Remove normalizedToDiscrete and normalizedToContinuous
This commit is contained in:
parent
3e17b0a463
commit
5fb5d2639f
|
@ -190,6 +190,10 @@ let percentiles = distPlus => {
|
||||||
let adjustBoth = discreteProbabilityMassFraction => {
|
let adjustBoth = discreteProbabilityMassFraction => {
|
||||||
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
|
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
|
||||||
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
|
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
|
||||||
|
|
||||||
|
// use the bigger proportion, such that whichever is the bigger proportion, the yMax is 1.
|
||||||
|
|
||||||
|
|
||||||
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
|
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
|
||||||
(
|
(
|
||||||
yMax /. yMaxDiscreteDomainFactor,
|
yMax /. yMaxDiscreteDomainFactor,
|
||||||
|
@ -231,8 +235,10 @@ module DistPlusChart = {
|
||||||
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
||||||
let discreteProbabilityMassFraction =
|
let discreteProbabilityMassFraction =
|
||||||
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
|
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
|
||||||
|
|
||||||
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
||||||
adjustBoth(discreteProbabilityMassFraction);
|
adjustBoth(discreteProbabilityMassFraction);
|
||||||
|
|
||||||
<DistributionPlot
|
<DistributionPlot
|
||||||
xScale={config.xLog ? "log" : "linear"}
|
xScale={config.xLog ? "log" : "linear"}
|
||||||
yScale={config.yLog ? "log" : "linear"}
|
yScale={config.yLog ? "log" : "linear"}
|
||||||
|
|
|
@ -427,7 +427,7 @@ export class DistPlotD3 {
|
||||||
addLollipopsChart(common) {
|
addLollipopsChart(common) {
|
||||||
const data = this.getDataPoints('discrete');
|
const data = this.getDataPoints('discrete');
|
||||||
|
|
||||||
const yMin = 0.; //d3.min(this.attrs.data.discrete.ys);
|
const yMin = 0.;
|
||||||
const yMax = d3.max(this.attrs.data.discrete.ys);
|
const yMax = d3.max(this.attrs.data.discrete.ys);
|
||||||
|
|
||||||
// X axis.
|
// X axis.
|
||||||
|
|
|
@ -223,9 +223,6 @@ module T =
|
||||||
|> updateIntegralSumCache(Some(1.0));
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = t => Some(t |> normalize);
|
|
||||||
let normalizedToDiscrete = _ => None;
|
|
||||||
|
|
||||||
let mean = (t: t) => {
|
let mean = (t: t) => {
|
||||||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
||||||
let indefiniteIntegralLinear = (p, a, b) =>
|
let indefiniteIntegralLinear = (p, a, b) =>
|
||||||
|
|
|
@ -173,9 +173,6 @@ module T =
|
||||||
|> updateIntegralSumCache(Some(1.0));
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = _ => None;
|
|
||||||
let normalizedToDiscrete = t => Some(t); // TODO: this should be normalized!
|
|
||||||
|
|
||||||
let downsample = (i, t: t): t => {
|
let downsample = (i, t: t): t => {
|
||||||
// It's not clear how to downsample a set of discrete points in a meaningful way.
|
// It's not clear how to downsample a set of discrete points in a meaningful way.
|
||||||
// The best we can do is to clip off the smallest values.
|
// The best we can do is to clip off the smallest values.
|
||||||
|
|
|
@ -69,30 +69,6 @@ module T =
|
||||||
t |> updateShape(truncatedShape);
|
t |> updateShape(truncatedShape);
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: is this still needed?
|
|
||||||
let normalizedToContinuous = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.normalizedToContinuous
|
|
||||||
|> E.O.fmap(
|
|
||||||
Continuous.T.mapY(
|
|
||||||
domainIncludedProbabilityMassAdjustment(t),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: is this still needed?
|
|
||||||
let normalizedToDiscrete = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.normalizedToDiscrete
|
|
||||||
|> E.O.fmap(
|
|
||||||
Discrete.T.mapY(
|
|
||||||
domainIncludedProbabilityMassAdjustment(t),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let xToY = (f, t: t) =>
|
let xToY = (f, t: t) =>
|
||||||
t
|
t
|
||||||
|> toShape
|
|> toShape
|
||||||
|
|
|
@ -10,8 +10,6 @@ module type dist = {
|
||||||
let toContinuous: t => option(DistTypes.continuousShape);
|
let toContinuous: t => option(DistTypes.continuousShape);
|
||||||
let toDiscrete: t => option(DistTypes.discreteShape);
|
let toDiscrete: t => option(DistTypes.discreteShape);
|
||||||
let normalize: t => t;
|
let normalize: t => t;
|
||||||
let normalizedToContinuous: t => option(DistTypes.continuousShape);
|
|
||||||
let normalizedToDiscrete: t => option(DistTypes.discreteShape);
|
|
||||||
let toDiscreteProbabilityMassFraction: t => float;
|
let toDiscreteProbabilityMassFraction: t => float;
|
||||||
let downsample: (int, t) => t;
|
let downsample: (int, t) => t;
|
||||||
let truncate: (option(float), option(float), t) => t;
|
let truncate: (option(float), option(float), t) => t;
|
||||||
|
@ -43,8 +41,6 @@ module Dist = (T: dist) => {
|
||||||
let toDiscrete = T.toDiscrete;
|
let toDiscrete = T.toDiscrete;
|
||||||
let normalize = T.normalize;
|
let normalize = T.normalize;
|
||||||
let truncate = T.truncate;
|
let truncate = T.truncate;
|
||||||
let normalizedToContinuous = T.normalizedToContinuous;
|
|
||||||
let normalizedToDiscrete = T.normalizedToDiscrete;
|
|
||||||
let mean = T.mean;
|
let mean = T.mean;
|
||||||
let variance = T.variance;
|
let variance = T.variance;
|
||||||
|
|
||||||
|
|
|
@ -135,11 +135,6 @@ module T =
|
||||||
{...t, discrete: downsampledDiscrete, continuous: downsampledContinuous};
|
{...t, discrete: downsampledDiscrete, continuous: downsampledContinuous};
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = (t: t) => Some(normalize(t).continuous);
|
|
||||||
|
|
||||||
let normalizedToDiscrete = ({discrete} as t: t) =>
|
|
||||||
Some(normalize(t).discrete);
|
|
||||||
|
|
||||||
let integral = (t: t) => {
|
let integral = (t: t) => {
|
||||||
switch (t.integralCache) {
|
switch (t.integralCache) {
|
||||||
| Some(cache) => cache
|
| Some(cache) => cache
|
||||||
|
|
|
@ -148,18 +148,6 @@ module T =
|
||||||
Continuous.T.toDiscreteProbabilityMassFraction,
|
Continuous.T.toDiscreteProbabilityMassFraction,
|
||||||
));
|
));
|
||||||
|
|
||||||
let normalizedToDiscrete =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.normalizedToDiscrete,
|
|
||||||
Discrete.T.normalizedToDiscrete,
|
|
||||||
Continuous.T.normalizedToDiscrete,
|
|
||||||
));
|
|
||||||
let normalizedToContinuous =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.normalizedToContinuous,
|
|
||||||
Discrete.T.normalizedToContinuous,
|
|
||||||
Continuous.T.normalizedToContinuous,
|
|
||||||
));
|
|
||||||
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
||||||
let integral =
|
let integral =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
|
|
Loading…
Reference in New Issue
Block a user