Merge pull request #64 from foretold-app/seb/epic-fixes
Fixes for a few more of the issues in the bugs doc
This commit is contained in:
commit
4072c47b70
|
@ -142,12 +142,15 @@ module DemoDist = {
|
||||||
},
|
},
|
||||||
~distPlusIngredients,
|
~distPlusIngredients,
|
||||||
~shouldDownsample=options.downsampleTo |> E.O.isSome,
|
~shouldDownsample=options.downsampleTo |> E.O.isSome,
|
||||||
~recommendedLength=options.downsampleTo |> E.O.default(10000),
|
~recommendedLength=options.downsampleTo |> E.O.default(100),
|
||||||
(),
|
(),
|
||||||
);
|
);
|
||||||
let response = DistPlusRenderer.run(inputs);
|
let response = DistPlusRenderer.run(inputs);
|
||||||
switch (RenderTypes.DistPlusRenderer.Outputs.distplus(response)) {
|
switch (RenderTypes.DistPlusRenderer.Outputs.distplus(response)) {
|
||||||
| Some(distPlus) => <DistPlusPlot distPlus />
|
| Some(distPlus) => {
|
||||||
|
let normalizedDistPlus = DistPlus.T.normalize(distPlus);
|
||||||
|
<DistPlusPlot distPlus={normalizedDistPlus} />;
|
||||||
|
}
|
||||||
| _ =>
|
| _ =>
|
||||||
"Correct Guesstimator string input to show a distribution."
|
"Correct Guesstimator string input to show a distribution."
|
||||||
|> R.ste
|
|> R.ste
|
||||||
|
@ -171,7 +174,7 @@ let make = () => {
|
||||||
~onSubmit=({state}) => {None},
|
~onSubmit=({state}) => {None},
|
||||||
~initialState={
|
~initialState={
|
||||||
//guesstimatorString: "mm(normal(-10, 2), uniform(18, 25), lognormal({mean: 10, stdev: 8}), triangular(31,40,50))",
|
//guesstimatorString: "mm(normal(-10, 2), uniform(18, 25), lognormal({mean: 10, stdev: 8}), triangular(31,40,50))",
|
||||||
guesstimatorString: "normal(0, 10) * 100", // , triangular(30, 40, 60)
|
guesstimatorString: "mm(1, 2, 3, normal(2, 1))", // , triangular(30, 40, 60)
|
||||||
domainType: "Complete",
|
domainType: "Complete",
|
||||||
xPoint: "50.0",
|
xPoint: "50.0",
|
||||||
xPoint2: "60.0",
|
xPoint2: "60.0",
|
||||||
|
|
|
@ -176,7 +176,8 @@ module Convert = {
|
||||||
let continuousShape: Types.continuousShape = {
|
let continuousShape: Types.continuousShape = {
|
||||||
xyShape,
|
xyShape,
|
||||||
interpolation: `Linear,
|
interpolation: `Linear,
|
||||||
knownIntegralSum: None,
|
integralSumCache: None,
|
||||||
|
integralCache: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let integral = XYShape.Analysis.integrateContinuousShape(continuousShape);
|
let integral = XYShape.Analysis.integrateContinuousShape(continuousShape);
|
||||||
|
@ -188,7 +189,8 @@ module Convert = {
|
||||||
ys,
|
ys,
|
||||||
},
|
},
|
||||||
interpolation: `Linear,
|
interpolation: `Linear,
|
||||||
knownIntegralSum: Some(1.0),
|
integralSumCache: Some(1.0),
|
||||||
|
integralCache: None,
|
||||||
};
|
};
|
||||||
continuousShape;
|
continuousShape;
|
||||||
};
|
};
|
||||||
|
@ -674,7 +676,7 @@ module State = {
|
||||||
/* create a cdf from a pdf */
|
/* create a cdf from a pdf */
|
||||||
let _pdf = Continuous.T.normalize(pdf);
|
let _pdf = Continuous.T.normalize(pdf);
|
||||||
|
|
||||||
let cdf = Continuous.T.integral(~cache=None, _pdf);
|
let cdf = Continuous.T.integral(_pdf);
|
||||||
let xs = [||];
|
let xs = [||];
|
||||||
let ys = [||];
|
let ys = [||];
|
||||||
for (i in 1 to 999) {
|
for (i in 1 to 999) {
|
||||||
|
|
|
@ -51,13 +51,13 @@ let table = (distPlus, x) => {
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.xToY(~cache=None, x)
|
|> DistPlus.T.Integral.xToY(x)
|
||||||
|> E.Float.with2DigitsPrecision
|
|> E.Float.with2DigitsPrecision
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.sum(~cache=None)
|
|> DistPlus.T.Integral.sum
|
||||||
|> E.Float.with2DigitsPrecision
|
|> E.Float.with2DigitsPrecision
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
|
@ -70,15 +70,9 @@ let table = (distPlus, x) => {
|
||||||
<td className="px-4 py-2">
|
<td className="px-4 py-2">
|
||||||
{"Continuous Total" |> ReasonReact.string}
|
{"Continuous Total" |> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2">
|
|
||||||
{"Scaled Continuous Total" |> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2">
|
<td className="px-4 py-2">
|
||||||
{"Discrete Total" |> ReasonReact.string}
|
{"Discrete Total" |> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2">
|
|
||||||
{"Scaled Discrete Total" |> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
@ -87,17 +81,7 @@ let table = (distPlus, x) => {
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.toContinuous
|
|> DistPlus.T.toContinuous
|
||||||
|> E.O.fmap(
|
|> E.O.fmap(
|
||||||
Continuous.T.Integral.sum(~cache=None),
|
Continuous.T.Integral.sum
|
||||||
)
|
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|
||||||
|> E.O.default("")
|
|
||||||
|> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2 border ">
|
|
||||||
{distPlus
|
|
||||||
|> DistPlus.T.normalizedToContinuous
|
|
||||||
|> E.O.fmap(
|
|
||||||
Continuous.T.Integral.sum(~cache=None),
|
|
||||||
)
|
)
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
||||||
|> E.O.default("")
|
|> E.O.default("")
|
||||||
|
@ -106,15 +90,7 @@ let table = (distPlus, x) => {
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.toDiscrete
|
|> DistPlus.T.toDiscrete
|
||||||
|> E.O.fmap(Discrete.T.Integral.sum(~cache=None))
|
|> E.O.fmap(Discrete.T.Integral.sum)
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|
||||||
|> E.O.default("")
|
|
||||||
|> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2 border ">
|
|
||||||
{distPlus
|
|
||||||
|> DistPlus.T.normalizedToDiscrete
|
|
||||||
|> E.O.fmap(Discrete.T.Integral.sum(~cache=None))
|
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
||||||
|> E.O.default("")
|
|> E.O.default("")
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
|
@ -143,42 +119,42 @@ let percentiles = distPlus => {
|
||||||
<tr>
|
<tr>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.01)
|
|> DistPlus.T.Integral.yToX(0.01)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.05)
|
|> DistPlus.T.Integral.yToX(0.05)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.25)
|
|> DistPlus.T.Integral.yToX(0.25)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.5)
|
|> DistPlus.T.Integral.yToX(0.5)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.75)
|
|> DistPlus.T.Integral.yToX(0.75)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.95)
|
|> DistPlus.T.Integral.yToX(0.95)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.99)
|
|> DistPlus.T.Integral.yToX(0.99)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> DistPlus.T.Integral.yToX(~cache=None, 0.99999)
|
|> DistPlus.T.Integral.yToX(0.99999)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -214,6 +190,10 @@ let percentiles = distPlus => {
|
||||||
let adjustBoth = discreteProbabilityMassFraction => {
|
let adjustBoth = discreteProbabilityMassFraction => {
|
||||||
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
|
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
|
||||||
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
|
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
|
||||||
|
|
||||||
|
// use the bigger proportion, such that whichever is the bigger proportion, the yMax is 1.
|
||||||
|
|
||||||
|
|
||||||
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
|
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
|
||||||
(
|
(
|
||||||
yMax /. yMaxDiscreteDomainFactor,
|
yMax /. yMaxDiscreteDomainFactor,
|
||||||
|
@ -225,10 +205,11 @@ module DistPlusChart = {
|
||||||
[@react.component]
|
[@react.component]
|
||||||
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
||||||
open DistPlus;
|
open DistPlus;
|
||||||
let discrete = distPlus |> T.normalizedToDiscrete |> E.O.fmap(Discrete.getShape);
|
|
||||||
|
let discrete = distPlus |> T.toDiscrete |> E.O.fmap(Discrete.getShape);
|
||||||
let continuous =
|
let continuous =
|
||||||
distPlus
|
distPlus
|
||||||
|> T.normalizedToContinuous
|
|> T.toContinuous
|
||||||
|> E.O.fmap(Continuous.getShape);
|
|> E.O.fmap(Continuous.getShape);
|
||||||
let range = T.xTotalRange(distPlus);
|
let range = T.xTotalRange(distPlus);
|
||||||
|
|
||||||
|
@ -236,7 +217,7 @@ module DistPlusChart = {
|
||||||
// let minX =
|
// let minX =
|
||||||
// switch (
|
// switch (
|
||||||
// distPlus
|
// distPlus
|
||||||
// |> DistPlus.T.Integral.yToX(~cache=None, 0.0001),
|
// |> DistPlus.T.Integral.yToX(0.0001),
|
||||||
// range,
|
// range,
|
||||||
// ) {
|
// ) {
|
||||||
// | (min, Some(range)) => Some(min -. range *. 0.001)
|
// | (min, Some(range)) => Some(min -. range *. 0.001)
|
||||||
|
@ -244,18 +225,20 @@ module DistPlusChart = {
|
||||||
// };
|
// };
|
||||||
|
|
||||||
let minX = {
|
let minX = {
|
||||||
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.00001);
|
distPlus |> DistPlus.T.Integral.yToX(0.00001);
|
||||||
};
|
};
|
||||||
|
|
||||||
let maxX = {
|
let maxX = {
|
||||||
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.99);
|
distPlus |> DistPlus.T.Integral.yToX(0.99999);
|
||||||
};
|
};
|
||||||
|
|
||||||
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
||||||
let discreteProbabilityMassFraction =
|
let discreteProbabilityMassFraction =
|
||||||
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
|
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
|
||||||
|
|
||||||
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
||||||
adjustBoth(discreteProbabilityMassFraction);
|
adjustBoth(discreteProbabilityMassFraction);
|
||||||
|
|
||||||
<DistributionPlot
|
<DistributionPlot
|
||||||
xScale={config.xLog ? "log" : "linear"}
|
xScale={config.xLog ? "log" : "linear"}
|
||||||
yScale={config.yLog ? "log" : "linear"}
|
yScale={config.yLog ? "log" : "linear"}
|
||||||
|
@ -283,11 +266,11 @@ module IntegralChart = {
|
||||||
|> Continuous.toLinear
|
|> Continuous.toLinear
|
||||||
|> E.O.fmap(Continuous.getShape);
|
|> E.O.fmap(Continuous.getShape);
|
||||||
let minX = {
|
let minX = {
|
||||||
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.00001);
|
distPlus |> DistPlus.T.Integral.yToX(0.00001);
|
||||||
};
|
};
|
||||||
|
|
||||||
let maxX = {
|
let maxX = {
|
||||||
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.99);
|
distPlus |> DistPlus.T.Integral.yToX(0.99999);
|
||||||
};
|
};
|
||||||
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
||||||
<DistributionPlot
|
<DistributionPlot
|
||||||
|
@ -334,6 +317,7 @@ let make = (~distPlus: DistTypes.distPlus) => {
|
||||||
let (x, setX) = React.useState(() => 0.);
|
let (x, setX) = React.useState(() => 0.);
|
||||||
let (state, dispatch) =
|
let (state, dispatch) =
|
||||||
React.useReducer(DistPlusPlotReducer.reducer, DistPlusPlotReducer.init);
|
React.useReducer(DistPlusPlotReducer.reducer, DistPlusPlotReducer.init);
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
{state.distributions
|
{state.distributions
|
||||||
|> E.L.fmapi((index, config) =>
|
|> E.L.fmapi((index, config) =>
|
||||||
|
|
|
@ -427,7 +427,7 @@ export class DistPlotD3 {
|
||||||
addLollipopsChart(common) {
|
addLollipopsChart(common) {
|
||||||
const data = this.getDataPoints('discrete');
|
const data = this.getDataPoints('discrete');
|
||||||
|
|
||||||
const yMin = 0.; //d3.min(this.attrs.data.discrete.ys);
|
const yMin = 0.;
|
||||||
const yMax = d3.max(this.attrs.data.discrete.ys);
|
const yMax = d3.max(this.attrs.data.discrete.ys);
|
||||||
|
|
||||||
// X axis.
|
// X axis.
|
||||||
|
|
|
@ -17,25 +17,25 @@ let toDiscretePointMassesFromTriangulars =
|
||||||
let n = s |> XYShape.T.length;
|
let n = s |> XYShape.T.length;
|
||||||
// first, double up the leftmost and rightmost points:
|
// first, double up the leftmost and rightmost points:
|
||||||
let {xs, ys}: XYShape.T.t = s;
|
let {xs, ys}: XYShape.T.t = s;
|
||||||
let _ = Js.Array.unshift(xs[0], xs);
|
Js.Array.unshift(xs[0], xs) |> ignore;
|
||||||
let _ = Js.Array.unshift(ys[0], ys);
|
Js.Array.unshift(ys[0], ys) |> ignore;
|
||||||
let _ = Js.Array.push(xs[n - 1], xs);
|
Js.Array.push(xs[n - 1], xs) |> ignore;
|
||||||
let _ = Js.Array.push(ys[n - 1], ys);
|
Js.Array.push(ys[n - 1], ys) |> ignore;
|
||||||
let n = E.A.length(xs);
|
let n = E.A.length(xs);
|
||||||
// squares and neighbourly products of the xs
|
// squares and neighbourly products of the xs
|
||||||
let xsSq: array(float) = Belt.Array.makeUninitializedUnsafe(n);
|
let xsSq: array(float) = Belt.Array.makeUninitializedUnsafe(n);
|
||||||
let xsProdN1: array(float) = Belt.Array.makeUninitializedUnsafe(n - 1);
|
let xsProdN1: array(float) = Belt.Array.makeUninitializedUnsafe(n - 1);
|
||||||
let xsProdN2: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2);
|
let xsProdN2: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2);
|
||||||
for (i in 0 to n - 1) {
|
for (i in 0 to n - 1) {
|
||||||
let _ = Belt.Array.set(xsSq, i, xs[i] *. xs[i]);
|
Belt.Array.set(xsSq, i, xs[i] *. xs[i]) |> ignore;
|
||||||
();
|
();
|
||||||
};
|
};
|
||||||
for (i in 0 to n - 2) {
|
for (i in 0 to n - 2) {
|
||||||
let _ = Belt.Array.set(xsProdN1, i, xs[i] *. xs[i + 1]);
|
Belt.Array.set(xsProdN1, i, xs[i] *. xs[i + 1]) |> ignore;
|
||||||
();
|
();
|
||||||
};
|
};
|
||||||
for (i in 0 to n - 3) {
|
for (i in 0 to n - 3) {
|
||||||
let _ = Belt.Array.set(xsProdN2, i, xs[i] *. xs[i + 2]);
|
Belt.Array.set(xsProdN2, i, xs[i] *. xs[i + 2]) |> ignore;
|
||||||
();
|
();
|
||||||
};
|
};
|
||||||
// means and variances
|
// means and variances
|
||||||
|
@ -45,12 +45,11 @@ let toDiscretePointMassesFromTriangulars =
|
||||||
|
|
||||||
if (inverse) {
|
if (inverse) {
|
||||||
for (i in 1 to n - 2) {
|
for (i in 1 to n - 2) {
|
||||||
let _ =
|
Belt.Array.set(
|
||||||
Belt.Array.set(
|
masses,
|
||||||
masses,
|
i - 1,
|
||||||
i - 1,
|
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
||||||
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
) |> ignore;
|
||||||
);
|
|
||||||
|
|
||||||
// this only works when the whole triange is either on the left or on the right of zero
|
// this only works when the whole triange is either on the left or on the right of zero
|
||||||
let a = xs[i - 1];
|
let a = xs[i - 1];
|
||||||
|
@ -71,43 +70,39 @@ let toDiscretePointMassesFromTriangulars =
|
||||||
-. inverseMean
|
-. inverseMean
|
||||||
** 2.;
|
** 2.;
|
||||||
|
|
||||||
let _ = Belt.Array.set(means, i - 1, inverseMean);
|
Belt.Array.set(means, i - 1, inverseMean) |> ignore;
|
||||||
|
|
||||||
let _ = Belt.Array.set(variances, i - 1, inverseVar);
|
Belt.Array.set(variances, i - 1, inverseVar) |> ignore;
|
||||||
();
|
();
|
||||||
};
|
};
|
||||||
|
|
||||||
{n: n - 2, masses, means, variances};
|
{n: n - 2, masses, means, variances};
|
||||||
} else {
|
} else {
|
||||||
for (i in 1 to n - 2) {
|
for (i in 1 to n - 2) {
|
||||||
|
|
||||||
// area of triangle = width * height / 2
|
// area of triangle = width * height / 2
|
||||||
let _ =
|
Belt.Array.set(
|
||||||
Belt.Array.set(
|
masses,
|
||||||
masses,
|
i - 1,
|
||||||
i - 1,
|
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
||||||
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
) |> ignore;
|
||||||
);
|
|
||||||
|
|
||||||
// means of triangle = (a + b + c) / 3
|
// means of triangle = (a + b + c) / 3
|
||||||
let _ =
|
Belt.Array.set(means, i - 1, (xs[i - 1] +. xs[i] +. xs[i + 1]) /. 3.) |> ignore;
|
||||||
Belt.Array.set(means, i - 1, (xs[i - 1] +. xs[i] +. xs[i + 1]) /. 3.);
|
|
||||||
|
|
||||||
// variance of triangle = (a^2 + b^2 + c^2 - ab - ac - bc) / 18
|
// variance of triangle = (a^2 + b^2 + c^2 - ab - ac - bc) / 18
|
||||||
let _ =
|
Belt.Array.set(
|
||||||
Belt.Array.set(
|
variances,
|
||||||
variances,
|
i - 1,
|
||||||
i - 1,
|
(
|
||||||
(
|
xsSq[i - 1]
|
||||||
xsSq[i - 1]
|
+. xsSq[i]
|
||||||
+. xsSq[i]
|
+. xsSq[i + 1]
|
||||||
+. xsSq[i + 1]
|
-. xsProdN1[i - 1]
|
||||||
-. xsProdN1[i - 1]
|
-. xsProdN1[i]
|
||||||
-. xsProdN1[i]
|
-. xsProdN2[i - 1]
|
||||||
-. xsProdN2[i - 1]
|
)
|
||||||
)
|
/. 18.,
|
||||||
/. 18.,
|
) |> ignore;
|
||||||
);
|
|
||||||
();
|
();
|
||||||
};
|
};
|
||||||
{n: n - 2, masses, means, variances};
|
{n: n - 2, masses, means, variances};
|
||||||
|
@ -115,7 +110,11 @@ let toDiscretePointMassesFromTriangulars =
|
||||||
};
|
};
|
||||||
|
|
||||||
let combineShapesContinuousContinuous =
|
let combineShapesContinuousContinuous =
|
||||||
(op: ExpressionTypes.algebraicOperation, s1: DistTypes.xyShape, s2: DistTypes.xyShape)
|
(
|
||||||
|
op: ExpressionTypes.algebraicOperation,
|
||||||
|
s1: DistTypes.xyShape,
|
||||||
|
s2: DistTypes.xyShape,
|
||||||
|
)
|
||||||
: DistTypes.xyShape => {
|
: DistTypes.xyShape => {
|
||||||
let t1n = s1 |> XYShape.T.length;
|
let t1n = s1 |> XYShape.T.length;
|
||||||
let t2n = s2 |> XYShape.T.length;
|
let t2n = s2 |> XYShape.T.length;
|
||||||
|
@ -123,10 +122,11 @@ let combineShapesContinuousContinuous =
|
||||||
// if we add the two distributions, we should probably use normal filters.
|
// if we add the two distributions, we should probably use normal filters.
|
||||||
// if we multiply the two distributions, we should probably use lognormal filters.
|
// if we multiply the two distributions, we should probably use lognormal filters.
|
||||||
let t1m = toDiscretePointMassesFromTriangulars(s1);
|
let t1m = toDiscretePointMassesFromTriangulars(s1);
|
||||||
let t2m = switch (op) {
|
let t2m =
|
||||||
|
switch (op) {
|
||||||
| `Divide => toDiscretePointMassesFromTriangulars(~inverse=true, s2)
|
| `Divide => toDiscretePointMassesFromTriangulars(~inverse=true, s2)
|
||||||
| _ => toDiscretePointMassesFromTriangulars(~inverse=false, s2)
|
| _ => toDiscretePointMassesFromTriangulars(~inverse=false, s2)
|
||||||
};
|
};
|
||||||
|
|
||||||
let combineMeansFn =
|
let combineMeansFn =
|
||||||
switch (op) {
|
switch (op) {
|
||||||
|
@ -163,7 +163,7 @@ let combineShapesContinuousContinuous =
|
||||||
for (i in 0 to t1m.n - 1) {
|
for (i in 0 to t1m.n - 1) {
|
||||||
for (j in 0 to t2m.n - 1) {
|
for (j in 0 to t2m.n - 1) {
|
||||||
let k = i * t2m.n + j;
|
let k = i * t2m.n + j;
|
||||||
let _ = Belt.Array.set(masses, k, t1m.masses[i] *. t2m.masses[j]);
|
Belt.Array.set(masses, k, t1m.masses[i] *. t2m.masses[j]) |> ignore;
|
||||||
|
|
||||||
let mean = combineMeansFn(t1m.means[i], t2m.means[j]);
|
let mean = combineMeansFn(t1m.means[i], t2m.means[j]);
|
||||||
let variance =
|
let variance =
|
||||||
|
@ -173,8 +173,8 @@ let combineShapesContinuousContinuous =
|
||||||
t1m.means[i],
|
t1m.means[i],
|
||||||
t2m.means[j],
|
t2m.means[j],
|
||||||
);
|
);
|
||||||
let _ = Belt.Array.set(means, k, mean);
|
Belt.Array.set(means, k, mean) |> ignore;
|
||||||
let _ = Belt.Array.set(variances, k, variance);
|
Belt.Array.set(variances, k, variance) |> ignore;
|
||||||
// update bounds
|
// update bounds
|
||||||
let minX = mean -. 2. *. sqrt(variance) *. 1.644854;
|
let minX = mean -. 2. *. sqrt(variance) *. 1.644854;
|
||||||
let maxX = mean +. 2. *. sqrt(variance) *. 1.644854;
|
let maxX = mean +. 2. *. sqrt(variance) *. 1.644854;
|
||||||
|
@ -190,66 +190,46 @@ let combineShapesContinuousContinuous =
|
||||||
// we now want to create a set of target points. For now, let's just evenly distribute 200 points between
|
// we now want to create a set of target points. For now, let's just evenly distribute 200 points between
|
||||||
// between the outputMinX and outputMaxX
|
// between the outputMinX and outputMaxX
|
||||||
let nOut = 300;
|
let nOut = 300;
|
||||||
let outputXs: array(float) = E.A.Floats.range(outputMinX^, outputMaxX^, nOut);
|
let outputXs: array(float) =
|
||||||
|
E.A.Floats.range(outputMinX^, outputMaxX^, nOut);
|
||||||
let outputYs: array(float) = Belt.Array.make(nOut, 0.0);
|
let outputYs: array(float) = Belt.Array.make(nOut, 0.0);
|
||||||
// now, for each of the outputYs, accumulate from a Gaussian kernel over each input point.
|
// now, for each of the outputYs, accumulate from a Gaussian kernel over each input point.
|
||||||
for (j in 0 to E.A.length(masses) - 1) { // go through all of the result points
|
for (j in 0 to E.A.length(masses) - 1) {
|
||||||
let _ = if (variances[j] > 0. && masses[j] > 0.) {
|
// go through all of the result points
|
||||||
for (i in 0 to E.A.length(outputXs) - 1) { // go through all of the target points
|
if (variances[j] > 0. && masses[j] > 0.) {
|
||||||
|
for (i in 0 to E.A.length(outputXs) - 1) {
|
||||||
|
// go through all of the target points
|
||||||
let dx = outputXs[i] -. means[j];
|
let dx = outputXs[i] -. means[j];
|
||||||
let contribution = masses[j] *. exp(-. (dx ** 2.) /. (2. *. variances[j])) /. (sqrt(2. *. 3.14159276 *. variances[j]));
|
let contribution =
|
||||||
let _ = Belt.Array.set(outputYs, i, outputYs[i] +. contribution);
|
masses[j]
|
||||||
();
|
*. exp(-. (dx ** 2.) /. (2. *. variances[j]))
|
||||||
|
/. sqrt(2. *. 3.14159276 *. variances[j]);
|
||||||
|
Belt.Array.set(outputYs, i, outputYs[i] +. contribution) |> ignore;
|
||||||
};
|
};
|
||||||
();
|
|
||||||
};
|
};
|
||||||
();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
{xs: outputXs, ys: outputYs};
|
{xs: outputXs, ys: outputYs};
|
||||||
};
|
};
|
||||||
|
|
||||||
let toDiscretePointMassesFromDiscrete = (s: DistTypes.xyShape): pointMassesWithMoments => {
|
let toDiscretePointMassesFromDiscrete =
|
||||||
let n = s |> XYShape.T.length;
|
(s: DistTypes.xyShape): pointMassesWithMoments => {
|
||||||
let {xs, ys}: XYShape.T.t = s;
|
let {xs, ys}: XYShape.T.t = s;
|
||||||
let n = E.A.length(xs);
|
let n = E.A.length(xs);
|
||||||
|
|
||||||
let masses: array(float) = Belt.Array.makeUninitializedUnsafe(n); // doesn't include the fake first and last points
|
let masses: array(float) = Belt.Array.makeBy(n, i => ys[i]);
|
||||||
let means: array(float) = Belt.Array.makeUninitializedUnsafe(n);
|
let means: array(float) = Belt.Array.makeBy(n, i => xs[i]);
|
||||||
let variances: array(float) = Belt.Array.makeUninitializedUnsafe(n);
|
let variances: array(float) = Belt.Array.makeBy(n, i => 0.0);
|
||||||
|
|
||||||
for (i in 0 to n - 1) {
|
|
||||||
let _ =
|
|
||||||
Belt.Array.set(
|
|
||||||
masses,
|
|
||||||
i,
|
|
||||||
ys[i]
|
|
||||||
);
|
|
||||||
|
|
||||||
let _ =
|
|
||||||
Belt.Array.set(
|
|
||||||
means,
|
|
||||||
i,
|
|
||||||
xs[i]
|
|
||||||
);
|
|
||||||
|
|
||||||
let _ =
|
|
||||||
Belt.Array.set(
|
|
||||||
variances,
|
|
||||||
i,
|
|
||||||
0.0
|
|
||||||
);
|
|
||||||
();
|
|
||||||
};
|
|
||||||
|
|
||||||
{n, masses, means, variances};
|
{n, masses, means, variances};
|
||||||
};
|
};
|
||||||
|
|
||||||
let combineShapesContinuousDiscreteAdd =
|
let combineShapesContinuousDiscrete =
|
||||||
(op: ExpressionTypes.algebraicOperation, s1: DistTypes.xyShape, s2: DistTypes.xyShape)
|
(op: ExpressionTypes.algebraicOperation, continuousShape: DistTypes.xyShape, discreteShape: DistTypes.xyShape)
|
||||||
: DistTypes.xyShape => {
|
: DistTypes.xyShape => {
|
||||||
let t1n = s1 |> XYShape.T.length;
|
|
||||||
let t2n = s2 |> XYShape.T.length;
|
let t1n = continuousShape |> XYShape.T.length;
|
||||||
|
let t2n = discreteShape |> XYShape.T.length;
|
||||||
|
|
||||||
// each x pair is added/subtracted
|
// each x pair is added/subtracted
|
||||||
let fn = Operation.Algebraic.toFn(op);
|
let fn = Operation.Algebraic.toFn(op);
|
||||||
|
@ -257,126 +237,48 @@ let combineShapesContinuousDiscreteAdd =
|
||||||
let outXYShapes: array(array((float, float))) =
|
let outXYShapes: array(array((float, float))) =
|
||||||
Belt.Array.makeUninitializedUnsafe(t2n);
|
Belt.Array.makeUninitializedUnsafe(t2n);
|
||||||
|
|
||||||
for (j in 0 to t2n - 1) {
|
switch (op) {
|
||||||
// for each one of the discrete points
|
| `Add
|
||||||
// create a new distribution, as long as the original continuous one
|
| `Subtract =>
|
||||||
|
for (j in 0 to t2n - 1) {
|
||||||
let dxyShape: array((float, float)) =
|
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
||||||
Belt.Array.makeUninitializedUnsafe(t1n);
|
let dxyShape: array((float, float)) =
|
||||||
for (i in 0 to t1n - 1) {
|
Belt.Array.makeUninitializedUnsafe(t1n);
|
||||||
let _ =
|
for (i in 0 to t1n - 1) {
|
||||||
Belt.Array.set(
|
Belt.Array.set(
|
||||||
dxyShape,
|
dxyShape,
|
||||||
i,
|
i,
|
||||||
(fn(s1.xs[i], s2.xs[j]), s1.ys[i] *. s2.ys[j]),
|
(fn(continuousShape.xs[i], discreteShape.xs[j]),
|
||||||
);
|
continuousShape.ys[i] *. discreteShape.ys[j]),
|
||||||
|
) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore;
|
||||||
();
|
();
|
||||||
};
|
}
|
||||||
|
| `Multiply
|
||||||
let _ = Belt.Array.set(outXYShapes, j, dxyShape);
|
| `Divide =>
|
||||||
();
|
for (j in 0 to t2n - 1) {
|
||||||
|
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
||||||
|
let dxyShape: array((float, float)) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1n);
|
||||||
|
for (i in 0 to t1n - 1) {
|
||||||
|
Belt.Array.set(
|
||||||
|
dxyShape,
|
||||||
|
i,
|
||||||
|
(fn(continuousShape.xs[i], discreteShape.xs[j]), continuousShape.ys[i] *. discreteShape.ys[j] /. discreteShape.xs[j]),
|
||||||
|
) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore;
|
||||||
|
();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
outXYShapes
|
outXYShapes
|
||||||
|> E.A.fold_left(XYShape.PointwiseCombination.combineLinear((+.)), XYShape.T.empty);
|
|> E.A.fmap(XYShape.T.fromZippedArray)
|
||||||
};
|
|> E.A.fold_left(
|
||||||
|
XYShape.PointwiseCombination.combine((+.),
|
||||||
let combineShapesContinuousDiscreteMultiply =
|
XYShape.XtoY.continuousInterpolator(`Linear, `UseZero)),
|
||||||
(op: ExpressionTypes.algebraicOperation, s1: DistTypes.xyShape, s2: DistTypes.xyShape)
|
XYShape.T.empty);
|
||||||
: DistTypes.xyShape => {
|
|
||||||
let t1n = s1 |> XYShape.T.length;
|
|
||||||
let t2n = s2 |> XYShape.T.length;
|
|
||||||
|
|
||||||
let t1m = toDiscretePointMassesFromTriangulars(s1);
|
|
||||||
let t2m = toDiscretePointMassesFromDiscrete(s2);
|
|
||||||
|
|
||||||
let combineMeansFn =
|
|
||||||
switch (op) {
|
|
||||||
| `Add => ((m1, m2) => m1 +. m2)
|
|
||||||
| `Subtract => ((m1, m2) => m1 -. m2)
|
|
||||||
| `Multiply => ((m1, m2) => m1 *. m2)
|
|
||||||
| `Divide => ((m1, m2) => m1 /. m2)
|
|
||||||
};
|
|
||||||
|
|
||||||
let combineVariancesFn =
|
|
||||||
switch (op) {
|
|
||||||
| `Add
|
|
||||||
| `Subtract => ((v1, v2, _, _) => v1 +. v2)
|
|
||||||
| `Multiply
|
|
||||||
| `Divide => (
|
|
||||||
(v1, v2, m1, m2) => v1 *. m2 ** 2.
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let outputMinX: ref(float) = ref(infinity);
|
|
||||||
let outputMaxX: ref(float) = ref(neg_infinity);
|
|
||||||
let masses: array(float) =
|
|
||||||
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
|
||||||
let means: array(float) =
|
|
||||||
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
|
||||||
let variances: array(float) =
|
|
||||||
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
|
||||||
// then convolve the two sets of pointMassesWithMoments
|
|
||||||
for (i in 0 to t1m.n - 1) {
|
|
||||||
for (j in 0 to t2m.n - 1) {
|
|
||||||
let k = i * t2m.n + j;
|
|
||||||
let _ = Belt.Array.set(masses, k, t1m.masses[i] *. t2m.masses[j]);
|
|
||||||
|
|
||||||
let mean = combineMeansFn(t1m.means[i], t2m.means[j]);
|
|
||||||
let variance =
|
|
||||||
combineVariancesFn(
|
|
||||||
t1m.variances[i],
|
|
||||||
t2m.variances[j],
|
|
||||||
t1m.means[i],
|
|
||||||
t2m.means[j],
|
|
||||||
);
|
|
||||||
let _ = Belt.Array.set(means, k, mean);
|
|
||||||
let _ = Belt.Array.set(variances, k, variance);
|
|
||||||
|
|
||||||
// update bounds
|
|
||||||
let minX = mean -. 2. *. sqrt(variance) *. 1.644854;
|
|
||||||
let maxX = mean +. 2. *. sqrt(variance) *. 1.644854;
|
|
||||||
if (minX < outputMinX^) {
|
|
||||||
outputMinX := minX;
|
|
||||||
};
|
|
||||||
if (maxX > outputMaxX^) {
|
|
||||||
outputMaxX := maxX;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
// we now want to create a set of target points. For now, let's just evenly distribute 200 points between
|
|
||||||
// between the outputMinX and outputMaxX
|
|
||||||
let nOut = 300;
|
|
||||||
let outputXs: array(float) = E.A.Floats.range(outputMinX^, outputMaxX^, nOut);
|
|
||||||
let outputYs: array(float) = Belt.Array.make(nOut, 0.0);
|
|
||||||
// now, for each of the outputYs, accumulate from a Gaussian kernel over each input point.
|
|
||||||
for (j in 0 to E.A.length(masses) - 1) { // go through all of the result points
|
|
||||||
let _ = if (variances[j] > 0. && masses[j] > 0.) {
|
|
||||||
for (i in 0 to E.A.length(outputXs) - 1) { // go through all of the target points
|
|
||||||
let dx = outputXs[i] -. means[j];
|
|
||||||
let contribution = masses[j] *. exp(-. (dx ** 2.) /. (2. *. variances[j])) /. (sqrt(2. *. 3.14159276 *. variances[j]));
|
|
||||||
let _ = Belt.Array.set(outputYs, i, outputYs[i] +. contribution);
|
|
||||||
();
|
|
||||||
};
|
|
||||||
();
|
|
||||||
};
|
|
||||||
();
|
|
||||||
};
|
|
||||||
|
|
||||||
{xs: outputXs, ys: outputYs};
|
|
||||||
};
|
|
||||||
|
|
||||||
let combineShapesContinuousDiscrete =
|
|
||||||
(op: ExpressionTypes.algebraicOperation, s1: DistTypes.xyShape, s2: DistTypes.xyShape)
|
|
||||||
: DistTypes.xyShape => {
|
|
||||||
|
|
||||||
switch (op) {
|
|
||||||
| `Add
|
|
||||||
| `Subtract => combineShapesContinuousDiscreteAdd(op, s1, s2);
|
|
||||||
| `Multiply
|
|
||||||
| `Divide => combineShapesContinuousDiscreteMultiply(op, s1, s2);
|
|
||||||
};
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,30 +3,45 @@ open Distributions;
|
||||||
type t = DistTypes.continuousShape;
|
type t = DistTypes.continuousShape;
|
||||||
let getShape = (t: t) => t.xyShape;
|
let getShape = (t: t) => t.xyShape;
|
||||||
let interpolation = (t: t) => t.interpolation;
|
let interpolation = (t: t) => t.interpolation;
|
||||||
let make = (interpolation, xyShape, knownIntegralSum): t => {
|
let make = (~interpolation=`Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
xyShape,
|
xyShape,
|
||||||
interpolation,
|
interpolation,
|
||||||
knownIntegralSum,
|
integralSumCache,
|
||||||
|
integralCache,
|
||||||
};
|
};
|
||||||
let shapeMap = (fn, {xyShape, interpolation, knownIntegralSum}: t): t => {
|
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
interpolation,
|
interpolation,
|
||||||
knownIntegralSum,
|
integralSumCache,
|
||||||
|
integralCache,
|
||||||
};
|
};
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
||||||
let oShapeMap =
|
let oShapeMap =
|
||||||
(fn, {xyShape, interpolation, knownIntegralSum}: t)
|
(fn, {xyShape, interpolation, integralSumCache, integralCache}: t)
|
||||||
: option(DistTypes.continuousShape) =>
|
: option(DistTypes.continuousShape) =>
|
||||||
fn(xyShape) |> E.O.fmap(make(interpolation, _, knownIntegralSum));
|
fn(xyShape) |> E.O.fmap(make(~interpolation, ~integralSumCache, ~integralCache));
|
||||||
|
|
||||||
|
let emptyIntegral: DistTypes.continuousShape = {
|
||||||
|
xyShape: {xs: [|neg_infinity|], ys: [|0.0|]},
|
||||||
|
interpolation: `Linear,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: None,
|
||||||
|
};
|
||||||
let empty: DistTypes.continuousShape = {
|
let empty: DistTypes.continuousShape = {
|
||||||
xyShape: XYShape.T.empty,
|
xyShape: XYShape.T.empty,
|
||||||
interpolation: `Linear,
|
interpolation: `Linear,
|
||||||
knownIntegralSum: Some(0.0),
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: Some(emptyIntegral),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let stepwiseToLinear = (t: t): t =>
|
||||||
|
make(~integralSumCache=t.integralSumCache, ~integralCache=t.integralCache, XYShape.Range.stepwiseToLinear(t.xyShape));
|
||||||
|
|
||||||
let combinePointwise =
|
let combinePointwise =
|
||||||
(
|
(
|
||||||
~knownIntegralSumsFn,
|
~integralSumCachesFn=(_, _) => None,
|
||||||
|
~integralCachesFn: (t, t) => option(t) =(_, _) => None,
|
||||||
|
~distributionType: DistTypes.distributionType = `PDF,
|
||||||
fn: (float, float) => float,
|
fn: (float, float) => float,
|
||||||
t1: DistTypes.continuousShape,
|
t1: DistTypes.continuousShape,
|
||||||
t2: DistTypes.continuousShape,
|
t2: DistTypes.continuousShape,
|
||||||
|
@ -36,59 +51,91 @@ let combinePointwise =
|
||||||
// can just sum them up. Otherwise, all bets are off.
|
// can just sum them up. Otherwise, all bets are off.
|
||||||
let combinedIntegralSum =
|
let combinedIntegralSum =
|
||||||
Common.combineIntegralSums(
|
Common.combineIntegralSums(
|
||||||
knownIntegralSumsFn,
|
integralSumCachesFn,
|
||||||
t1.knownIntegralSum,
|
t1.integralSumCache,
|
||||||
t2.knownIntegralSum,
|
t2.integralSumCache,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
|
// If combining stepwise and linear, we must convert the stepwise to linear first,
|
||||||
|
// i.e. add a point at the bottom of each step
|
||||||
|
let (t1, t2) = switch (t1.interpolation, t2.interpolation) {
|
||||||
|
| (`Linear, `Linear) => (t1, t2);
|
||||||
|
| (`Stepwise, `Stepwise) => (t1, t2);
|
||||||
|
| (`Linear, `Stepwise) => (t1, stepwiseToLinear(t2));
|
||||||
|
| (`Stepwise, `Linear) => (stepwiseToLinear(t1), t2);
|
||||||
|
};
|
||||||
|
|
||||||
|
let extrapolation = switch (distributionType) {
|
||||||
|
| `PDF => `UseZero
|
||||||
|
| `CDF => `UseOutermostPoints
|
||||||
|
};
|
||||||
|
|
||||||
|
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation);
|
||||||
|
|
||||||
make(
|
make(
|
||||||
`Linear,
|
~integralSumCache=combinedIntegralSum,
|
||||||
XYShape.PointwiseCombination.combineLinear(
|
XYShape.PointwiseCombination.combine(
|
||||||
~fn=(+.),
|
(+.),
|
||||||
|
interpolator,
|
||||||
t1.xyShape,
|
t1.xyShape,
|
||||||
t2.xyShape,
|
t2.xyShape,
|
||||||
),
|
),
|
||||||
combinedIntegralSum,
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let toLinear = (t: t): option(t) => {
|
let toLinear = (t: t): option(t) => {
|
||||||
switch (t) {
|
switch (t) {
|
||||||
| {interpolation: `Stepwise, xyShape, knownIntegralSum} =>
|
| {interpolation: `Stepwise, xyShape, integralSumCache, integralCache} =>
|
||||||
xyShape
|
xyShape
|
||||||
|> XYShape.Range.stepsToContinuous
|
|> XYShape.Range.stepsToContinuous
|
||||||
|> E.O.fmap(make(`Linear, _, knownIntegralSum))
|
|> E.O.fmap(make(~integralSumCache, ~integralCache))
|
||||||
| {interpolation: `Linear} => Some(t)
|
| {interpolation: `Linear} => Some(t)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
||||||
let updateKnownIntegralSum = (knownIntegralSum, t: t): t => {
|
|
||||||
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
knownIntegralSum,
|
integralSumCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
};
|
};
|
||||||
|
|
||||||
let reduce =
|
let reduce =
|
||||||
(
|
(
|
||||||
~knownIntegralSumsFn: (float, float) => option(float)=(_, _) => None,
|
~integralSumCachesFn: (float, float) => option(float)=(_, _) => None,
|
||||||
|
~integralCachesFn: (t, t) => option(t)=(_, _) => None,
|
||||||
fn,
|
fn,
|
||||||
continuousShapes,
|
continuousShapes,
|
||||||
) =>
|
) =>
|
||||||
continuousShapes
|
continuousShapes
|
||||||
|> E.A.fold_left(combinePointwise(~knownIntegralSumsFn, fn), empty);
|
|> E.A.fold_left(combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn), empty);
|
||||||
|
|
||||||
let mapY = (~knownIntegralSumFn=_ => None, fn, t: t) => {
|
let mapY = (~integralSumCacheFn=_ => None,
|
||||||
let u = E.O.bind(_, knownIntegralSumFn);
|
~integralCacheFn=_ => None,
|
||||||
let yMapFn = shapeMap(XYShape.T.mapY(fn));
|
~fn, t: t) => {
|
||||||
|
make(
|
||||||
t |> yMapFn |> updateKnownIntegralSum(u(t.knownIntegralSum));
|
~interpolation=t.interpolation,
|
||||||
|
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
|
||||||
|
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
|
||||||
|
t |> getShape |> XYShape.T.mapY(fn),
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let scaleBy = (~scale=1.0, t: t): t => {
|
let rec scaleBy = (~scale=1.0, t: t): t => {
|
||||||
|
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, v => Some(scale *. v));
|
||||||
|
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(scaleBy(~scale, v)));
|
||||||
|
|
||||||
t
|
t
|
||||||
|> mapY((r: float) => r *. scale)
|
|> mapY(~fn=(r: float) => r *. scale)
|
||||||
|> updateKnownIntegralSum(
|
|> updateIntegralSumCache(scaledIntegralSumCache)
|
||||||
E.O.bind(t.knownIntegralSum, v => Some(scale *. v)),
|
|> updateIntegralCache(scaledIntegralCache)
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module T =
|
module T =
|
||||||
|
@ -98,6 +145,7 @@ module T =
|
||||||
let minX = shapeFn(XYShape.T.minX);
|
let minX = shapeFn(XYShape.T.minX);
|
||||||
let maxX = shapeFn(XYShape.T.maxX);
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
let mapY = mapY;
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
let toDiscreteProbabilityMassFraction = _ => 0.0;
|
let toDiscreteProbabilityMassFraction = _ => 0.0;
|
||||||
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
||||||
let xToY = (f, {interpolation, xyShape}: t) => {
|
let xToY = (f, {interpolation, xyShape}: t) => {
|
||||||
|
@ -121,63 +169,56 @@ module T =
|
||||||
|> XYShape.T.zip
|
|> XYShape.T.zip
|
||||||
|> XYShape.Zipped.filterByX(x => x >= lc && x <= rc);
|
|> XYShape.Zipped.filterByX(x => x >= lc && x <= rc);
|
||||||
|
|
||||||
let eps = (t |> getShape |> XYShape.T.xTotalRange) *. 0.0001;
|
|
||||||
|
|
||||||
let leftNewPoint =
|
let leftNewPoint =
|
||||||
leftCutoff |> E.O.dimap(lc => [|(lc -. eps, 0.)|], _ => [||]);
|
leftCutoff |> E.O.dimap(lc => [|(lc -. epsilon_float, 0.)|], _ => [||]);
|
||||||
let rightNewPoint =
|
let rightNewPoint =
|
||||||
rightCutoff |> E.O.dimap(rc => [|(rc +. eps, 0.)|], _ => [||]);
|
rightCutoff |> E.O.dimap(rc => [|(rc +. epsilon_float, 0.)|], _ => [||]);
|
||||||
|
|
||||||
let truncatedZippedPairsWithNewPoints =
|
let truncatedZippedPairsWithNewPoints =
|
||||||
E.A.concatMany([|leftNewPoint, truncatedZippedPairs, rightNewPoint|]);
|
E.A.concatMany([|leftNewPoint, truncatedZippedPairs, rightNewPoint|]);
|
||||||
let truncatedShape =
|
let truncatedShape =
|
||||||
XYShape.T.fromZippedArray(truncatedZippedPairsWithNewPoints);
|
XYShape.T.fromZippedArray(truncatedZippedPairsWithNewPoints);
|
||||||
|
|
||||||
make(`Linear, truncatedShape, None);
|
make(truncatedShape)
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: This should work with stepwise plots.
|
// TODO: This should work with stepwise plots.
|
||||||
let integral = (~cache, t) =>
|
let integral = (t) =>
|
||||||
if (t |> getShape |> XYShape.T.length > 0) {
|
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
|
||||||
switch (cache) {
|
| (true, _) => emptyIntegral
|
||||||
| Some(cache) => cache
|
| (false, Some(cache)) => cache
|
||||||
| None =>
|
| (false, None) =>
|
||||||
t
|
t
|
||||||
|> getShape
|
|> getShape
|
||||||
|> XYShape.Range.integrateWithTriangles
|
|> XYShape.Range.integrateWithTriangles
|
||||||
|> E.O.toExt("This should not have happened")
|
|> E.O.toExt("This should not have happened")
|
||||||
|> make(`Linear, _, None)
|
|> make
|
||||||
};
|
|
||||||
} else {
|
|
||||||
make(`Linear, {xs: [|neg_infinity|], ys: [|0.0|]}, None);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let downsample = (~cache=None, length, t): t =>
|
let downsample = (length, t): t =>
|
||||||
t
|
t
|
||||||
|> shapeMap(
|
|> shapeMap(
|
||||||
XYShape.XsConversion.proportionByProbabilityMass(
|
XYShape.XsConversion.proportionByProbabilityMass(
|
||||||
length,
|
length,
|
||||||
integral(~cache, t).xyShape,
|
integral(t).xyShape,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
let integralEndY = (~cache, t: t) =>
|
let integralEndY = (t: t) =>
|
||||||
t.knownIntegralSum |> E.O.default(t |> integral(~cache) |> lastY);
|
t.integralSumCache |> E.O.default(t |> integral |> lastY);
|
||||||
let integralXtoY = (~cache, f, t: t) =>
|
let integralXtoY = (f, t: t) =>
|
||||||
t |> integral(~cache) |> shapeFn(XYShape.XtoY.linear(f));
|
t |> integral |> shapeFn(XYShape.XtoY.linear(f));
|
||||||
let integralYtoX = (~cache, f, t: t) =>
|
let integralYtoX = (f, t: t) =>
|
||||||
t |> integral(~cache) |> shapeFn(XYShape.YtoX.linear(f));
|
t |> integral |> shapeFn(XYShape.YtoX.linear(f));
|
||||||
let toContinuous = t => Some(t);
|
let toContinuous = t => Some(t);
|
||||||
let toDiscrete = _ => None;
|
let toDiscrete = _ => None;
|
||||||
|
|
||||||
let normalize = (t: t): t => {
|
let normalize = (t: t): t => {
|
||||||
t
|
t
|
||||||
|> scaleBy(~scale=1. /. integralEndY(~cache=None, t))
|
|> updateIntegralCache(Some(integral(t)))
|
||||||
|> updateKnownIntegralSum(Some(1.0));
|
|> scaleBy(~scale=1. /. integralEndY(t))
|
||||||
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = t => Some(t |> normalize);
|
|
||||||
let normalizedToDiscrete = _ => None;
|
|
||||||
|
|
||||||
let mean = (t: t) => {
|
let mean = (t: t) => {
|
||||||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
||||||
let indefiniteIntegralLinear = (p, a, b) =>
|
let indefiniteIntegralLinear = (p, a, b) =>
|
||||||
|
@ -205,23 +246,28 @@ let combineAlgebraicallyWithDiscrete =
|
||||||
t1: t,
|
t1: t,
|
||||||
t2: DistTypes.discreteShape,
|
t2: DistTypes.discreteShape,
|
||||||
) => {
|
) => {
|
||||||
let s1 = t1 |> getShape;
|
let t1s = t1 |> getShape;
|
||||||
let s2 = t2.xyShape;
|
let t2s = t2.xyShape; // TODO would like to use Discrete.getShape here, but current file structure doesn't allow for that
|
||||||
let t1n = s1 |> XYShape.T.length;
|
|
||||||
let t2n = s2 |> XYShape.T.length;
|
if (XYShape.T.isEmpty(t1s) || XYShape.T.isEmpty(t2s)) {
|
||||||
if (t1n == 0 || t2n == 0) {
|
|
||||||
empty;
|
empty;
|
||||||
} else {
|
} else {
|
||||||
let combinedShape =
|
let continuousAsLinear = switch (t1.interpolation) {
|
||||||
AlgebraicShapeCombination.combineShapesContinuousDiscrete(op, s1, s2);
|
| `Linear => t1;
|
||||||
|
| `Stepwise => stepwiseToLinear(t1)
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinedShape = AlgebraicShapeCombination.combineShapesContinuousDiscrete(op, continuousAsLinear |> getShape, t2s);
|
||||||
|
|
||||||
let combinedIntegralSum =
|
let combinedIntegralSum =
|
||||||
Common.combineIntegralSums(
|
Common.combineIntegralSums(
|
||||||
(a, b) => Some(a *. b),
|
(a, b) => Some(a *. b),
|
||||||
t1.knownIntegralSum,
|
t1.integralSumCache,
|
||||||
t2.knownIntegralSum,
|
t2.integralSumCache,
|
||||||
);
|
);
|
||||||
// return a new Continuous distribution
|
|
||||||
make(`Linear, combinedShape, combinedIntegralSum);
|
// TODO: It could make sense to automatically transform the integrals here (shift or scale)
|
||||||
|
make(~interpolation=t1.interpolation, ~integralSumCache=combinedIntegralSum, combinedShape)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -239,10 +285,10 @@ let combineAlgebraically =
|
||||||
let combinedIntegralSum =
|
let combinedIntegralSum =
|
||||||
Common.combineIntegralSums(
|
Common.combineIntegralSums(
|
||||||
(a, b) => Some(a *. b),
|
(a, b) => Some(a *. b),
|
||||||
t1.knownIntegralSum,
|
t1.integralSumCache,
|
||||||
t2.knownIntegralSum,
|
t2.integralSumCache,
|
||||||
);
|
);
|
||||||
// return a new Continuous distribution
|
// return a new Continuous distribution
|
||||||
make(`Linear, combinedShape, combinedIntegralSum);
|
make(~integralSumCache=combinedIntegralSum, combinedShape);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -2,23 +2,37 @@ open Distributions;
|
||||||
|
|
||||||
type t = DistTypes.discreteShape;
|
type t = DistTypes.discreteShape;
|
||||||
|
|
||||||
let make = (xyShape, knownIntegralSum): t => {xyShape, knownIntegralSum};
|
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {xyShape, integralSumCache, integralCache};
|
||||||
let shapeMap = (fn, {xyShape, knownIntegralSum}: t): t => {
|
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
knownIntegralSum,
|
integralSumCache,
|
||||||
|
integralCache
|
||||||
};
|
};
|
||||||
let getShape = (t: t) => t.xyShape;
|
let getShape = (t: t) => t.xyShape;
|
||||||
let oShapeMap = (fn, {xyShape, knownIntegralSum}: t): option(t) =>
|
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option(t) =>
|
||||||
fn(xyShape) |> E.O.fmap(make(_, knownIntegralSum));
|
fn(xyShape) |> E.O.fmap(make(~integralSumCache, ~integralCache));
|
||||||
|
|
||||||
|
let emptyIntegral: DistTypes.continuousShape = {
|
||||||
|
xyShape: {xs: [|neg_infinity|], ys: [|0.0|]},
|
||||||
|
interpolation: `Stepwise,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: None,
|
||||||
|
};
|
||||||
|
let empty: DistTypes.discreteShape = {
|
||||||
|
xyShape: XYShape.T.empty,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: Some(emptyIntegral),
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
let empty: t = {xyShape: XYShape.T.empty, knownIntegralSum: Some(0.0)};
|
|
||||||
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
||||||
|
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
||||||
|
|
||||||
let combinePointwise =
|
let combinePointwise =
|
||||||
(
|
(
|
||||||
~knownIntegralSumsFn,
|
~integralSumCachesFn = (_, _) => None,
|
||||||
|
~integralCachesFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape) = (_, _) => None,
|
||||||
fn,
|
fn,
|
||||||
t1: DistTypes.discreteShape,
|
t1: DistTypes.discreteShape,
|
||||||
t2: DistTypes.discreteShape,
|
t2: DistTypes.discreteShape,
|
||||||
|
@ -26,38 +40,47 @@ let combinePointwise =
|
||||||
: DistTypes.discreteShape => {
|
: DistTypes.discreteShape => {
|
||||||
let combinedIntegralSum =
|
let combinedIntegralSum =
|
||||||
Common.combineIntegralSums(
|
Common.combineIntegralSums(
|
||||||
knownIntegralSumsFn,
|
integralSumCachesFn,
|
||||||
t1.knownIntegralSum,
|
t1.integralSumCache,
|
||||||
t2.knownIntegralSum,
|
t2.integralSumCache,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
make(
|
make(
|
||||||
|
~integralSumCache=combinedIntegralSum,
|
||||||
XYShape.PointwiseCombination.combine(
|
XYShape.PointwiseCombination.combine(
|
||||||
~xsSelection=ALL_XS,
|
(+.),
|
||||||
~xToYSelection=XYShape.XtoY.stepwiseIfAtX,
|
XYShape.XtoY.discreteInterpolator,
|
||||||
~fn=(a, b) => fn(E.O.default(0.0, a), E.O.default(0.0, b)), // stepwiseIfAtX returns option(float), so this fn needs to handle None
|
|
||||||
t1.xyShape,
|
t1.xyShape,
|
||||||
t2.xyShape,
|
t2.xyShape,
|
||||||
),
|
),
|
||||||
combinedIntegralSum,
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let reduce =
|
let reduce =
|
||||||
(~knownIntegralSumsFn=(_, _) => None, fn, discreteShapes)
|
(~integralSumCachesFn=(_, _) => None,
|
||||||
: DistTypes.discreteShape =>
|
~integralCachesFn=(_, _) => None,
|
||||||
|
fn, discreteShapes)
|
||||||
|
: DistTypes.discreteShape =>
|
||||||
discreteShapes
|
discreteShapes
|
||||||
|> E.A.fold_left(combinePointwise(~knownIntegralSumsFn, fn), empty);
|
|> E.A.fold_left(combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn), empty);
|
||||||
|
|
||||||
let updateKnownIntegralSum = (knownIntegralSum, t: t): t => {
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
knownIntegralSum,
|
integralSumCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
};
|
};
|
||||||
|
|
||||||
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
||||||
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
|
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
|
||||||
let combineAlgebraically =
|
let combineAlgebraically =
|
||||||
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
|
||||||
let t1s = t1 |> getShape;
|
let t1s = t1 |> getShape;
|
||||||
let t2s = t2 |> getShape;
|
let t2s = t2 |> getShape;
|
||||||
let t1n = t1s |> XYShape.T.length;
|
let t1n = t1s |> XYShape.T.length;
|
||||||
|
@ -66,8 +89,8 @@ let combineAlgebraically =
|
||||||
let combinedIntegralSum =
|
let combinedIntegralSum =
|
||||||
Common.combineIntegralSums(
|
Common.combineIntegralSums(
|
||||||
(s1, s2) => Some(s1 *. s2),
|
(s1, s2) => Some(s1 *. s2),
|
||||||
t1.knownIntegralSum,
|
t1.integralSumCache,
|
||||||
t2.knownIntegralSum,
|
t2.integralSumCache,
|
||||||
);
|
);
|
||||||
|
|
||||||
let fn = Operation.Algebraic.toFn(op);
|
let fn = Operation.Algebraic.toFn(op);
|
||||||
|
@ -87,84 +110,85 @@ let combineAlgebraically =
|
||||||
|
|
||||||
let combinedShape = XYShape.T.fromZippedArray(rxys);
|
let combinedShape = XYShape.T.fromZippedArray(rxys);
|
||||||
|
|
||||||
make(combinedShape, combinedIntegralSum);
|
make(~integralSumCache=combinedIntegralSum, combinedShape);
|
||||||
};
|
};
|
||||||
|
|
||||||
let mapY = (~knownIntegralSumFn=previousKnownIntegralSum => None, fn, t: t) => {
|
let mapY = (~integralSumCacheFn=_ => None,
|
||||||
let u = E.O.bind(_, knownIntegralSumFn);
|
~integralCacheFn=_ => None,
|
||||||
let yMapFn = shapeMap(XYShape.T.mapY(fn));
|
~fn, t: t) => {
|
||||||
|
make(
|
||||||
t |> yMapFn |> updateKnownIntegralSum(u(t.knownIntegralSum));
|
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
|
||||||
|
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
|
||||||
|
t |> getShape |> XYShape.T.mapY(fn),
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
let scaleBy = (~scale=1.0, t: t): t => {
|
let scaleBy = (~scale=1.0, t: t): t => {
|
||||||
|
let scaledIntegralSumCache = t.integralSumCache |> E.O.fmap((*.)(scale));
|
||||||
|
let scaledIntegralCache = t.integralCache |> E.O.fmap(Continuous.scaleBy(~scale));
|
||||||
|
|
||||||
t
|
t
|
||||||
|> mapY((r: float) => r *. scale)
|
|> mapY(~fn=(r: float) => r *. scale)
|
||||||
|> updateKnownIntegralSum(
|
|> updateIntegralSumCache(scaledIntegralSumCache)
|
||||||
E.O.bind(t.knownIntegralSum, v => Some(scale *. v)),
|
|> updateIntegralCache(scaledIntegralCache)
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module T =
|
module T =
|
||||||
Dist({
|
Dist({
|
||||||
type t = DistTypes.discreteShape;
|
type t = DistTypes.discreteShape;
|
||||||
type integral = DistTypes.continuousShape;
|
type integral = DistTypes.continuousShape;
|
||||||
let integral = (~cache, t) =>
|
let integral = (t) =>
|
||||||
if (t |> getShape |> XYShape.T.length > 0) {
|
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
|
||||||
switch (cache) {
|
| (true, _) => emptyIntegral
|
||||||
| Some(c) => c
|
| (false, Some(c)) => c
|
||||||
| None =>
|
| (false, None) => {
|
||||||
Continuous.make(
|
let ts = getShape(t);
|
||||||
`Stepwise,
|
// The first xy of this integral should always be the zero, to ensure nice plotting
|
||||||
XYShape.T.accumulateYs((+.), getShape(t)),
|
let firstX = ts |> XYShape.T.minX;
|
||||||
None,
|
let prependedZeroPoint: XYShape.T.t = {xs: [|firstX -. epsilon_float|], ys: [|0.|]};
|
||||||
)
|
let integralShape =
|
||||||
};
|
ts
|
||||||
} else {
|
|> XYShape.T.concat(prependedZeroPoint)
|
||||||
Continuous.make(
|
|> XYShape.T.accumulateYs((+.));
|
||||||
`Stepwise,
|
|
||||||
{xs: [|neg_infinity|], ys: [|0.0|]},
|
Continuous.make(~interpolation=`Stepwise, integralShape);
|
||||||
None,
|
}
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let integralEndY = (~cache, t: t) =>
|
let integralEndY = (t: t) =>
|
||||||
t.knownIntegralSum
|
t.integralSumCache
|
||||||
|> E.O.default(t |> integral(~cache) |> Continuous.lastY);
|
|> E.O.default(t |> integral |> Continuous.lastY);
|
||||||
let minX = shapeFn(XYShape.T.minX);
|
let minX = shapeFn(XYShape.T.minX);
|
||||||
let maxX = shapeFn(XYShape.T.maxX);
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
let toDiscreteProbabilityMassFraction = _ => 1.0;
|
let toDiscreteProbabilityMassFraction = _ => 1.0;
|
||||||
let mapY = mapY;
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
||||||
let toContinuous = _ => None;
|
let toContinuous = _ => None;
|
||||||
let toDiscrete = t => Some(t);
|
let toDiscrete = t => Some(t);
|
||||||
|
|
||||||
let normalize = (t: t): t => {
|
let normalize = (t: t): t => {
|
||||||
t
|
t
|
||||||
|> scaleBy(~scale=1. /. integralEndY(~cache=None, t))
|
|> scaleBy(~scale=1. /. integralEndY(t))
|
||||||
|> updateKnownIntegralSum(Some(1.0));
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = _ => None;
|
let downsample = (i, t: t): t => {
|
||||||
let normalizedToDiscrete = t => Some(t); // TODO: this should be normalized!
|
|
||||||
|
|
||||||
let downsample = (~cache=None, i, t: t): t => {
|
|
||||||
// It's not clear how to downsample a set of discrete points in a meaningful way.
|
// It's not clear how to downsample a set of discrete points in a meaningful way.
|
||||||
// The best we can do is to clip off the smallest values.
|
// The best we can do is to clip off the smallest values.
|
||||||
let currentLength = t |> getShape |> XYShape.T.length;
|
let currentLength = t |> getShape |> XYShape.T.length;
|
||||||
|
|
||||||
if (i < currentLength && i >= 1 && currentLength > 1) {
|
if (i < currentLength && i >= 1 && currentLength > 1) {
|
||||||
let clippedShape =
|
t
|
||||||
t
|
|> getShape
|
||||||
|> getShape
|
|> XYShape.T.zip
|
||||||
|> XYShape.T.zip
|
|> XYShape.Zipped.sortByY
|
||||||
|> XYShape.Zipped.sortByY
|
|> Belt.Array.reverse
|
||||||
|> Belt.Array.reverse
|
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|
||||||
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|
|> XYShape.Zipped.sortByX
|
||||||
|> XYShape.Zipped.sortByX
|
|> XYShape.T.fromZippedArray
|
||||||
|> XYShape.T.fromZippedArray;
|
|> make;
|
||||||
|
|
||||||
make(clippedShape, None); // if someone needs the sum, they'll have to recompute it
|
|
||||||
} else {
|
} else {
|
||||||
t;
|
t;
|
||||||
};
|
};
|
||||||
|
@ -172,17 +196,15 @@ module T =
|
||||||
|
|
||||||
let truncate =
|
let truncate =
|
||||||
(leftCutoff: option(float), rightCutoff: option(float), t: t): t => {
|
(leftCutoff: option(float), rightCutoff: option(float), t: t): t => {
|
||||||
let truncatedShape =
|
t
|
||||||
t
|
|> getShape
|
||||||
|> getShape
|
|> XYShape.T.zip
|
||||||
|> XYShape.T.zip
|
|> XYShape.Zipped.filterByX(x =>
|
||||||
|> XYShape.Zipped.filterByX(x =>
|
x >= E.O.default(neg_infinity, leftCutoff)
|
||||||
x >= E.O.default(neg_infinity, leftCutoff)
|
&& x <= E.O.default(infinity, rightCutoff)
|
||||||
|| x <= E.O.default(infinity, rightCutoff)
|
)
|
||||||
)
|
|> XYShape.T.fromZippedArray
|
||||||
|> XYShape.T.fromZippedArray;
|
|> make;
|
||||||
|
|
||||||
make(truncatedShape, None);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let xToY = (f, t) =>
|
let xToY = (f, t) =>
|
||||||
|
@ -192,11 +214,11 @@ module T =
|
||||||
|> E.O.default(0.0)
|
|> E.O.default(0.0)
|
||||||
|> DistTypes.MixedPoint.makeDiscrete;
|
|> DistTypes.MixedPoint.makeDiscrete;
|
||||||
|
|
||||||
let integralXtoY = (~cache, f, t) =>
|
let integralXtoY = (f, t) =>
|
||||||
t |> integral(~cache) |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
||||||
|
|
||||||
let integralYtoX = (~cache, f, t) =>
|
let integralYtoX = (f, t) =>
|
||||||
t |> integral(~cache) |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
||||||
|
|
||||||
let mean = (t: t): float => {
|
let mean = (t: t): float => {
|
||||||
let s = getShape(t);
|
let s = getShape(t);
|
||||||
|
|
|
@ -2,8 +2,7 @@ open DistTypes;
|
||||||
|
|
||||||
type t = DistTypes.distPlus;
|
type t = DistTypes.distPlus;
|
||||||
|
|
||||||
let shapeIntegral = shape =>
|
let shapeIntegral = shape => Shape.T.Integral.get(shape);
|
||||||
Shape.T.Integral.get(~cache=None, shape);
|
|
||||||
let make =
|
let make =
|
||||||
(
|
(
|
||||||
~shape,
|
~shape,
|
||||||
|
@ -59,7 +58,6 @@ module T =
|
||||||
let normalize = (t: t): t => {
|
let normalize = (t: t): t => {
|
||||||
let normalizedShape = t |> toShape |> Shape.T.normalize;
|
let normalizedShape = t |> toShape |> Shape.T.normalize;
|
||||||
t |> updateShape(normalizedShape);
|
t |> updateShape(normalizedShape);
|
||||||
// TODO: also adjust for domainIncludedProbabilityMass here.
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let truncate = (leftCutoff, rightCutoff, t: t): t => {
|
let truncate = (leftCutoff, rightCutoff, t: t): t => {
|
||||||
|
@ -71,28 +69,6 @@ module T =
|
||||||
t |> updateShape(truncatedShape);
|
t |> updateShape(truncatedShape);
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.normalizedToContinuous
|
|
||||||
|> E.O.fmap(
|
|
||||||
Continuous.T.mapY(
|
|
||||||
domainIncludedProbabilityMassAdjustment(t),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let normalizedToDiscrete = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.normalizedToDiscrete
|
|
||||||
|> E.O.fmap(
|
|
||||||
Discrete.T.mapY(
|
|
||||||
domainIncludedProbabilityMassAdjustment(t),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let xToY = (f, t: t) =>
|
let xToY = (f, t: t) =>
|
||||||
t
|
t
|
||||||
|> toShape
|
|> toShape
|
||||||
|
@ -105,34 +81,36 @@ module T =
|
||||||
shapeFn(Shape.T.toDiscreteProbabilityMassFraction);
|
shapeFn(Shape.T.toDiscreteProbabilityMassFraction);
|
||||||
|
|
||||||
// This bit is kind of awkward, could probably use rethinking.
|
// This bit is kind of awkward, could probably use rethinking.
|
||||||
let integral = (~cache, t: t) =>
|
let integral = (t: t) =>
|
||||||
updateShape(Continuous(t.integralCache), t);
|
updateShape(Continuous(t.integralCache), t);
|
||||||
|
|
||||||
let downsample = (~cache=None, i, t): t =>
|
let updateIntegralCache = (integralCache: option(DistTypes.continuousShape), t) =>
|
||||||
|
update(~integralCache=E.O.default(t.integralCache, integralCache), t);
|
||||||
|
|
||||||
|
let downsample = (i, t): t =>
|
||||||
updateShape(t |> toShape |> Shape.T.downsample(i), t);
|
updateShape(t |> toShape |> Shape.T.downsample(i), t);
|
||||||
// todo: adjust for limit, maybe?
|
// todo: adjust for limit, maybe?
|
||||||
let mapY =
|
let mapY =
|
||||||
(
|
(
|
||||||
~knownIntegralSumFn=previousIntegralSum => None,
|
~integralSumCacheFn=previousIntegralSum => None,
|
||||||
fn,
|
~integralCacheFn=previousIntegralCache => None,
|
||||||
|
~fn,
|
||||||
{shape, _} as t: t,
|
{shape, _} as t: t,
|
||||||
)
|
)
|
||||||
: t =>
|
: t =>
|
||||||
Shape.T.mapY(~knownIntegralSumFn, fn, shape)
|
Shape.T.mapY(~integralSumCacheFn, ~fn, shape)
|
||||||
|> updateShape(_, t);
|
|> updateShape(_, t);
|
||||||
|
|
||||||
// get the total of everything
|
// get the total of everything
|
||||||
let integralEndY = (~cache as _, t: t) => {
|
let integralEndY = (t: t) => {
|
||||||
Shape.T.Integral.sum(
|
Shape.T.Integral.sum(
|
||||||
~cache=Some(t.integralCache),
|
|
||||||
toShape(t),
|
toShape(t),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Fix this below, obviously. Adjust for limits
|
// TODO: Fix this below, obviously. Adjust for limits
|
||||||
let integralXtoY = (~cache as _, f, t: t) => {
|
let integralXtoY = (f, t: t) => {
|
||||||
Shape.T.Integral.xToY(
|
Shape.T.Integral.xToY(
|
||||||
~cache=Some(t.integralCache),
|
|
||||||
f,
|
f,
|
||||||
toShape(t),
|
toShape(t),
|
||||||
)
|
)
|
||||||
|
@ -140,8 +118,8 @@ module T =
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
||||||
let integralYtoX = (~cache as _, f, t: t) => {
|
let integralYtoX = (f, t: t) => {
|
||||||
Shape.T.Integral.yToX(~cache=None, f, toShape(t));
|
Shape.T.Integral.yToX(f, toShape(t));
|
||||||
};
|
};
|
||||||
|
|
||||||
let mean = (t: t) => {
|
let mean = (t: t) => {
|
||||||
|
|
|
@ -23,6 +23,6 @@
|
||||||
include DistPlus.T.Integral;
|
include DistPlus.T.Integral;
|
||||||
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
||||||
timeInVectorToX(f, t)
|
timeInVectorToX(f, t)
|
||||||
|> E.O.fmap(x => DistPlus.T.Integral.xToY(~cache=None, x, t));
|
|> E.O.fmap(x => DistPlus.T.Integral.xToY(x, t));
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,25 +9,45 @@ type domain =
|
||||||
| RightLimited(domainLimit)
|
| RightLimited(domainLimit)
|
||||||
| LeftAndRightLimited(domainLimit, domainLimit);
|
| LeftAndRightLimited(domainLimit, domainLimit);
|
||||||
|
|
||||||
|
type distributionType = [
|
||||||
|
| `PDF
|
||||||
|
| `CDF
|
||||||
|
];
|
||||||
|
|
||||||
type xyShape = {
|
type xyShape = {
|
||||||
xs: array(float),
|
xs: array(float),
|
||||||
ys: array(float),
|
ys: array(float),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type interpolationStrategy = [
|
||||||
|
| `Stepwise
|
||||||
|
| `Linear
|
||||||
|
];
|
||||||
|
type extrapolationStrategy = [
|
||||||
|
| `UseZero
|
||||||
|
| `UseOutermostPoints
|
||||||
|
];
|
||||||
|
|
||||||
|
type interpolator = (xyShape, int, float) => float;
|
||||||
|
|
||||||
type continuousShape = {
|
type continuousShape = {
|
||||||
xyShape,
|
xyShape,
|
||||||
interpolation: [ | `Stepwise | `Linear],
|
interpolation: interpolationStrategy,
|
||||||
knownIntegralSum: option(float),
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
};
|
};
|
||||||
|
|
||||||
type discreteShape = {
|
type discreteShape = {
|
||||||
xyShape,
|
xyShape,
|
||||||
knownIntegralSum: option(float),
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
};
|
};
|
||||||
|
|
||||||
type mixedShape = {
|
type mixedShape = {
|
||||||
continuous: continuousShape,
|
continuous: continuousShape,
|
||||||
discrete: discreteShape,
|
discrete: discreteShape,
|
||||||
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
};
|
};
|
||||||
|
|
||||||
type shapeMonad('a, 'b, 'c) =
|
type shapeMonad('a, 'b, 'c) =
|
||||||
|
|
|
@ -4,22 +4,22 @@ module type dist = {
|
||||||
let minX: t => float;
|
let minX: t => float;
|
||||||
let maxX: t => float;
|
let maxX: t => float;
|
||||||
let mapY:
|
let mapY:
|
||||||
(~knownIntegralSumFn: float => option(float)=?, float => float, t) => t;
|
(~integralSumCacheFn: float => option(float)=?, ~integralCacheFn: DistTypes.continuousShape => option(DistTypes.continuousShape)=?, ~fn: float => float, t) => t;
|
||||||
let xToY: (float, t) => DistTypes.mixedPoint;
|
let xToY: (float, t) => DistTypes.mixedPoint;
|
||||||
let toShape: t => DistTypes.shape;
|
let toShape: t => DistTypes.shape;
|
||||||
let toContinuous: t => option(DistTypes.continuousShape);
|
let toContinuous: t => option(DistTypes.continuousShape);
|
||||||
let toDiscrete: t => option(DistTypes.discreteShape);
|
let toDiscrete: t => option(DistTypes.discreteShape);
|
||||||
let normalize: t => t;
|
let normalize: t => t;
|
||||||
let normalizedToContinuous: t => option(DistTypes.continuousShape);
|
|
||||||
let normalizedToDiscrete: t => option(DistTypes.discreteShape);
|
|
||||||
let toDiscreteProbabilityMassFraction: t => float;
|
let toDiscreteProbabilityMassFraction: t => float;
|
||||||
let downsample: (~cache: option(integral)=?, int, t) => t;
|
let downsample: (int, t) => t;
|
||||||
let truncate: (option(float), option(float), t) => t;
|
let truncate: (option(float), option(float), t) => t;
|
||||||
|
|
||||||
let integral: (~cache: option(integral), t) => integral;
|
let updateIntegralCache: (option(DistTypes.continuousShape), t) => t;
|
||||||
let integralEndY: (~cache: option(integral), t) => float;
|
|
||||||
let integralXtoY: (~cache: option(integral), float, t) => float;
|
let integral: (t) => integral;
|
||||||
let integralYtoX: (~cache: option(integral), float, t) => float;
|
let integralEndY: (t) => float;
|
||||||
|
let integralXtoY: (float, t) => float;
|
||||||
|
let integralYtoX: (float, t) => float;
|
||||||
|
|
||||||
let mean: t => float;
|
let mean: t => float;
|
||||||
let variance: t => float;
|
let variance: t => float;
|
||||||
|
@ -41,11 +41,11 @@ module Dist = (T: dist) => {
|
||||||
let toDiscrete = T.toDiscrete;
|
let toDiscrete = T.toDiscrete;
|
||||||
let normalize = T.normalize;
|
let normalize = T.normalize;
|
||||||
let truncate = T.truncate;
|
let truncate = T.truncate;
|
||||||
let normalizedToContinuous = T.normalizedToContinuous;
|
|
||||||
let normalizedToDiscrete = T.normalizedToDiscrete;
|
|
||||||
let mean = T.mean;
|
let mean = T.mean;
|
||||||
let variance = T.variance;
|
let variance = T.variance;
|
||||||
|
|
||||||
|
let updateIntegralCache = T.updateIntegralCache;
|
||||||
|
|
||||||
module Integral = {
|
module Integral = {
|
||||||
type t = T.integral;
|
type t = T.integral;
|
||||||
let get = T.integral;
|
let get = T.integral;
|
||||||
|
@ -59,10 +59,23 @@ module Common = {
|
||||||
let combineIntegralSums =
|
let combineIntegralSums =
|
||||||
(
|
(
|
||||||
combineFn: (float, float) => option(float),
|
combineFn: (float, float) => option(float),
|
||||||
t1KnownIntegralSum: option(float),
|
t1IntegralSumCache: option(float),
|
||||||
t2KnownIntegralSum: option(float),
|
t2IntegralSumCache: option(float),
|
||||||
) => {
|
) => {
|
||||||
switch (t1KnownIntegralSum, t2KnownIntegralSum) {
|
switch (t1IntegralSumCache, t2IntegralSumCache) {
|
||||||
|
| (None, _)
|
||||||
|
| (_, None) => None
|
||||||
|
| (Some(s1), Some(s2)) => combineFn(s1, s2)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineIntegrals =
|
||||||
|
(
|
||||||
|
combineFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape),
|
||||||
|
t1IntegralCache: option(DistTypes.continuousShape),
|
||||||
|
t2IntegralCache: option(DistTypes.continuousShape),
|
||||||
|
) => {
|
||||||
|
switch (t1IntegralCache, t2IntegralCache) {
|
||||||
| (None, _)
|
| (None, _)
|
||||||
| (_, None) => None
|
| (_, None) => None
|
||||||
| (Some(s1), Some(s2)) => combineFn(s1, s2)
|
| (Some(s1), Some(s2)) => combineFn(s1, s2)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
open Distributions;
|
open Distributions;
|
||||||
|
|
||||||
type t = DistTypes.mixedShape;
|
type t = DistTypes.mixedShape;
|
||||||
let make = (~continuous, ~discrete): t => {continuous, discrete};
|
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {continuous, discrete, integralSumCache, integralCache};
|
||||||
|
|
||||||
let totalLength = (t: t): int => {
|
let totalLength = (t: t): int => {
|
||||||
let continuousLength =
|
let continuousLength =
|
||||||
|
@ -11,29 +11,20 @@ let totalLength = (t: t): int => {
|
||||||
continuousLength + discreteLength;
|
continuousLength + discreteLength;
|
||||||
};
|
};
|
||||||
|
|
||||||
let scaleBy = (~scale=1.0, {discrete, continuous}: t): t => {
|
let scaleBy = (~scale=1.0, t: t): t => {
|
||||||
let scaledDiscrete = Discrete.scaleBy(~scale, discrete);
|
let scaledDiscrete = Discrete.scaleBy(~scale, t.discrete);
|
||||||
let scaledContinuous = Continuous.scaleBy(~scale, continuous);
|
let scaledContinuous = Continuous.scaleBy(~scale, t.continuous);
|
||||||
make(~discrete=scaledDiscrete, ~continuous=scaledContinuous);
|
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(Continuous.scaleBy(~scale, v)));
|
||||||
|
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, s => Some(s *. scale));
|
||||||
|
make(~discrete=scaledDiscrete, ~continuous=scaledContinuous, ~integralSumCache=scaledIntegralSumCache, ~integralCache=scaledIntegralCache);
|
||||||
};
|
};
|
||||||
|
|
||||||
let toContinuous = ({continuous}: t) => Some(continuous);
|
let toContinuous = ({continuous}: t) => Some(continuous);
|
||||||
let toDiscrete = ({discrete}: t) => Some(discrete);
|
let toDiscrete = ({discrete}: t) => Some(discrete);
|
||||||
|
|
||||||
let combinePointwise = (~knownIntegralSumsFn, fn, t1: t, t2: t) => {
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
let reducedDiscrete =
|
...t,
|
||||||
[|t1, t2|]
|
integralCache,
|
||||||
|> E.A.fmap(toDiscrete)
|
|
||||||
|> E.A.O.concatSomes
|
|
||||||
|> Discrete.reduce(~knownIntegralSumsFn, fn);
|
|
||||||
|
|
||||||
let reducedContinuous =
|
|
||||||
[|t1, t2|]
|
|
||||||
|> E.A.fmap(toContinuous)
|
|
||||||
|> E.A.O.concatSomes
|
|
||||||
|> Continuous.reduce(~knownIntegralSumsFn, fn);
|
|
||||||
|
|
||||||
make(~discrete=reducedDiscrete, ~continuous=reducedContinuous);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module T =
|
module T =
|
||||||
|
@ -47,6 +38,8 @@ module T =
|
||||||
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
||||||
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
||||||
|
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
|
|
||||||
let toContinuous = toContinuous;
|
let toContinuous = toContinuous;
|
||||||
let toDiscrete = toDiscrete;
|
let toDiscrete = toDiscrete;
|
||||||
|
|
||||||
|
@ -61,29 +54,35 @@ module T =
|
||||||
let truncatedDiscrete =
|
let truncatedDiscrete =
|
||||||
Discrete.T.truncate(leftCutoff, rightCutoff, discrete);
|
Discrete.T.truncate(leftCutoff, rightCutoff, discrete);
|
||||||
|
|
||||||
make(~discrete=truncatedDiscrete, ~continuous=truncatedContinuous);
|
make(~integralSumCache=None, ~integralCache=None, ~discrete=truncatedDiscrete, ~continuous=truncatedContinuous);
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalize = (t: t): t => {
|
let normalize = (t: t): t => {
|
||||||
|
let continuousIntegral = Continuous.T.Integral.get(t.continuous);
|
||||||
|
let discreteIntegral = Discrete.T.Integral.get(t.discrete);
|
||||||
|
|
||||||
|
let continuous = t.continuous |> Continuous.updateIntegralCache(Some(continuousIntegral));
|
||||||
|
let discrete = t.discrete |> Discrete.updateIntegralCache(Some(discreteIntegral));
|
||||||
|
|
||||||
let continuousIntegralSum =
|
let continuousIntegralSum =
|
||||||
Continuous.T.Integral.sum(~cache=None, t.continuous);
|
Continuous.T.Integral.sum(continuous);
|
||||||
let discreteIntegralSum =
|
let discreteIntegralSum =
|
||||||
Discrete.T.Integral.sum(~cache=None, t.discrete);
|
Discrete.T.Integral.sum(discrete);
|
||||||
let totalIntegralSum = continuousIntegralSum +. discreteIntegralSum;
|
let totalIntegralSum = continuousIntegralSum +. discreteIntegralSum;
|
||||||
|
|
||||||
let newContinuousSum = continuousIntegralSum /. totalIntegralSum;
|
let newContinuousSum = continuousIntegralSum /. totalIntegralSum;
|
||||||
let newDiscreteSum = discreteIntegralSum /. totalIntegralSum;
|
let newDiscreteSum = discreteIntegralSum /. totalIntegralSum;
|
||||||
|
|
||||||
let normalizedContinuous =
|
let normalizedContinuous =
|
||||||
t.continuous
|
continuous
|
||||||
|> Continuous.scaleBy(~scale=1. /. newContinuousSum)
|
|> Continuous.scaleBy(~scale=newContinuousSum /. continuousIntegralSum)
|
||||||
|> Continuous.updateKnownIntegralSum(Some(newContinuousSum));
|
|> Continuous.updateIntegralSumCache(Some(newContinuousSum));
|
||||||
let normalizedDiscrete =
|
let normalizedDiscrete =
|
||||||
t.discrete
|
discrete
|
||||||
|> Discrete.scaleBy(~scale=1. /. newDiscreteSum)
|
|> Discrete.scaleBy(~scale=newDiscreteSum /. discreteIntegralSum)
|
||||||
|> Discrete.updateKnownIntegralSum(Some(newDiscreteSum));
|
|> Discrete.updateIntegralSumCache(Some(newDiscreteSum));
|
||||||
|
|
||||||
make(~continuous=normalizedContinuous, ~discrete=normalizedDiscrete);
|
make(~integralSumCache=Some(1.0), ~integralCache=None, ~continuous=normalizedContinuous, ~discrete=normalizedDiscrete);
|
||||||
};
|
};
|
||||||
|
|
||||||
let xToY = (x, t: t) => {
|
let xToY = (x, t: t) => {
|
||||||
|
@ -97,23 +96,22 @@ module T =
|
||||||
|
|
||||||
let toDiscreteProbabilityMassFraction = ({discrete, continuous}: t) => {
|
let toDiscreteProbabilityMassFraction = ({discrete, continuous}: t) => {
|
||||||
let discreteIntegralSum =
|
let discreteIntegralSum =
|
||||||
Discrete.T.Integral.sum(~cache=None, discrete);
|
Discrete.T.Integral.sum(discrete);
|
||||||
let continuousIntegralSum =
|
let continuousIntegralSum =
|
||||||
Continuous.T.Integral.sum(~cache=None, continuous);
|
Continuous.T.Integral.sum(continuous);
|
||||||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
discreteIntegralSum /. totalIntegralSum;
|
discreteIntegralSum /. totalIntegralSum;
|
||||||
};
|
};
|
||||||
|
|
||||||
let downsample = (~cache=None, count, {discrete, continuous}: t): t => {
|
let downsample = (count, t: t): t => {
|
||||||
// We will need to distribute the new xs fairly between the discrete and continuous shapes.
|
// We will need to distribute the new xs fairly between the discrete and continuous shapes.
|
||||||
// The easiest way to do this is to simply go by the previous probability masses.
|
// The easiest way to do this is to simply go by the previous probability masses.
|
||||||
|
|
||||||
// The cache really isn't helpful here, because we would need two separate caches
|
|
||||||
let discreteIntegralSum =
|
let discreteIntegralSum =
|
||||||
Discrete.T.Integral.sum(~cache=None, discrete);
|
Discrete.T.Integral.sum(t.discrete);
|
||||||
let continuousIntegralSum =
|
let continuousIntegralSum =
|
||||||
Continuous.T.Integral.sum(~cache=None, continuous);
|
Continuous.T.Integral.sum(t.continuous);
|
||||||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
// TODO: figure out what to do when the totalIntegralSum is zero.
|
// TODO: figure out what to do when the totalIntegralSum is zero.
|
||||||
|
@ -123,7 +121,7 @@ module T =
|
||||||
int_of_float(
|
int_of_float(
|
||||||
float_of_int(count) *. (discreteIntegralSum /. totalIntegralSum),
|
float_of_int(count) *. (discreteIntegralSum /. totalIntegralSum),
|
||||||
),
|
),
|
||||||
discrete,
|
t.discrete,
|
||||||
);
|
);
|
||||||
|
|
||||||
let downsampledContinuous =
|
let downsampledContinuous =
|
||||||
|
@ -131,75 +129,71 @@ module T =
|
||||||
int_of_float(
|
int_of_float(
|
||||||
float_of_int(count) *. (continuousIntegralSum /. totalIntegralSum),
|
float_of_int(count) *. (continuousIntegralSum /. totalIntegralSum),
|
||||||
),
|
),
|
||||||
continuous,
|
t.continuous,
|
||||||
);
|
);
|
||||||
|
|
||||||
{discrete: downsampledDiscrete, continuous: downsampledContinuous};
|
{...t, discrete: downsampledDiscrete, continuous: downsampledContinuous};
|
||||||
};
|
};
|
||||||
|
|
||||||
let normalizedToContinuous = (t: t) => Some(normalize(t).continuous);
|
let integral = (t: t) => {
|
||||||
|
switch (t.integralCache) {
|
||||||
let normalizedToDiscrete = ({discrete} as t: t) =>
|
|
||||||
Some(normalize(t).discrete);
|
|
||||||
|
|
||||||
let integral = (~cache, {continuous, discrete}: t) => {
|
|
||||||
switch (cache) {
|
|
||||||
| Some(cache) => cache
|
| Some(cache) => cache
|
||||||
| None =>
|
| None =>
|
||||||
// note: if the underlying shapes aren't normalized, then these integrals won't be either!
|
// note: if the underlying shapes aren't normalized, then these integrals won't be either -- but that's the way it should be.
|
||||||
let continuousIntegral =
|
let continuousIntegral = Continuous.T.Integral.get(t.continuous);
|
||||||
Continuous.T.Integral.get(~cache=None, continuous);
|
let discreteIntegral = Continuous.stepwiseToLinear(Discrete.T.Integral.get(t.discrete));
|
||||||
let discreteIntegral = Discrete.T.Integral.get(~cache=None, discrete);
|
|
||||||
|
|
||||||
Continuous.make(
|
Continuous.make(
|
||||||
`Linear,
|
XYShape.PointwiseCombination.combine(
|
||||||
XYShape.PointwiseCombination.combineLinear(
|
(+.),
|
||||||
~fn=(+.),
|
XYShape.XtoY.continuousInterpolator(`Linear, `UseOutermostPoints),
|
||||||
Continuous.getShape(continuousIntegral),
|
Continuous.getShape(continuousIntegral),
|
||||||
Continuous.getShape(discreteIntegral),
|
Continuous.getShape(discreteIntegral),
|
||||||
),
|
),
|
||||||
None,
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let integralEndY = (~cache, t: t) => {
|
let integralEndY = (t: t) => {
|
||||||
integral(~cache, t) |> Continuous.lastY;
|
t |> integral |> Continuous.lastY;
|
||||||
};
|
};
|
||||||
|
|
||||||
let integralXtoY = (~cache, f, t) => {
|
let integralXtoY = (f, t) => {
|
||||||
t |> integral(~cache) |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
||||||
};
|
};
|
||||||
|
|
||||||
let integralYtoX = (~cache, f, t) => {
|
let integralYtoX = (f, t) => {
|
||||||
t |> integral(~cache) |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
||||||
};
|
};
|
||||||
|
|
||||||
// This pipes all ys (continuous and discrete) through fn.
|
// This pipes all ys (continuous and discrete) through fn.
|
||||||
// If mapY is a linear operation, we might be able to update the knownIntegralSums as well;
|
// If mapY is a linear operation, we might be able to update the integralSumCaches as well;
|
||||||
// if not, they'll be set to None.
|
// if not, they'll be set to None.
|
||||||
let mapY =
|
let mapY =
|
||||||
(
|
(
|
||||||
~knownIntegralSumFn=previousIntegralSum => None,
|
~integralSumCacheFn=previousIntegralSum => None,
|
||||||
fn,
|
~integralCacheFn=previousIntegral => None,
|
||||||
{discrete, continuous}: t,
|
~fn,
|
||||||
|
t: t,
|
||||||
)
|
)
|
||||||
: t => {
|
: t => {
|
||||||
let u = E.O.bind(_, knownIntegralSumFn);
|
let yMappedDiscrete: DistTypes.discreteShape =
|
||||||
|
t.discrete
|
||||||
|
|> Discrete.T.mapY(~fn)
|
||||||
|
|> Discrete.updateIntegralSumCache(E.O.bind(t.discrete.integralSumCache, integralSumCacheFn))
|
||||||
|
|> Discrete.updateIntegralCache(E.O.bind(t.discrete.integralCache, integralCacheFn));
|
||||||
|
|
||||||
let yMappedDiscrete =
|
let yMappedContinuous: DistTypes.continuousShape =
|
||||||
discrete
|
t.continuous
|
||||||
|> Discrete.T.mapY(fn)
|
|> Continuous.T.mapY(~fn)
|
||||||
|> Discrete.updateKnownIntegralSum(u(discrete.knownIntegralSum));
|
|> Continuous.updateIntegralSumCache(E.O.bind(t.continuous.integralSumCache, integralSumCacheFn))
|
||||||
|
|> Continuous.updateIntegralCache(E.O.bind(t.continuous.integralCache, integralCacheFn));
|
||||||
let yMappedContinuous =
|
|
||||||
continuous
|
|
||||||
|> Continuous.T.mapY(fn)
|
|
||||||
|> Continuous.updateKnownIntegralSum(u(continuous.knownIntegralSum));
|
|
||||||
|
|
||||||
{
|
{
|
||||||
discrete: yMappedDiscrete,
|
discrete: yMappedDiscrete,
|
||||||
continuous: Continuous.T.mapY(fn, continuous),
|
continuous: yMappedContinuous,
|
||||||
|
integralSumCache: E.O.bind(t.integralSumCache, integralSumCacheFn),
|
||||||
|
integralCache: E.O.bind(t.integralCache, integralCacheFn),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -208,10 +202,8 @@ module T =
|
||||||
let continuousMean = Continuous.T.mean(continuous);
|
let continuousMean = Continuous.T.mean(continuous);
|
||||||
|
|
||||||
// the combined mean is the weighted sum of the two:
|
// the combined mean is the weighted sum of the two:
|
||||||
let discreteIntegralSum =
|
let discreteIntegralSum = Discrete.T.Integral.sum(discrete);
|
||||||
Discrete.T.Integral.sum(~cache=None, discrete);
|
let continuousIntegralSum = Continuous.T.Integral.sum(continuous);
|
||||||
let continuousIntegralSum =
|
|
||||||
Continuous.T.Integral.sum(~cache=None, continuous);
|
|
||||||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
(
|
(
|
||||||
|
@ -225,10 +217,8 @@ module T =
|
||||||
|
|
||||||
let variance = ({discrete, continuous} as t: t): float => {
|
let variance = ({discrete, continuous} as t: t): float => {
|
||||||
// the combined mean is the weighted sum of the two:
|
// the combined mean is the weighted sum of the two:
|
||||||
let discreteIntegralSum =
|
let discreteIntegralSum = Discrete.T.Integral.sum(discrete);
|
||||||
Discrete.T.Integral.sum(~cache=None, discrete);
|
let continuousIntegralSum = Continuous.T.Integral.sum(continuous);
|
||||||
let continuousIntegralSum =
|
|
||||||
Continuous.T.Integral.sum(~cache=None, continuous);
|
|
||||||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
||||||
|
@ -279,27 +269,64 @@ let combineAlgebraically =
|
||||||
let ccConvResult =
|
let ccConvResult =
|
||||||
Continuous.combineAlgebraically(
|
Continuous.combineAlgebraically(
|
||||||
op,
|
op,
|
||||||
t1d.continuous,
|
t1.continuous,
|
||||||
t2d.continuous,
|
t2.continuous,
|
||||||
);
|
);
|
||||||
let dcConvResult =
|
let dcConvResult =
|
||||||
Continuous.combineAlgebraicallyWithDiscrete(
|
Continuous.combineAlgebraicallyWithDiscrete(
|
||||||
op,
|
op,
|
||||||
t2d.continuous,
|
t2.continuous,
|
||||||
t1d.discrete,
|
t1.discrete,
|
||||||
);
|
);
|
||||||
let cdConvResult =
|
let cdConvResult =
|
||||||
Continuous.combineAlgebraicallyWithDiscrete(
|
Continuous.combineAlgebraicallyWithDiscrete(
|
||||||
op,
|
op,
|
||||||
t1d.continuous,
|
t1.continuous,
|
||||||
t2d.discrete,
|
t2.discrete,
|
||||||
);
|
);
|
||||||
let continuousConvResult =
|
let continuousConvResult =
|
||||||
Continuous.reduce((+.), [|ccConvResult, dcConvResult, cdConvResult|]);
|
Continuous.reduce((+.), [|ccConvResult, dcConvResult, cdConvResult|]);
|
||||||
|
|
||||||
// ... finally, discrete (*) discrete => discrete, obviously:
|
// ... finally, discrete (*) discrete => discrete, obviously:
|
||||||
let discreteConvResult =
|
let discreteConvResult =
|
||||||
Discrete.combineAlgebraically(op, t1d.discrete, t2d.discrete);
|
Discrete.combineAlgebraically(op, t1.discrete, t2.discrete);
|
||||||
|
|
||||||
{discrete: discreteConvResult, continuous: continuousConvResult};
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
(a, b) => Some(a *. b),
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
{discrete: discreteConvResult, continuous: continuousConvResult, integralSumCache: combinedIntegralSum, integralCache: None};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinePointwise = (~integralSumCachesFn = (_, _) => None, ~integralCachesFn = (_, _) => None, fn, t1: t, t2: t): t => {
|
||||||
|
let reducedDiscrete =
|
||||||
|
[|t1, t2|]
|
||||||
|
|> E.A.fmap(toDiscrete)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Discrete.reduce(~integralSumCachesFn, ~integralCachesFn, fn);
|
||||||
|
|
||||||
|
let reducedContinuous =
|
||||||
|
[|t1, t2|]
|
||||||
|
|> E.A.fmap(toContinuous)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Continuous.reduce(~integralSumCachesFn, ~integralCachesFn, fn);
|
||||||
|
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
integralSumCachesFn,
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
let combinedIntegral =
|
||||||
|
Common.combineIntegrals(
|
||||||
|
integralCachesFn,
|
||||||
|
t1.integralCache,
|
||||||
|
t2.integralCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
make(~integralSumCache=combinedIntegralSum, ~integralCache=combinedIntegral, ~discrete=reducedDiscrete, ~continuous=reducedContinuous);
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,8 +9,8 @@ type assumptions = {
|
||||||
};
|
};
|
||||||
|
|
||||||
let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete: option(DistTypes.discreteShape)): option(DistTypes.shape) => {
|
let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete: option(DistTypes.discreteShape)): option(DistTypes.shape) => {
|
||||||
let continuous = continuous |> E.O.default(Continuous.make(`Linear, {xs: [||], ys: [||]}, Some(0.0)));
|
let continuous = continuous |> E.O.default(Continuous.make(~integralSumCache=Some(0.0), {xs: [||], ys: [||]}));
|
||||||
let discrete = discrete |> E.O.default(Discrete.make({xs: [||], ys: [||]}, Some(0.0)));
|
let discrete = discrete |> E.O.default(Discrete.make(~integralSumCache=Some(0.0), {xs: [||], ys: [||]}));
|
||||||
let cLength =
|
let cLength =
|
||||||
continuous
|
continuous
|
||||||
|> Continuous.getShape
|
|> Continuous.getShape
|
||||||
|
@ -22,15 +22,13 @@ let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete: op
|
||||||
| (0 | 1, _) => Some(Discrete(discrete))
|
| (0 | 1, _) => Some(Discrete(discrete))
|
||||||
| (_, 0) => Some(Continuous(continuous))
|
| (_, 0) => Some(Continuous(continuous))
|
||||||
| (_, _) =>
|
| (_, _) =>
|
||||||
let discreteProbabilityMassFraction =
|
|
||||||
Discrete.T.Integral.sum(~cache=None, discrete);
|
|
||||||
let discrete = Discrete.T.normalize(discrete);
|
|
||||||
let continuous = Continuous.T.normalize(continuous);
|
|
||||||
let mixedDist =
|
let mixedDist =
|
||||||
Mixed.make(
|
Mixed.make(
|
||||||
|
~integralSumCache=None,
|
||||||
|
~integralCache=None,
|
||||||
~continuous,
|
~continuous,
|
||||||
~discrete
|
~discrete,
|
||||||
);
|
);
|
||||||
Some(Mixed(mixedDist));
|
Some(Mixed(mixedDist));
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -15,42 +15,54 @@ let fmap = ((fn1, fn2, fn3), t: t): t =>
|
||||||
| Continuous(m) => Continuous(fn3(m))
|
| Continuous(m) => Continuous(fn3(m))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
let toMixed =
|
let toMixed =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
m => m,
|
m => m,
|
||||||
d => Mixed.make(~discrete=d, ~continuous=Continuous.empty),
|
d => Mixed.make(~integralSumCache=d.integralSumCache, ~integralCache=d.integralCache, ~discrete=d, ~continuous=Continuous.empty),
|
||||||
c => Mixed.make(~discrete=Discrete.empty, ~continuous=c),
|
c => Mixed.make(~integralSumCache=c.integralSumCache, ~integralCache=c.integralCache, ~discrete=Discrete.empty, ~continuous=c),
|
||||||
));
|
));
|
||||||
|
|
||||||
let combineAlgebraically =
|
let combineAlgebraically =
|
||||||
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
|
||||||
switch (t1, t2) {
|
switch (t1, t2) {
|
||||||
| (Continuous(m1), Continuous(m2)) =>
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
DistTypes.Continuous(Continuous.combineAlgebraically(op, m1, m2))
|
Continuous.combineAlgebraically(op, m1, m2) |> Continuous.T.toShape;
|
||||||
|
| (Continuous(m1), Discrete(m2))
|
||||||
|
| (Discrete(m2), Continuous(m1)) =>
|
||||||
|
Continuous.combineAlgebraicallyWithDiscrete(op, m1, m2) |> Continuous.T.toShape
|
||||||
| (Discrete(m1), Discrete(m2)) =>
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
DistTypes.Discrete(Discrete.combineAlgebraically(op, m1, m2))
|
Discrete.combineAlgebraically(op, m1, m2) |> Discrete.T.toShape
|
||||||
| (m1, m2) =>
|
| (m1, m2) =>
|
||||||
DistTypes.Mixed(
|
Mixed.combineAlgebraically(
|
||||||
Mixed.combineAlgebraically(op, toMixed(m1), toMixed(m2)),
|
op,
|
||||||
|
toMixed(m1),
|
||||||
|
toMixed(m2),
|
||||||
)
|
)
|
||||||
|
|> Mixed.T.toShape
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let combinePointwise =
|
let combinePointwise =
|
||||||
(~knownIntegralSumsFn=(_, _) => None, fn, t1: t, t2: t) =>
|
(~integralSumCachesFn: (float, float) => option(float) = (_, _) => None,
|
||||||
|
~integralCachesFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape) = (_, _) => None,
|
||||||
|
fn,
|
||||||
|
t1: t,
|
||||||
|
t2: t) =>
|
||||||
switch (t1, t2) {
|
switch (t1, t2) {
|
||||||
| (Continuous(m1), Continuous(m2)) =>
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
DistTypes.Continuous(
|
DistTypes.Continuous(
|
||||||
Continuous.combinePointwise(~knownIntegralSumsFn, fn, m1, m2),
|
Continuous.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
|
||||||
)
|
)
|
||||||
| (Discrete(m1), Discrete(m2)) =>
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
DistTypes.Discrete(
|
DistTypes.Discrete(
|
||||||
Discrete.combinePointwise(~knownIntegralSumsFn, fn, m1, m2),
|
Discrete.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
|
||||||
)
|
)
|
||||||
| (m1, m2) =>
|
| (m1, m2) =>
|
||||||
DistTypes.Mixed(
|
DistTypes.Mixed(
|
||||||
Mixed.combinePointwise(
|
Mixed.combinePointwise(
|
||||||
~knownIntegralSumsFn,
|
~integralSumCachesFn,
|
||||||
|
~integralCachesFn,
|
||||||
fn,
|
fn,
|
||||||
toMixed(m1),
|
toMixed(m1),
|
||||||
toMixed(m2),
|
toMixed(m2),
|
||||||
|
@ -58,15 +70,6 @@ let combinePointwise =
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: implement these functions
|
|
||||||
let pdf = (f: float, t: t): float => {
|
|
||||||
0.0;
|
|
||||||
};
|
|
||||||
|
|
||||||
let inv = (f: float, t: t): float => {
|
|
||||||
0.0;
|
|
||||||
};
|
|
||||||
|
|
||||||
module T =
|
module T =
|
||||||
Dist({
|
Dist({
|
||||||
type t = DistTypes.shape;
|
type t = DistTypes.shape;
|
||||||
|
@ -84,7 +87,7 @@ module T =
|
||||||
let toContinuous = t => None;
|
let toContinuous = t => None;
|
||||||
let toDiscrete = t => None;
|
let toDiscrete = t => None;
|
||||||
|
|
||||||
let downsample = (~cache=None, i, t) =>
|
let downsample = (i, t) =>
|
||||||
fmap(
|
fmap(
|
||||||
(
|
(
|
||||||
Mixed.T.downsample(i),
|
Mixed.T.downsample(i),
|
||||||
|
@ -105,8 +108,21 @@ module T =
|
||||||
);
|
);
|
||||||
|
|
||||||
let toDiscreteProbabilityMassFraction = t => 0.0;
|
let toDiscreteProbabilityMassFraction = t => 0.0;
|
||||||
|
|
||||||
let normalize =
|
let normalize =
|
||||||
fmap((Mixed.T.normalize, Discrete.T.normalize, Continuous.T.normalize));
|
fmap((
|
||||||
|
Mixed.T.normalize,
|
||||||
|
Discrete.T.normalize,
|
||||||
|
Continuous.T.normalize
|
||||||
|
));
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t =>
|
||||||
|
fmap((
|
||||||
|
Mixed.T.updateIntegralCache(integralCache),
|
||||||
|
Discrete.T.updateIntegralCache(integralCache),
|
||||||
|
Continuous.T.updateIntegralCache(integralCache),
|
||||||
|
), t);
|
||||||
|
|
||||||
let toContinuous =
|
let toContinuous =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.toContinuous,
|
Mixed.T.toContinuous,
|
||||||
|
@ -127,51 +143,39 @@ module T =
|
||||||
Continuous.T.toDiscreteProbabilityMassFraction,
|
Continuous.T.toDiscreteProbabilityMassFraction,
|
||||||
));
|
));
|
||||||
|
|
||||||
let normalizedToDiscrete =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.normalizedToDiscrete,
|
|
||||||
Discrete.T.normalizedToDiscrete,
|
|
||||||
Continuous.T.normalizedToDiscrete,
|
|
||||||
));
|
|
||||||
let normalizedToContinuous =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.normalizedToContinuous,
|
|
||||||
Discrete.T.normalizedToContinuous,
|
|
||||||
Continuous.T.normalizedToContinuous,
|
|
||||||
));
|
|
||||||
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
||||||
let integral = (~cache) =>
|
let integral =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.Integral.get(~cache=None),
|
Mixed.T.Integral.get,
|
||||||
Discrete.T.Integral.get(~cache=None),
|
Discrete.T.Integral.get,
|
||||||
Continuous.T.Integral.get(~cache=None),
|
Continuous.T.Integral.get,
|
||||||
));
|
));
|
||||||
let integralEndY = (~cache) =>
|
let integralEndY =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.Integral.sum(~cache=None),
|
Mixed.T.Integral.sum,
|
||||||
Discrete.T.Integral.sum(~cache),
|
Discrete.T.Integral.sum,
|
||||||
Continuous.T.Integral.sum(~cache=None),
|
Continuous.T.Integral.sum,
|
||||||
));
|
));
|
||||||
let integralXtoY = (~cache, f) => {
|
let integralXtoY = (f) => {
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.Integral.xToY(~cache, f),
|
Mixed.T.Integral.xToY(f),
|
||||||
Discrete.T.Integral.xToY(~cache, f),
|
Discrete.T.Integral.xToY(f),
|
||||||
Continuous.T.Integral.xToY(~cache, f),
|
Continuous.T.Integral.xToY(f),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let integralYtoX = (~cache, f) => {
|
let integralYtoX = (f) => {
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.Integral.yToX(~cache, f),
|
Mixed.T.Integral.yToX(f),
|
||||||
Discrete.T.Integral.yToX(~cache, f),
|
Discrete.T.Integral.yToX(f),
|
||||||
Continuous.T.Integral.yToX(~cache, f),
|
Continuous.T.Integral.yToX(f),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX));
|
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX));
|
||||||
let mapY = (~knownIntegralSumFn=previousIntegralSum => None, fn) =>
|
let mapY = (~integralSumCacheFn=previousIntegralSum => None, ~integralCacheFn=previousIntegral=>None, ~fn) =>
|
||||||
fmap((
|
fmap((
|
||||||
Mixed.T.mapY(~knownIntegralSumFn, fn),
|
Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
Discrete.T.mapY(~knownIntegralSumFn, fn),
|
Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
Continuous.T.mapY(~knownIntegralSumFn, fn),
|
Continuous.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
));
|
));
|
||||||
|
|
||||||
let mean = (t: t): float =>
|
let mean = (t: t): float =>
|
||||||
|
@ -189,6 +193,14 @@ module T =
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let pdf = (f: float, t: t) => {
|
||||||
|
let mixedPoint: DistTypes.mixedPoint = T.xToY(f, t);
|
||||||
|
mixedPoint.continuous +. mixedPoint.discrete;
|
||||||
|
};
|
||||||
|
|
||||||
|
let inv = T.Integral.yToX;
|
||||||
|
let cdf = T.Integral.xToY;
|
||||||
|
|
||||||
let doN = (n, fn) => {
|
let doN = (n, fn) => {
|
||||||
let items = Belt.Array.make(n, 0.0);
|
let items = Belt.Array.make(n, 0.0);
|
||||||
for (x in 0 to n - 1) {
|
for (x in 0 to n - 1) {
|
||||||
|
@ -198,21 +210,24 @@ let doN = (n, fn) => {
|
||||||
items;
|
items;
|
||||||
};
|
};
|
||||||
|
|
||||||
let sample = (cache, t: t): float => {
|
let sample = (t: t): float => {
|
||||||
let randomItem = Random.float(1.);
|
let randomItem = Random.float(1.);
|
||||||
let bar = T.Integral.yToX(~cache, randomItem, t);
|
let bar = t |> T.Integral.yToX(randomItem);
|
||||||
bar;
|
bar;
|
||||||
};
|
};
|
||||||
|
|
||||||
let sampleNRendered = (n, dist) => {
|
let sampleNRendered = (n, dist) => {
|
||||||
let integralCache = T.Integral.get(~cache=None, dist);
|
let integralCache = T.Integral.get(dist);
|
||||||
doN(n, () => sample(Some(integralCache), dist));
|
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist);
|
||||||
|
|
||||||
|
doN(n, () => sample(distWithUpdatedIntegralCache));
|
||||||
};
|
};
|
||||||
|
|
||||||
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s) =>
|
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s): float =>
|
||||||
switch (distToFloatOp) {
|
switch (distToFloatOp) {
|
||||||
| `Pdf(f) => pdf(f, s)
|
| `Pdf(f) => pdf(f, s)
|
||||||
|
| `Cdf(f) => pdf(f, s)
|
||||||
| `Inv(f) => inv(f, s)
|
| `Inv(f) => inv(f, s)
|
||||||
| `Sample => sample(None, s)
|
| `Sample => sample(s)
|
||||||
| `Mean => T.mean(s)
|
| `Mean => T.mean(s)
|
||||||
};
|
};
|
||||||
|
|
|
@ -19,6 +19,7 @@ module T = {
|
||||||
let ys = (t: t) => t.ys;
|
let ys = (t: t) => t.ys;
|
||||||
let length = (t: t) => E.A.length(t.xs);
|
let length = (t: t) => E.A.length(t.xs);
|
||||||
let empty = {xs: [||], ys: [||]};
|
let empty = {xs: [||], ys: [||]};
|
||||||
|
let isEmpty = (t: t) => length(t) == 0;
|
||||||
let minX = (t: t) => t |> xs |> E.A.Sorted.min |> extImp;
|
let minX = (t: t) => t |> xs |> E.A.Sorted.min |> extImp;
|
||||||
let maxX = (t: t) => t |> xs |> E.A.Sorted.max |> extImp;
|
let maxX = (t: t) => t |> xs |> E.A.Sorted.max |> extImp;
|
||||||
let firstY = (t: t) => t |> ys |> E.A.first |> extImp;
|
let firstY = (t: t) => t |> ys |> E.A.first |> extImp;
|
||||||
|
@ -32,6 +33,11 @@ module T = {
|
||||||
let accumulateYs = (fn, p: t) => {
|
let accumulateYs = (fn, p: t) => {
|
||||||
fromArray((p.xs, E.A.accumulate(fn, p.ys)));
|
fromArray((p.xs, E.A.accumulate(fn, p.ys)));
|
||||||
};
|
};
|
||||||
|
let concat = (t1: t, t2: t) => {
|
||||||
|
let cxs = Array.concat([t1.xs, t2.xs]);
|
||||||
|
let cys = Array.concat([t1.ys, t2.ys]);
|
||||||
|
{xs: cxs, ys: cys};
|
||||||
|
};
|
||||||
let fromZippedArray = (pairs: array((float, float))): t =>
|
let fromZippedArray = (pairs: array((float, float))): t =>
|
||||||
pairs |> Belt.Array.unzip |> fromArray;
|
pairs |> Belt.Array.unzip |> fromArray;
|
||||||
let equallyDividedXs = (t: t, newLength) => {
|
let equallyDividedXs = (t: t, newLength) => {
|
||||||
|
@ -137,6 +143,63 @@ module XtoY = {
|
||||||
};
|
};
|
||||||
n;
|
n;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
|
||||||
|
Interpolation can either be stepwise (using the value on the left) or linear. Extrapolation can be `UseZero or `UseOutermostPoints. */
|
||||||
|
let continuousInterpolator = (interpolation: DistTypes.interpolationStrategy, extrapolation: DistTypes.extrapolationStrategy): interpolator => {
|
||||||
|
switch (interpolation, extrapolation) {
|
||||||
|
| (`Linear, `UseZero) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
0.0
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
|
let x1 = t.xs[leftIndex];
|
||||||
|
let x2 = t.xs[leftIndex + 1];
|
||||||
|
let y1 = t.ys[leftIndex];
|
||||||
|
let y2 = t.ys[leftIndex + 1];
|
||||||
|
let fraction = (x -. x1) /. (x2 -. x1);
|
||||||
|
y1 *. (1. -. fraction) +. y2 *. fraction;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
| (`Linear, `UseOutermostPoints) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
t.ys[0];
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
t.ys[T.length(t) - 1]
|
||||||
|
} else {
|
||||||
|
let x1 = t.xs[leftIndex];
|
||||||
|
let x2 = t.xs[leftIndex + 1];
|
||||||
|
let y1 = t.ys[leftIndex];
|
||||||
|
let y2 = t.ys[leftIndex + 1];
|
||||||
|
let fraction = (x -. x1) /. (x2 -. x1);
|
||||||
|
y1 *. (1. -. fraction) +. y2 *. fraction;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
| (`Stepwise, `UseZero) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
0.0
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
|
t.ys[leftIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
| (`Stepwise, `UseOutermostPoints) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
t.ys[0];
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
t.ys[T.length(t) - 1]
|
||||||
|
} else {
|
||||||
|
t.ys[leftIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
|
||||||
|
For discrete distributions, the probability density between points is zero, so we just return zero here. */
|
||||||
|
let discreteInterpolator: interpolator = (t: T.t, leftIndex: int, x: float) => 0.0;
|
||||||
};
|
};
|
||||||
|
|
||||||
module XsConversion = {
|
module XsConversion = {
|
||||||
|
@ -171,24 +234,22 @@ module Zipped = {
|
||||||
};
|
};
|
||||||
|
|
||||||
module PointwiseCombination = {
|
module PointwiseCombination = {
|
||||||
type xsSelection =
|
|
||||||
| ALL_XS
|
|
||||||
| XS_EVENLY_DIVIDED(int);
|
|
||||||
|
|
||||||
let combineLinear = [%raw {| // : (float => float => float, T.t, T.t) => T.t
|
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
||||||
|
let combine = [%raw {| // : (float => float => float, T.t, T.t, bool) => T.t
|
||||||
// This function combines two xyShapes by looping through both of them simultaneously.
|
// This function combines two xyShapes by looping through both of them simultaneously.
|
||||||
// It always moves on to the next smallest x, whether that's in the first or second input's xs,
|
// It always moves on to the next smallest x, whether that's in the first or second input's xs,
|
||||||
// and interpolates the value on the other side, thus accumulating xs and ys.
|
// and interpolates the value on the other side, thus accumulating xs and ys.
|
||||||
// In this implementation (unlike in XtoY.linear above), values outside of a shape's xs are considered 0.0 (instead of firstY or lastY).
|
|
||||||
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
||||||
|
|
||||||
function(fn, t1, t2) {
|
function(fn, interpolator, t1, t2) {
|
||||||
let t1n = t1.xs.length;
|
let t1n = t1.xs.length;
|
||||||
let t2n = t2.xs.length;
|
let t2n = t2.xs.length;
|
||||||
let outX = [];
|
let outX = [];
|
||||||
let outY = [];
|
let outY = [];
|
||||||
let i = -1;
|
let i = -1;
|
||||||
let j = -1;
|
let j = -1;
|
||||||
|
|
||||||
while (i <= t1n - 1 && j <= t2n - 1) {
|
while (i <= t1n - 1 && j <= t2n - 1) {
|
||||||
let x, ya, yb;
|
let x, ya, yb;
|
||||||
if (j == t2n - 1 && i < t1n - 1 ||
|
if (j == t2n - 1 && i < t1n - 1 ||
|
||||||
|
@ -198,8 +259,7 @@ module PointwiseCombination = {
|
||||||
x = t1.xs[i];
|
x = t1.xs[i];
|
||||||
ya = t1.ys[i];
|
ya = t1.ys[i];
|
||||||
|
|
||||||
let bFraction = (x - (t2.xs[j] || x)) / ((t2.xs[j+1] || x) - (t2.xs[j] || 0));
|
yb = interpolator(t2, j, x);
|
||||||
yb = (t2.ys[j] || 0) * (1-bFraction) + (t2.ys[j+1] || 0) * bFraction;
|
|
||||||
} else if (i == t1n - 1 && j < t2n - 1 ||
|
} else if (i == t1n - 1 && j < t2n - 1 ||
|
||||||
t1.xs[i+1] > t2.xs[j+1]) { // if b has to catch up to a, or if a is already done
|
t1.xs[i+1] > t2.xs[j+1]) { // if b has to catch up to a, or if a is already done
|
||||||
j++;
|
j++;
|
||||||
|
@ -207,8 +267,7 @@ module PointwiseCombination = {
|
||||||
x = t2.xs[j];
|
x = t2.xs[j];
|
||||||
yb = t2.ys[j];
|
yb = t2.ys[j];
|
||||||
|
|
||||||
let aFraction = (x - (t1.xs[i] || x)) / ((t1.xs[i+1] || x) - (t1.xs[i] || 0));
|
ya = interpolator(t1, i, x);
|
||||||
ya = (t1.ys[i] || 0) * (1-aFraction) + (t1.ys[i+1] || 0) * aFraction;
|
|
||||||
} else if (i < t1n - 1 && j < t2n && t1.xs[i+1] === t2.xs[j+1]) { // if they happen to be equal, move both ahead
|
} else if (i < t1n - 1 && j < t2n && t1.xs[i+1] === t2.xs[j+1]) { // if they happen to be equal, move both ahead
|
||||||
i++;
|
i++;
|
||||||
j++;
|
j++;
|
||||||
|
@ -227,15 +286,16 @@ module PointwiseCombination = {
|
||||||
outX.push(x);
|
outX.push(x);
|
||||||
outY.push(fn(ya, yb));
|
outY.push(fn(ya, yb));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {xs: outX, ys: outY};
|
return {xs: outX, ys: outY};
|
||||||
}
|
}
|
||||||
|}];
|
|}];
|
||||||
|
|
||||||
let combine =
|
let combineEvenXs =
|
||||||
(
|
(
|
||||||
~xToYSelection: (float, T.t) => 'a,
|
|
||||||
~xsSelection=ALL_XS,
|
|
||||||
~fn,
|
~fn,
|
||||||
|
~xToYSelection,
|
||||||
|
sampleCount,
|
||||||
t1: T.t,
|
t1: T.t,
|
||||||
t2: T.t,
|
t2: T.t,
|
||||||
) => {
|
) => {
|
||||||
|
@ -245,25 +305,15 @@ module PointwiseCombination = {
|
||||||
| (0, _) => t2
|
| (0, _) => t2
|
||||||
| (_, 0) => t1
|
| (_, 0) => t1
|
||||||
| (_, _) => {
|
| (_, _) => {
|
||||||
let allXs =
|
let allXs = Ts.equallyDividedXs([|t1, t2|], sampleCount);
|
||||||
switch (xsSelection) {
|
|
||||||
| ALL_XS => Ts.allXs([|t1, t2|])
|
|
||||||
| XS_EVENLY_DIVIDED(sampleCount) =>
|
|
||||||
Ts.equallyDividedXs([|t1, t2|], sampleCount)
|
|
||||||
};
|
|
||||||
|
|
||||||
let allYs =
|
let allYs = allXs |> E.A.fmap(x => fn(xToYSelection(x, t1), xToYSelection(x, t2)));
|
||||||
allXs |> E.A.fmap(x => fn(xToYSelection(x, t1), xToYSelection(x, t2)));
|
|
||||||
|
|
||||||
T.fromArrays(allXs, allYs);
|
T.fromArrays(allXs, allYs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
//let combineLinear = combine(~xToYSelection=XtoY.linear);
|
|
||||||
let combineStepwise = combine(~xToYSelection=XtoY.stepwiseIncremental);
|
|
||||||
let combineIfAtX = combine(~xToYSelection=XtoY.stepwiseIfAtX);
|
|
||||||
|
|
||||||
// TODO: I'd bet this is pretty slow. Maybe it would be faster to intersperse Xs and Ys separately.
|
// TODO: I'd bet this is pretty slow. Maybe it would be faster to intersperse Xs and Ys separately.
|
||||||
let intersperse = (t1: T.t, t2: T.t) => {
|
let intersperse = (t1: T.t, t2: T.t) => {
|
||||||
E.A.intersperse(T.zip(t1), T.zip(t2)) |> T.fromZippedArray;
|
E.A.intersperse(T.zip(t1), T.zip(t2)) |> T.fromZippedArray;
|
||||||
|
@ -324,8 +374,31 @@ module Range = {
|
||||||
|
|
||||||
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x);
|
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x);
|
||||||
|
|
||||||
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
let stepwiseToLinear = ({xs, ys}: T.t): T.t => {
|
||||||
|
// adds points at the bottom of each step.
|
||||||
|
let length = E.A.length(xs);
|
||||||
|
let newXs: array(float) = Belt.Array.makeUninitializedUnsafe(2 * length);
|
||||||
|
let newYs: array(float) = Belt.Array.makeUninitializedUnsafe(2 * length);
|
||||||
|
|
||||||
|
Belt.Array.set(newXs, 0, xs[0] -. epsilon_float) |> ignore;
|
||||||
|
Belt.Array.set(newYs, 0, 0.) |> ignore;
|
||||||
|
Belt.Array.set(newXs, 1, xs[0]) |> ignore;
|
||||||
|
Belt.Array.set(newYs, 1, ys[0]) |> ignore;
|
||||||
|
|
||||||
|
for (i in 1 to E.A.length(xs) - 1) {
|
||||||
|
Belt.Array.set(newXs, i * 2, xs[i] -. epsilon_float) |> ignore;
|
||||||
|
Belt.Array.set(newYs, i * 2, ys[i-1]) |> ignore;
|
||||||
|
Belt.Array.set(newXs, i * 2 + 1, xs[i]) |> ignore;
|
||||||
|
Belt.Array.set(newYs, i * 2 + 1, ys[i]) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
|
||||||
|
{xs: newXs, ys: newYs};
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: I think this isn't needed by any functions anymore.
|
||||||
let stepsToContinuous = t => {
|
let stepsToContinuous = t => {
|
||||||
|
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
||||||
let diff = T.xTotalRange(t) |> (r => r *. 0.00001);
|
let diff = T.xTotalRange(t) |> (r => r *. 0.00001);
|
||||||
let items =
|
let items =
|
||||||
switch (E.A.toRanges(Belt.Array.zip(t.xs, t.ys))) {
|
switch (E.A.toRanges(Belt.Array.zip(t.xs, t.ys))) {
|
||||||
|
@ -356,10 +429,10 @@ let pointLogScore = (prediction, answer) =>
|
||||||
};
|
};
|
||||||
|
|
||||||
let logScorePoint = (sampleCount, t1, t2) =>
|
let logScorePoint = (sampleCount, t1, t2) =>
|
||||||
PointwiseCombination.combine(
|
PointwiseCombination.combineEvenXs(
|
||||||
~xsSelection=XS_EVENLY_DIVIDED(sampleCount),
|
|
||||||
~xToYSelection=XtoY.linear,
|
|
||||||
~fn=pointLogScore,
|
~fn=pointLogScore,
|
||||||
|
~xToYSelection=XtoY.linear,
|
||||||
|
sampleCount,
|
||||||
t1,
|
t1,
|
||||||
t2,
|
t2,
|
||||||
)
|
)
|
||||||
|
|
|
@ -20,61 +20,15 @@ module AlgebraicCombination = {
|
||||||
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
|
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
|
||||||
};
|
};
|
||||||
|
|
||||||
let tryCombination = (n, algebraicOp, t1: node, t2: node) => {
|
let combinationByRendering =
|
||||||
let sampleN =
|
(evaluationParams, algebraicOp, t1: node, t2: node)
|
||||||
mapRenderable(Shape.sampleNRendered(n), SymbolicDist.T.sampleN(n));
|
: result(node, string) => {
|
||||||
switch (sampleN(t1), sampleN(t2)) {
|
E.R.merge(
|
||||||
| (Some(a), Some(b)) =>
|
Render.ensureIsRenderedAndGetShape(evaluationParams, t1),
|
||||||
Some(
|
Render.ensureIsRenderedAndGetShape(evaluationParams, t2),
|
||||||
Belt.Array.zip(a, b)
|
)
|
||||||
|> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
|
|> E.R.fmap(((a, b)) =>
|
||||||
)
|
`RenderedDist(Shape.combineAlgebraically(algebraicOp, a, b))
|
||||||
| _ => None
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let renderIfNotRendered = (params, t) =>
|
|
||||||
!renderable(t)
|
|
||||||
? switch (render(params, t)) {
|
|
||||||
| Ok(r) => Ok(r)
|
|
||||||
| Error(e) => Error(e)
|
|
||||||
}
|
|
||||||
: Ok(t);
|
|
||||||
|
|
||||||
let combineAsShapes =
|
|
||||||
(evaluationParams: evaluationParams, algebraicOp, t1: node, t2: node) => {
|
|
||||||
let i1 = renderIfNotRendered(evaluationParams, t1);
|
|
||||||
let i2 = renderIfNotRendered(evaluationParams, t2);
|
|
||||||
E.R.merge(i1, i2)
|
|
||||||
|> E.R.bind(
|
|
||||||
_,
|
|
||||||
((a, b)) => {
|
|
||||||
let samples =
|
|
||||||
tryCombination(
|
|
||||||
evaluationParams.samplingInputs.sampleCount,
|
|
||||||
algebraicOp,
|
|
||||||
a,
|
|
||||||
b,
|
|
||||||
);
|
|
||||||
let shape =
|
|
||||||
samples
|
|
||||||
|> E.O.fmap(
|
|
||||||
Samples.T.fromSamples(
|
|
||||||
~samplingInputs={
|
|
||||||
sampleCount:
|
|
||||||
Some(evaluationParams.samplingInputs.sampleCount),
|
|
||||||
outputXYPoints:
|
|
||||||
Some(evaluationParams.samplingInputs.outputXYPoints),
|
|
||||||
kernelWidth: evaluationParams.samplingInputs.kernelWidth,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|> E.O.bind(_, (r: RenderTypes.ShapeRenderer.Sampling.outputs) =>
|
|
||||||
r.shape
|
|
||||||
)
|
|
||||||
|> E.O.toResult("No response");
|
|
||||||
shape |> E.R.fmap(r => `Normalize(`RenderedDist(r)));
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -92,7 +46,7 @@ module AlgebraicCombination = {
|
||||||
_,
|
_,
|
||||||
fun
|
fun
|
||||||
| `SymbolicDist(d) as t => Ok(t)
|
| `SymbolicDist(d) as t => Ok(t)
|
||||||
| _ => combineAsShapes(evaluationParams, algebraicOp, t1, t2),
|
| _ => combinationByRendering(evaluationParams, algebraicOp, t1, t2),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -101,16 +55,18 @@ module VerticalScaling = {
|
||||||
(evaluationParams: evaluationParams, scaleOp, t, scaleBy) => {
|
(evaluationParams: evaluationParams, scaleOp, t, scaleBy) => {
|
||||||
// scaleBy has to be a single float, otherwise we'll return an error.
|
// scaleBy has to be a single float, otherwise we'll return an error.
|
||||||
let fn = Operation.Scale.toFn(scaleOp);
|
let fn = Operation.Scale.toFn(scaleOp);
|
||||||
let knownIntegralSumFn = Operation.Scale.toKnownIntegralSumFn(scaleOp);
|
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(scaleOp);
|
||||||
let renderedShape = render(evaluationParams, t);
|
let integralCacheFn = Operation.Scale.toIntegralCacheFn(scaleOp);
|
||||||
|
let renderedShape = Render.render(evaluationParams, t);
|
||||||
|
|
||||||
switch (renderedShape, scaleBy) {
|
switch (renderedShape, scaleBy) {
|
||||||
| (Ok(`RenderedDist(rs)), `SymbolicDist(`Float(sm))) =>
|
| (Ok(`RenderedDist(rs)), `SymbolicDist(`Float(sm))) =>
|
||||||
Ok(
|
Ok(
|
||||||
`RenderedDist(
|
`RenderedDist(
|
||||||
Shape.T.mapY(
|
Shape.T.mapY(
|
||||||
~knownIntegralSumFn=knownIntegralSumFn(sm),
|
~integralSumCacheFn=integralSumCacheFn(sm),
|
||||||
fn(sm),
|
~integralCacheFn=integralCacheFn(sm),
|
||||||
|
~fn=fn(sm),
|
||||||
rs,
|
rs,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -122,13 +78,23 @@ module VerticalScaling = {
|
||||||
};
|
};
|
||||||
|
|
||||||
module PointwiseCombination = {
|
module PointwiseCombination = {
|
||||||
let pointwiseAdd = (evaluationParams: evaluationParams, t1, t2) => {
|
let pointwiseAdd = (evaluationParams: evaluationParams, t1: t, t2: t) => {
|
||||||
switch (render(evaluationParams, t1), render(evaluationParams, t2)) {
|
switch (Render.render(evaluationParams, t1), Render.render(evaluationParams, t2)) {
|
||||||
| (Ok(`RenderedDist(rs1)), Ok(`RenderedDist(rs2))) =>
|
| (Ok(`RenderedDist(rs1)), Ok(`RenderedDist(rs2))) =>
|
||||||
Ok(
|
Ok(
|
||||||
`RenderedDist(
|
`RenderedDist(
|
||||||
Shape.combinePointwise(
|
Shape.combinePointwise(
|
||||||
~knownIntegralSumsFn=(a, b) => Some(a +. b),
|
~integralSumCachesFn=(a, b) => Some(a +. b),
|
||||||
|
~integralCachesFn=
|
||||||
|
(a, b) =>
|
||||||
|
Some(
|
||||||
|
Continuous.combinePointwise(
|
||||||
|
~distributionType=`CDF,
|
||||||
|
(+.),
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
),
|
||||||
|
),
|
||||||
(+.),
|
(+.),
|
||||||
rs1,
|
rs1,
|
||||||
rs2,
|
rs2,
|
||||||
|
@ -141,7 +107,7 @@ module PointwiseCombination = {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let pointwiseMultiply = (evaluationParams: evaluationParams, t1, t2) => {
|
let pointwiseMultiply = (evaluationParams: evaluationParams, t1: t, t2: t) => {
|
||||||
// TODO: construct a function that we can easily sample from, to construct
|
// TODO: construct a function that we can easily sample from, to construct
|
||||||
// a RenderedDist. Use the xMin and xMax of the rendered shapes to tell the sampling function where to look.
|
// a RenderedDist. Use the xMin and xMax of the rendered shapes to tell the sampling function where to look.
|
||||||
Error(
|
Error(
|
||||||
|
@ -150,7 +116,12 @@ module PointwiseCombination = {
|
||||||
};
|
};
|
||||||
|
|
||||||
let operationToLeaf =
|
let operationToLeaf =
|
||||||
(evaluationParams: evaluationParams, pointwiseOp, t1, t2) => {
|
(
|
||||||
|
evaluationParams: evaluationParams,
|
||||||
|
pointwiseOp: pointwiseOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
) => {
|
||||||
switch (pointwiseOp) {
|
switch (pointwiseOp) {
|
||||||
| `Add => pointwiseAdd(evaluationParams, t1, t2)
|
| `Add => pointwiseAdd(evaluationParams, t1, t2)
|
||||||
| `Multiply => pointwiseMultiply(evaluationParams, t1, t2)
|
| `Multiply => pointwiseMultiply(evaluationParams, t1, t2)
|
||||||
|
@ -163,7 +134,7 @@ module Truncate = {
|
||||||
switch (leftCutoff, rightCutoff, t) {
|
switch (leftCutoff, rightCutoff, t) {
|
||||||
| (None, None, t) => `Solution(t)
|
| (None, None, t) => `Solution(t)
|
||||||
| (Some(lc), Some(rc), t) when lc > rc =>
|
| (Some(lc), Some(rc), t) when lc > rc =>
|
||||||
`Error("Left truncation bound must be smaller than right bound.")
|
`Error("Left truncation bound must be smaller than right truncation bound.")
|
||||||
| (lc, rc, `SymbolicDist(`Uniform(u))) =>
|
| (lc, rc, `SymbolicDist(`Uniform(u))) =>
|
||||||
`Solution(
|
`Solution(
|
||||||
`SymbolicDist(`Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))),
|
`SymbolicDist(`Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))),
|
||||||
|
@ -174,9 +145,9 @@ module Truncate = {
|
||||||
|
|
||||||
let truncateAsShape =
|
let truncateAsShape =
|
||||||
(evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) => {
|
(evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) => {
|
||||||
// TODO: use named args in renderToShape; if we're lucky we can at least get the tail
|
// TODO: use named args for xMin/xMax in renderToShape; if we're lucky we can at least get the tail
|
||||||
// of a distribution we otherwise wouldn't get at all
|
// of a distribution we otherwise wouldn't get at all
|
||||||
switch (render(evaluationParams, t)) {
|
switch (Render.ensureIsRendered(evaluationParams, t)) {
|
||||||
| Ok(`RenderedDist(rs)) =>
|
| Ok(`RenderedDist(rs)) =>
|
||||||
Ok(`RenderedDist(Shape.T.truncate(leftCutoff, rightCutoff, rs)))
|
Ok(`RenderedDist(Shape.T.truncate(leftCutoff, rightCutoff, rs)))
|
||||||
| Error(e) => Error(e)
|
| Error(e) => Error(e)
|
||||||
|
|
|
@ -1,7 +1,13 @@
|
||||||
type algebraicOperation = [ | `Add | `Multiply | `Subtract | `Divide];
|
type algebraicOperation = [ | `Add | `Multiply | `Subtract | `Divide];
|
||||||
type pointwiseOperation = [ | `Add | `Multiply];
|
type pointwiseOperation = [ | `Add | `Multiply];
|
||||||
type scaleOperation = [ | `Multiply | `Exponentiate | `Log];
|
type scaleOperation = [ | `Multiply | `Exponentiate | `Log];
|
||||||
type distToFloatOperation = [ | `Pdf(float) | `Inv(float) | `Mean | `Sample];
|
type distToFloatOperation = [
|
||||||
|
| `Pdf(float)
|
||||||
|
| `Cdf(float)
|
||||||
|
| `Inv(float)
|
||||||
|
| `Mean
|
||||||
|
| `Sample
|
||||||
|
];
|
||||||
|
|
||||||
module ExpressionTree = {
|
module ExpressionTree = {
|
||||||
type node = [
|
type node = [
|
||||||
|
@ -31,26 +37,42 @@ module ExpressionTree = {
|
||||||
let evaluateNode = (evaluationParams: evaluationParams) =>
|
let evaluateNode = (evaluationParams: evaluationParams) =>
|
||||||
evaluationParams.evaluateNode(evaluationParams);
|
evaluationParams.evaluateNode(evaluationParams);
|
||||||
|
|
||||||
let render = (evaluationParams: evaluationParams, r) =>
|
|
||||||
evaluateNode(evaluationParams, `Render(r));
|
|
||||||
|
|
||||||
let evaluateAndRetry = (evaluationParams, fn, node) =>
|
let evaluateAndRetry = (evaluationParams, fn, node) =>
|
||||||
node
|
node
|
||||||
|> evaluationParams.evaluateNode(evaluationParams)
|
|> evaluationParams.evaluateNode(evaluationParams)
|
||||||
|> E.R.bind(_, fn(evaluationParams));
|
|> E.R.bind(_, fn(evaluationParams));
|
||||||
|
|
||||||
let renderable =
|
module Render = {
|
||||||
fun
|
type t = node;
|
||||||
| `SymbolicDist(_) => true
|
|
||||||
| `RenderedDist(_) => true
|
let render = (evaluationParams: evaluationParams, r) =>
|
||||||
| _ => false;
|
`Render(r) |> evaluateNode(evaluationParams);
|
||||||
|
|
||||||
|
let ensureIsRendered = (params, t) =>
|
||||||
|
switch (t) {
|
||||||
|
| `RenderedDist(_) => Ok(t)
|
||||||
|
| _ =>
|
||||||
|
switch (render(params, t)) {
|
||||||
|
| Ok(`RenderedDist(r)) => Ok(`RenderedDist(r))
|
||||||
|
| Ok(_) => Error("Did not render as requested")
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let ensureIsRenderedAndGetShape = (params, t) =>
|
||||||
|
switch (ensureIsRendered(params, t)) {
|
||||||
|
| Ok(`RenderedDist(r)) => Ok(r)
|
||||||
|
| Ok(_) => Error("Did not render as requested")
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
};
|
||||||
|
|
||||||
|
let getShape = (item: node) =>
|
||||||
|
switch (item) {
|
||||||
|
| `RenderedDist(r) => Some(r)
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
let mapRenderable = (renderedFn, symFn, item: node) =>
|
|
||||||
switch (item) {
|
|
||||||
| `SymbolicDist(s) => Some(symFn(s))
|
|
||||||
| `RenderedDist(r) => Some(renderedFn(r))
|
|
||||||
| _ => None
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
type simplificationResult = [
|
type simplificationResult = [
|
||||||
|
|
|
@ -237,7 +237,8 @@ module MathAdtToDistDst = {
|
||||||
args: array(result(ExpressionTypes.ExpressionTree.node, string)),
|
args: array(result(ExpressionTypes.ExpressionTree.node, string)),
|
||||||
) => {
|
) => {
|
||||||
let toOkAlgebraic = r => Ok(`AlgebraicCombination(r));
|
let toOkAlgebraic = r => Ok(`AlgebraicCombination(r));
|
||||||
let toOkTrunctate = r => Ok(`Truncate(r));
|
let toOkTruncate = r => Ok(`Truncate(r));
|
||||||
|
let toOkFloatFromDist = r => Ok(`FloatFromDist(r))
|
||||||
switch (name, args) {
|
switch (name, args) {
|
||||||
| ("add", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Add, l, r))
|
| ("add", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Add, l, r))
|
||||||
| ("add", _) => Error("Addition needs two operands")
|
| ("add", _) => Error("Addition needs two operands")
|
||||||
|
@ -249,11 +250,11 @@ module MathAdtToDistDst = {
|
||||||
| ("divide", _) => Error("Division needs two operands")
|
| ("divide", _) => Error("Division needs two operands")
|
||||||
| ("pow", _) => Error("Exponentiation is not yet supported.")
|
| ("pow", _) => Error("Exponentiation is not yet supported.")
|
||||||
| ("leftTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(lc)))|]) =>
|
| ("leftTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(lc)))|]) =>
|
||||||
toOkTrunctate((Some(lc), None, d))
|
toOkTruncate((Some(lc), None, d))
|
||||||
| ("leftTruncate", _) =>
|
| ("leftTruncate", _) =>
|
||||||
Error("leftTruncate needs two arguments: the expression and the cutoff")
|
Error("leftTruncate needs two arguments: the expression and the cutoff")
|
||||||
| ("rightTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(rc)))|]) =>
|
| ("rightTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(rc)))|]) =>
|
||||||
toOkTrunctate((None, Some(rc), d))
|
toOkTruncate((None, Some(rc), d))
|
||||||
| ("rightTruncate", _) =>
|
| ("rightTruncate", _) =>
|
||||||
Error(
|
Error(
|
||||||
"rightTruncate needs two arguments: the expression and the cutoff",
|
"rightTruncate needs two arguments: the expression and the cutoff",
|
||||||
|
@ -266,9 +267,19 @@ module MathAdtToDistDst = {
|
||||||
Ok(`SymbolicDist(`Float(rc))),
|
Ok(`SymbolicDist(`Float(rc))),
|
||||||
|],
|
|],
|
||||||
) =>
|
) =>
|
||||||
toOkTrunctate((Some(lc), Some(rc), d))
|
toOkTruncate((Some(lc), Some(rc), d))
|
||||||
| ("truncate", _) =>
|
| ("truncate", _) =>
|
||||||
Error("truncate needs three arguments: the expression and both cutoffs")
|
Error("truncate needs three arguments: the expression and both cutoffs")
|
||||||
|
| ("pdf", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Pdf(v), d))
|
||||||
|
| ("cdf", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Cdf(v), d))
|
||||||
|
| ("inv", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Inv(v), d))
|
||||||
|
| ("mean", [|Ok(d)|]) =>
|
||||||
|
toOkFloatFromDist((`Mean, d))
|
||||||
|
| ("sample", [|Ok(d)|]) =>
|
||||||
|
toOkFloatFromDist((`Sample, d))
|
||||||
| _ => Error("This type not currently supported")
|
| _ => Error("This type not currently supported")
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -316,11 +327,12 @@ module MathAdtToDistDst = {
|
||||||
| "pow"
|
| "pow"
|
||||||
| "leftTruncate"
|
| "leftTruncate"
|
||||||
| "rightTruncate"
|
| "rightTruncate"
|
||||||
| "truncate" => operationParser(name, parseArgs())
|
| "truncate"
|
||||||
| "mean" as n
|
| "mean"
|
||||||
| "inv" as n
|
| "inv"
|
||||||
| "sample" as n
|
| "sample"
|
||||||
| "pdf" as n
|
| "cdf"
|
||||||
|
| "pdf" => operationParser(name, parseArgs())
|
||||||
| n => Error(n ++ "(...) is not currently supported")
|
| n => Error(n ++ "(...) is not currently supported")
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -41,6 +41,7 @@ module DistToFloat = {
|
||||||
|
|
||||||
let format = (operation, value) =>
|
let format = (operation, value) =>
|
||||||
switch (operation) {
|
switch (operation) {
|
||||||
|
| `Cdf(f) => {j|cdf(x=$f,$value)|j}
|
||||||
| `Pdf(f) => {j|pdf(x=$f,$value)|j}
|
| `Pdf(f) => {j|pdf(x=$f,$value)|j}
|
||||||
| `Inv(f) => {j|inv(x=$f,$value)|j}
|
| `Inv(f) => {j|inv(x=$f,$value)|j}
|
||||||
| `Sample => "sample($value)"
|
| `Sample => "sample($value)"
|
||||||
|
@ -63,11 +64,17 @@ module Scale = {
|
||||||
| `Log => {j|verticalLog($value, $scaleBy) |j}
|
| `Log => {j|verticalLog($value, $scaleBy) |j}
|
||||||
};
|
};
|
||||||
|
|
||||||
let toKnownIntegralSumFn =
|
let toIntegralSumCacheFn =
|
||||||
fun
|
fun
|
||||||
| `Multiply => ((a, b) => Some(a *. b))
|
| `Multiply => ((a, b) => Some(a *. b))
|
||||||
| `Exponentiate => ((_, _) => None)
|
| `Exponentiate => ((_, _) => None)
|
||||||
| `Log => ((_, _) => None);
|
| `Log => ((_, _) => None);
|
||||||
|
|
||||||
|
let toIntegralCacheFn =
|
||||||
|
fun
|
||||||
|
| `Multiply => ((a, b) => None) // TODO: this could probably just be multiplied out (using Continuous.scaleBy)
|
||||||
|
| `Exponentiate => ((_, _) => None)
|
||||||
|
| `Log => ((_, _) => None);
|
||||||
};
|
};
|
||||||
|
|
||||||
module T = {
|
module T = {
|
||||||
|
|
80
src/distPlus/expressionTree/SamplingDistribution.re
Normal file
80
src/distPlus/expressionTree/SamplingDistribution.re
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
open ExpressionTypes.ExpressionTree;
|
||||||
|
|
||||||
|
let isSamplingDistribution: node => bool =
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(_) => true
|
||||||
|
| `RenderedDist(_) => true
|
||||||
|
| _ => false;
|
||||||
|
|
||||||
|
let renderIfIsNotSamplingDistribution = (params, t) =>
|
||||||
|
!isSamplingDistribution(t)
|
||||||
|
? switch (Render.render(params, t)) {
|
||||||
|
| Ok(r) => Ok(r)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
}
|
||||||
|
: Ok(t);
|
||||||
|
|
||||||
|
let map = (~renderedDistFn, ~symbolicDistFn, node: node) =>
|
||||||
|
node
|
||||||
|
|> (
|
||||||
|
fun
|
||||||
|
| `RenderedDist(r) => Some(renderedDistFn(r))
|
||||||
|
| `SymbolicDist(s) => Some(symbolicDistFn(s))
|
||||||
|
| _ => None
|
||||||
|
);
|
||||||
|
|
||||||
|
let sampleN = n =>
|
||||||
|
map(
|
||||||
|
~renderedDistFn=Shape.sampleNRendered(n),
|
||||||
|
~symbolicDistFn=SymbolicDist.T.sampleN(n),
|
||||||
|
);
|
||||||
|
|
||||||
|
let getCombinationSamples = (n, algebraicOp, t1: node, t2: node) => {
|
||||||
|
switch (sampleN(n, t1), sampleN(n, t2)) {
|
||||||
|
| (Some(a), Some(b)) =>
|
||||||
|
Some(
|
||||||
|
Belt.Array.zip(a, b)
|
||||||
|
|> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
|
||||||
|
)
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineShapesUsingSampling =
|
||||||
|
(evaluationParams: evaluationParams, algebraicOp, t1: node, t2: node) => {
|
||||||
|
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1);
|
||||||
|
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2);
|
||||||
|
E.R.merge(i1, i2)
|
||||||
|
|> E.R.bind(
|
||||||
|
_,
|
||||||
|
((a, b)) => {
|
||||||
|
let samples =
|
||||||
|
getCombinationSamples(
|
||||||
|
evaluationParams.samplingInputs.sampleCount,
|
||||||
|
algebraicOp,
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
);
|
||||||
|
|
||||||
|
// todo: This bottom part should probably be somewhere else.
|
||||||
|
let shape =
|
||||||
|
samples
|
||||||
|
|> E.O.fmap(
|
||||||
|
Samples.T.fromSamples(
|
||||||
|
~samplingInputs={
|
||||||
|
sampleCount:
|
||||||
|
Some(evaluationParams.samplingInputs.sampleCount),
|
||||||
|
outputXYPoints:
|
||||||
|
Some(evaluationParams.samplingInputs.outputXYPoints),
|
||||||
|
kernelWidth: evaluationParams.samplingInputs.kernelWidth,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|> E.O.bind(_, (r: RenderTypes.ShapeRenderer.Sampling.outputs) =>
|
||||||
|
r.shape
|
||||||
|
)
|
||||||
|
|> E.O.toResult("No response");
|
||||||
|
shape |> E.R.fmap(r => `Normalize(`RenderedDist(r)));
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
|
@ -120,7 +120,7 @@ module T = {
|
||||||
|> E.FloatFloatMap.fmap(r => r /. length)
|
|> E.FloatFloatMap.fmap(r => r /. length)
|
||||||
|> E.FloatFloatMap.toArray
|
|> E.FloatFloatMap.toArray
|
||||||
|> XYShape.T.fromZippedArray
|
|> XYShape.T.fromZippedArray
|
||||||
|> Discrete.make(_, None);
|
|> Discrete.make;
|
||||||
|
|
||||||
let pdf =
|
let pdf =
|
||||||
continuousPart |> E.A.length > 5
|
continuousPart |> E.A.length > 5
|
||||||
|
@ -150,7 +150,7 @@ module T = {
|
||||||
~outputXYPoints=samplingInputs.outputXYPoints,
|
~outputXYPoints=samplingInputs.outputXYPoints,
|
||||||
formatUnitWidth(usedUnitWidth),
|
formatUnitWidth(usedUnitWidth),
|
||||||
)
|
)
|
||||||
|> Continuous.make(`Linear, _, None)
|
|> Continuous.make
|
||||||
|> (r => Some((r, foo)));
|
|> (r => Some((r, foo)));
|
||||||
}
|
}
|
||||||
: None;
|
: None;
|
||||||
|
|
|
@ -3,6 +3,7 @@ open SymbolicTypes;
|
||||||
module Exponential = {
|
module Exponential = {
|
||||||
type t = exponential;
|
type t = exponential;
|
||||||
let pdf = (x, t: t) => Jstat.exponential##pdf(x, t.rate);
|
let pdf = (x, t: t) => Jstat.exponential##pdf(x, t.rate);
|
||||||
|
let cdf = (x, t: t) => Jstat.exponential##cdf(x, t.rate);
|
||||||
let inv = (p, t: t) => Jstat.exponential##inv(p, t.rate);
|
let inv = (p, t: t) => Jstat.exponential##inv(p, t.rate);
|
||||||
let sample = (t: t) => Jstat.exponential##sample(t.rate);
|
let sample = (t: t) => Jstat.exponential##sample(t.rate);
|
||||||
let mean = (t: t) => Ok(Jstat.exponential##mean(t.rate));
|
let mean = (t: t) => Ok(Jstat.exponential##mean(t.rate));
|
||||||
|
@ -12,6 +13,7 @@ module Exponential = {
|
||||||
module Cauchy = {
|
module Cauchy = {
|
||||||
type t = cauchy;
|
type t = cauchy;
|
||||||
let pdf = (x, t: t) => Jstat.cauchy##pdf(x, t.local, t.scale);
|
let pdf = (x, t: t) => Jstat.cauchy##pdf(x, t.local, t.scale);
|
||||||
|
let cdf = (x, t: t) => Jstat.cauchy##cdf(x, t.local, t.scale);
|
||||||
let inv = (p, t: t) => Jstat.cauchy##inv(p, t.local, t.scale);
|
let inv = (p, t: t) => Jstat.cauchy##inv(p, t.local, t.scale);
|
||||||
let sample = (t: t) => Jstat.cauchy##sample(t.local, t.scale);
|
let sample = (t: t) => Jstat.cauchy##sample(t.local, t.scale);
|
||||||
let mean = (_: t) => Error("Cauchy distributions have no mean value.");
|
let mean = (_: t) => Error("Cauchy distributions have no mean value.");
|
||||||
|
@ -21,6 +23,7 @@ module Cauchy = {
|
||||||
module Triangular = {
|
module Triangular = {
|
||||||
type t = triangular;
|
type t = triangular;
|
||||||
let pdf = (x, t: t) => Jstat.triangular##pdf(x, t.low, t.high, t.medium);
|
let pdf = (x, t: t) => Jstat.triangular##pdf(x, t.low, t.high, t.medium);
|
||||||
|
let cdf = (x, t: t) => Jstat.triangular##cdf(x, t.low, t.high, t.medium);
|
||||||
let inv = (p, t: t) => Jstat.triangular##inv(p, t.low, t.high, t.medium);
|
let inv = (p, t: t) => Jstat.triangular##inv(p, t.low, t.high, t.medium);
|
||||||
let sample = (t: t) => Jstat.triangular##sample(t.low, t.high, t.medium);
|
let sample = (t: t) => Jstat.triangular##sample(t.low, t.high, t.medium);
|
||||||
let mean = (t: t) => Ok(Jstat.triangular##mean(t.low, t.high, t.medium));
|
let mean = (t: t) => Ok(Jstat.triangular##mean(t.low, t.high, t.medium));
|
||||||
|
@ -30,6 +33,7 @@ module Triangular = {
|
||||||
module Normal = {
|
module Normal = {
|
||||||
type t = normal;
|
type t = normal;
|
||||||
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
||||||
|
let cdf = (x, t: t) => Jstat.normal##cdf(x, t.mean, t.stdev);
|
||||||
|
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let mean = E.A.Floats.mean([|low, high|]);
|
let mean = E.A.Floats.mean([|low, high|]);
|
||||||
|
@ -72,6 +76,7 @@ module Normal = {
|
||||||
module Beta = {
|
module Beta = {
|
||||||
type t = beta;
|
type t = beta;
|
||||||
let pdf = (x, t: t) => Jstat.beta##pdf(x, t.alpha, t.beta);
|
let pdf = (x, t: t) => Jstat.beta##pdf(x, t.alpha, t.beta);
|
||||||
|
let cdf = (x, t: t) => Jstat.beta##cdf(x, t.alpha, t.beta);
|
||||||
let inv = (p, t: t) => Jstat.beta##inv(p, t.alpha, t.beta);
|
let inv = (p, t: t) => Jstat.beta##inv(p, t.alpha, t.beta);
|
||||||
let sample = (t: t) => Jstat.beta##sample(t.alpha, t.beta);
|
let sample = (t: t) => Jstat.beta##sample(t.alpha, t.beta);
|
||||||
let mean = (t: t) => Ok(Jstat.beta##mean(t.alpha, t.beta));
|
let mean = (t: t) => Ok(Jstat.beta##mean(t.alpha, t.beta));
|
||||||
|
@ -81,6 +86,7 @@ module Beta = {
|
||||||
module Lognormal = {
|
module Lognormal = {
|
||||||
type t = lognormal;
|
type t = lognormal;
|
||||||
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
||||||
|
let cdf = (x, t: t) => Jstat.lognormal##cdf(x, t.mu, t.sigma);
|
||||||
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
||||||
let mean = (t: t) => Ok(Jstat.lognormal##mean(t.mu, t.sigma));
|
let mean = (t: t) => Ok(Jstat.lognormal##mean(t.mu, t.sigma));
|
||||||
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
||||||
|
@ -126,6 +132,7 @@ module Lognormal = {
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
type t = uniform;
|
type t = uniform;
|
||||||
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
||||||
|
let cdf = (x, t: t) => Jstat.uniform##cdf(x, t.low, t.high);
|
||||||
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
||||||
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
||||||
let mean = (t: t) => Ok(Jstat.uniform##mean(t.low, t.high));
|
let mean = (t: t) => Ok(Jstat.uniform##mean(t.low, t.high));
|
||||||
|
@ -140,6 +147,7 @@ module Uniform = {
|
||||||
module Float = {
|
module Float = {
|
||||||
type t = float;
|
type t = float;
|
||||||
let pdf = (x, t: t) => x == t ? 1.0 : 0.0;
|
let pdf = (x, t: t) => x == t ? 1.0 : 0.0;
|
||||||
|
let cdf = (x, t: t) => x >= t ? 1.0 : 0.0;
|
||||||
let inv = (p, t: t) => p < t ? 0.0 : 1.0;
|
let inv = (p, t: t) => p < t ? 0.0 : 1.0;
|
||||||
let mean = (t: t) => Ok(t);
|
let mean = (t: t) => Ok(t);
|
||||||
let sample = (t: t) => t;
|
let sample = (t: t) => t;
|
||||||
|
@ -162,6 +170,18 @@ module T = {
|
||||||
| `Float(n) => Float.pdf(x, n)
|
| `Float(n) => Float.pdf(x, n)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let cdf = (x, dist) =>
|
||||||
|
switch (dist) {
|
||||||
|
| `Normal(n) => Normal.cdf(x, n)
|
||||||
|
| `Triangular(n) => Triangular.cdf(x, n)
|
||||||
|
| `Exponential(n) => Exponential.cdf(x, n)
|
||||||
|
| `Cauchy(n) => Cauchy.cdf(x, n)
|
||||||
|
| `Lognormal(n) => Lognormal.cdf(x, n)
|
||||||
|
| `Uniform(n) => Uniform.cdf(x, n)
|
||||||
|
| `Beta(n) => Beta.cdf(x, n)
|
||||||
|
| `Float(n) => Float.cdf(x, n)
|
||||||
|
};
|
||||||
|
|
||||||
let inv = (x, dist) =>
|
let inv = (x, dist) =>
|
||||||
switch (dist) {
|
switch (dist) {
|
||||||
| `Normal(n) => Normal.inv(x, n)
|
| `Normal(n) => Normal.inv(x, n)
|
||||||
|
@ -244,6 +264,7 @@ module T = {
|
||||||
|
|
||||||
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s) =>
|
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s) =>
|
||||||
switch (distToFloatOp) {
|
switch (distToFloatOp) {
|
||||||
|
| `Cdf(f) => Ok(cdf(f, s))
|
||||||
| `Pdf(f) => Ok(pdf(f, s))
|
| `Pdf(f) => Ok(pdf(f, s))
|
||||||
| `Inv(f) => Ok(inv(f, s))
|
| `Inv(f) => Ok(inv(f, s))
|
||||||
| `Sample => Ok(sample(s))
|
| `Sample => Ok(sample(s))
|
||||||
|
@ -295,10 +316,14 @@ module T = {
|
||||||
let toShape = (sampleCount, d: symbolicDist): DistTypes.shape =>
|
let toShape = (sampleCount, d: symbolicDist): DistTypes.shape =>
|
||||||
switch (d) {
|
switch (d) {
|
||||||
| `Float(v) =>
|
| `Float(v) =>
|
||||||
Discrete(Discrete.make({xs: [|v|], ys: [|1.0|]}, Some(1.0)))
|
Discrete(
|
||||||
|
Discrete.make(~integralSumCache=Some(1.0), {xs: [|v|], ys: [|1.0|]}),
|
||||||
|
)
|
||||||
| _ =>
|
| _ =>
|
||||||
let xs = interpolateXs(~xSelection=`ByWeight, d, sampleCount);
|
let xs = interpolateXs(~xSelection=`ByWeight, d, sampleCount);
|
||||||
let ys = xs |> E.A.fmap(x => pdf(x, d));
|
let ys = xs |> E.A.fmap(x => pdf(x, d));
|
||||||
Continuous(Continuous.make(`Linear, {xs, ys}, Some(1.0)));
|
Continuous(
|
||||||
|
Continuous.make(~integralSumCache=Some(1.0), {xs, ys}),
|
||||||
|
);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue
Block a user