Merge pull request #67 from foretold-app/epic-expression-tree
Epic expression tree
This commit is contained in:
commit
e86d959c1b
|
@ -3,417 +3,413 @@ open Expect;
|
||||||
|
|
||||||
let shape: DistTypes.xyShape = {xs: [|1., 4., 8.|], ys: [|8., 9., 2.|]};
|
let shape: DistTypes.xyShape = {xs: [|1., 4., 8.|], ys: [|8., 9., 2.|]};
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
// let makeTest = (~only=false, str, item1, item2) =>
|
||||||
only
|
// only
|
||||||
? Only.test(str, () =>
|
// ? Only.test(str, () =>
|
||||||
expect(item1) |> toEqual(item2)
|
// expect(item1) |> toEqual(item2)
|
||||||
)
|
// )
|
||||||
: test(str, () =>
|
// : test(str, () =>
|
||||||
expect(item1) |> toEqual(item2)
|
// expect(item1) |> toEqual(item2)
|
||||||
);
|
// );
|
||||||
|
|
||||||
let makeTestCloseEquality = (~only=false, str, item1, item2, ~digits) =>
|
// let makeTestCloseEquality = (~only=false, str, item1, item2, ~digits) =>
|
||||||
only
|
// only
|
||||||
? Only.test(str, () =>
|
// ? Only.test(str, () =>
|
||||||
expect(item1) |> toBeSoCloseTo(item2, ~digits)
|
// expect(item1) |> toBeSoCloseTo(item2, ~digits)
|
||||||
)
|
// )
|
||||||
: test(str, () =>
|
// : test(str, () =>
|
||||||
expect(item1) |> toBeSoCloseTo(item2, ~digits)
|
// expect(item1) |> toBeSoCloseTo(item2, ~digits)
|
||||||
);
|
// );
|
||||||
|
|
||||||
describe("Shape", () => {
|
// describe("Shape", () => {
|
||||||
describe("Continuous", () => {
|
// describe("Continuous", () => {
|
||||||
open Distributions.Continuous;
|
// open Continuous;
|
||||||
let continuous = make(`Linear, shape);
|
// let continuous = make(`Linear, shape, None);
|
||||||
makeTest("minX", T.minX(continuous), 1.0);
|
// makeTest("minX", T.minX(continuous), 1.0);
|
||||||
makeTest("maxX", T.maxX(continuous), 8.0);
|
// makeTest("maxX", T.maxX(continuous), 8.0);
|
||||||
makeTest(
|
// makeTest(
|
||||||
"mapY",
|
// "mapY",
|
||||||
T.mapY(r => r *. 2.0, continuous) |> getShape |> (r => r.ys),
|
// T.mapY(r => r *. 2.0, continuous) |> getShape |> (r => r.ys),
|
||||||
[|16., 18.0, 4.0|],
|
// [|16., 18.0, 4.0|],
|
||||||
);
|
// );
|
||||||
describe("xToY", () => {
|
// describe("xToY", () => {
|
||||||
describe("when Linear", () => {
|
// describe("when Linear", () => {
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 4.0",
|
// "at 4.0",
|
||||||
T.xToY(4., continuous),
|
// T.xToY(4., continuous),
|
||||||
{continuous: 9.0, discrete: 0.0},
|
// {continuous: 9.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
// Note: This below is weird to me, I'm not sure if it's what we want really.
|
// // Note: This below is weird to me, I'm not sure if it's what we want really.
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 0.0",
|
// "at 0.0",
|
||||||
T.xToY(0., continuous),
|
// T.xToY(0., continuous),
|
||||||
{continuous: 8.0, discrete: 0.0},
|
// {continuous: 8.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 5.0",
|
// "at 5.0",
|
||||||
T.xToY(5., continuous),
|
// T.xToY(5., continuous),
|
||||||
{continuous: 7.25, discrete: 0.0},
|
// {continuous: 7.25, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 10.0",
|
// "at 10.0",
|
||||||
T.xToY(10., continuous),
|
// T.xToY(10., continuous),
|
||||||
{continuous: 2.0, discrete: 0.0},
|
// {continuous: 2.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
});
|
// });
|
||||||
describe("when Stepwise", () => {
|
// describe("when Stepwise", () => {
|
||||||
let continuous = make(`Stepwise, shape);
|
// let continuous = make(`Stepwise, shape, None);
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 4.0",
|
// "at 4.0",
|
||||||
T.xToY(4., continuous),
|
// T.xToY(4., continuous),
|
||||||
{continuous: 9.0, discrete: 0.0},
|
// {continuous: 9.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 0.0",
|
// "at 0.0",
|
||||||
T.xToY(0., continuous),
|
// T.xToY(0., continuous),
|
||||||
{continuous: 0.0, discrete: 0.0},
|
// {continuous: 0.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 5.0",
|
// "at 5.0",
|
||||||
T.xToY(5., continuous),
|
// T.xToY(5., continuous),
|
||||||
{continuous: 9.0, discrete: 0.0},
|
// {continuous: 9.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"at 10.0",
|
// "at 10.0",
|
||||||
T.xToY(10., continuous),
|
// T.xToY(10., continuous),
|
||||||
{continuous: 2.0, discrete: 0.0},
|
// {continuous: 2.0, discrete: 0.0},
|
||||||
);
|
// );
|
||||||
});
|
// });
|
||||||
});
|
// });
|
||||||
makeTest(
|
// makeTest(
|
||||||
"integral",
|
// "integral",
|
||||||
T.Integral.get(~cache=None, continuous) |> getShape,
|
// T.Integral.get(~cache=None, continuous) |> getShape,
|
||||||
{xs: [|1.0, 4.0, 8.0|], ys: [|0.0, 25.5, 47.5|]},
|
// {xs: [|1.0, 4.0, 8.0|], ys: [|0.0, 25.5, 47.5|]},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"toLinear",
|
// "toLinear",
|
||||||
{
|
// {
|
||||||
let continuous =
|
// let continuous =
|
||||||
make(`Stepwise, {xs: [|1., 4., 8.|], ys: [|0.1, 5., 1.0|]});
|
// make(`Stepwise, {xs: [|1., 4., 8.|], ys: [|0.1, 5., 1.0|]}, None);
|
||||||
continuous |> toLinear |> E.O.fmap(getShape);
|
// continuous |> toLinear |> E.O.fmap(getShape);
|
||||||
},
|
// },
|
||||||
Some({
|
// Some({
|
||||||
xs: [|1.00007, 1.00007, 4.0, 4.00007, 8.0, 8.00007|],
|
// xs: [|1.00007, 1.00007, 4.0, 4.00007, 8.0, 8.00007|],
|
||||||
ys: [|0.0, 0.1, 0.1, 5.0, 5.0, 1.0|],
|
// ys: [|0.0, 0.1, 0.1, 5.0, 5.0, 1.0|],
|
||||||
}),
|
// }),
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"toLinear",
|
// "toLinear",
|
||||||
{
|
// {
|
||||||
let continuous = make(`Stepwise, {xs: [|0.0|], ys: [|0.3|]});
|
// let continuous = make(`Stepwise, {xs: [|0.0|], ys: [|0.3|]}, None);
|
||||||
continuous |> toLinear |> E.O.fmap(getShape);
|
// continuous |> toLinear |> E.O.fmap(getShape);
|
||||||
},
|
// },
|
||||||
Some({xs: [|0.0|], ys: [|0.3|]}),
|
// Some({xs: [|0.0|], ys: [|0.3|]}),
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"integralXToY",
|
// "integralXToY",
|
||||||
T.Integral.xToY(~cache=None, 0.0, continuous),
|
// T.Integral.xToY(~cache=None, 0.0, continuous),
|
||||||
0.0,
|
// 0.0,
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"integralXToY",
|
// "integralXToY",
|
||||||
T.Integral.xToY(~cache=None, 2.0, continuous),
|
// T.Integral.xToY(~cache=None, 2.0, continuous),
|
||||||
8.5,
|
// 8.5,
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"integralXToY",
|
// "integralXToY",
|
||||||
T.Integral.xToY(~cache=None, 100.0, continuous),
|
// T.Integral.xToY(~cache=None, 100.0, continuous),
|
||||||
47.5,
|
// 47.5,
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"integralEndY",
|
// "integralEndY",
|
||||||
continuous
|
// continuous
|
||||||
|> T.scaleToIntegralSum(~intendedSum=1.0)
|
// |> T.normalize //scaleToIntegralSum(~intendedSum=1.0)
|
||||||
|> T.Integral.sum(~cache=None),
|
// |> T.Integral.sum(~cache=None),
|
||||||
1.0,
|
// 1.0,
|
||||||
);
|
// );
|
||||||
});
|
// });
|
||||||
|
|
||||||
describe("Discrete", () => {
|
// describe("Discrete", () => {
|
||||||
open Distributions.Discrete;
|
// open Discrete;
|
||||||
let shape: DistTypes.xyShape = {
|
// let shape: DistTypes.xyShape = {
|
||||||
xs: [|1., 4., 8.|],
|
// xs: [|1., 4., 8.|],
|
||||||
ys: [|0.3, 0.5, 0.2|],
|
// ys: [|0.3, 0.5, 0.2|],
|
||||||
};
|
// };
|
||||||
let discrete = shape;
|
// let discrete = make(shape, None);
|
||||||
makeTest("minX", T.minX(discrete), 1.0);
|
// makeTest("minX", T.minX(discrete), 1.0);
|
||||||
makeTest("maxX", T.maxX(discrete), 8.0);
|
// makeTest("maxX", T.maxX(discrete), 8.0);
|
||||||
makeTest(
|
// makeTest(
|
||||||
"mapY",
|
// "mapY",
|
||||||
T.mapY(r => r *. 2.0, discrete) |> (r => r.ys),
|
// T.mapY(r => r *. 2.0, discrete) |> (r => getShape(r).ys),
|
||||||
[|0.6, 1.0, 0.4|],
|
// [|0.6, 1.0, 0.4|],
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"xToY at 4.0",
|
// "xToY at 4.0",
|
||||||
T.xToY(4., discrete),
|
// T.xToY(4., discrete),
|
||||||
{discrete: 0.5, continuous: 0.0},
|
// {discrete: 0.5, continuous: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"xToY at 0.0",
|
// "xToY at 0.0",
|
||||||
T.xToY(0., discrete),
|
// T.xToY(0., discrete),
|
||||||
{discrete: 0.0, continuous: 0.0},
|
// {discrete: 0.0, continuous: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"xToY at 5.0",
|
// "xToY at 5.0",
|
||||||
T.xToY(5., discrete),
|
// T.xToY(5., discrete),
|
||||||
{discrete: 0.0, continuous: 0.0},
|
// {discrete: 0.0, continuous: 0.0},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"scaleBy",
|
// "scaleBy",
|
||||||
T.scaleBy(~scale=4.0, discrete),
|
// scaleBy(~scale=4.0, discrete),
|
||||||
{xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]},
|
// make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"scaleToIntegralSum",
|
// "normalize, then scale by 4.0",
|
||||||
T.scaleToIntegralSum(~intendedSum=4.0, discrete),
|
// discrete
|
||||||
{xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]},
|
// |> T.normalize
|
||||||
);
|
// |> scaleBy(~scale=4.0),
|
||||||
makeTest(
|
// make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
|
||||||
"scaleToIntegralSum: back and forth",
|
// );
|
||||||
discrete
|
// makeTest(
|
||||||
|> T.scaleToIntegralSum(~intendedSum=4.0)
|
// "scaleToIntegralSum: back and forth",
|
||||||
|> T.scaleToIntegralSum(~intendedSum=1.0),
|
// discrete
|
||||||
discrete,
|
// |> T.normalize
|
||||||
);
|
// |> scaleBy(~scale=4.0)
|
||||||
makeTest(
|
// |> T.normalize,
|
||||||
"integral",
|
// discrete,
|
||||||
T.Integral.get(~cache=None, discrete),
|
// );
|
||||||
Distributions.Continuous.make(
|
// makeTest(
|
||||||
`Stepwise,
|
// "integral",
|
||||||
{xs: [|1., 4., 8.|], ys: [|0.3, 0.8, 1.0|]},
|
// T.Integral.get(~cache=None, discrete),
|
||||||
),
|
// Continuous.make(
|
||||||
);
|
// `Stepwise,
|
||||||
makeTest(
|
// {xs: [|1., 4., 8.|], ys: [|0.3, 0.8, 1.0|]},
|
||||||
"integral with 1 element",
|
// None
|
||||||
T.Integral.get(~cache=None, {xs: [|0.0|], ys: [|1.0|]}),
|
// ),
|
||||||
Distributions.Continuous.make(`Stepwise, {xs: [|0.0|], ys: [|1.0|]}),
|
// );
|
||||||
);
|
// makeTest(
|
||||||
makeTest(
|
// "integral with 1 element",
|
||||||
"integralXToY",
|
// T.Integral.get(~cache=None, Discrete.make({xs: [|0.0|], ys: [|1.0|]}, None)),
|
||||||
T.Integral.xToY(~cache=None, 6.0, discrete),
|
// Continuous.make(`Stepwise, {xs: [|0.0|], ys: [|1.0|]}, None),
|
||||||
0.9,
|
// );
|
||||||
);
|
// makeTest(
|
||||||
makeTest("integralEndY", T.Integral.sum(~cache=None, discrete), 1.0);
|
// "integralXToY",
|
||||||
makeTest("mean", T.mean(discrete), 3.9);
|
// T.Integral.xToY(~cache=None, 6.0, discrete),
|
||||||
makeTestCloseEquality(
|
// 0.9,
|
||||||
"variance",
|
// );
|
||||||
T.variance(discrete),
|
// makeTest("integralEndY", T.Integral.sum(~cache=None, discrete), 1.0);
|
||||||
5.89,
|
// makeTest("mean", T.mean(discrete), 3.9);
|
||||||
~digits=7,
|
// makeTestCloseEquality(
|
||||||
);
|
// "variance",
|
||||||
});
|
// T.variance(discrete),
|
||||||
|
// 5.89,
|
||||||
|
// ~digits=7,
|
||||||
|
// );
|
||||||
|
// });
|
||||||
|
|
||||||
describe("Mixed", () => {
|
// describe("Mixed", () => {
|
||||||
open Distributions.Mixed;
|
// open Distributions.Mixed;
|
||||||
let discrete: DistTypes.xyShape = {
|
// let discreteShape: DistTypes.xyShape = {
|
||||||
xs: [|1., 4., 8.|],
|
// xs: [|1., 4., 8.|],
|
||||||
ys: [|0.3, 0.5, 0.2|],
|
// ys: [|0.3, 0.5, 0.2|],
|
||||||
};
|
// };
|
||||||
let continuous =
|
// let discrete = Discrete.make(discreteShape, None);
|
||||||
Distributions.Continuous.make(
|
// let continuous =
|
||||||
`Linear,
|
// Continuous.make(
|
||||||
{xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
|
// `Linear,
|
||||||
)
|
// {xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
|
||||||
|> Distributions.Continuous.T.scaleToIntegralSum(~intendedSum=1.0);
|
// None
|
||||||
let mixed =
|
// )
|
||||||
MixedShapeBuilder.build(
|
// |> Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
|
||||||
~continuous,
|
// let mixed = Mixed.make(
|
||||||
~discrete,
|
// ~continuous,
|
||||||
~assumptions={
|
// ~discrete,
|
||||||
continuous: ADDS_TO_CORRECT_PROBABILITY,
|
// );
|
||||||
discrete: ADDS_TO_CORRECT_PROBABILITY,
|
// makeTest("minX", T.minX(mixed), 1.0);
|
||||||
discreteProbabilityMass: Some(0.5),
|
// makeTest("maxX", T.maxX(mixed), 14.0);
|
||||||
},
|
// makeTest(
|
||||||
)
|
// "mapY",
|
||||||
|> E.O.toExn("");
|
// T.mapY(r => r *. 2.0, mixed),
|
||||||
makeTest("minX", T.minX(mixed), 1.0);
|
// Mixed.make(
|
||||||
makeTest("maxX", T.maxX(mixed), 14.0);
|
// ~continuous=
|
||||||
makeTest(
|
// Continuous.make(
|
||||||
"mapY",
|
// `Linear,
|
||||||
T.mapY(r => r *. 2.0, mixed),
|
// {
|
||||||
Distributions.Mixed.make(
|
// xs: [|3., 7., 14.|],
|
||||||
~continuous=
|
// ys: [|
|
||||||
Distributions.Continuous.make(
|
// 0.11588411588411589,
|
||||||
`Linear,
|
// 0.16383616383616384,
|
||||||
{
|
// 0.24775224775224775,
|
||||||
xs: [|3., 7., 14.|],
|
// |],
|
||||||
ys: [|
|
// },
|
||||||
0.11588411588411589,
|
// None
|
||||||
0.16383616383616384,
|
// ),
|
||||||
0.24775224775224775,
|
// ~discrete=Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None)
|
||||||
|],
|
// ),
|
||||||
},
|
// );
|
||||||
),
|
// makeTest(
|
||||||
~discrete={xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]},
|
// "xToY at 4.0",
|
||||||
~discreteProbabilityMassFraction=0.5,
|
// T.xToY(4., mixed),
|
||||||
),
|
// {discrete: 0.25, continuous: 0.03196803196803197},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"xToY at 4.0",
|
// "xToY at 0.0",
|
||||||
T.xToY(4., mixed),
|
// T.xToY(0., mixed),
|
||||||
{discrete: 0.25, continuous: 0.03196803196803197},
|
// {discrete: 0.0, continuous: 0.028971028971028972},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest(
|
||||||
"xToY at 0.0",
|
// "xToY at 5.0",
|
||||||
T.xToY(0., mixed),
|
// T.xToY(7., mixed),
|
||||||
{discrete: 0.0, continuous: 0.028971028971028972},
|
// {discrete: 0.0, continuous: 0.04095904095904096},
|
||||||
);
|
// );
|
||||||
makeTest(
|
// makeTest("integralEndY", T.Integral.sum(~cache=None, mixed), 1.0);
|
||||||
"xToY at 5.0",
|
// makeTest(
|
||||||
T.xToY(7., mixed),
|
// "scaleBy",
|
||||||
{discrete: 0.0, continuous: 0.04095904095904096},
|
// Mixed.scaleBy(~scale=2.0, mixed),
|
||||||
);
|
// Mixed.make(
|
||||||
makeTest("integralEndY", T.Integral.sum(~cache=None, mixed), 1.0);
|
// ~continuous=
|
||||||
makeTest(
|
// Continuous.make(
|
||||||
"scaleBy",
|
// `Linear,
|
||||||
T.scaleBy(~scale=2.0, mixed),
|
// {
|
||||||
Distributions.Mixed.make(
|
// xs: [|3., 7., 14.|],
|
||||||
~continuous=
|
// ys: [|
|
||||||
Distributions.Continuous.make(
|
// 0.11588411588411589,
|
||||||
`Linear,
|
// 0.16383616383616384,
|
||||||
{
|
// 0.24775224775224775,
|
||||||
xs: [|3., 7., 14.|],
|
// |],
|
||||||
ys: [|
|
// },
|
||||||
0.11588411588411589,
|
// None
|
||||||
0.16383616383616384,
|
// ),
|
||||||
0.24775224775224775,
|
// ~discrete=Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None),
|
||||||
|],
|
// ),
|
||||||
},
|
// );
|
||||||
),
|
// makeTest(
|
||||||
~discrete={xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]},
|
// "integral",
|
||||||
~discreteProbabilityMassFraction=0.5,
|
// T.Integral.get(~cache=None, mixed),
|
||||||
),
|
// Continuous.make(
|
||||||
);
|
// `Linear,
|
||||||
makeTest(
|
// {
|
||||||
"integral",
|
// xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
|
||||||
T.Integral.get(~cache=None, mixed),
|
// ys: [|
|
||||||
Distributions.Continuous.make(
|
// 0.0,
|
||||||
`Linear,
|
// 0.0,
|
||||||
{
|
// 0.15,
|
||||||
xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
|
// 0.18496503496503497,
|
||||||
ys: [|
|
// 0.4349674825174825,
|
||||||
0.0,
|
// 0.5398601398601399,
|
||||||
0.0,
|
// 0.5913086913086913,
|
||||||
0.15,
|
// 0.6913122927072927,
|
||||||
0.18496503496503497,
|
// 1.0,
|
||||||
0.4349674825174825,
|
// |],
|
||||||
0.5398601398601399,
|
// },
|
||||||
0.5913086913086913,
|
// None,
|
||||||
0.6913122927072927,
|
// ),
|
||||||
1.0,
|
// );
|
||||||
|],
|
// });
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Distplus", () => {
|
// describe("Distplus", () => {
|
||||||
open Distributions.DistPlus;
|
// open DistPlus;
|
||||||
let discrete: DistTypes.xyShape = {
|
// let discreteShape: DistTypes.xyShape = {
|
||||||
xs: [|1., 4., 8.|],
|
// xs: [|1., 4., 8.|],
|
||||||
ys: [|0.3, 0.5, 0.2|],
|
// ys: [|0.3, 0.5, 0.2|],
|
||||||
};
|
// };
|
||||||
let continuous =
|
// let discrete = Discrete.make(discreteShape, None);
|
||||||
Distributions.Continuous.make(
|
// let continuous =
|
||||||
`Linear,
|
// Continuous.make(
|
||||||
{xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
|
// `Linear,
|
||||||
)
|
// {xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
|
||||||
|> Distributions.Continuous.T.scaleToIntegralSum(~intendedSum=1.0);
|
// None
|
||||||
let mixed =
|
// )
|
||||||
MixedShapeBuilder.build(
|
// |> Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
|
||||||
~continuous,
|
// let mixed =
|
||||||
~discrete,
|
// Mixed.make(
|
||||||
~assumptions={
|
// ~continuous,
|
||||||
continuous: ADDS_TO_CORRECT_PROBABILITY,
|
// ~discrete,
|
||||||
discrete: ADDS_TO_CORRECT_PROBABILITY,
|
// );
|
||||||
discreteProbabilityMass: Some(0.5),
|
// let distPlus =
|
||||||
},
|
// DistPlus.make(
|
||||||
)
|
// ~shape=Mixed(mixed),
|
||||||
|> E.O.toExn("");
|
// ~guesstimatorString=None,
|
||||||
let distPlus =
|
// (),
|
||||||
Distributions.DistPlus.make(
|
// );
|
||||||
~shape=Mixed(mixed),
|
// makeTest("minX", T.minX(distPlus), 1.0);
|
||||||
~guesstimatorString=None,
|
// makeTest("maxX", T.maxX(distPlus), 14.0);
|
||||||
(),
|
// makeTest(
|
||||||
);
|
// "xToY at 4.0",
|
||||||
makeTest("minX", T.minX(distPlus), 1.0);
|
// T.xToY(4., distPlus),
|
||||||
makeTest("maxX", T.maxX(distPlus), 14.0);
|
// {discrete: 0.25, continuous: 0.03196803196803197},
|
||||||
makeTest(
|
// );
|
||||||
"xToY at 4.0",
|
// makeTest(
|
||||||
T.xToY(4., distPlus),
|
// "xToY at 0.0",
|
||||||
{discrete: 0.25, continuous: 0.03196803196803197},
|
// T.xToY(0., distPlus),
|
||||||
);
|
// {discrete: 0.0, continuous: 0.028971028971028972},
|
||||||
makeTest(
|
// );
|
||||||
"xToY at 0.0",
|
// makeTest(
|
||||||
T.xToY(0., distPlus),
|
// "xToY at 5.0",
|
||||||
{discrete: 0.0, continuous: 0.028971028971028972},
|
// T.xToY(7., distPlus),
|
||||||
);
|
// {discrete: 0.0, continuous: 0.04095904095904096},
|
||||||
makeTest(
|
// );
|
||||||
"xToY at 5.0",
|
// makeTest("integralEndY", T.Integral.sum(~cache=None, distPlus), 1.0);
|
||||||
T.xToY(7., distPlus),
|
// makeTest(
|
||||||
{discrete: 0.0, continuous: 0.04095904095904096},
|
// "integral",
|
||||||
);
|
// T.Integral.get(~cache=None, distPlus) |> T.toContinuous,
|
||||||
makeTest("integralEndY", T.Integral.sum(~cache=None, distPlus), 1.0);
|
// Some(
|
||||||
makeTest(
|
// Continuous.make(
|
||||||
"integral",
|
// `Linear,
|
||||||
T.Integral.get(~cache=None, distPlus) |> T.toContinuous,
|
// {
|
||||||
Some(
|
// xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
|
||||||
Distributions.Continuous.make(
|
// ys: [|
|
||||||
`Linear,
|
// 0.0,
|
||||||
{
|
// 0.0,
|
||||||
xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
|
// 0.15,
|
||||||
ys: [|
|
// 0.18496503496503497,
|
||||||
0.0,
|
// 0.4349674825174825,
|
||||||
0.0,
|
// 0.5398601398601399,
|
||||||
0.15,
|
// 0.5913086913086913,
|
||||||
0.18496503496503497,
|
// 0.6913122927072927,
|
||||||
0.4349674825174825,
|
// 1.0,
|
||||||
0.5398601398601399,
|
// |],
|
||||||
0.5913086913086913,
|
// },
|
||||||
0.6913122927072927,
|
// None,
|
||||||
1.0,
|
// ),
|
||||||
|],
|
// ),
|
||||||
},
|
// );
|
||||||
),
|
// });
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Shape", () => {
|
// describe("Shape", () => {
|
||||||
let mean = 10.0;
|
// let mean = 10.0;
|
||||||
let stdev = 4.0;
|
// let stdev = 4.0;
|
||||||
let variance = stdev ** 2.0;
|
// let variance = stdev ** 2.0;
|
||||||
let numSamples = 10000;
|
// let numSamples = 10000;
|
||||||
open Distributions.Shape;
|
// open Distributions.Shape;
|
||||||
let normal: SymbolicDist.dist = `Normal({mean, stdev});
|
// let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
|
||||||
let normalShape = SymbolicDist.GenericSimple.toShape(normal, numSamples);
|
// let normalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(normal));
|
||||||
let lognormal = SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev);
|
// let lognormal = SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev);
|
||||||
let lognormalShape =
|
// let lognormalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(lognormal));
|
||||||
SymbolicDist.GenericSimple.toShape(lognormal, numSamples);
|
|
||||||
|
|
||||||
makeTestCloseEquality(
|
// makeTestCloseEquality(
|
||||||
"Mean of a normal",
|
// "Mean of a normal",
|
||||||
T.mean(normalShape),
|
// T.mean(normalShape),
|
||||||
mean,
|
// mean,
|
||||||
~digits=2,
|
// ~digits=2,
|
||||||
);
|
// );
|
||||||
makeTestCloseEquality(
|
// makeTestCloseEquality(
|
||||||
"Variance of a normal",
|
// "Variance of a normal",
|
||||||
T.variance(normalShape),
|
// T.variance(normalShape),
|
||||||
variance,
|
// variance,
|
||||||
~digits=1,
|
// ~digits=1,
|
||||||
);
|
// );
|
||||||
makeTestCloseEquality(
|
// makeTestCloseEquality(
|
||||||
"Mean of a lognormal",
|
// "Mean of a lognormal",
|
||||||
T.mean(lognormalShape),
|
// T.mean(lognormalShape),
|
||||||
mean,
|
// mean,
|
||||||
~digits=2,
|
// ~digits=2,
|
||||||
);
|
// );
|
||||||
makeTestCloseEquality(
|
// makeTestCloseEquality(
|
||||||
"Variance of a lognormal",
|
// "Variance of a lognormal",
|
||||||
T.variance(lognormalShape),
|
// T.variance(lognormalShape),
|
||||||
variance,
|
// variance,
|
||||||
~digits=0,
|
// ~digits=0,
|
||||||
);
|
// );
|
||||||
});
|
// });
|
||||||
});
|
// });
|
||||||
|
|
|
@ -27,7 +27,6 @@
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@foretold/components": "0.0.6",
|
"@foretold/components": "0.0.6",
|
||||||
"@foretold/guesstimator": "1.0.11",
|
|
||||||
"@glennsl/bs-json": "^5.0.2",
|
"@glennsl/bs-json": "^5.0.2",
|
||||||
"antd": "3.17.0",
|
"antd": "3.17.0",
|
||||||
"autoprefixer": "9.7.4",
|
"autoprefixer": "9.7.4",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
let entries = EntryTypes.[Continuous.entry];
|
let entries = EntryTypes.[Continuous2.entry,ExpressionTreeExamples.entry];
|
|
@ -19,8 +19,9 @@ let timeDist ={
|
||||||
let setup = dist =>
|
let setup = dist =>
|
||||||
RenderTypes.DistPlusRenderer.make(~distPlusIngredients=dist,())
|
RenderTypes.DistPlusRenderer.make(~distPlusIngredients=dist,())
|
||||||
|> DistPlusRenderer.run
|
|> DistPlusRenderer.run
|
||||||
|> RenderTypes.DistPlusRenderer.Outputs.distplus
|
|> E.R.fmap(distPlus => <DistPlusPlot distPlus />)
|
||||||
|> R.O.fmapOrNull(distPlus => <DistPlusPlot distPlus />);
|
|> E.R.toOption
|
||||||
|
|> E.O.toExn("")
|
||||||
|
|
||||||
let simpleExample = (name, guesstimatorString) =>
|
let simpleExample = (name, guesstimatorString) =>
|
||||||
<>
|
<>
|
||||||
|
@ -84,4 +85,4 @@ let distributions = () =>
|
||||||
</div>
|
</div>
|
||||||
</div>;
|
</div>;
|
||||||
|
|
||||||
let entry = EntryTypes.(entry(~title="Pdf", ~render=distributions));
|
let entry = EntryTypes.(entry(~title="Mixed Distributions", ~render=distributions));
|
72
showcase/entries/ExpressionTreeExamples.re
Normal file
72
showcase/entries/ExpressionTreeExamples.re
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
let setup = dist =>
|
||||||
|
RenderTypes.DistPlusRenderer.make(~distPlusIngredients=dist, ())
|
||||||
|
|> DistPlusRenderer.run
|
||||||
|
|> E.R.fmap(distPlus => <DistPlusPlot distPlus />)
|
||||||
|
|> E.R.toOption
|
||||||
|
|> E.O.toExn("")
|
||||||
|
|
||||||
|
let simpleExample = (guesstimatorString, ~problem="", ()) =>
|
||||||
|
<>
|
||||||
|
<p> {guesstimatorString |> ReasonReact.string} </p>
|
||||||
|
<p> {problem |> (e => "problem: " ++ e) |> ReasonReact.string} </p>
|
||||||
|
{setup(
|
||||||
|
RenderTypes.DistPlusRenderer.Ingredients.make(~guesstimatorString, ()),
|
||||||
|
)}
|
||||||
|
</>;
|
||||||
|
|
||||||
|
let distributions = () =>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<h2 className="text-gray-800 text-xl font-bold">
|
||||||
|
{"Initial Section" |> ReasonReact.string}
|
||||||
|
</h2>
|
||||||
|
{simpleExample(
|
||||||
|
"normal(-1, 1) + normal(5, 2)",
|
||||||
|
~problem="Tails look too flat",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"mm(normal(4,2), normal(10,1))",
|
||||||
|
~problem="Tails look too flat",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"normal(-1, 1) * normal(5, 2)",
|
||||||
|
~problem="This looks really weird",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"normal(1,2) * normal(2,2) * normal(3,1)",
|
||||||
|
~problem="Seems like important parts are cut off",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"mm(uniform(0, 1) , normal(3,2))",
|
||||||
|
~problem="Uniform distribution seems to break multimodal",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"truncate(mm(1 to 10, 10 to 30), 10, 20)",
|
||||||
|
~problem="Truncate seems to have no effect",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"normal(5,2)*(10^3)",
|
||||||
|
~problem="Multiplied items should be evaluated.",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"normal(5,10*3)",
|
||||||
|
~problem="At least simple operations in the distributions should be evaluated.",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
{simpleExample(
|
||||||
|
"normal(5,10)^3",
|
||||||
|
~problem="Exponentiation not yet supported",
|
||||||
|
(),
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>;
|
||||||
|
|
||||||
|
let entry =
|
||||||
|
EntryTypes.(entry(~title="ExpressionTree", ~render=distributions));
|
14
src/App.re
14
src/App.re
|
@ -1,8 +1,6 @@
|
||||||
type route =
|
type route =
|
||||||
| Model(string)
|
| Model(string)
|
||||||
| DistBuilder
|
| DistBuilder
|
||||||
| DistBuilder2
|
|
||||||
| DistBuilder3
|
|
||||||
| Drawer
|
| Drawer
|
||||||
| Home
|
| Home
|
||||||
| NotFound;
|
| NotFound;
|
||||||
|
@ -11,8 +9,6 @@ let routeToPath = route =>
|
||||||
switch (route) {
|
switch (route) {
|
||||||
| Model(modelId) => "/m/" ++ modelId
|
| Model(modelId) => "/m/" ++ modelId
|
||||||
| DistBuilder => "/dist-builder"
|
| DistBuilder => "/dist-builder"
|
||||||
| DistBuilder2 => "/dist-builder2"
|
|
||||||
| DistBuilder3 => "/dist-builder3"
|
|
||||||
| Drawer => "/drawer"
|
| Drawer => "/drawer"
|
||||||
| Home => "/"
|
| Home => "/"
|
||||||
| _ => "/"
|
| _ => "/"
|
||||||
|
@ -75,12 +71,6 @@ module Menu = {
|
||||||
<Item href={routeToPath(DistBuilder)} key="dist-builder">
|
<Item href={routeToPath(DistBuilder)} key="dist-builder">
|
||||||
{"Dist Builder" |> R.ste}
|
{"Dist Builder" |> R.ste}
|
||||||
</Item>
|
</Item>
|
||||||
<Item href={routeToPath(DistBuilder2)} key="dist-builder-2">
|
|
||||||
{"Dist Builder 2" |> R.ste}
|
|
||||||
</Item>
|
|
||||||
<Item href={routeToPath(DistBuilder3)} key="dist-builder-3">
|
|
||||||
{"Dist Builder 3" |> R.ste}
|
|
||||||
</Item>
|
|
||||||
<Item href={routeToPath(Drawer)} key="drawer">
|
<Item href={routeToPath(Drawer)} key="drawer">
|
||||||
{"Drawer" |> R.ste}
|
{"Drawer" |> R.ste}
|
||||||
</Item>
|
</Item>
|
||||||
|
@ -97,8 +87,6 @@ let make = () => {
|
||||||
switch (url.path) {
|
switch (url.path) {
|
||||||
| ["m", modelId] => Model(modelId)
|
| ["m", modelId] => Model(modelId)
|
||||||
| ["dist-builder"] => DistBuilder
|
| ["dist-builder"] => DistBuilder
|
||||||
| ["dist-builder2"] => DistBuilder2
|
|
||||||
| ["dist-builder3"] => DistBuilder3
|
|
||||||
| ["drawer"] => Drawer
|
| ["drawer"] => Drawer
|
||||||
| [] => Home
|
| [] => Home
|
||||||
| _ => NotFound
|
| _ => NotFound
|
||||||
|
@ -113,8 +101,6 @@ let make = () => {
|
||||||
| None => <div> {"Page is not found" |> R.ste} </div>
|
| None => <div> {"Page is not found" |> R.ste} </div>
|
||||||
}
|
}
|
||||||
| DistBuilder => <DistBuilder />
|
| DistBuilder => <DistBuilder />
|
||||||
| DistBuilder2 => <DistBuilder2 />
|
|
||||||
| DistBuilder3 => <DistBuilder3 />
|
|
||||||
| Drawer => <Drawer />
|
| Drawer => <Drawer />
|
||||||
| Home => <Home />
|
| Home => <Home />
|
||||||
| _ => <div> {"Page is not found" |> R.ste} </div>
|
| _ => <div> {"Page is not found" |> R.ste} </div>
|
||||||
|
|
|
@ -17,7 +17,7 @@ module FormConfig = [%lenses
|
||||||
//
|
//
|
||||||
sampleCount: string,
|
sampleCount: string,
|
||||||
outputXYPoints: string,
|
outputXYPoints: string,
|
||||||
truncateTo: string,
|
downsampleTo: string,
|
||||||
kernelWidth: string,
|
kernelWidth: string,
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
@ -25,7 +25,7 @@ module FormConfig = [%lenses
|
||||||
type options = {
|
type options = {
|
||||||
sampleCount: int,
|
sampleCount: int,
|
||||||
outputXYPoints: int,
|
outputXYPoints: int,
|
||||||
truncateTo: option(int),
|
downsampleTo: option(int),
|
||||||
kernelWidth: option(float),
|
kernelWidth: option(float),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ type inputs = {
|
||||||
samplingInputs: RenderTypes.ShapeRenderer.Sampling.inputs,
|
samplingInputs: RenderTypes.ShapeRenderer.Sampling.inputs,
|
||||||
guesstimatorString: string,
|
guesstimatorString: string,
|
||||||
length: int,
|
length: int,
|
||||||
shouldTruncateSampledDistribution: int,
|
shouldDownsampleSampledDistribution: int,
|
||||||
};
|
};
|
||||||
|
|
||||||
module DemoDist = {
|
module DemoDist = {
|
||||||
|
@ -141,17 +141,16 @@ module DemoDist = {
|
||||||
kernelWidth: options.kernelWidth,
|
kernelWidth: options.kernelWidth,
|
||||||
},
|
},
|
||||||
~distPlusIngredients,
|
~distPlusIngredients,
|
||||||
~shouldTruncate=options.truncateTo |> E.O.isSome,
|
~shouldDownsample=options.downsampleTo |> E.O.isSome,
|
||||||
~recommendedLength=options.truncateTo |> E.O.default(10000),
|
~recommendedLength=options.downsampleTo |> E.O.default(1000),
|
||||||
(),
|
(),
|
||||||
);
|
);
|
||||||
let response = DistPlusRenderer.run(inputs);
|
let response = DistPlusRenderer.run(inputs);
|
||||||
Js.log(response);
|
switch (response) {
|
||||||
switch (RenderTypes.DistPlusRenderer.Outputs.distplus(response)) {
|
| Ok(distPlus) =>
|
||||||
| Some(distPlus) => <DistPlusPlot distPlus />
|
let normalizedDistPlus = DistPlus.T.normalize(distPlus);
|
||||||
| _ =>
|
<DistPlusPlot distPlus=normalizedDistPlus />;
|
||||||
"Correct Guesstimator string input to show a distribution."
|
| Error(r) => r |> R.ste
|
||||||
|> R.ste
|
|
||||||
};
|
};
|
||||||
| _ =>
|
| _ =>
|
||||||
"Nothing to show. Try to change the distribution description."
|
"Nothing to show. Try to change the distribution description."
|
||||||
|
@ -171,7 +170,8 @@ let make = () => {
|
||||||
~schema,
|
~schema,
|
||||||
~onSubmit=({state}) => {None},
|
~onSubmit=({state}) => {None},
|
||||||
~initialState={
|
~initialState={
|
||||||
guesstimatorString: "mm(normal(-10, 2), uniform(18, 25), lognormal({mean: 10, stdev: 8}), triangular(31,40,50))",
|
//guesstimatorString: "mm(normal(-10, 2), uniform(18, 25), lognormal({mean: 10, stdev: 8}), triangular(31,40,50))",
|
||||||
|
guesstimatorString: "mm(1, 2, 3, normal(2, 1))", // , triangular(30, 40, 60)
|
||||||
domainType: "Complete",
|
domainType: "Complete",
|
||||||
xPoint: "50.0",
|
xPoint: "50.0",
|
||||||
xPoint2: "60.0",
|
xPoint2: "60.0",
|
||||||
|
@ -181,9 +181,9 @@ let make = () => {
|
||||||
zero: MomentRe.momentNow(),
|
zero: MomentRe.momentNow(),
|
||||||
unit: "days",
|
unit: "days",
|
||||||
sampleCount: "30000",
|
sampleCount: "30000",
|
||||||
outputXYPoints: "10000",
|
outputXYPoints: "1000",
|
||||||
truncateTo: "1000",
|
downsampleTo: "",
|
||||||
kernelWidth: "5",
|
kernelWidth: "",
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
);
|
);
|
||||||
|
@ -210,7 +210,7 @@ let make = () => {
|
||||||
let sampleCount = reform.state.values.sampleCount |> Js.Float.fromString;
|
let sampleCount = reform.state.values.sampleCount |> Js.Float.fromString;
|
||||||
let outputXYPoints =
|
let outputXYPoints =
|
||||||
reform.state.values.outputXYPoints |> Js.Float.fromString;
|
reform.state.values.outputXYPoints |> Js.Float.fromString;
|
||||||
let truncateTo = reform.state.values.truncateTo |> Js.Float.fromString;
|
let downsampleTo = reform.state.values.downsampleTo |> Js.Float.fromString;
|
||||||
let kernelWidth = reform.state.values.kernelWidth |> Js.Float.fromString;
|
let kernelWidth = reform.state.values.kernelWidth |> Js.Float.fromString;
|
||||||
|
|
||||||
let domain =
|
let domain =
|
||||||
|
@ -252,20 +252,20 @@ let make = () => {
|
||||||
};
|
};
|
||||||
|
|
||||||
let options =
|
let options =
|
||||||
switch (sampleCount, outputXYPoints, truncateTo) {
|
switch (sampleCount, outputXYPoints, downsampleTo) {
|
||||||
| (_, _, _)
|
| (_, _, _)
|
||||||
when
|
when
|
||||||
!Js.Float.isNaN(sampleCount)
|
!Js.Float.isNaN(sampleCount)
|
||||||
&& !Js.Float.isNaN(outputXYPoints)
|
&& !Js.Float.isNaN(outputXYPoints)
|
||||||
&& !Js.Float.isNaN(truncateTo)
|
&& !Js.Float.isNaN(downsampleTo)
|
||||||
&& sampleCount > 10.
|
&& sampleCount > 10.
|
||||||
&& outputXYPoints > 10. =>
|
&& outputXYPoints > 10. =>
|
||||||
Some({
|
Some({
|
||||||
sampleCount: sampleCount |> int_of_float,
|
sampleCount: sampleCount |> int_of_float,
|
||||||
outputXYPoints: outputXYPoints |> int_of_float,
|
outputXYPoints: outputXYPoints |> int_of_float,
|
||||||
truncateTo:
|
downsampleTo:
|
||||||
int_of_float(truncateTo) > 0
|
int_of_float(downsampleTo) > 0
|
||||||
? Some(int_of_float(truncateTo)) : None,
|
? Some(int_of_float(downsampleTo)) : None,
|
||||||
kernelWidth: kernelWidth == 0.0 ? None : Some(kernelWidth),
|
kernelWidth: kernelWidth == 0.0 ? None : Some(kernelWidth),
|
||||||
})
|
})
|
||||||
| _ => None
|
| _ => None
|
||||||
|
@ -287,7 +287,7 @@ let make = () => {
|
||||||
reform.state.values.unit,
|
reform.state.values.unit,
|
||||||
reform.state.values.sampleCount,
|
reform.state.values.sampleCount,
|
||||||
reform.state.values.outputXYPoints,
|
reform.state.values.outputXYPoints,
|
||||||
reform.state.values.truncateTo,
|
reform.state.values.downsampleTo,
|
||||||
reform.state.values.kernelWidth,
|
reform.state.values.kernelWidth,
|
||||||
reloader |> string_of_int,
|
reloader |> string_of_int,
|
||||||
|],
|
|],
|
||||||
|
@ -481,7 +481,10 @@ let make = () => {
|
||||||
/>
|
/>
|
||||||
</Col>
|
</Col>
|
||||||
<Col span=4>
|
<Col span=4>
|
||||||
<FieldFloat field=FormConfig.TruncateTo label="Truncate To" />
|
<FieldFloat
|
||||||
|
field=FormConfig.DownsampleTo
|
||||||
|
label="Downsample To"
|
||||||
|
/>
|
||||||
</Col>
|
</Col>
|
||||||
<Col span=4>
|
<Col span=4>
|
||||||
<FieldFloat field=FormConfig.KernelWidth label="Kernel Width" />
|
<FieldFloat field=FormConfig.KernelWidth label="Kernel Width" />
|
||||||
|
|
|
@ -1,105 +0,0 @@
|
||||||
open BsReform;
|
|
||||||
open Antd.Grid;
|
|
||||||
|
|
||||||
module FormConfig = [%lenses type state = {guesstimatorString: string}];
|
|
||||||
|
|
||||||
module Form = ReForm.Make(FormConfig);
|
|
||||||
|
|
||||||
let schema = Form.Validation.Schema([||]);
|
|
||||||
|
|
||||||
module FieldString = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~field, ~label) => {
|
|
||||||
<Form.Field
|
|
||||||
field
|
|
||||||
render={({handleChange, error, value, validate}) =>
|
|
||||||
<Antd.Form.Item label={label |> R.ste}>
|
|
||||||
<Antd.Input
|
|
||||||
value
|
|
||||||
onChange={BsReform.Helpers.handleChange(handleChange)}
|
|
||||||
onBlur={_ => validate()}
|
|
||||||
/>
|
|
||||||
</Antd.Form.Item>
|
|
||||||
}
|
|
||||||
/>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module Styles = {
|
|
||||||
open Css;
|
|
||||||
let dist = style([padding(em(1.))]);
|
|
||||||
let spacer = style([marginTop(em(1.))]);
|
|
||||||
};
|
|
||||||
|
|
||||||
module DemoDist = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~guesstimatorString: string) => {
|
|
||||||
let (ys, xs, isEmpty) =
|
|
||||||
DistEditor.getPdfFromUserInput(guesstimatorString);
|
|
||||||
let inside =
|
|
||||||
isEmpty
|
|
||||||
? "Nothing to show" |> R.ste
|
|
||||||
: {
|
|
||||||
let distPlus =
|
|
||||||
Distributions.DistPlus.make(
|
|
||||||
~shape=
|
|
||||||
Continuous(
|
|
||||||
Distributions.Continuous.make(`Linear, {xs, ys}),
|
|
||||||
),
|
|
||||||
~domain=Complete,
|
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
~guesstimatorString=None,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
|> Distributions.DistPlus.T.scaleToIntegralSum(~intendedSum=1.0);
|
|
||||||
<DistPlusPlot distPlus />;
|
|
||||||
};
|
|
||||||
<Antd.Card title={"Distribution" |> R.ste}>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
inside
|
|
||||||
</Antd.Card>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
[@react.component]
|
|
||||||
let make = () => {
|
|
||||||
let reform =
|
|
||||||
Form.use(
|
|
||||||
~validationStrategy=OnDemand,
|
|
||||||
~schema,
|
|
||||||
~onSubmit=({state}) => {None},
|
|
||||||
~initialState={guesstimatorString: "lognormal(6.1, 1)"},
|
|
||||||
(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let demoDist =
|
|
||||||
React.useMemo1(
|
|
||||||
() => {
|
|
||||||
<DemoDist
|
|
||||||
guesstimatorString={reform.state.values.guesstimatorString}
|
|
||||||
/>
|
|
||||||
},
|
|
||||||
[|reform.state.values.guesstimatorString|],
|
|
||||||
);
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
demoDist
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
<Antd.Card title={"Distribution Form" |> R.ste}>
|
|
||||||
<Form.Provider value=reform>
|
|
||||||
<Antd.Form>
|
|
||||||
<Row _type=`flex>
|
|
||||||
<Col span=12>
|
|
||||||
<FieldString
|
|
||||||
field=FormConfig.GuesstimatorString
|
|
||||||
label="Guesstimator String"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Antd.Form>
|
|
||||||
</Form.Provider>
|
|
||||||
</Antd.Card>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
</div>;
|
|
||||||
};
|
|
|
@ -1,114 +0,0 @@
|
||||||
open BsReform;
|
|
||||||
open Antd.Grid;
|
|
||||||
|
|
||||||
module FormConfig = [%lenses type state = {guesstimatorString: string}];
|
|
||||||
|
|
||||||
module Form = ReForm.Make(FormConfig);
|
|
||||||
|
|
||||||
let schema = Form.Validation.Schema([||]);
|
|
||||||
|
|
||||||
module FieldString = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~field, ~label) => {
|
|
||||||
<Form.Field
|
|
||||||
field
|
|
||||||
render={({handleChange, error, value, validate}) =>
|
|
||||||
<Antd.Form.Item label={label |> R.ste}>
|
|
||||||
<Antd.Input
|
|
||||||
value
|
|
||||||
onChange={BsReform.Helpers.handleChange(handleChange)}
|
|
||||||
onBlur={_ => validate()}
|
|
||||||
/>
|
|
||||||
</Antd.Form.Item>
|
|
||||||
}
|
|
||||||
/>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module Styles = {
|
|
||||||
open Css;
|
|
||||||
let dist = style([padding(em(1.))]);
|
|
||||||
let spacer = style([marginTop(em(1.))]);
|
|
||||||
};
|
|
||||||
|
|
||||||
module DemoDist = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~guesstimatorString: string) => {
|
|
||||||
let parsed1 = MathJsParser.fromString(guesstimatorString);
|
|
||||||
let shape =
|
|
||||||
switch (parsed1) {
|
|
||||||
| Ok(r) => Some(SymbolicDist.toShape(10000, r))
|
|
||||||
| _ => None
|
|
||||||
};
|
|
||||||
|
|
||||||
let str =
|
|
||||||
switch (parsed1) {
|
|
||||||
| Ok(r) => SymbolicDist.toString(r)
|
|
||||||
| Error(e) => e
|
|
||||||
};
|
|
||||||
|
|
||||||
let inside =
|
|
||||||
shape
|
|
||||||
|> E.O.fmap(shape => {
|
|
||||||
let distPlus =
|
|
||||||
Distributions.DistPlus.make(
|
|
||||||
~shape,
|
|
||||||
~domain=Complete,
|
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
~guesstimatorString=None,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
|> Distributions.DistPlus.T.scaleToIntegralSum(~intendedSum=1.0);
|
|
||||||
<DistPlusPlot distPlus />;
|
|
||||||
})
|
|
||||||
|> E.O.default(ReasonReact.null);
|
|
||||||
<Antd.Card title={"Distribution" |> R.ste}>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
inside
|
|
||||||
{str |> ReasonReact.string}
|
|
||||||
</Antd.Card>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
[@react.component]
|
|
||||||
let make = () => {
|
|
||||||
let reform =
|
|
||||||
Form.use(
|
|
||||||
~validationStrategy=OnDemand,
|
|
||||||
~schema,
|
|
||||||
~onSubmit=({state}) => {None},
|
|
||||||
~initialState={guesstimatorString: "mm(1 to 100, 50 to 200, [.5,.5])"},
|
|
||||||
(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let demoDist =
|
|
||||||
React.useMemo1(
|
|
||||||
() => {
|
|
||||||
<DemoDist
|
|
||||||
guesstimatorString={reform.state.values.guesstimatorString}
|
|
||||||
/>
|
|
||||||
},
|
|
||||||
[|reform.state.values.guesstimatorString|],
|
|
||||||
);
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
demoDist
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
<Antd.Card title={"Distribution Form" |> R.ste}>
|
|
||||||
<Form.Provider value=reform>
|
|
||||||
<Antd.Form>
|
|
||||||
<Row _type=`flex>
|
|
||||||
<Col span=12>
|
|
||||||
<FieldString
|
|
||||||
field=FormConfig.GuesstimatorString
|
|
||||||
label="Guesstimator String"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Antd.Form>
|
|
||||||
</Form.Provider>
|
|
||||||
</Antd.Card>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
</div>;
|
|
||||||
};
|
|
|
@ -161,7 +161,6 @@ module Convert = {
|
||||||
let canvasShapeToContinuousShape =
|
let canvasShapeToContinuousShape =
|
||||||
(~canvasShape: Types.canvasShape, ~canvasElement: Dom.element)
|
(~canvasShape: Types.canvasShape, ~canvasElement: Dom.element)
|
||||||
: Types.continuousShape => {
|
: Types.continuousShape => {
|
||||||
|
|
||||||
let xs = canvasShape.xValues;
|
let xs = canvasShape.xValues;
|
||||||
let hs = canvasShape.hs;
|
let hs = canvasShape.hs;
|
||||||
let rectangle: Types.rectangle =
|
let rectangle: Types.rectangle =
|
||||||
|
@ -170,13 +169,15 @@ module Convert = {
|
||||||
let paddingFactorY = CanvasContext.paddingFactorX(rectangle.height);
|
let paddingFactorY = CanvasContext.paddingFactorX(rectangle.height);
|
||||||
let windowScrollY: float = [%raw "window.scrollY"];
|
let windowScrollY: float = [%raw "window.scrollY"];
|
||||||
|
|
||||||
let y0Line = bottom+.windowScrollY-.paddingFactorY;
|
let y0Line = bottom +. windowScrollY -. paddingFactorY;
|
||||||
let ys = E.A.fmap( h => y0Line -. h, hs);
|
let ys = E.A.fmap(h => y0Line -. h, hs);
|
||||||
|
|
||||||
let xyShape: Types.xyShape = {xs, ys};
|
let xyShape: Types.xyShape = {xs, ys};
|
||||||
let continuousShape: Types.continuousShape = {
|
let continuousShape: Types.continuousShape = {
|
||||||
xyShape,
|
xyShape,
|
||||||
interpolation: `Linear,
|
interpolation: `Linear,
|
||||||
|
integralSumCache: None,
|
||||||
|
integralCache: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let integral = XYShape.Analysis.integrateContinuousShape(continuousShape);
|
let integral = XYShape.Analysis.integrateContinuousShape(continuousShape);
|
||||||
|
@ -188,6 +189,8 @@ module Convert = {
|
||||||
ys,
|
ys,
|
||||||
},
|
},
|
||||||
interpolation: `Linear,
|
interpolation: `Linear,
|
||||||
|
integralSumCache: Some(1.0),
|
||||||
|
integralCache: None,
|
||||||
};
|
};
|
||||||
continuousShape;
|
continuousShape;
|
||||||
};
|
};
|
||||||
|
@ -289,8 +292,8 @@ module Draw = {
|
||||||
/*
|
/*
|
||||||
let continuousShape =
|
let continuousShape =
|
||||||
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
||||||
let mean = Distributions.Continuous.T.mean(continuousShape);
|
let mean = Continuous.T.mean(continuousShape);
|
||||||
let variance = Distributions.Continuous.T.variance(continuousShape);
|
let variance = Continuous.T.variance(continuousShape);
|
||||||
let meanLocation =
|
let meanLocation =
|
||||||
Convert.findClosestInOrderedArrayDangerously(mean, canvasShape.xValues);
|
Convert.findClosestInOrderedArrayDangerously(mean, canvasShape.xValues);
|
||||||
let meanLocationCanvasX = canvasShape.ws[meanLocation];
|
let meanLocationCanvasX = canvasShape.ws[meanLocation];
|
||||||
|
@ -386,24 +389,29 @@ module Draw = {
|
||||||
let stdev = 15.0;
|
let stdev = 15.0;
|
||||||
let numSamples = 3000;
|
let numSamples = 3000;
|
||||||
|
|
||||||
let normal: SymbolicDist.dist = `Normal({mean, stdev});
|
let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
|
||||||
let normalShape = SymbolicDist.GenericSimple.toShape(normal, numSamples);
|
let normalShape =
|
||||||
|
ExpressionTree.toShape(
|
||||||
|
numSamples,
|
||||||
|
{sampleCount: 10000, outputXYPoints: 10000, kernelWidth: None},
|
||||||
|
`SymbolicDist(normal),
|
||||||
|
) |> E.R.toExn;
|
||||||
let xyShape: Types.xyShape =
|
let xyShape: Types.xyShape =
|
||||||
switch (normalShape) {
|
switch (normalShape) {
|
||||||
| Mixed(_) => {xs: [||], ys: [||]}
|
| Mixed(_) => {xs: [||], ys: [||]}
|
||||||
| Discrete(_) => {xs: [||], ys: [||]}
|
| Discrete(_) => {xs: [||], ys: [||]}
|
||||||
| Continuous(m) => Distributions.Continuous.getShape(m)
|
| Continuous(m) => Continuous.getShape(m)
|
||||||
};
|
};
|
||||||
|
|
||||||
/* // To use a lognormal instead:
|
/* // To use a lognormal instead:
|
||||||
let lognormal = SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev);
|
let lognormal = SymbolicTypes.Lognormal.fromMeanAndStdev(mean, stdev);
|
||||||
let lognormalShape =
|
let lognormalShape =
|
||||||
SymbolicDist.GenericSimple.toShape(lognormal, numSamples);
|
SymbolicTypes.GenericSimple.toShape(lognormal, numSamples);
|
||||||
let lognormalXYShape: Types.xyShape =
|
let lognormalXYShape: Types.xyShape =
|
||||||
switch (lognormalShape) {
|
switch (lognormalShape) {
|
||||||
| Mixed(_) => {xs: [||], ys: [||]}
|
| Mixed(_) => {xs: [||], ys: [||]}
|
||||||
| Discrete(_) => {xs: [||], ys: [||]}
|
| Discrete(_) => {xs: [||], ys: [||]}
|
||||||
| Continuous(m) => Distributions.Continuous.getShape(m)
|
| Continuous(m) => Continuous.getShape(m)
|
||||||
};
|
};
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -666,14 +674,9 @@ module State = {
|
||||||
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
||||||
|
|
||||||
/* create a cdf from a pdf */
|
/* create a cdf from a pdf */
|
||||||
let _pdf =
|
let _pdf = Continuous.T.normalize(pdf);
|
||||||
Distributions.Continuous.T.scaleToIntegralSum(
|
|
||||||
~cache=None,
|
|
||||||
~intendedSum=1.0,
|
|
||||||
pdf,
|
|
||||||
);
|
|
||||||
|
|
||||||
let cdf = Distributions.Continuous.T.integral(~cache=None, _pdf);
|
let cdf = Continuous.T.integral(_pdf);
|
||||||
let xs = [||];
|
let xs = [||];
|
||||||
let ys = [||];
|
let ys = [||];
|
||||||
for (i in 1 to 999) {
|
for (i in 1 to 999) {
|
||||||
|
|
|
@ -37,27 +37,27 @@ let table = (distPlus, x) => {
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.xToY(x)
|
|> DistPlus.T.xToY(x)
|
||||||
|> DistTypes.MixedPoint.toDiscreteValue
|
|> DistTypes.MixedPoint.toDiscreteValue
|
||||||
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.xToY(x)
|
|> DistPlus.T.xToY(x)
|
||||||
|> DistTypes.MixedPoint.toContinuousValue
|
|> DistTypes.MixedPoint.toContinuousValue
|
||||||
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.xToY(~cache=None, x)
|
|> DistPlus.T.Integral.xToY(x)
|
||||||
|> E.Float.with2DigitsPrecision
|
|> E.Float.with2DigitsPrecision
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.sum(~cache=None)
|
|> DistPlus.T.Integral.sum
|
||||||
|> E.Float.with2DigitsPrecision
|
|> E.Float.with2DigitsPrecision
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
|
@ -70,24 +70,18 @@ let table = (distPlus, x) => {
|
||||||
<td className="px-4 py-2">
|
<td className="px-4 py-2">
|
||||||
{"Continuous Total" |> ReasonReact.string}
|
{"Continuous Total" |> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2">
|
|
||||||
{"Scaled Continuous Total" |> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2">
|
<td className="px-4 py-2">
|
||||||
{"Discrete Total" |> ReasonReact.string}
|
{"Discrete Total" |> ReasonReact.string}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2">
|
|
||||||
{"Scaled Discrete Total" |> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.toContinuous
|
|> DistPlus.T.toContinuous
|
||||||
|> E.O.fmap(
|
|> E.O.fmap(
|
||||||
Distributions.Continuous.T.Integral.sum(~cache=None),
|
Continuous.T.Integral.sum
|
||||||
)
|
)
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
||||||
|> E.O.default("")
|
|> E.O.default("")
|
||||||
|
@ -95,26 +89,8 @@ let table = (distPlus, x) => {
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border ">
|
<td className="px-4 py-2 border ">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.toScaledContinuous
|
|> DistPlus.T.toDiscrete
|
||||||
|> E.O.fmap(
|
|> E.O.fmap(Discrete.T.Integral.sum)
|
||||||
Distributions.Continuous.T.Integral.sum(~cache=None),
|
|
||||||
)
|
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|
||||||
|> E.O.default("")
|
|
||||||
|> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2 border ">
|
|
||||||
{distPlus
|
|
||||||
|> Distributions.DistPlus.T.toDiscrete
|
|
||||||
|> E.O.fmap(Distributions.Discrete.T.Integral.sum(~cache=None))
|
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|
||||||
|> E.O.default("")
|
|
||||||
|> ReasonReact.string}
|
|
||||||
</td>
|
|
||||||
<td className="px-4 py-2 border ">
|
|
||||||
{distPlus
|
|
||||||
|> Distributions.DistPlus.T.toScaledDiscrete
|
|
||||||
|> E.O.fmap(Distributions.Discrete.T.Integral.sum(~cache=None))
|
|
||||||
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
|> E.O.fmap(E.Float.with2DigitsPrecision)
|
||||||
|> E.O.default("")
|
|> E.O.default("")
|
||||||
|> ReasonReact.string}
|
|> ReasonReact.string}
|
||||||
|
@ -143,42 +119,42 @@ let percentiles = distPlus => {
|
||||||
<tr>
|
<tr>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.01)
|
|> DistPlus.T.Integral.yToX(0.01)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.05)
|
|> DistPlus.T.Integral.yToX(0.05)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.25)
|
|> DistPlus.T.Integral.yToX(0.25)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.5)
|
|> DistPlus.T.Integral.yToX(0.5)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.75)
|
|> DistPlus.T.Integral.yToX(0.75)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.95)
|
|> DistPlus.T.Integral.yToX(0.95)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99)
|
|> DistPlus.T.Integral.yToX(0.99)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus
|
{distPlus
|
||||||
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99999)
|
|> DistPlus.T.Integral.yToX(0.99999)
|
||||||
|> showFloat}
|
|> showFloat}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -197,13 +173,13 @@ let percentiles = distPlus => {
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus |> Distributions.DistPlus.T.mean |> showFloat}
|
{distPlus |> DistPlus.T.mean |> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus |> Distributions.DistPlus.T.variance |> (r => r ** 0.5) |> showFloat}
|
{distPlus |> DistPlus.T.variance |> (r => r ** 0.5) |> showFloat}
|
||||||
</td>
|
</td>
|
||||||
<td className="px-4 py-2 border">
|
<td className="px-4 py-2 border">
|
||||||
{distPlus |> Distributions.DistPlus.T.variance |> showFloat}
|
{distPlus |> DistPlus.T.variance |> showFloat}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
|
@ -211,34 +187,37 @@ let percentiles = distPlus => {
|
||||||
</div>;
|
</div>;
|
||||||
};
|
};
|
||||||
|
|
||||||
let adjustBoth = discreteProbabilityMass => {
|
let adjustBoth = discreteProbabilityMassFraction => {
|
||||||
let yMaxDiscreteDomainFactor = discreteProbabilityMass;
|
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
|
||||||
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMass;
|
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
|
||||||
let yMax =
|
|
||||||
yMaxDiscreteDomainFactor > yMaxContinuousDomainFactor
|
// use the bigger proportion, such that whichever is the bigger proportion, the yMax is 1.
|
||||||
? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor;
|
|
||||||
|
|
||||||
|
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
|
||||||
(
|
(
|
||||||
1.0 /. (yMaxDiscreteDomainFactor /. yMax),
|
yMax /. yMaxDiscreteDomainFactor,
|
||||||
1.0 /. (yMaxContinuousDomainFactor /. yMax),
|
yMax /. yMaxContinuousDomainFactor,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
module DistPlusChart = {
|
module DistPlusChart = {
|
||||||
[@react.component]
|
[@react.component]
|
||||||
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
||||||
open Distributions.DistPlus;
|
open DistPlus;
|
||||||
let discrete = distPlus |> T.toScaledDiscrete;
|
|
||||||
|
let discrete = distPlus |> T.toDiscrete |> E.O.fmap(Discrete.getShape);
|
||||||
let continuous =
|
let continuous =
|
||||||
distPlus
|
distPlus
|
||||||
|> T.toScaledContinuous
|
|> T.toContinuous
|
||||||
|> E.O.fmap(Distributions.Continuous.getShape);
|
|> E.O.fmap(Continuous.getShape);
|
||||||
let range = T.xTotalRange(distPlus);
|
let range = T.xTotalRange(distPlus);
|
||||||
|
|
||||||
// // We subtract a bit from the range to make sure that it fits. Maybe this should be done in d3 instead.
|
// // We subtract a bit from the range to make sure that it fits. Maybe this should be done in d3 instead.
|
||||||
// let minX =
|
// let minX =
|
||||||
// switch (
|
// switch (
|
||||||
// distPlus
|
// distPlus
|
||||||
// |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.0001),
|
// |> DistPlus.T.Integral.yToX(0.0001),
|
||||||
// range,
|
// range,
|
||||||
// ) {
|
// ) {
|
||||||
// | (min, Some(range)) => Some(min -. range *. 0.001)
|
// | (min, Some(range)) => Some(min -. range *. 0.001)
|
||||||
|
@ -246,18 +225,20 @@ module DistPlusChart = {
|
||||||
// };
|
// };
|
||||||
|
|
||||||
let minX = {
|
let minX = {
|
||||||
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.00001);
|
distPlus |> DistPlus.T.Integral.yToX(0.00001);
|
||||||
};
|
};
|
||||||
|
|
||||||
let maxX = {
|
let maxX = {
|
||||||
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99);
|
distPlus |> DistPlus.T.Integral.yToX(0.99999);
|
||||||
};
|
};
|
||||||
|
|
||||||
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
||||||
let toDiscreteProbabilityMass =
|
let discreteProbabilityMassFraction =
|
||||||
distPlus |> Distributions.DistPlus.T.toDiscreteProbabilityMass;
|
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
|
||||||
|
|
||||||
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
|
||||||
adjustBoth(toDiscreteProbabilityMass);
|
adjustBoth(discreteProbabilityMassFraction);
|
||||||
|
|
||||||
<DistributionPlot
|
<DistributionPlot
|
||||||
xScale={config.xLog ? "log" : "linear"}
|
xScale={config.xLog ? "log" : "linear"}
|
||||||
yScale={config.yLog ? "log" : "linear"}
|
yScale={config.yLog ? "log" : "linear"}
|
||||||
|
@ -278,18 +259,18 @@ module DistPlusChart = {
|
||||||
module IntegralChart = {
|
module IntegralChart = {
|
||||||
[@react.component]
|
[@react.component]
|
||||||
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
|
||||||
open Distributions.DistPlus;
|
open DistPlus;
|
||||||
let integral = distPlus.integralCache;
|
let integral = distPlus.integralCache;
|
||||||
let continuous =
|
let continuous =
|
||||||
integral
|
integral
|
||||||
|> Distributions.Continuous.toLinear
|
|> Continuous.toLinear
|
||||||
|> E.O.fmap(Distributions.Continuous.getShape);
|
|> E.O.fmap(Continuous.getShape);
|
||||||
let minX = {
|
let minX = {
|
||||||
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.00001);
|
distPlus |> DistPlus.T.Integral.yToX(0.00001);
|
||||||
};
|
};
|
||||||
|
|
||||||
let maxX = {
|
let maxX = {
|
||||||
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99);
|
distPlus |> DistPlus.T.Integral.yToX(0.99999);
|
||||||
};
|
};
|
||||||
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
|
||||||
<DistributionPlot
|
<DistributionPlot
|
||||||
|
@ -336,10 +317,11 @@ let make = (~distPlus: DistTypes.distPlus) => {
|
||||||
let (x, setX) = React.useState(() => 0.);
|
let (x, setX) = React.useState(() => 0.);
|
||||||
let (state, dispatch) =
|
let (state, dispatch) =
|
||||||
React.useReducer(DistPlusPlotReducer.reducer, DistPlusPlotReducer.init);
|
React.useReducer(DistPlusPlotReducer.reducer, DistPlusPlotReducer.init);
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
{state.distributions
|
{state.distributions
|
||||||
|> E.L.fmapi((index, config) =>
|
|> E.L.fmapi((index, config) =>
|
||||||
<div className="flex">
|
<div className="flex" key={string_of_int(index)}>
|
||||||
<div className="w-4/5">
|
<div className="w-4/5">
|
||||||
<Chart distPlus config onHover={r => {setX(_ => r)}} />
|
<Chart distPlus config onHover={r => {setX(_ => r)}} />
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -427,7 +427,7 @@ export class DistPlotD3 {
|
||||||
addLollipopsChart(common) {
|
addLollipopsChart(common) {
|
||||||
const data = this.getDataPoints('discrete');
|
const data = this.getDataPoints('discrete');
|
||||||
|
|
||||||
const yMin = d3.min(this.attrs.data.discrete.ys);
|
const yMin = 0.;
|
||||||
const yMax = d3.max(this.attrs.data.discrete.ys);
|
const yMax = d3.max(this.attrs.data.discrete.ys);
|
||||||
|
|
||||||
// X axis.
|
// X axis.
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
[@bs.module "./main.js"]
|
|
||||||
external getPdfFromUserInput: string => (array(float), array(float), bool) =
|
|
||||||
"get_pdf_from_user_input";
|
|
|
@ -1,247 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
const jStat = require("jstat");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This module defines an abstract BinnedDistribution class, which
|
|
||||||
* should be implemented for each distribution. You need to decide
|
|
||||||
* how to bin the distribution (use _adabin unless there's a nicer
|
|
||||||
* way for your distr) and how to choose the distribution's support.
|
|
||||||
*/
|
|
||||||
|
|
||||||
math.import({
|
|
||||||
normal: jStat.normal,
|
|
||||||
beta: jStat.beta,
|
|
||||||
lognormal: jStat.lognormal,
|
|
||||||
uniform: jStat.uniform
|
|
||||||
});
|
|
||||||
|
|
||||||
class BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @param args
|
|
||||||
*/
|
|
||||||
constructor(args) {
|
|
||||||
this._set_props();
|
|
||||||
this.max_bin_size = 0.005;
|
|
||||||
this.min_bin_size = 0;
|
|
||||||
this.increment = 0.0001;
|
|
||||||
this.desired_delta = 0.001;
|
|
||||||
this.start_bin_size = 0.0001;
|
|
||||||
|
|
||||||
[this.params, this.pdf_func, this.sample] = this.get_params_and_pdf_func(
|
|
||||||
args
|
|
||||||
);
|
|
||||||
|
|
||||||
[this.start_point, this.end_point] = this.get_bounds();
|
|
||||||
[this.pdf_vals, this.divider_pts] = this.bin();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* this is hacky but class properties aren't always supported
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//Adaptive binning. Specify a desired change in density to get adjusted bin sizes.
|
|
||||||
/**
|
|
||||||
* @returns {(number[]|[*])[]}
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_adabin() {
|
|
||||||
let point = this.start_point;
|
|
||||||
let vals = [this.pdf_func(point)];
|
|
||||||
let divider_pts = [point];
|
|
||||||
let support = this.end_point - this.start_point;
|
|
||||||
let bin_size = this.start_bin_size * support;
|
|
||||||
|
|
||||||
while (point < this.end_point) {
|
|
||||||
let val = this.pdf_func(point + bin_size);
|
|
||||||
if (Math.abs(val - vals[vals.length - 1]) > this.desired_delta) {
|
|
||||||
while (
|
|
||||||
(Math.abs(val - vals[vals.length - 1]) > this.desired_delta) &
|
|
||||||
(bin_size - this.increment * support > this.min_bin_size)
|
|
||||||
) {
|
|
||||||
bin_size -= this.increment;
|
|
||||||
val = this.pdf_func(point + bin_size);
|
|
||||||
}
|
|
||||||
} else if (Math.abs(val - vals[vals.length - 1]) < this.desired_delta) {
|
|
||||||
while (
|
|
||||||
(Math.abs(val - vals[vals.length - 1]) < this.desired_delta) &
|
|
||||||
(bin_size < this.max_bin_size)
|
|
||||||
) {
|
|
||||||
bin_size += this.increment;
|
|
||||||
val = this.pdf_func(point + bin_size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
point += bin_size;
|
|
||||||
vals.push(val);
|
|
||||||
divider_pts.push(point);
|
|
||||||
}
|
|
||||||
vals = vals.map((_, idx) => vals[idx] / 2 + vals[idx + 1] / 2);
|
|
||||||
vals = vals.slice(0, -1);
|
|
||||||
return [vals, divider_pts];
|
|
||||||
}
|
|
||||||
|
|
||||||
bin() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
get_bounds() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param args
|
|
||||||
* @returns {(any|(function(*=): *))[]}
|
|
||||||
*/
|
|
||||||
get_params_and_pdf_func(args) {
|
|
||||||
let args_str = args.toString() + ")";
|
|
||||||
let substr = this.name + ".pdf(x, " + args_str;
|
|
||||||
let compiled = math.compile(substr);
|
|
||||||
|
|
||||||
function pdf_func(x) {
|
|
||||||
return compiled.evaluate({ x: x });
|
|
||||||
}
|
|
||||||
|
|
||||||
let mc_compiled = math.compile(this.name + ".sample(" + args_str);
|
|
||||||
let kv_pairs = this.param_names.map((val, idx) => [val, args[idx]]);
|
|
||||||
let params = Object.fromEntries(new Map(kv_pairs));
|
|
||||||
return [params, pdf_func, mc_compiled.evaluate];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class NormalDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "normal";
|
|
||||||
this.param_names = ["mean", "std"];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(number|*)[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
return [
|
|
||||||
this.params.mean - 4 * this.params.std,
|
|
||||||
this.params.mean + 4 * this.params.std
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {[[*], [*]]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
return this._adabin(this.params.std);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class UniformDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "uniform";
|
|
||||||
this.param_names = ["start_point", "end_point"];
|
|
||||||
this.num_bins = 200;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
return [this.params.start_point, this.params.end_point];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(*[])[]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
let divider_pts = evenly_spaced_grid(
|
|
||||||
this.params.start_point,
|
|
||||||
this.params.end_point,
|
|
||||||
this.num_bins
|
|
||||||
);
|
|
||||||
let vals = divider_pts.map(x =>
|
|
||||||
this.pdf_func(this.params.start_point / 2 + this.params.end_point / 2)
|
|
||||||
);
|
|
||||||
vals = vals.slice(0, -1);
|
|
||||||
return [vals, divider_pts];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class LogNormalDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "lognormal";
|
|
||||||
this.param_names = ["normal_mean", "normal_std"];
|
|
||||||
this.n_bounds_samples = 10000;
|
|
||||||
this.n_largest_bound_sample = 10;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param samples
|
|
||||||
* @param n
|
|
||||||
* @returns {any}
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_nth_largest(samples, n) {
|
|
||||||
var largest_buffer = Array(n).fill(-Infinity);
|
|
||||||
for (const sample of samples) {
|
|
||||||
if (sample > largest_buffer[n - 1]) {
|
|
||||||
var i = n;
|
|
||||||
while ((i > 0) & (sample > largest_buffer[i - 1])) {
|
|
||||||
i -= 1;
|
|
||||||
}
|
|
||||||
largest_buffer[i] = sample;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return largest_buffer[n - 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(*|any)[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
let samples = Array(this.n_bounds_samples)
|
|
||||||
.fill(0)
|
|
||||||
.map(() => this.sample());
|
|
||||||
return [
|
|
||||||
math.min(samples),
|
|
||||||
this._nth_largest(samples, this.n_largest_bound_sample)
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {[[*], [*]]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
return this._adabin();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param start
|
|
||||||
* @param stop
|
|
||||||
* @param numel
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function evenly_spaced_grid(start, stop, numel) {
|
|
||||||
return Array(numel)
|
|
||||||
.fill(0)
|
|
||||||
.map((_, idx) => start + (idx / numel) * (stop - start));
|
|
||||||
}
|
|
||||||
|
|
||||||
const distrs = {
|
|
||||||
normal: NormalDistributionBinned,
|
|
||||||
lognormal: LogNormalDistributionBinned,
|
|
||||||
uniform: UniformDistributionBinned
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.distrs = distrs;
|
|
|
@ -1,364 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const bst = require("binary-search-tree");
|
|
||||||
|
|
||||||
const distrs = require("./distribution.js").distrs;
|
|
||||||
const parse = require("./parse.js");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
|
|
||||||
const NUM_MC_SAMPLES = 3000;
|
|
||||||
const OUTPUT_GRID_NUMEL = 3000;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The main algorithmic work is done by functions in this module.
|
|
||||||
* It also contains the main function, taking the user's string
|
|
||||||
* and returning pdf values and x's.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param start
|
|
||||||
* @param stop
|
|
||||||
* @param numel
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function evenly_spaced_grid(start, stop, numel) {
|
|
||||||
return Array(numel)
|
|
||||||
.fill(0)
|
|
||||||
.map((_, idx) => start + (idx / numel) * (stop - start));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes an array of strings like "normal(0, 1)" and
|
|
||||||
* returns the corresponding distribution objects
|
|
||||||
* @param substrings
|
|
||||||
* @returns {*}
|
|
||||||
*/
|
|
||||||
function get_distributions(substrings) {
|
|
||||||
let names_and_args = substrings.map(parse.get_distr_name_and_args);
|
|
||||||
let pdfs = names_and_args.map(x => new distrs[x[0]](x[1]));
|
|
||||||
return pdfs;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* update the binary search tree with bin points of
|
|
||||||
* deterministic_pdf transformed by tansform func
|
|
||||||
* (transfrom func can be a stocahstic func with parameters
|
|
||||||
* sampled from mc_distrs)
|
|
||||||
*
|
|
||||||
* @param transform_func
|
|
||||||
* @param deterministic_pdf
|
|
||||||
* @param mc_distrs
|
|
||||||
* @param track_idx
|
|
||||||
* @param num_mc_samples
|
|
||||||
* @param bst_pts_and_idxs
|
|
||||||
* @returns {(number)[]}
|
|
||||||
*/
|
|
||||||
function update_transformed_divider_points_bst(
|
|
||||||
transform_func,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
track_idx,
|
|
||||||
num_mc_samples,
|
|
||||||
bst_pts_and_idxs
|
|
||||||
) {
|
|
||||||
var transformed_pts = [];
|
|
||||||
var pdf_inner_idxs = [];
|
|
||||||
var factors = [];
|
|
||||||
var start_pt = Infinity;
|
|
||||||
var end_pt = -Infinity;
|
|
||||||
let use_mc = mc_distrs.length > 0;
|
|
||||||
var num_outer_iters = use_mc ? num_mc_samples : 1;
|
|
||||||
|
|
||||||
for (let sample_idx = 0; sample_idx < num_outer_iters; ++sample_idx) {
|
|
||||||
var this_transformed_pts = deterministic_pdf.divider_pts;
|
|
||||||
if (use_mc) {
|
|
||||||
let samples = mc_distrs.map(x => x.sample());
|
|
||||||
this_transformed_pts = this_transformed_pts.map(x =>
|
|
||||||
transform_func([x].concat(samples))
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
this_transformed_pts = this_transformed_pts.map(x => transform_func([x]));
|
|
||||||
}
|
|
||||||
var this_transformed_pts_paired = [];
|
|
||||||
for (let tp_idx = 0; tp_idx < this_transformed_pts.length - 1; tp_idx++) {
|
|
||||||
let sorted = [
|
|
||||||
this_transformed_pts[tp_idx],
|
|
||||||
this_transformed_pts[tp_idx + 1]
|
|
||||||
].sort((a, b) => a - b);
|
|
||||||
if (sorted[0] < start_pt) {
|
|
||||||
start_pt = sorted[0];
|
|
||||||
}
|
|
||||||
if (sorted[1] > end_pt) {
|
|
||||||
end_pt = sorted[1];
|
|
||||||
}
|
|
||||||
this_transformed_pts_paired.push(sorted);
|
|
||||||
}
|
|
||||||
|
|
||||||
transformed_pts = transformed_pts.concat(this_transformed_pts_paired);
|
|
||||||
|
|
||||||
pdf_inner_idxs = pdf_inner_idxs.concat([
|
|
||||||
...Array(this_transformed_pts_paired.length).keys()
|
|
||||||
]);
|
|
||||||
var this_factors = [];
|
|
||||||
for (let idx = 0; idx < this_transformed_pts_paired.length; idx++) {
|
|
||||||
this_factors.push(
|
|
||||||
(deterministic_pdf.divider_pts[idx + 1] -
|
|
||||||
deterministic_pdf.divider_pts[idx]) /
|
|
||||||
(this_transformed_pts_paired[idx][1] -
|
|
||||||
this_transformed_pts_paired[idx][0])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
factors = factors.concat(this_factors);
|
|
||||||
}
|
|
||||||
for (let i = 0; i < transformed_pts.length; ++i) {
|
|
||||||
bst_pts_and_idxs.insert(transformed_pts[i][0], {
|
|
||||||
start: transformed_pts[i][0],
|
|
||||||
end: transformed_pts[i][1],
|
|
||||||
idx: [track_idx, pdf_inner_idxs[i]],
|
|
||||||
factor: factors[i] / num_outer_iters
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return [start_pt, end_pt];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Take the binary search tree with transformed bin points,
|
|
||||||
* and an array of pdf values associated with the bins,
|
|
||||||
* and return a pdf over an evenly spaced grid
|
|
||||||
*
|
|
||||||
* @param pdf_vals
|
|
||||||
* @param bst_pts_and_idxs
|
|
||||||
* @param output_grid
|
|
||||||
* @returns {[]}
|
|
||||||
*/
|
|
||||||
function get_final_pdf(pdf_vals, bst_pts_and_idxs, output_grid) {
|
|
||||||
var offset = output_grid[1] / 2 - output_grid[0] / 2;
|
|
||||||
var active_intervals = new Map();
|
|
||||||
var active_endpoints = new bst.AVLTree();
|
|
||||||
var final_pdf_vals = [];
|
|
||||||
|
|
||||||
for (
|
|
||||||
let out_grid_idx = 0;
|
|
||||||
out_grid_idx < output_grid.length;
|
|
||||||
++out_grid_idx
|
|
||||||
) {
|
|
||||||
let startpoints_within_bin = bst_pts_and_idxs.betweenBounds({
|
|
||||||
$gte: output_grid[out_grid_idx] - offset,
|
|
||||||
$lt: output_grid[out_grid_idx] + offset
|
|
||||||
});
|
|
||||||
for (let interval of startpoints_within_bin) {
|
|
||||||
active_intervals.set(interval.idx, [
|
|
||||||
interval.start,
|
|
||||||
interval.end,
|
|
||||||
interval.factor
|
|
||||||
]);
|
|
||||||
active_endpoints.insert(interval.end, interval.idx);
|
|
||||||
}
|
|
||||||
var contrib = 0;
|
|
||||||
for (let [pdf_idx, bounds_and_ratio] of active_intervals.entries()) {
|
|
||||||
let overlap_start = Math.max(
|
|
||||||
output_grid[out_grid_idx] - offset,
|
|
||||||
bounds_and_ratio[0]
|
|
||||||
);
|
|
||||||
let overlap_end = Math.min(
|
|
||||||
output_grid[out_grid_idx] + offset,
|
|
||||||
bounds_and_ratio[1]
|
|
||||||
);
|
|
||||||
let interval_size = bounds_and_ratio[1] - bounds_and_ratio[0];
|
|
||||||
let contrib_frac =
|
|
||||||
interval_size === 0
|
|
||||||
? 0
|
|
||||||
: (overlap_end - overlap_start) * bounds_and_ratio[2];
|
|
||||||
let t = contrib_frac * pdf_vals[pdf_idx[0]][pdf_idx[1]];
|
|
||||||
contrib += t;
|
|
||||||
}
|
|
||||||
final_pdf_vals.push(contrib);
|
|
||||||
let endpoints_within_bin = active_endpoints.betweenBounds({
|
|
||||||
$gte: output_grid[out_grid_idx] - offset,
|
|
||||||
$lt: output_grid[out_grid_idx] + offset
|
|
||||||
});
|
|
||||||
for (let interval_idx of endpoints_within_bin) {
|
|
||||||
active_intervals.delete(interval_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return final_pdf_vals;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} str
|
|
||||||
* @param {string} char
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
function get_count_of_chars(str, char) {
|
|
||||||
return str.split(char).length - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Entrypoint. Pass user input strings to this function,
|
|
||||||
* get the corresponding pdf values and input points back.
|
|
||||||
* If the pdf requires monte carlo (it contains a between-distr function)
|
|
||||||
* we first determing which distr to have deterministic
|
|
||||||
* and which to sample from. This is decided based on which
|
|
||||||
* choice gives the least variance.
|
|
||||||
*
|
|
||||||
* @param user_input_string
|
|
||||||
* @returns {([]|*[])[]}
|
|
||||||
*/
|
|
||||||
function get_pdf_from_user_input(user_input_string) {
|
|
||||||
try {
|
|
||||||
const count_opened_bracket = get_count_of_chars(user_input_string, '(');
|
|
||||||
const count_closed_bracket = get_count_of_chars(user_input_string, ')');
|
|
||||||
if (count_opened_bracket !== count_closed_bracket) {
|
|
||||||
throw new Error('Count of brackets are not equal.');
|
|
||||||
}
|
|
||||||
|
|
||||||
let parsed = parse.parse_initial_string(user_input_string);
|
|
||||||
let mm_args = parse.separate_mm_args(parsed.mm_args_string);
|
|
||||||
const is_mm = mm_args.distrs.length > 0;
|
|
||||||
if (!parsed.outer_string) {
|
|
||||||
throw new Error('Parse string is empty.');
|
|
||||||
}
|
|
||||||
|
|
||||||
let tree = new bst.AVLTree();
|
|
||||||
let possible_start_pts = [];
|
|
||||||
let possible_end_pts = [];
|
|
||||||
let all_vals = [];
|
|
||||||
let weights = is_mm ? math.compile(mm_args.weights).evaluate()._data : [1];
|
|
||||||
let weights_sum = weights.reduce((a, b) => a + b);
|
|
||||||
weights = weights.map(x => x / weights_sum);
|
|
||||||
let n_iters = is_mm ? mm_args.distrs.length : 1;
|
|
||||||
|
|
||||||
for (let i = 0; i < n_iters; ++i) {
|
|
||||||
let distr_string = is_mm ? mm_args.distrs[i] : parsed.outer_string;
|
|
||||||
var [deterministic_pdf, mc_distrs] = choose_pdf_func(distr_string);
|
|
||||||
var grid_transform = get_grid_transform(distr_string);
|
|
||||||
var [start_pt, end_pt] = update_transformed_divider_points_bst(
|
|
||||||
grid_transform,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
i,
|
|
||||||
NUM_MC_SAMPLES,
|
|
||||||
tree
|
|
||||||
);
|
|
||||||
possible_start_pts.push(start_pt);
|
|
||||||
possible_end_pts.push(end_pt);
|
|
||||||
all_vals.push(deterministic_pdf.pdf_vals.map(x => x * weights[i]));
|
|
||||||
}
|
|
||||||
|
|
||||||
start_pt = Math.min(...possible_start_pts);
|
|
||||||
end_pt = Math.max(...possible_end_pts);
|
|
||||||
|
|
||||||
let output_grid = evenly_spaced_grid(start_pt, end_pt, OUTPUT_GRID_NUMEL);
|
|
||||||
let final_pdf_vals = get_final_pdf(all_vals, tree, output_grid);
|
|
||||||
|
|
||||||
return [final_pdf_vals, output_grid, false];
|
|
||||||
} catch (e) {
|
|
||||||
return [[], [], true];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param vals
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
function variance(vals) {
|
|
||||||
var vari = 0;
|
|
||||||
for (let i = 0; i < vals[0].length; ++i) {
|
|
||||||
let mean = 0;
|
|
||||||
let this_vari = 0;
|
|
||||||
for (let val of vals) {
|
|
||||||
mean += val[i] / vals.length;
|
|
||||||
}
|
|
||||||
for (let val of vals) {
|
|
||||||
this_vari += (val[i] - mean) ** 2;
|
|
||||||
}
|
|
||||||
vari += this_vari;
|
|
||||||
}
|
|
||||||
return vari;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param array
|
|
||||||
* @param idx
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function pluck_from_array(array, idx) {
|
|
||||||
return [array[idx], array.slice(0, idx).concat(array.slice(idx + 1))];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If distr_string requires MC, try all possible
|
|
||||||
* choices for the deterministic distribution,
|
|
||||||
* and pick the one with the least variance.
|
|
||||||
* It's much better to sample from a normal than a lognormal.
|
|
||||||
*
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {(*|*[])[]|*[]}
|
|
||||||
*/
|
|
||||||
function choose_pdf_func(distr_string) {
|
|
||||||
var variances = [];
|
|
||||||
let transform_func = get_grid_transform(distr_string);
|
|
||||||
let substrings = parse.get_distr_substrings(distr_string);
|
|
||||||
var pdfs = get_distributions(substrings);
|
|
||||||
if (pdfs.length === 1) {
|
|
||||||
return [pdfs[0], []];
|
|
||||||
}
|
|
||||||
var start_pt = 0;
|
|
||||||
var end_pt = 0;
|
|
||||||
for (let i = 0; i < pdfs.length; ++i) {
|
|
||||||
var outputs = [];
|
|
||||||
for (let j = 0; j < 20; ++j) {
|
|
||||||
let tree = new bst.AVLTree();
|
|
||||||
let [deterministic_pdf, mc_distrs] = pluck_from_array(pdfs, i);
|
|
||||||
let [this_start_pt, this_end_pt] = update_transformed_divider_points_bst(
|
|
||||||
transform_func,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
0,
|
|
||||||
10,
|
|
||||||
tree
|
|
||||||
);
|
|
||||||
[start_pt, end_pt] =
|
|
||||||
j === 0 ? [this_start_pt, this_end_pt] : [start_pt, end_pt];
|
|
||||||
var output_grid = evenly_spaced_grid(start_pt, end_pt, 100);
|
|
||||||
let final_pdf_vals = get_final_pdf(
|
|
||||||
[deterministic_pdf.pdf_vals],
|
|
||||||
tree,
|
|
||||||
output_grid
|
|
||||||
);
|
|
||||||
outputs.push(final_pdf_vals);
|
|
||||||
}
|
|
||||||
variances.push(variance(outputs));
|
|
||||||
}
|
|
||||||
let best_variance = Math.min(...variances);
|
|
||||||
let best_idx = variances
|
|
||||||
.map((val, idx) => [val, idx])
|
|
||||||
.filter(x => x[0] === best_variance)[0][1];
|
|
||||||
let mc_distrs = pdfs.slice(0, best_idx).concat(pdfs.slice(best_idx + 1));
|
|
||||||
return [pdfs[best_idx], mc_distrs];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {function(*): *}
|
|
||||||
*/
|
|
||||||
function get_grid_transform(distr_string) {
|
|
||||||
let substrings = parse.get_distr_substrings(distr_string);
|
|
||||||
let arg_strings = [];
|
|
||||||
for (let i = 0; i < substrings.length; ++i) {
|
|
||||||
distr_string = distr_string.replace(substrings[i], "x_" + i.toString());
|
|
||||||
arg_strings.push("x_" + i.toString());
|
|
||||||
}
|
|
||||||
let compiled = math.compile(distr_string);
|
|
||||||
|
|
||||||
function grid_transform(x) {
|
|
||||||
let kv_pairs = arg_strings.map((val, idx) => [val, x[idx]]);
|
|
||||||
let args_obj = Object.fromEntries(new Map(kv_pairs));
|
|
||||||
return compiled.evaluate(args_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
return grid_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.get_pdf_from_user_input = get_pdf_from_user_input;
|
|
|
@ -1,139 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
|
|
||||||
// Functions for parsing/processing user input strings are here
|
|
||||||
|
|
||||||
// @todo: Do not use objects.
|
|
||||||
const DISTR_REGEXS = [
|
|
||||||
/beta\(/g,
|
|
||||||
/(log)?normal\(/g,
|
|
||||||
/multimodal\(/g,
|
|
||||||
/mm\(/g,
|
|
||||||
/uniform\(/g
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param user_input_string
|
|
||||||
* @returns {{mm_args_string: string, outer_string: string}}
|
|
||||||
*/
|
|
||||||
function parse_initial_string(user_input_string) {
|
|
||||||
let outer_output_string = "";
|
|
||||||
let mm_args_string = "";
|
|
||||||
let idx = 0;
|
|
||||||
|
|
||||||
while (idx < user_input_string.length) {
|
|
||||||
if (
|
|
||||||
user_input_string.substring(idx - 11, idx) === "multimodal(" ||
|
|
||||||
user_input_string.substring(idx - 3, idx) === "mm("
|
|
||||||
) {
|
|
||||||
let num_open_brackets = 1;
|
|
||||||
while (num_open_brackets > 0 && idx < user_input_string.length) {
|
|
||||||
mm_args_string += user_input_string[idx];
|
|
||||||
idx += 1;
|
|
||||||
if (user_input_string[idx] === ")") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
} else if (user_input_string[idx] === "(") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
outer_output_string += ")";
|
|
||||||
idx += 1;
|
|
||||||
} else {
|
|
||||||
outer_output_string += user_input_string[idx];
|
|
||||||
idx += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
outer_string: outer_output_string,
|
|
||||||
mm_args_string: mm_args_string
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param mm_args_string
|
|
||||||
* @returns {{distrs: [], weights: string}}
|
|
||||||
*/
|
|
||||||
function separate_mm_args(mm_args_string) {
|
|
||||||
if (mm_args_string.endsWith(",")) {
|
|
||||||
mm_args_string = mm_args_string.slice(0, -1);
|
|
||||||
}
|
|
||||||
let args_array = [];
|
|
||||||
let num_open_brackets = 0;
|
|
||||||
let arg_substring = "";
|
|
||||||
for (let char of mm_args_string) {
|
|
||||||
if (num_open_brackets === 0 && char === ",") {
|
|
||||||
args_array.push(arg_substring.trim());
|
|
||||||
arg_substring = "";
|
|
||||||
} else {
|
|
||||||
if (char === ")" || char === "]") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
} else if (char === "(" || char === "[") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
}
|
|
||||||
arg_substring += char;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
distrs: args_array,
|
|
||||||
weights: arg_substring.trim()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {[]}
|
|
||||||
*/
|
|
||||||
function get_distr_substrings(distr_string) {
|
|
||||||
let substrings = [];
|
|
||||||
for (let regex of DISTR_REGEXS) {
|
|
||||||
let matches = distr_string.matchAll(regex);
|
|
||||||
for (let match of matches) {
|
|
||||||
let idx = match.index + match[0].length;
|
|
||||||
let num_open_brackets = 1;
|
|
||||||
let distr_substring = "";
|
|
||||||
while (num_open_brackets !== 0 && idx < distr_string.length) {
|
|
||||||
distr_substring += distr_string[idx];
|
|
||||||
if (distr_string[idx] === "(") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
} else if (distr_string[idx] === ")") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
}
|
|
||||||
idx += 1;
|
|
||||||
}
|
|
||||||
substrings.push((match[0] + distr_substring).trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return substrings;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param substr
|
|
||||||
* @returns {(string|*)[]}
|
|
||||||
*/
|
|
||||||
function get_distr_name_and_args(substr) {
|
|
||||||
let distr_name = "";
|
|
||||||
let args_str = "";
|
|
||||||
let args_flag = false;
|
|
||||||
for (let char of substr) {
|
|
||||||
if (!args_flag && char !== "(") {
|
|
||||||
distr_name += char;
|
|
||||||
}
|
|
||||||
if (args_flag && char !== ")") {
|
|
||||||
args_str += char;
|
|
||||||
}
|
|
||||||
if (char === "(") {
|
|
||||||
args_str += "[";
|
|
||||||
args_flag = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
args_str += "]";
|
|
||||||
let args = math.compile(args_str).evaluate()._data;
|
|
||||||
return [distr_name, args];
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.get_distr_name_and_args = get_distr_name_and_args;
|
|
||||||
exports.get_distr_substrings = get_distr_substrings;
|
|
||||||
exports.separate_mm_args = separate_mm_args;
|
|
||||||
exports.parse_initial_string = parse_initial_string;
|
|
284
src/distPlus/distribution/AlgebraicShapeCombination.re
Normal file
284
src/distPlus/distribution/AlgebraicShapeCombination.re
Normal file
|
@ -0,0 +1,284 @@
|
||||||
|
type pointMassesWithMoments = {
|
||||||
|
n: int,
|
||||||
|
masses: array(float),
|
||||||
|
means: array(float),
|
||||||
|
variances: array(float),
|
||||||
|
};
|
||||||
|
|
||||||
|
/* This function takes a continuous distribution and efficiently approximates it as
|
||||||
|
point masses that have variances associated with them.
|
||||||
|
We estimate the means and variances from overlapping triangular distributions which we imagine are making up the
|
||||||
|
XYShape.
|
||||||
|
We can then use the algebra of random variables to "convolve" the point masses and their variances,
|
||||||
|
and finally reconstruct a new distribution from them, e.g. using a Fast Gauss Transform or Raykar et al. (2007). */
|
||||||
|
let toDiscretePointMassesFromTriangulars =
|
||||||
|
(~inverse=false, s: XYShape.T.t): pointMassesWithMoments => {
|
||||||
|
// TODO: what if there is only one point in the distribution?
|
||||||
|
let n = s |> XYShape.T.length;
|
||||||
|
// first, double up the leftmost and rightmost points:
|
||||||
|
let {xs, ys}: XYShape.T.t = s;
|
||||||
|
Js.Array.unshift(xs[0], xs) |> ignore;
|
||||||
|
Js.Array.unshift(ys[0], ys) |> ignore;
|
||||||
|
Js.Array.push(xs[n - 1], xs) |> ignore;
|
||||||
|
Js.Array.push(ys[n - 1], ys) |> ignore;
|
||||||
|
let n = E.A.length(xs);
|
||||||
|
// squares and neighbourly products of the xs
|
||||||
|
let xsSq: array(float) = Belt.Array.makeUninitializedUnsafe(n);
|
||||||
|
let xsProdN1: array(float) = Belt.Array.makeUninitializedUnsafe(n - 1);
|
||||||
|
let xsProdN2: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2);
|
||||||
|
for (i in 0 to n - 1) {
|
||||||
|
Belt.Array.set(xsSq, i, xs[i] *. xs[i]) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
for (i in 0 to n - 2) {
|
||||||
|
Belt.Array.set(xsProdN1, i, xs[i] *. xs[i + 1]) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
for (i in 0 to n - 3) {
|
||||||
|
Belt.Array.set(xsProdN2, i, xs[i] *. xs[i + 2]) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
// means and variances
|
||||||
|
let masses: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2); // doesn't include the fake first and last points
|
||||||
|
let means: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2);
|
||||||
|
let variances: array(float) = Belt.Array.makeUninitializedUnsafe(n - 2);
|
||||||
|
|
||||||
|
if (inverse) {
|
||||||
|
for (i in 1 to n - 2) {
|
||||||
|
Belt.Array.set(
|
||||||
|
masses,
|
||||||
|
i - 1,
|
||||||
|
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
||||||
|
) |> ignore;
|
||||||
|
|
||||||
|
// this only works when the whole triange is either on the left or on the right of zero
|
||||||
|
let a = xs[i - 1];
|
||||||
|
let c = xs[i];
|
||||||
|
let b = xs[i + 1];
|
||||||
|
|
||||||
|
// These are the moments of the reciprocal of a triangular distribution, as symbolically integrated by Mathematica.
|
||||||
|
// They're probably pretty close to invMean ~ 1/mean = 3/(a+b+c) and invVar. But I haven't worked out
|
||||||
|
// the worst case error, so for now let's use these monster equations
|
||||||
|
let inverseMean =
|
||||||
|
2.
|
||||||
|
*. (a *. log(a /. c) /. (a -. c) +. b *. log(c /. b) /. (b -. c))
|
||||||
|
/. (a -. b);
|
||||||
|
let inverseVar =
|
||||||
|
2.
|
||||||
|
*. (log(c /. a) /. (a -. c) +. b *. log(b /. c) /. (b -. c))
|
||||||
|
/. (a -. b)
|
||||||
|
-. inverseMean
|
||||||
|
** 2.;
|
||||||
|
|
||||||
|
Belt.Array.set(means, i - 1, inverseMean) |> ignore;
|
||||||
|
|
||||||
|
Belt.Array.set(variances, i - 1, inverseVar) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
|
||||||
|
{n: n - 2, masses, means, variances};
|
||||||
|
} else {
|
||||||
|
for (i in 1 to n - 2) {
|
||||||
|
// area of triangle = width * height / 2
|
||||||
|
Belt.Array.set(
|
||||||
|
masses,
|
||||||
|
i - 1,
|
||||||
|
(xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.,
|
||||||
|
) |> ignore;
|
||||||
|
|
||||||
|
// means of triangle = (a + b + c) / 3
|
||||||
|
Belt.Array.set(means, i - 1, (xs[i - 1] +. xs[i] +. xs[i + 1]) /. 3.) |> ignore;
|
||||||
|
|
||||||
|
// variance of triangle = (a^2 + b^2 + c^2 - ab - ac - bc) / 18
|
||||||
|
Belt.Array.set(
|
||||||
|
variances,
|
||||||
|
i - 1,
|
||||||
|
(
|
||||||
|
xsSq[i - 1]
|
||||||
|
+. xsSq[i]
|
||||||
|
+. xsSq[i + 1]
|
||||||
|
-. xsProdN1[i - 1]
|
||||||
|
-. xsProdN1[i]
|
||||||
|
-. xsProdN2[i - 1]
|
||||||
|
)
|
||||||
|
/. 18.,
|
||||||
|
) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
{n: n - 2, masses, means, variances};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineShapesContinuousContinuous =
|
||||||
|
(
|
||||||
|
op: ExpressionTypes.algebraicOperation,
|
||||||
|
s1: DistTypes.xyShape,
|
||||||
|
s2: DistTypes.xyShape,
|
||||||
|
)
|
||||||
|
: DistTypes.xyShape => {
|
||||||
|
let t1n = s1 |> XYShape.T.length;
|
||||||
|
let t2n = s2 |> XYShape.T.length;
|
||||||
|
|
||||||
|
// if we add the two distributions, we should probably use normal filters.
|
||||||
|
// if we multiply the two distributions, we should probably use lognormal filters.
|
||||||
|
let t1m = toDiscretePointMassesFromTriangulars(s1);
|
||||||
|
let t2m =
|
||||||
|
switch (op) {
|
||||||
|
| `Divide => toDiscretePointMassesFromTriangulars(~inverse=true, s2)
|
||||||
|
| _ => toDiscretePointMassesFromTriangulars(~inverse=false, s2)
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineMeansFn =
|
||||||
|
switch (op) {
|
||||||
|
| `Add => ((m1, m2) => m1 +. m2)
|
||||||
|
| `Subtract => ((m1, m2) => m1 -. m2)
|
||||||
|
| `Multiply => ((m1, m2) => m1 *. m2)
|
||||||
|
| `Divide => ((m1, mInv2) => m1 *. mInv2)
|
||||||
|
}; // note: here, mInv2 = mean(1 / t2) ~= 1 / mean(t2)
|
||||||
|
|
||||||
|
// converts the variances and means of the two inputs into the variance of the output
|
||||||
|
let combineVariancesFn =
|
||||||
|
switch (op) {
|
||||||
|
| `Add => ((v1, v2, _, _) => v1 +. v2)
|
||||||
|
| `Subtract => ((v1, v2, _, _) => v1 +. v2)
|
||||||
|
| `Multiply => (
|
||||||
|
(v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
||||||
|
)
|
||||||
|
| `Divide => (
|
||||||
|
(v1, vInv2, m1, mInv2) =>
|
||||||
|
v1 *. vInv2 +. v1 *. mInv2 ** 2. +. vInv2 *. m1 ** 2.
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: If operating on two positive-domain distributions, we should take that into account
|
||||||
|
let outputMinX: ref(float) = ref(infinity);
|
||||||
|
let outputMaxX: ref(float) = ref(neg_infinity);
|
||||||
|
let masses: array(float) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
||||||
|
let means: array(float) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
||||||
|
let variances: array(float) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n);
|
||||||
|
// then convolve the two sets of pointMassesWithMoments
|
||||||
|
for (i in 0 to t1m.n - 1) {
|
||||||
|
for (j in 0 to t2m.n - 1) {
|
||||||
|
let k = i * t2m.n + j;
|
||||||
|
Belt.Array.set(masses, k, t1m.masses[i] *. t2m.masses[j]) |> ignore;
|
||||||
|
|
||||||
|
let mean = combineMeansFn(t1m.means[i], t2m.means[j]);
|
||||||
|
let variance =
|
||||||
|
combineVariancesFn(
|
||||||
|
t1m.variances[i],
|
||||||
|
t2m.variances[j],
|
||||||
|
t1m.means[i],
|
||||||
|
t2m.means[j],
|
||||||
|
);
|
||||||
|
Belt.Array.set(means, k, mean) |> ignore;
|
||||||
|
Belt.Array.set(variances, k, variance) |> ignore;
|
||||||
|
// update bounds
|
||||||
|
let minX = mean -. 2. *. sqrt(variance) *. 1.644854;
|
||||||
|
let maxX = mean +. 2. *. sqrt(variance) *. 1.644854;
|
||||||
|
if (minX < outputMinX^) {
|
||||||
|
outputMinX := minX;
|
||||||
|
};
|
||||||
|
if (maxX > outputMaxX^) {
|
||||||
|
outputMaxX := maxX;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// we now want to create a set of target points. For now, let's just evenly distribute 200 points between
|
||||||
|
// between the outputMinX and outputMaxX
|
||||||
|
let nOut = 300;
|
||||||
|
let outputXs: array(float) =
|
||||||
|
E.A.Floats.range(outputMinX^, outputMaxX^, nOut);
|
||||||
|
let outputYs: array(float) = Belt.Array.make(nOut, 0.0);
|
||||||
|
// now, for each of the outputYs, accumulate from a Gaussian kernel over each input point.
|
||||||
|
for (j in 0 to E.A.length(masses) - 1) {
|
||||||
|
// go through all of the result points
|
||||||
|
if (variances[j] > 0. && masses[j] > 0.) {
|
||||||
|
for (i in 0 to E.A.length(outputXs) - 1) {
|
||||||
|
// go through all of the target points
|
||||||
|
let dx = outputXs[i] -. means[j];
|
||||||
|
let contribution =
|
||||||
|
masses[j]
|
||||||
|
*. exp(-. (dx ** 2.) /. (2. *. variances[j]))
|
||||||
|
/. sqrt(2. *. 3.14159276 *. variances[j]);
|
||||||
|
Belt.Array.set(outputYs, i, outputYs[i] +. contribution) |> ignore;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
{xs: outputXs, ys: outputYs};
|
||||||
|
};
|
||||||
|
|
||||||
|
let toDiscretePointMassesFromDiscrete =
|
||||||
|
(s: DistTypes.xyShape): pointMassesWithMoments => {
|
||||||
|
let {xs, ys}: XYShape.T.t = s;
|
||||||
|
let n = E.A.length(xs);
|
||||||
|
|
||||||
|
let masses: array(float) = Belt.Array.makeBy(n, i => ys[i]);
|
||||||
|
let means: array(float) = Belt.Array.makeBy(n, i => xs[i]);
|
||||||
|
let variances: array(float) = Belt.Array.makeBy(n, i => 0.0);
|
||||||
|
|
||||||
|
{n, masses, means, variances};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineShapesContinuousDiscrete =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, continuousShape: DistTypes.xyShape, discreteShape: DistTypes.xyShape)
|
||||||
|
: DistTypes.xyShape => {
|
||||||
|
|
||||||
|
let t1n = continuousShape |> XYShape.T.length;
|
||||||
|
let t2n = discreteShape |> XYShape.T.length;
|
||||||
|
|
||||||
|
// each x pair is added/subtracted
|
||||||
|
let fn = Operation.Algebraic.toFn(op);
|
||||||
|
|
||||||
|
let outXYShapes: array(array((float, float))) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t2n);
|
||||||
|
|
||||||
|
switch (op) {
|
||||||
|
| `Add
|
||||||
|
| `Subtract =>
|
||||||
|
for (j in 0 to t2n - 1) {
|
||||||
|
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
||||||
|
let dxyShape: array((float, float)) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1n);
|
||||||
|
for (i in 0 to t1n - 1) {
|
||||||
|
Belt.Array.set(
|
||||||
|
dxyShape,
|
||||||
|
i,
|
||||||
|
(fn(continuousShape.xs[i], discreteShape.xs[j]),
|
||||||
|
continuousShape.ys[i] *. discreteShape.ys[j]),
|
||||||
|
) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore;
|
||||||
|
();
|
||||||
|
}
|
||||||
|
| `Multiply
|
||||||
|
| `Divide =>
|
||||||
|
for (j in 0 to t2n - 1) {
|
||||||
|
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
||||||
|
let dxyShape: array((float, float)) =
|
||||||
|
Belt.Array.makeUninitializedUnsafe(t1n);
|
||||||
|
for (i in 0 to t1n - 1) {
|
||||||
|
Belt.Array.set(
|
||||||
|
dxyShape,
|
||||||
|
i,
|
||||||
|
(fn(continuousShape.xs[i], discreteShape.xs[j]), continuousShape.ys[i] *. discreteShape.ys[j] /. discreteShape.xs[j]),
|
||||||
|
) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore;
|
||||||
|
();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
outXYShapes
|
||||||
|
|> E.A.fmap(XYShape.T.fromZippedArray)
|
||||||
|
|> E.A.fold_left(
|
||||||
|
XYShape.PointwiseCombination.combine((+.),
|
||||||
|
XYShape.XtoY.continuousInterpolator(`Linear, `UseZero)),
|
||||||
|
XYShape.T.empty);
|
||||||
|
};
|
294
src/distPlus/distribution/Continuous.re
Normal file
294
src/distPlus/distribution/Continuous.re
Normal file
|
@ -0,0 +1,294 @@
|
||||||
|
open Distributions;
|
||||||
|
|
||||||
|
type t = DistTypes.continuousShape;
|
||||||
|
let getShape = (t: t) => t.xyShape;
|
||||||
|
let interpolation = (t: t) => t.interpolation;
|
||||||
|
let make = (~interpolation=`Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
|
xyShape,
|
||||||
|
interpolation,
|
||||||
|
integralSumCache,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
||||||
|
xyShape: fn(xyShape),
|
||||||
|
interpolation,
|
||||||
|
integralSumCache,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
||||||
|
let oShapeMap =
|
||||||
|
(fn, {xyShape, interpolation, integralSumCache, integralCache}: t)
|
||||||
|
: option(DistTypes.continuousShape) =>
|
||||||
|
fn(xyShape) |> E.O.fmap(make(~interpolation, ~integralSumCache, ~integralCache));
|
||||||
|
|
||||||
|
let emptyIntegral: DistTypes.continuousShape = {
|
||||||
|
xyShape: {xs: [|neg_infinity|], ys: [|0.0|]},
|
||||||
|
interpolation: `Linear,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: None,
|
||||||
|
};
|
||||||
|
let empty: DistTypes.continuousShape = {
|
||||||
|
xyShape: XYShape.T.empty,
|
||||||
|
interpolation: `Linear,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: Some(emptyIntegral),
|
||||||
|
};
|
||||||
|
|
||||||
|
let stepwiseToLinear = (t: t): t =>
|
||||||
|
make(~integralSumCache=t.integralSumCache, ~integralCache=t.integralCache, XYShape.Range.stepwiseToLinear(t.xyShape));
|
||||||
|
|
||||||
|
let combinePointwise =
|
||||||
|
(
|
||||||
|
~integralSumCachesFn=(_, _) => None,
|
||||||
|
~integralCachesFn: (t, t) => option(t) =(_, _) => None,
|
||||||
|
~distributionType: DistTypes.distributionType = `PDF,
|
||||||
|
fn: (float, float) => float,
|
||||||
|
t1: DistTypes.continuousShape,
|
||||||
|
t2: DistTypes.continuousShape,
|
||||||
|
)
|
||||||
|
: DistTypes.continuousShape => {
|
||||||
|
// If we're adding the distributions, and we know the total of each, then we
|
||||||
|
// can just sum them up. Otherwise, all bets are off.
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
integralSumCachesFn,
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
|
// If combining stepwise and linear, we must convert the stepwise to linear first,
|
||||||
|
// i.e. add a point at the bottom of each step
|
||||||
|
let (t1, t2) = switch (t1.interpolation, t2.interpolation) {
|
||||||
|
| (`Linear, `Linear) => (t1, t2);
|
||||||
|
| (`Stepwise, `Stepwise) => (t1, t2);
|
||||||
|
| (`Linear, `Stepwise) => (t1, stepwiseToLinear(t2));
|
||||||
|
| (`Stepwise, `Linear) => (stepwiseToLinear(t1), t2);
|
||||||
|
};
|
||||||
|
|
||||||
|
let extrapolation = switch (distributionType) {
|
||||||
|
| `PDF => `UseZero
|
||||||
|
| `CDF => `UseOutermostPoints
|
||||||
|
};
|
||||||
|
|
||||||
|
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation);
|
||||||
|
|
||||||
|
make(
|
||||||
|
~integralSumCache=combinedIntegralSum,
|
||||||
|
XYShape.PointwiseCombination.combine(
|
||||||
|
fn,
|
||||||
|
interpolator,
|
||||||
|
t1.xyShape,
|
||||||
|
t2.xyShape,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let toLinear = (t: t): option(t) => {
|
||||||
|
switch (t) {
|
||||||
|
| {interpolation: `Stepwise, xyShape, integralSumCache, integralCache} =>
|
||||||
|
xyShape
|
||||||
|
|> XYShape.Range.stepsToContinuous
|
||||||
|
|> E.O.fmap(make(~integralSumCache, ~integralCache))
|
||||||
|
| {interpolation: `Linear} => Some(t)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
||||||
|
|
||||||
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralSumCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
let reduce =
|
||||||
|
(
|
||||||
|
~integralSumCachesFn: (float, float) => option(float)=(_, _) => None,
|
||||||
|
~integralCachesFn: (t, t) => option(t)=(_, _) => None,
|
||||||
|
fn,
|
||||||
|
continuousShapes,
|
||||||
|
) =>
|
||||||
|
continuousShapes
|
||||||
|
|> E.A.fold_left(combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn), empty);
|
||||||
|
|
||||||
|
let mapY = (~integralSumCacheFn=_ => None,
|
||||||
|
~integralCacheFn=_ => None,
|
||||||
|
~fn, t: t) => {
|
||||||
|
make(
|
||||||
|
~interpolation=t.interpolation,
|
||||||
|
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
|
||||||
|
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
|
||||||
|
t |> getShape |> XYShape.T.mapY(fn),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec scaleBy = (~scale=1.0, t: t): t => {
|
||||||
|
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, v => Some(scale *. v));
|
||||||
|
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(scaleBy(~scale, v)));
|
||||||
|
|
||||||
|
t
|
||||||
|
|> mapY(~fn=(r: float) => r *. scale)
|
||||||
|
|> updateIntegralSumCache(scaledIntegralSumCache)
|
||||||
|
|> updateIntegralCache(scaledIntegralCache)
|
||||||
|
};
|
||||||
|
|
||||||
|
module T =
|
||||||
|
Dist({
|
||||||
|
type t = DistTypes.continuousShape;
|
||||||
|
type integral = DistTypes.continuousShape;
|
||||||
|
let minX = shapeFn(XYShape.T.minX);
|
||||||
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
|
let toDiscreteProbabilityMassFraction = _ => 0.0;
|
||||||
|
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
||||||
|
let xToY = (f, {interpolation, xyShape}: t) => {
|
||||||
|
(
|
||||||
|
switch (interpolation) {
|
||||||
|
| `Stepwise =>
|
||||||
|
xyShape |> XYShape.XtoY.stepwiseIncremental(f) |> E.O.default(0.0)
|
||||||
|
| `Linear => xyShape |> XYShape.XtoY.linear(f)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|> DistTypes.MixedPoint.makeContinuous;
|
||||||
|
};
|
||||||
|
|
||||||
|
let truncate =
|
||||||
|
(leftCutoff: option(float), rightCutoff: option(float), t: t) => {
|
||||||
|
let lc = E.O.default(neg_infinity, leftCutoff);
|
||||||
|
let rc = E.O.default(infinity, rightCutoff);
|
||||||
|
let truncatedZippedPairs =
|
||||||
|
t
|
||||||
|
|> getShape
|
||||||
|
|> XYShape.T.zip
|
||||||
|
|> XYShape.Zipped.filterByX(x => x >= lc && x <= rc);
|
||||||
|
|
||||||
|
let leftNewPoint =
|
||||||
|
leftCutoff |> E.O.dimap(lc => [|(lc -. epsilon_float, 0.)|], _ => [||]);
|
||||||
|
let rightNewPoint =
|
||||||
|
rightCutoff |> E.O.dimap(rc => [|(rc +. epsilon_float, 0.)|], _ => [||]);
|
||||||
|
|
||||||
|
let truncatedZippedPairsWithNewPoints =
|
||||||
|
E.A.concatMany([|leftNewPoint, truncatedZippedPairs, rightNewPoint|]);
|
||||||
|
let truncatedShape =
|
||||||
|
XYShape.T.fromZippedArray(truncatedZippedPairsWithNewPoints);
|
||||||
|
|
||||||
|
make(truncatedShape)
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: This should work with stepwise plots.
|
||||||
|
let integral = (t) =>
|
||||||
|
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
|
||||||
|
| (true, _) => emptyIntegral
|
||||||
|
| (false, Some(cache)) => cache
|
||||||
|
| (false, None) =>
|
||||||
|
t
|
||||||
|
|> getShape
|
||||||
|
|> XYShape.Range.integrateWithTriangles
|
||||||
|
|> E.O.toExt("This should not have happened")
|
||||||
|
|> make
|
||||||
|
};
|
||||||
|
|
||||||
|
let downsample = (length, t): t =>
|
||||||
|
t
|
||||||
|
|> shapeMap(
|
||||||
|
XYShape.XsConversion.proportionByProbabilityMass(
|
||||||
|
length,
|
||||||
|
integral(t).xyShape,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
let integralEndY = (t: t) =>
|
||||||
|
t.integralSumCache |> E.O.default(t |> integral |> lastY);
|
||||||
|
let integralXtoY = (f, t: t) =>
|
||||||
|
t |> integral |> shapeFn(XYShape.XtoY.linear(f));
|
||||||
|
let integralYtoX = (f, t: t) =>
|
||||||
|
t |> integral |> shapeFn(XYShape.YtoX.linear(f));
|
||||||
|
let toContinuous = t => Some(t);
|
||||||
|
let toDiscrete = _ => None;
|
||||||
|
|
||||||
|
let normalize = (t: t): t => {
|
||||||
|
t
|
||||||
|
|> updateIntegralCache(Some(integral(t)))
|
||||||
|
|> scaleBy(~scale=1. /. integralEndY(t))
|
||||||
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
|
};
|
||||||
|
|
||||||
|
let mean = (t: t) => {
|
||||||
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
||||||
|
let indefiniteIntegralLinear = (p, a, b) =>
|
||||||
|
a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0;
|
||||||
|
|
||||||
|
XYShape.Analysis.integrateContinuousShape(
|
||||||
|
~indefiniteIntegralStepwise,
|
||||||
|
~indefiniteIntegralLinear,
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
let variance = (t: t): float =>
|
||||||
|
XYShape.Analysis.getVarianceDangerously(
|
||||||
|
t,
|
||||||
|
mean,
|
||||||
|
XYShape.Analysis.getMeanOfSquaresContinuousShape,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
|
||||||
|
each discrete data point, and then adds them all together. */
|
||||||
|
let combineAlgebraicallyWithDiscrete =
|
||||||
|
(
|
||||||
|
op: ExpressionTypes.algebraicOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: DistTypes.discreteShape,
|
||||||
|
) => {
|
||||||
|
let t1s = t1 |> getShape;
|
||||||
|
let t2s = t2.xyShape; // TODO would like to use Discrete.getShape here, but current file structure doesn't allow for that
|
||||||
|
|
||||||
|
if (XYShape.T.isEmpty(t1s) || XYShape.T.isEmpty(t2s)) {
|
||||||
|
empty;
|
||||||
|
} else {
|
||||||
|
let continuousAsLinear = switch (t1.interpolation) {
|
||||||
|
| `Linear => t1;
|
||||||
|
| `Stepwise => stepwiseToLinear(t1)
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinedShape = AlgebraicShapeCombination.combineShapesContinuousDiscrete(op, continuousAsLinear |> getShape, t2s);
|
||||||
|
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
(a, b) => Some(a *. b),
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: It could make sense to automatically transform the integrals here (shift or scale)
|
||||||
|
make(~interpolation=t1.interpolation, ~integralSumCache=combinedIntegralSum, combinedShape)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineAlgebraically =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
|
||||||
|
let s1 = t1 |> getShape;
|
||||||
|
let s2 = t2 |> getShape;
|
||||||
|
let t1n = s1 |> XYShape.T.length;
|
||||||
|
let t2n = s2 |> XYShape.T.length;
|
||||||
|
if (t1n == 0 || t2n == 0) {
|
||||||
|
empty;
|
||||||
|
} else {
|
||||||
|
let combinedShape =
|
||||||
|
AlgebraicShapeCombination.combineShapesContinuousContinuous(op, s1, s2);
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
(a, b) => Some(a *. b),
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
// return a new Continuous distribution
|
||||||
|
make(~integralSumCache=combinedIntegralSum, combinedShape);
|
||||||
|
};
|
||||||
|
};
|
232
src/distPlus/distribution/Discrete.re
Normal file
232
src/distPlus/distribution/Discrete.re
Normal file
|
@ -0,0 +1,232 @@
|
||||||
|
open Distributions;
|
||||||
|
|
||||||
|
type t = DistTypes.discreteShape;
|
||||||
|
|
||||||
|
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {xyShape, integralSumCache, integralCache};
|
||||||
|
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
||||||
|
xyShape: fn(xyShape),
|
||||||
|
integralSumCache,
|
||||||
|
integralCache
|
||||||
|
};
|
||||||
|
let getShape = (t: t) => t.xyShape;
|
||||||
|
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option(t) =>
|
||||||
|
fn(xyShape) |> E.O.fmap(make(~integralSumCache, ~integralCache));
|
||||||
|
|
||||||
|
let emptyIntegral: DistTypes.continuousShape = {
|
||||||
|
xyShape: {xs: [|neg_infinity|], ys: [|0.0|]},
|
||||||
|
interpolation: `Stepwise,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: None,
|
||||||
|
};
|
||||||
|
let empty: DistTypes.discreteShape = {
|
||||||
|
xyShape: XYShape.T.empty,
|
||||||
|
integralSumCache: Some(0.0),
|
||||||
|
integralCache: Some(emptyIntegral),
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
||||||
|
|
||||||
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
||||||
|
|
||||||
|
let combinePointwise =
|
||||||
|
(
|
||||||
|
~integralSumCachesFn = (_, _) => None,
|
||||||
|
~integralCachesFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape) = (_, _) => None,
|
||||||
|
fn,
|
||||||
|
t1: DistTypes.discreteShape,
|
||||||
|
t2: DistTypes.discreteShape,
|
||||||
|
)
|
||||||
|
: DistTypes.discreteShape => {
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
integralSumCachesFn,
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
|
make(
|
||||||
|
~integralSumCache=combinedIntegralSum,
|
||||||
|
XYShape.PointwiseCombination.combine(
|
||||||
|
(+.),
|
||||||
|
XYShape.XtoY.discreteInterpolator,
|
||||||
|
t1.xyShape,
|
||||||
|
t2.xyShape,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let reduce =
|
||||||
|
(~integralSumCachesFn=(_, _) => None,
|
||||||
|
~integralCachesFn=(_, _) => None,
|
||||||
|
fn, discreteShapes)
|
||||||
|
: DistTypes.discreteShape =>
|
||||||
|
discreteShapes
|
||||||
|
|> E.A.fold_left(combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn), empty);
|
||||||
|
|
||||||
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralSumCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
||||||
|
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
|
||||||
|
let combineAlgebraically =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
|
||||||
|
let t1s = t1 |> getShape;
|
||||||
|
let t2s = t2 |> getShape;
|
||||||
|
let t1n = t1s |> XYShape.T.length;
|
||||||
|
let t2n = t2s |> XYShape.T.length;
|
||||||
|
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
(s1, s2) => Some(s1 *. s2),
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
let fn = Operation.Algebraic.toFn(op);
|
||||||
|
let xToYMap = E.FloatFloatMap.empty();
|
||||||
|
|
||||||
|
for (i in 0 to t1n - 1) {
|
||||||
|
for (j in 0 to t2n - 1) {
|
||||||
|
let x = fn(t1s.xs[i], t2s.xs[j]);
|
||||||
|
let cv = xToYMap |> E.FloatFloatMap.get(x) |> E.O.default(0.);
|
||||||
|
let my = t1s.ys[i] *. t2s.ys[j];
|
||||||
|
let _ = Belt.MutableMap.set(xToYMap, x, cv +. my);
|
||||||
|
();
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let rxys = xToYMap |> E.FloatFloatMap.toArray |> XYShape.Zipped.sortByX;
|
||||||
|
|
||||||
|
let combinedShape = XYShape.T.fromZippedArray(rxys);
|
||||||
|
|
||||||
|
make(~integralSumCache=combinedIntegralSum, combinedShape);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mapY = (~integralSumCacheFn=_ => None,
|
||||||
|
~integralCacheFn=_ => None,
|
||||||
|
~fn, t: t) => {
|
||||||
|
make(
|
||||||
|
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
|
||||||
|
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
|
||||||
|
t |> getShape |> XYShape.T.mapY(fn),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
let scaleBy = (~scale=1.0, t: t): t => {
|
||||||
|
let scaledIntegralSumCache = t.integralSumCache |> E.O.fmap((*.)(scale));
|
||||||
|
let scaledIntegralCache = t.integralCache |> E.O.fmap(Continuous.scaleBy(~scale));
|
||||||
|
|
||||||
|
t
|
||||||
|
|> mapY(~fn=(r: float) => r *. scale)
|
||||||
|
|> updateIntegralSumCache(scaledIntegralSumCache)
|
||||||
|
|> updateIntegralCache(scaledIntegralCache)
|
||||||
|
};
|
||||||
|
|
||||||
|
module T =
|
||||||
|
Dist({
|
||||||
|
type t = DistTypes.discreteShape;
|
||||||
|
type integral = DistTypes.continuousShape;
|
||||||
|
let integral = (t) =>
|
||||||
|
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
|
||||||
|
| (true, _) => emptyIntegral
|
||||||
|
| (false, Some(c)) => c
|
||||||
|
| (false, None) => {
|
||||||
|
let ts = getShape(t);
|
||||||
|
// The first xy of this integral should always be the zero, to ensure nice plotting
|
||||||
|
let firstX = ts |> XYShape.T.minX;
|
||||||
|
let prependedZeroPoint: XYShape.T.t = {xs: [|firstX -. epsilon_float|], ys: [|0.|]};
|
||||||
|
let integralShape =
|
||||||
|
ts
|
||||||
|
|> XYShape.T.concat(prependedZeroPoint)
|
||||||
|
|> XYShape.T.accumulateYs((+.));
|
||||||
|
|
||||||
|
Continuous.make(~interpolation=`Stepwise, integralShape);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let integralEndY = (t: t) =>
|
||||||
|
t.integralSumCache
|
||||||
|
|> E.O.default(t |> integral |> Continuous.lastY);
|
||||||
|
let minX = shapeFn(XYShape.T.minX);
|
||||||
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
|
let toDiscreteProbabilityMassFraction = _ => 1.0;
|
||||||
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
|
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
||||||
|
let toContinuous = _ => None;
|
||||||
|
let toDiscrete = t => Some(t);
|
||||||
|
|
||||||
|
let normalize = (t: t): t => {
|
||||||
|
t
|
||||||
|
|> scaleBy(~scale=1. /. integralEndY(t))
|
||||||
|
|> updateIntegralSumCache(Some(1.0));
|
||||||
|
};
|
||||||
|
|
||||||
|
let downsample = (i, t: t): t => {
|
||||||
|
// It's not clear how to downsample a set of discrete points in a meaningful way.
|
||||||
|
// The best we can do is to clip off the smallest values.
|
||||||
|
let currentLength = t |> getShape |> XYShape.T.length;
|
||||||
|
|
||||||
|
if (i < currentLength && i >= 1 && currentLength > 1) {
|
||||||
|
t
|
||||||
|
|> getShape
|
||||||
|
|> XYShape.T.zip
|
||||||
|
|> XYShape.Zipped.sortByY
|
||||||
|
|> Belt.Array.reverse
|
||||||
|
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|
||||||
|
|> XYShape.Zipped.sortByX
|
||||||
|
|> XYShape.T.fromZippedArray
|
||||||
|
|> make;
|
||||||
|
} else {
|
||||||
|
t;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let truncate =
|
||||||
|
(leftCutoff: option(float), rightCutoff: option(float), t: t): t => {
|
||||||
|
t
|
||||||
|
|> getShape
|
||||||
|
|> XYShape.T.zip
|
||||||
|
|> XYShape.Zipped.filterByX(x =>
|
||||||
|
x >= E.O.default(neg_infinity, leftCutoff)
|
||||||
|
&& x <= E.O.default(infinity, rightCutoff)
|
||||||
|
)
|
||||||
|
|> XYShape.T.fromZippedArray
|
||||||
|
|> make;
|
||||||
|
};
|
||||||
|
|
||||||
|
let xToY = (f, t) =>
|
||||||
|
t
|
||||||
|
|> getShape
|
||||||
|
|> XYShape.XtoY.stepwiseIfAtX(f)
|
||||||
|
|> E.O.default(0.0)
|
||||||
|
|> DistTypes.MixedPoint.makeDiscrete;
|
||||||
|
|
||||||
|
let integralXtoY = (f, t) =>
|
||||||
|
t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
||||||
|
|
||||||
|
let integralYtoX = (f, t) =>
|
||||||
|
t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
||||||
|
|
||||||
|
let mean = (t: t): float => {
|
||||||
|
let s = getShape(t);
|
||||||
|
E.A.reducei(s.xs, 0.0, (acc, x, i) => acc +. x *. s.ys[i]);
|
||||||
|
};
|
||||||
|
let variance = (t: t): float => {
|
||||||
|
let getMeanOfSquares = t =>
|
||||||
|
t |> shapeMap(XYShape.Analysis.squareXYShape) |> mean;
|
||||||
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares);
|
||||||
|
};
|
||||||
|
});
|
129
src/distPlus/distribution/DistPlus.re
Normal file
129
src/distPlus/distribution/DistPlus.re
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
open DistTypes;
|
||||||
|
|
||||||
|
type t = DistTypes.distPlus;
|
||||||
|
|
||||||
|
let shapeIntegral = shape => Shape.T.Integral.get(shape);
|
||||||
|
let make =
|
||||||
|
(
|
||||||
|
~shape,
|
||||||
|
~guesstimatorString,
|
||||||
|
~domain=Complete,
|
||||||
|
~unit=UnspecifiedDistribution,
|
||||||
|
(),
|
||||||
|
)
|
||||||
|
: t => {
|
||||||
|
let integral = shapeIntegral(shape);
|
||||||
|
{shape, domain, integralCache: integral, unit, guesstimatorString};
|
||||||
|
};
|
||||||
|
|
||||||
|
let update =
|
||||||
|
(
|
||||||
|
~shape=?,
|
||||||
|
~integralCache=?,
|
||||||
|
~domain=?,
|
||||||
|
~unit=?,
|
||||||
|
~guesstimatorString=?,
|
||||||
|
t: t,
|
||||||
|
) => {
|
||||||
|
shape: E.O.default(t.shape, shape),
|
||||||
|
integralCache: E.O.default(t.integralCache, integralCache),
|
||||||
|
domain: E.O.default(t.domain, domain),
|
||||||
|
unit: E.O.default(t.unit, unit),
|
||||||
|
guesstimatorString: E.O.default(t.guesstimatorString, guesstimatorString),
|
||||||
|
};
|
||||||
|
|
||||||
|
let updateShape = (shape, t) => {
|
||||||
|
let integralCache = shapeIntegral(shape);
|
||||||
|
update(~shape, ~integralCache, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
let domainIncludedProbabilityMass = (t: t) =>
|
||||||
|
Domain.includedProbabilityMass(t.domain);
|
||||||
|
|
||||||
|
let domainIncludedProbabilityMassAdjustment = (t: t, f) =>
|
||||||
|
f *. Domain.includedProbabilityMass(t.domain);
|
||||||
|
|
||||||
|
let toShape = ({shape, _}: t) => shape;
|
||||||
|
|
||||||
|
let shapeFn = (fn, {shape}: t) => fn(shape);
|
||||||
|
|
||||||
|
module T =
|
||||||
|
Distributions.Dist({
|
||||||
|
type t = DistTypes.distPlus;
|
||||||
|
type integral = DistTypes.distPlus;
|
||||||
|
let toShape = toShape;
|
||||||
|
let toContinuous = shapeFn(Shape.T.toContinuous);
|
||||||
|
let toDiscrete = shapeFn(Shape.T.toDiscrete);
|
||||||
|
|
||||||
|
let normalize = (t: t): t => {
|
||||||
|
let normalizedShape = t |> toShape |> Shape.T.normalize;
|
||||||
|
t |> updateShape(normalizedShape);
|
||||||
|
};
|
||||||
|
|
||||||
|
let truncate = (leftCutoff, rightCutoff, t: t): t => {
|
||||||
|
let truncatedShape =
|
||||||
|
t
|
||||||
|
|> toShape
|
||||||
|
|> Shape.T.truncate(leftCutoff, rightCutoff);
|
||||||
|
|
||||||
|
t |> updateShape(truncatedShape);
|
||||||
|
};
|
||||||
|
|
||||||
|
let xToY = (f, t: t) =>
|
||||||
|
t
|
||||||
|
|> toShape
|
||||||
|
|> Shape.T.xToY(f)
|
||||||
|
|> MixedPoint.fmap(domainIncludedProbabilityMassAdjustment(t));
|
||||||
|
|
||||||
|
let minX = shapeFn(Shape.T.minX);
|
||||||
|
let maxX = shapeFn(Shape.T.maxX);
|
||||||
|
let toDiscreteProbabilityMassFraction =
|
||||||
|
shapeFn(Shape.T.toDiscreteProbabilityMassFraction);
|
||||||
|
|
||||||
|
// This bit is kind of awkward, could probably use rethinking.
|
||||||
|
let integral = (t: t) =>
|
||||||
|
updateShape(Continuous(t.integralCache), t);
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache: option(DistTypes.continuousShape), t) =>
|
||||||
|
update(~integralCache=E.O.default(t.integralCache, integralCache), t);
|
||||||
|
|
||||||
|
let downsample = (i, t): t =>
|
||||||
|
updateShape(t |> toShape |> Shape.T.downsample(i), t);
|
||||||
|
// todo: adjust for limit, maybe?
|
||||||
|
let mapY =
|
||||||
|
(
|
||||||
|
~integralSumCacheFn=previousIntegralSum => None,
|
||||||
|
~integralCacheFn=previousIntegralCache => None,
|
||||||
|
~fn,
|
||||||
|
{shape, _} as t: t,
|
||||||
|
)
|
||||||
|
: t =>
|
||||||
|
Shape.T.mapY(~integralSumCacheFn, ~fn, shape)
|
||||||
|
|> updateShape(_, t);
|
||||||
|
|
||||||
|
// get the total of everything
|
||||||
|
let integralEndY = (t: t) => {
|
||||||
|
Shape.T.Integral.sum(
|
||||||
|
toShape(t),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: Fix this below, obviously. Adjust for limits
|
||||||
|
let integralXtoY = (f, t: t) => {
|
||||||
|
Shape.T.Integral.xToY(
|
||||||
|
f,
|
||||||
|
toShape(t),
|
||||||
|
)
|
||||||
|
|> domainIncludedProbabilityMassAdjustment(t);
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
||||||
|
let integralYtoX = (f, t: t) => {
|
||||||
|
Shape.T.Integral.yToX(f, toShape(t));
|
||||||
|
};
|
||||||
|
|
||||||
|
let mean = (t: t) => {
|
||||||
|
Shape.T.mean(t.shape);
|
||||||
|
};
|
||||||
|
let variance = (t: t) => Shape.T.variance(t.shape);
|
||||||
|
});
|
28
src/distPlus/distribution/DistPlusTime.re
Normal file
28
src/distPlus/distribution/DistPlusTime.re
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
open DistTypes;
|
||||||
|
|
||||||
|
type t = DistTypes.distPlus;
|
||||||
|
|
||||||
|
let unitToJson = ({unit}: t) => unit |> DistTypes.DistributionUnit.toJson;
|
||||||
|
|
||||||
|
let timeVector = ({unit}: t) =>
|
||||||
|
switch (unit) {
|
||||||
|
| TimeDistribution(timeVector) => Some(timeVector)
|
||||||
|
| UnspecifiedDistribution => None
|
||||||
|
};
|
||||||
|
|
||||||
|
let timeInVectorToX = (f: TimeTypes.timeInVector, t: t) => {
|
||||||
|
let timeVector = t |> timeVector;
|
||||||
|
timeVector |> E.O.fmap(TimeTypes.RelativeTimePoint.toXValue(_, f));
|
||||||
|
};
|
||||||
|
|
||||||
|
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
||||||
|
timeInVectorToX(f, t) |> E.O.fmap(DistPlus.T.xToY(_, t));
|
||||||
|
};
|
||||||
|
|
||||||
|
module Integral = {
|
||||||
|
include DistPlus.T.Integral;
|
||||||
|
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
||||||
|
timeInVectorToX(f, t)
|
||||||
|
|> E.O.fmap(x => DistPlus.T.Integral.xToY(x, t));
|
||||||
|
};
|
||||||
|
};
|
|
@ -9,22 +9,45 @@ type domain =
|
||||||
| RightLimited(domainLimit)
|
| RightLimited(domainLimit)
|
||||||
| LeftAndRightLimited(domainLimit, domainLimit);
|
| LeftAndRightLimited(domainLimit, domainLimit);
|
||||||
|
|
||||||
|
type distributionType = [
|
||||||
|
| `PDF
|
||||||
|
| `CDF
|
||||||
|
];
|
||||||
|
|
||||||
type xyShape = {
|
type xyShape = {
|
||||||
xs: array(float),
|
xs: array(float),
|
||||||
ys: array(float),
|
ys: array(float),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type interpolationStrategy = [
|
||||||
|
| `Stepwise
|
||||||
|
| `Linear
|
||||||
|
];
|
||||||
|
type extrapolationStrategy = [
|
||||||
|
| `UseZero
|
||||||
|
| `UseOutermostPoints
|
||||||
|
];
|
||||||
|
|
||||||
|
type interpolator = (xyShape, int, float) => float;
|
||||||
|
|
||||||
type continuousShape = {
|
type continuousShape = {
|
||||||
xyShape,
|
xyShape,
|
||||||
interpolation: [ | `Stepwise | `Linear],
|
interpolation: interpolationStrategy,
|
||||||
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
};
|
};
|
||||||
|
|
||||||
type discreteShape = xyShape;
|
type discreteShape = {
|
||||||
|
xyShape,
|
||||||
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
|
};
|
||||||
|
|
||||||
type mixedShape = {
|
type mixedShape = {
|
||||||
continuous: continuousShape,
|
continuous: continuousShape,
|
||||||
discrete: discreteShape,
|
discrete: discreteShape,
|
||||||
discreteProbabilityMassFraction: float,
|
integralSumCache: option(float),
|
||||||
|
integralCache: option(continuousShape),
|
||||||
};
|
};
|
||||||
|
|
||||||
type shapeMonad('a, 'b, 'c) =
|
type shapeMonad('a, 'b, 'c) =
|
||||||
|
|
|
@ -3,20 +3,23 @@ module type dist = {
|
||||||
type integral;
|
type integral;
|
||||||
let minX: t => float;
|
let minX: t => float;
|
||||||
let maxX: t => float;
|
let maxX: t => float;
|
||||||
let mapY: (float => float, t) => t;
|
let mapY:
|
||||||
|
(~integralSumCacheFn: float => option(float)=?, ~integralCacheFn: DistTypes.continuousShape => option(DistTypes.continuousShape)=?, ~fn: float => float, t) => t;
|
||||||
let xToY: (float, t) => DistTypes.mixedPoint;
|
let xToY: (float, t) => DistTypes.mixedPoint;
|
||||||
let toShape: t => DistTypes.shape;
|
let toShape: t => DistTypes.shape;
|
||||||
let toContinuous: t => option(DistTypes.continuousShape);
|
let toContinuous: t => option(DistTypes.continuousShape);
|
||||||
let toDiscrete: t => option(DistTypes.discreteShape);
|
let toDiscrete: t => option(DistTypes.discreteShape);
|
||||||
let toScaledContinuous: t => option(DistTypes.continuousShape);
|
let normalize: t => t;
|
||||||
let toScaledDiscrete: t => option(DistTypes.discreteShape);
|
let toDiscreteProbabilityMassFraction: t => float;
|
||||||
let toDiscreteProbabilityMass: t => float;
|
let downsample: (int, t) => t;
|
||||||
let truncate: (~cache: option(integral)=?, int, t) => t;
|
let truncate: (option(float), option(float), t) => t;
|
||||||
|
|
||||||
let integral: (~cache: option(integral), t) => integral;
|
let updateIntegralCache: (option(DistTypes.continuousShape), t) => t;
|
||||||
let integralEndY: (~cache: option(integral), t) => float;
|
|
||||||
let integralXtoY: (~cache: option(integral), float, t) => float;
|
let integral: (t) => integral;
|
||||||
let integralYtoX: (~cache: option(integral), float, t) => float;
|
let integralEndY: (t) => float;
|
||||||
|
let integralXtoY: (float, t) => float;
|
||||||
|
let integralYtoX: (float, t) => float;
|
||||||
|
|
||||||
let mean: t => float;
|
let mean: t => float;
|
||||||
let variance: t => float;
|
let variance: t => float;
|
||||||
|
@ -31,18 +34,17 @@ module Dist = (T: dist) => {
|
||||||
let xTotalRange = (t: t) => maxX(t) -. minX(t);
|
let xTotalRange = (t: t) => maxX(t) -. minX(t);
|
||||||
let mapY = T.mapY;
|
let mapY = T.mapY;
|
||||||
let xToY = T.xToY;
|
let xToY = T.xToY;
|
||||||
let truncate = T.truncate;
|
let downsample = T.downsample;
|
||||||
let toShape = T.toShape;
|
let toShape = T.toShape;
|
||||||
let toDiscreteProbabilityMass = T.toDiscreteProbabilityMass;
|
let toDiscreteProbabilityMassFraction = T.toDiscreteProbabilityMassFraction;
|
||||||
let toContinuous = T.toContinuous;
|
let toContinuous = T.toContinuous;
|
||||||
let toDiscrete = T.toDiscrete;
|
let toDiscrete = T.toDiscrete;
|
||||||
let toScaledContinuous = T.toScaledContinuous;
|
let normalize = T.normalize;
|
||||||
let toScaledDiscrete = T.toScaledDiscrete;
|
let truncate = T.truncate;
|
||||||
let mean = T.mean;
|
let mean = T.mean;
|
||||||
let variance = T.variance;
|
let variance = T.variance;
|
||||||
|
|
||||||
// TODO: Move this to each class, have use integral to produce integral in DistPlus class.
|
let updateIntegralCache = T.updateIntegralCache;
|
||||||
let scaleBy = (~scale=1.0, t: t) => t |> mapY((r: float) => r *. scale);
|
|
||||||
|
|
||||||
module Integral = {
|
module Integral = {
|
||||||
type t = T.integral;
|
type t = T.integral;
|
||||||
|
@ -51,661 +53,32 @@ module Dist = (T: dist) => {
|
||||||
let yToX = T.integralYtoX;
|
let yToX = T.integralYtoX;
|
||||||
let sum = T.integralEndY;
|
let sum = T.integralEndY;
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is suboptimal because it could get the cache but doesn't here.
|
|
||||||
let scaleToIntegralSum =
|
|
||||||
(~cache: option(integral)=None, ~intendedSum=1.0, t: t) => {
|
|
||||||
let scale = intendedSum /. Integral.sum(~cache, t);
|
|
||||||
scaleBy(~scale, t);
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module Continuous = {
|
module Common = {
|
||||||
type t = DistTypes.continuousShape;
|
let combineIntegralSums =
|
||||||
let getShape = (t: t) => t.xyShape;
|
|
||||||
let interpolation = (t: t) => t.interpolation;
|
|
||||||
let make = (interpolation, xyShape): t => {xyShape, interpolation};
|
|
||||||
let shapeMap = (fn, {xyShape, interpolation}: t): t => {
|
|
||||||
xyShape: fn(xyShape),
|
|
||||||
interpolation,
|
|
||||||
};
|
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
|
|
||||||
let oShapeMap =
|
|
||||||
(fn, {xyShape, interpolation}: t): option(DistTypes.continuousShape) =>
|
|
||||||
fn(xyShape) |> E.O.fmap(make(interpolation));
|
|
||||||
|
|
||||||
let toLinear = (t: t): option(t) => {
|
|
||||||
switch (t) {
|
|
||||||
| {interpolation: `Stepwise, xyShape} =>
|
|
||||||
xyShape |> XYShape.Range.stepsToContinuous |> E.O.fmap(make(`Linear))
|
|
||||||
| {interpolation: `Linear, _} => Some(t)
|
|
||||||
};
|
|
||||||
};
|
|
||||||
let shapeFn = (fn, t: t) => t |> getShape |> fn;
|
|
||||||
|
|
||||||
module T =
|
|
||||||
Dist({
|
|
||||||
type t = DistTypes.continuousShape;
|
|
||||||
type integral = DistTypes.continuousShape;
|
|
||||||
let minX = shapeFn(XYShape.T.minX);
|
|
||||||
let maxX = shapeFn(XYShape.T.maxX);
|
|
||||||
let toDiscreteProbabilityMass = _ => 0.0;
|
|
||||||
let mapY = fn => shapeMap(XYShape.T.mapY(fn));
|
|
||||||
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
|
||||||
let xToY = (f, {interpolation, xyShape}: t) => {
|
|
||||||
(
|
(
|
||||||
switch (interpolation) {
|
combineFn: (float, float) => option(float),
|
||||||
| `Stepwise =>
|
t1IntegralSumCache: option(float),
|
||||||
xyShape
|
t2IntegralSumCache: option(float),
|
||||||
|> XYShape.XtoY.stepwiseIncremental(f)
|
|
||||||
|> E.O.default(0.0)
|
|
||||||
| `Linear => xyShape |> XYShape.XtoY.linear(f)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|> DistTypes.MixedPoint.makeContinuous;
|
|
||||||
};
|
|
||||||
|
|
||||||
// let combineWithFn = (t1: t, t2: t, fn: (float, float) => float) => {
|
|
||||||
// switch(t1, t2){
|
|
||||||
// | ({interpolation: `Stepwise}, {interpolation: `Stepwise}) => 3.0
|
|
||||||
// | ({interpolation: `Linear}, {interpolation: `Linear}) => 3.0
|
|
||||||
// }
|
|
||||||
// };
|
|
||||||
|
|
||||||
// TODO: This should work with stepwise plots.
|
|
||||||
let integral = (~cache, t) =>
|
|
||||||
switch (cache) {
|
|
||||||
| Some(cache) => cache
|
|
||||||
| None =>
|
|
||||||
t
|
|
||||||
|> getShape
|
|
||||||
|> XYShape.Range.integrateWithTriangles
|
|
||||||
|> E.O.toExt("This should not have happened")
|
|
||||||
|> make(`Linear)
|
|
||||||
};
|
|
||||||
let truncate = (~cache=None, length, t) =>
|
|
||||||
t
|
|
||||||
|> shapeMap(
|
|
||||||
XYShape.XsConversion.proportionByProbabilityMass(
|
|
||||||
length,
|
|
||||||
integral(~cache, t).xyShape,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
let integralEndY = (~cache, t) => t |> integral(~cache) |> lastY;
|
|
||||||
let integralXtoY = (~cache, f, t) =>
|
|
||||||
t |> integral(~cache) |> shapeFn(XYShape.XtoY.linear(f));
|
|
||||||
let integralYtoX = (~cache, f, t) =>
|
|
||||||
t |> integral(~cache) |> shapeFn(XYShape.YtoX.linear(f));
|
|
||||||
let toContinuous = t => Some(t);
|
|
||||||
let toDiscrete = _ => None;
|
|
||||||
let toScaledContinuous = t => Some(t);
|
|
||||||
let toScaledDiscrete = _ => None;
|
|
||||||
|
|
||||||
let mean = (t: t) => {
|
|
||||||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
|
|
||||||
let indefiniteIntegralLinear = (p, a, b) =>
|
|
||||||
a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0;
|
|
||||||
XYShape.Analysis.integrateContinuousShape(
|
|
||||||
~indefiniteIntegralStepwise,
|
|
||||||
~indefiniteIntegralLinear,
|
|
||||||
t,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
let variance = (t: t): float =>
|
|
||||||
XYShape.Analysis.getVarianceDangerously(
|
|
||||||
t,
|
|
||||||
mean,
|
|
||||||
XYShape.Analysis.getMeanOfSquaresContinuousShape,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module Discrete = {
|
|
||||||
let sortedByY = (t: DistTypes.discreteShape) =>
|
|
||||||
t |> XYShape.T.zip |> XYShape.Zipped.sortByY;
|
|
||||||
let sortedByX = (t: DistTypes.discreteShape) =>
|
|
||||||
t |> XYShape.T.zip |> XYShape.Zipped.sortByX;
|
|
||||||
let empty = XYShape.T.empty;
|
|
||||||
let combine =
|
|
||||||
(fn, t1: DistTypes.discreteShape, t2: DistTypes.discreteShape)
|
|
||||||
: DistTypes.discreteShape => {
|
|
||||||
XYShape.Combine.combine(
|
|
||||||
~xsSelection=ALL_XS,
|
|
||||||
~xToYSelection=XYShape.XtoY.stepwiseIfAtX,
|
|
||||||
~fn,
|
|
||||||
t1,
|
|
||||||
t2,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
let _default0 = (fn, a, b) =>
|
|
||||||
fn(E.O.default(0.0, a), E.O.default(0.0, b));
|
|
||||||
let reduce = (fn, items) =>
|
|
||||||
items |> E.A.fold_left(combine(_default0(fn)), empty);
|
|
||||||
|
|
||||||
module T =
|
|
||||||
Dist({
|
|
||||||
type t = DistTypes.discreteShape;
|
|
||||||
type integral = DistTypes.continuousShape;
|
|
||||||
let integral = (~cache, t) =>
|
|
||||||
switch (cache) {
|
|
||||||
| Some(c) => c
|
|
||||||
| None => Continuous.make(`Stepwise, XYShape.T.accumulateYs((+.), t))
|
|
||||||
};
|
|
||||||
let integralEndY = (~cache, t) =>
|
|
||||||
t |> integral(~cache) |> Continuous.lastY;
|
|
||||||
let minX = XYShape.T.minX;
|
|
||||||
let maxX = XYShape.T.maxX;
|
|
||||||
let toDiscreteProbabilityMass = _ => 1.0;
|
|
||||||
let mapY = XYShape.T.mapY;
|
|
||||||
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
|
||||||
let toContinuous = _ => None;
|
|
||||||
let toDiscrete = t => Some(t);
|
|
||||||
let toScaledContinuous = _ => None;
|
|
||||||
let toScaledDiscrete = t => Some(t);
|
|
||||||
let truncate = (~cache=None, i, t: t): DistTypes.discreteShape =>
|
|
||||||
t
|
|
||||||
|> XYShape.T.zip
|
|
||||||
|> XYShape.Zipped.sortByY
|
|
||||||
|> Belt.Array.reverse
|
|
||||||
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|
|
||||||
|> XYShape.Zipped.sortByX
|
|
||||||
|> XYShape.T.fromZippedArray;
|
|
||||||
|
|
||||||
let xToY = (f, t) => {
|
|
||||||
XYShape.XtoY.stepwiseIfAtX(f, t)
|
|
||||||
|> E.O.default(0.0)
|
|
||||||
|> DistTypes.MixedPoint.makeDiscrete;
|
|
||||||
};
|
|
||||||
|
|
||||||
let integralXtoY = (~cache, f, t) =>
|
|
||||||
t
|
|
||||||
|> integral(~cache)
|
|
||||||
|> Continuous.getShape
|
|
||||||
|> XYShape.XtoY.linear(f);
|
|
||||||
|
|
||||||
let integralYtoX = (~cache, f, t) =>
|
|
||||||
t
|
|
||||||
|> integral(~cache)
|
|
||||||
|> Continuous.getShape
|
|
||||||
|> XYShape.YtoX.linear(f);
|
|
||||||
|
|
||||||
let mean = (t: t): float =>
|
|
||||||
E.A.reducei(t.xs, 0.0, (acc, x, i) => acc +. x *. t.ys[i]);
|
|
||||||
let variance = (t: t): float => {
|
|
||||||
let getMeanOfSquares = t =>
|
|
||||||
mean(XYShape.Analysis.squareXYShape(t));
|
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: I think this shouldn't assume continuous/discrete are normalized to 1.0, and thus should not need the discreteProbabilityMassFraction being separate.
|
|
||||||
module Mixed = {
|
|
||||||
type t = DistTypes.mixedShape;
|
|
||||||
let make =
|
|
||||||
(~continuous, ~discrete, ~discreteProbabilityMassFraction)
|
|
||||||
: DistTypes.mixedShape => {
|
|
||||||
continuous,
|
|
||||||
discrete,
|
|
||||||
discreteProbabilityMassFraction,
|
|
||||||
};
|
|
||||||
|
|
||||||
// todo: Put into scaling module
|
|
||||||
let scaleDiscreteFn =
|
|
||||||
({discreteProbabilityMassFraction}: DistTypes.mixedShape, f) =>
|
|
||||||
f *. discreteProbabilityMassFraction;
|
|
||||||
|
|
||||||
//TODO: Warning: This currently computes the integral, which is expensive.
|
|
||||||
let scaleContinuousFn =
|
|
||||||
({discreteProbabilityMassFraction}: DistTypes.mixedShape, f) =>
|
|
||||||
f *. (1.0 -. discreteProbabilityMassFraction);
|
|
||||||
|
|
||||||
//TODO: Warning: This currently computes the integral, which is expensive.
|
|
||||||
let scaleContinuous = ({discreteProbabilityMassFraction}: t, continuous) =>
|
|
||||||
continuous
|
|
||||||
|> Continuous.T.scaleToIntegralSum(
|
|
||||||
~intendedSum=1.0 -. discreteProbabilityMassFraction,
|
|
||||||
);
|
|
||||||
|
|
||||||
let scaleDiscrete = ({discreteProbabilityMassFraction}: t, disrete) =>
|
|
||||||
disrete
|
|
||||||
|> Discrete.T.scaleToIntegralSum(
|
|
||||||
~intendedSum=discreteProbabilityMassFraction,
|
|
||||||
);
|
|
||||||
|
|
||||||
module T =
|
|
||||||
Dist({
|
|
||||||
type t = DistTypes.mixedShape;
|
|
||||||
type integral = DistTypes.continuousShape;
|
|
||||||
let minX = ({continuous, discrete}: t) => {
|
|
||||||
min(Continuous.T.minX(continuous), Discrete.T.minX(discrete));
|
|
||||||
};
|
|
||||||
let maxX = ({continuous, discrete}: t) =>
|
|
||||||
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
|
||||||
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
|
||||||
let toContinuous = ({continuous}: t) => Some(continuous);
|
|
||||||
let toDiscrete = ({discrete}: t) => Some(discrete);
|
|
||||||
let toDiscreteProbabilityMass = ({discreteProbabilityMassFraction}: t) => discreteProbabilityMassFraction;
|
|
||||||
|
|
||||||
let xToY = (f, {discrete, continuous} as t: t) => {
|
|
||||||
let c =
|
|
||||||
continuous
|
|
||||||
|> Continuous.T.xToY(f)
|
|
||||||
|> DistTypes.MixedPoint.fmap(scaleContinuousFn(t));
|
|
||||||
let d =
|
|
||||||
discrete
|
|
||||||
|> Discrete.T.xToY(f)
|
|
||||||
|> DistTypes.MixedPoint.fmap(scaleDiscreteFn(t));
|
|
||||||
DistTypes.MixedPoint.add(c, d);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Warning: It's not clear how to update the discreteProbabilityMassFraction, so this may create small errors.
|
|
||||||
let truncate =
|
|
||||||
(
|
|
||||||
~cache=None,
|
|
||||||
count,
|
|
||||||
{discrete, continuous, discreteProbabilityMassFraction}: t,
|
|
||||||
)
|
|
||||||
: t => {
|
|
||||||
{
|
|
||||||
discrete:
|
|
||||||
Discrete.T.truncate(
|
|
||||||
int_of_float(
|
|
||||||
float_of_int(count) *. discreteProbabilityMassFraction,
|
|
||||||
),
|
|
||||||
discrete,
|
|
||||||
),
|
|
||||||
continuous:
|
|
||||||
Continuous.T.truncate(
|
|
||||||
int_of_float(
|
|
||||||
float_of_int(count)
|
|
||||||
*. (1.0 -. discreteProbabilityMassFraction),
|
|
||||||
),
|
|
||||||
continuous,
|
|
||||||
),
|
|
||||||
discreteProbabilityMassFraction,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let toScaledContinuous = ({continuous} as t: t) =>
|
|
||||||
Some(scaleContinuous(t, continuous));
|
|
||||||
|
|
||||||
let toScaledDiscrete = ({discrete} as t: t) =>
|
|
||||||
Some(scaleDiscrete(t, discrete));
|
|
||||||
|
|
||||||
let integral =
|
|
||||||
(
|
|
||||||
~cache,
|
|
||||||
{continuous, discrete, discreteProbabilityMassFraction}: t,
|
|
||||||
) => {
|
) => {
|
||||||
switch (cache) {
|
switch (t1IntegralSumCache, t2IntegralSumCache) {
|
||||||
| Some(cache) => cache
|
| (None, _)
|
||||||
| None =>
|
| (_, None) => None
|
||||||
let scaleContinuousBy =
|
| (Some(s1), Some(s2)) => combineFn(s1, s2)
|
||||||
(1.0 -. discreteProbabilityMassFraction)
|
|
||||||
/. (continuous |> Continuous.T.Integral.sum(~cache=None));
|
|
||||||
|
|
||||||
let scaleDiscreteBy =
|
|
||||||
discreteProbabilityMassFraction
|
|
||||||
/. (
|
|
||||||
discrete
|
|
||||||
|> Discrete.T.Integral.get(~cache=None)
|
|
||||||
|> Continuous.toLinear
|
|
||||||
|> E.O.fmap(Continuous.lastY)
|
|
||||||
|> E.O.toExn("")
|
|
||||||
);
|
|
||||||
|
|
||||||
let cont =
|
|
||||||
continuous
|
|
||||||
|> Continuous.T.Integral.get(~cache=None)
|
|
||||||
|> Continuous.T.scaleBy(~scale=scaleContinuousBy);
|
|
||||||
|
|
||||||
let dist =
|
|
||||||
discrete
|
|
||||||
|> Discrete.T.Integral.get(~cache=None)
|
|
||||||
|> Continuous.toLinear
|
|
||||||
|> E.O.toExn("")
|
|
||||||
|> Continuous.T.scaleBy(~scale=scaleDiscreteBy);
|
|
||||||
|
|
||||||
let result =
|
|
||||||
Continuous.make(
|
|
||||||
`Linear,
|
|
||||||
XYShape.Combine.combineLinear(
|
|
||||||
~fn=(+.),
|
|
||||||
Continuous.getShape(cont),
|
|
||||||
Continuous.getShape(dist),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
result;
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let integralEndY = (~cache, t: t) => {
|
let combineIntegrals =
|
||||||
integral(~cache, t) |> Continuous.lastY;
|
|
||||||
};
|
|
||||||
|
|
||||||
let integralXtoY = (~cache, f, t) => {
|
|
||||||
t
|
|
||||||
|> integral(~cache)
|
|
||||||
|> Continuous.getShape
|
|
||||||
|> XYShape.XtoY.linear(f);
|
|
||||||
};
|
|
||||||
|
|
||||||
let integralYtoX = (~cache, f, t) => {
|
|
||||||
t
|
|
||||||
|> integral(~cache)
|
|
||||||
|> Continuous.getShape
|
|
||||||
|> XYShape.YtoX.linear(f);
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: This part really needs to be rethought, I'm quite sure this is just broken. Mapping Ys would change the desired discreteProbabilityMassFraction.
|
|
||||||
let mapY =
|
|
||||||
(fn, {discrete, continuous, discreteProbabilityMassFraction}: t): t => {
|
|
||||||
{
|
|
||||||
discrete: Discrete.T.mapY(fn, discrete),
|
|
||||||
continuous: Continuous.T.mapY(fn, continuous),
|
|
||||||
discreteProbabilityMassFraction,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let mean = (t: t): float => {
|
|
||||||
let discreteProbabilityMassFraction =
|
|
||||||
t.discreteProbabilityMassFraction;
|
|
||||||
switch (discreteProbabilityMassFraction) {
|
|
||||||
| 1.0 => Discrete.T.mean(t.discrete)
|
|
||||||
| 0.0 => Continuous.T.mean(t.continuous)
|
|
||||||
| _ =>
|
|
||||||
Discrete.T.mean(t.discrete)
|
|
||||||
*. discreteProbabilityMassFraction
|
|
||||||
+. Continuous.T.mean(t.continuous)
|
|
||||||
*. (1.0 -. discreteProbabilityMassFraction)
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let variance = (t: t): float => {
|
|
||||||
let discreteProbabilityMassFraction =
|
|
||||||
t.discreteProbabilityMassFraction;
|
|
||||||
let getMeanOfSquares = (t: t) => {
|
|
||||||
Discrete.T.mean(XYShape.Analysis.squareXYShape(t.discrete))
|
|
||||||
*. t.discreteProbabilityMassFraction
|
|
||||||
+. XYShape.Analysis.getMeanOfSquaresContinuousShape(t.continuous)
|
|
||||||
*. (1.0 -. t.discreteProbabilityMassFraction);
|
|
||||||
};
|
|
||||||
switch (discreteProbabilityMassFraction) {
|
|
||||||
| 1.0 => Discrete.T.variance(t.discrete)
|
|
||||||
| 0.0 => Continuous.T.variance(t.continuous)
|
|
||||||
| _ =>
|
|
||||||
XYShape.Analysis.getVarianceDangerously(
|
|
||||||
t,
|
|
||||||
mean,
|
|
||||||
getMeanOfSquares,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module Shape = {
|
|
||||||
module T =
|
|
||||||
Dist({
|
|
||||||
type t = DistTypes.shape;
|
|
||||||
type integral = DistTypes.continuousShape;
|
|
||||||
|
|
||||||
let mapToAll = ((fn1, fn2, fn3), t: t) =>
|
|
||||||
switch (t) {
|
|
||||||
| Mixed(m) => fn1(m)
|
|
||||||
| Discrete(m) => fn2(m)
|
|
||||||
| Continuous(m) => fn3(m)
|
|
||||||
};
|
|
||||||
|
|
||||||
let fmap = ((fn1, fn2, fn3), t: t): t =>
|
|
||||||
switch (t) {
|
|
||||||
| Mixed(m) => Mixed(fn1(m))
|
|
||||||
| Discrete(m) => Discrete(fn2(m))
|
|
||||||
| Continuous(m) => Continuous(fn3(m))
|
|
||||||
};
|
|
||||||
|
|
||||||
let xToY = f =>
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.xToY(f),
|
|
||||||
Discrete.T.xToY(f),
|
|
||||||
Continuous.T.xToY(f),
|
|
||||||
));
|
|
||||||
let toShape = (t: t) => t;
|
|
||||||
let toContinuous =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.toContinuous,
|
|
||||||
Discrete.T.toContinuous,
|
|
||||||
Continuous.T.toContinuous,
|
|
||||||
));
|
|
||||||
let toDiscrete =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.toDiscrete,
|
|
||||||
Discrete.T.toDiscrete,
|
|
||||||
Continuous.T.toDiscrete,
|
|
||||||
));
|
|
||||||
|
|
||||||
let truncate = (~cache=None, i) =>
|
|
||||||
fmap((
|
|
||||||
Mixed.T.truncate(i),
|
|
||||||
Discrete.T.truncate(i),
|
|
||||||
Continuous.T.truncate(i),
|
|
||||||
));
|
|
||||||
|
|
||||||
let toDiscreteProbabilityMass =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.toDiscreteProbabilityMass,
|
|
||||||
Discrete.T.toDiscreteProbabilityMass,
|
|
||||||
Continuous.T.toDiscreteProbabilityMass,
|
|
||||||
));
|
|
||||||
|
|
||||||
let toScaledDiscrete =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.toScaledDiscrete,
|
|
||||||
Discrete.T.toScaledDiscrete,
|
|
||||||
Continuous.T.toScaledDiscrete,
|
|
||||||
));
|
|
||||||
let toScaledContinuous =
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.toScaledContinuous,
|
|
||||||
Discrete.T.toScaledContinuous,
|
|
||||||
Continuous.T.toScaledContinuous,
|
|
||||||
));
|
|
||||||
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
|
||||||
let integral = (~cache) => {
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.Integral.get(~cache),
|
|
||||||
Discrete.T.Integral.get(~cache),
|
|
||||||
Continuous.T.Integral.get(~cache),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let integralEndY = (~cache) =>
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.Integral.sum(~cache),
|
|
||||||
Discrete.T.Integral.sum(~cache),
|
|
||||||
Continuous.T.Integral.sum(~cache),
|
|
||||||
));
|
|
||||||
let integralXtoY = (~cache, f) => {
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.Integral.xToY(~cache, f),
|
|
||||||
Discrete.T.Integral.xToY(~cache, f),
|
|
||||||
Continuous.T.Integral.xToY(~cache, f),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let integralYtoX = (~cache, f) => {
|
|
||||||
mapToAll((
|
|
||||||
Mixed.T.Integral.yToX(~cache, f),
|
|
||||||
Discrete.T.Integral.yToX(~cache, f),
|
|
||||||
Continuous.T.Integral.yToX(~cache, f),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX));
|
|
||||||
let mapY = fn =>
|
|
||||||
fmap((
|
|
||||||
Mixed.T.mapY(fn),
|
|
||||||
Discrete.T.mapY(fn),
|
|
||||||
Continuous.T.mapY(fn),
|
|
||||||
));
|
|
||||||
|
|
||||||
let mean = (t: t): float =>
|
|
||||||
switch (t) {
|
|
||||||
| Mixed(m) => Mixed.T.mean(m)
|
|
||||||
| Discrete(m) => Discrete.T.mean(m)
|
|
||||||
| Continuous(m) => Continuous.T.mean(m)
|
|
||||||
};
|
|
||||||
|
|
||||||
let variance = (t: t): float =>
|
|
||||||
switch (t) {
|
|
||||||
| Mixed(m) => Mixed.T.variance(m)
|
|
||||||
| Discrete(m) => Discrete.T.variance(m)
|
|
||||||
| Continuous(m) => Continuous.T.variance(m)
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module DistPlus = {
|
|
||||||
open DistTypes;
|
|
||||||
|
|
||||||
type t = DistTypes.distPlus;
|
|
||||||
|
|
||||||
let shapeIntegral = shape => Shape.T.Integral.get(~cache=None, shape);
|
|
||||||
let make =
|
|
||||||
(
|
(
|
||||||
~shape,
|
combineFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape),
|
||||||
~guesstimatorString,
|
t1IntegralCache: option(DistTypes.continuousShape),
|
||||||
~domain=Complete,
|
t2IntegralCache: option(DistTypes.continuousShape),
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
: t => {
|
|
||||||
let integral = shapeIntegral(shape);
|
|
||||||
{shape, domain, integralCache: integral, unit, guesstimatorString};
|
|
||||||
};
|
|
||||||
|
|
||||||
let update =
|
|
||||||
(
|
|
||||||
~shape=?,
|
|
||||||
~integralCache=?,
|
|
||||||
~domain=?,
|
|
||||||
~unit=?,
|
|
||||||
~guesstimatorString=?,
|
|
||||||
t: t,
|
|
||||||
) => {
|
) => {
|
||||||
shape: E.O.default(t.shape, shape),
|
switch (t1IntegralCache, t2IntegralCache) {
|
||||||
integralCache: E.O.default(t.integralCache, integralCache),
|
| (None, _)
|
||||||
domain: E.O.default(t.domain, domain),
|
| (_, None) => None
|
||||||
unit: E.O.default(t.unit, unit),
|
| (Some(s1), Some(s2)) => combineFn(s1, s2)
|
||||||
guesstimatorString: E.O.default(t.guesstimatorString, guesstimatorString),
|
|
||||||
};
|
|
||||||
|
|
||||||
let updateShape = (shape, t) => {
|
|
||||||
let integralCache = shapeIntegral(shape);
|
|
||||||
update(~shape, ~integralCache, t);
|
|
||||||
};
|
|
||||||
|
|
||||||
let domainIncludedProbabilityMass = (t: t) =>
|
|
||||||
Domain.includedProbabilityMass(t.domain);
|
|
||||||
|
|
||||||
let domainIncludedProbabilityMassAdjustment = (t: t, f) =>
|
|
||||||
f *. Domain.includedProbabilityMass(t.domain);
|
|
||||||
|
|
||||||
let toShape = ({shape, _}: t) => shape;
|
|
||||||
|
|
||||||
let shapeFn = (fn, {shape}: t) => fn(shape);
|
|
||||||
|
|
||||||
module T =
|
|
||||||
Dist({
|
|
||||||
type t = DistTypes.distPlus;
|
|
||||||
type integral = DistTypes.distPlus;
|
|
||||||
let toShape = toShape;
|
|
||||||
let toContinuous = shapeFn(Shape.T.toContinuous);
|
|
||||||
let toDiscrete = shapeFn(Shape.T.toDiscrete);
|
|
||||||
// todo: Adjust for total mass.
|
|
||||||
|
|
||||||
let toScaledContinuous = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.toScaledContinuous
|
|
||||||
|> E.O.fmap(
|
|
||||||
Continuous.T.mapY(domainIncludedProbabilityMassAdjustment(t)),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let toScaledDiscrete = (t: t) => {
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.toScaledDiscrete
|
|
||||||
|> E.O.fmap(
|
|
||||||
Discrete.T.mapY(domainIncludedProbabilityMassAdjustment(t)),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let xToY = (f, t: t) =>
|
|
||||||
t
|
|
||||||
|> toShape
|
|
||||||
|> Shape.T.xToY(f)
|
|
||||||
|> MixedPoint.fmap(domainIncludedProbabilityMassAdjustment(t));
|
|
||||||
|
|
||||||
let minX = shapeFn(Shape.T.minX);
|
|
||||||
let maxX = shapeFn(Shape.T.maxX);
|
|
||||||
let toDiscreteProbabilityMass =
|
|
||||||
shapeFn(Shape.T.toDiscreteProbabilityMass);
|
|
||||||
|
|
||||||
// This bit is kind of awkward, could probably use rethinking.
|
|
||||||
let integral = (~cache, t: t) =>
|
|
||||||
updateShape(Continuous(t.integralCache), t);
|
|
||||||
|
|
||||||
let truncate = (~cache=None, i, t) =>
|
|
||||||
updateShape(t |> toShape |> Shape.T.truncate(i), t);
|
|
||||||
// todo: adjust for limit, maybe?
|
|
||||||
let mapY = (fn, {shape, _} as t: t): t =>
|
|
||||||
Shape.T.mapY(fn, shape) |> updateShape(_, t);
|
|
||||||
|
|
||||||
let integralEndY = (~cache as _, t: t) =>
|
|
||||||
Shape.T.Integral.sum(~cache=Some(t.integralCache), toShape(t));
|
|
||||||
|
|
||||||
// TODO: Fix this below, obviously. Adjust for limits
|
|
||||||
let integralXtoY = (~cache as _, f, t: t) => {
|
|
||||||
Shape.T.Integral.xToY(~cache=Some(t.integralCache), f, toShape(t))
|
|
||||||
|> domainIncludedProbabilityMassAdjustment(t);
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
|
||||||
let integralYtoX = (~cache as _, f, t: t) => {
|
|
||||||
Shape.T.Integral.yToX(~cache=Some(t.integralCache), f, toShape(t));
|
|
||||||
};
|
|
||||||
let mean = (t: t) => Shape.T.mean(t.shape);
|
|
||||||
let variance = (t: t) => Shape.T.variance(t.shape);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module DistPlusTime = {
|
|
||||||
open DistTypes;
|
|
||||||
|
|
||||||
type t = DistTypes.distPlus;
|
|
||||||
|
|
||||||
let unitToJson = ({unit}: t) => unit |> DistTypes.DistributionUnit.toJson;
|
|
||||||
|
|
||||||
let timeVector = ({unit}: t) =>
|
|
||||||
switch (unit) {
|
|
||||||
| TimeDistribution(timeVector) => Some(timeVector)
|
|
||||||
| UnspecifiedDistribution => None
|
|
||||||
};
|
|
||||||
|
|
||||||
let timeInVectorToX = (f: TimeTypes.timeInVector, t: t) => {
|
|
||||||
let timeVector = t |> timeVector;
|
|
||||||
timeVector |> E.O.fmap(TimeTypes.RelativeTimePoint.toXValue(_, f));
|
|
||||||
};
|
|
||||||
|
|
||||||
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
|
||||||
timeInVectorToX(f, t) |> E.O.fmap(DistPlus.T.xToY(_, t));
|
|
||||||
};
|
|
||||||
|
|
||||||
module Integral = {
|
|
||||||
include DistPlus.T.Integral;
|
|
||||||
let xToY = (f: TimeTypes.timeInVector, t: t) => {
|
|
||||||
timeInVectorToX(f, t)
|
|
||||||
|> E.O.fmap(x => DistPlus.T.Integral.xToY(~cache=None, x, t));
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
332
src/distPlus/distribution/Mixed.re
Normal file
332
src/distPlus/distribution/Mixed.re
Normal file
|
@ -0,0 +1,332 @@
|
||||||
|
open Distributions;
|
||||||
|
|
||||||
|
type t = DistTypes.mixedShape;
|
||||||
|
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {continuous, discrete, integralSumCache, integralCache};
|
||||||
|
|
||||||
|
let totalLength = (t: t): int => {
|
||||||
|
let continuousLength =
|
||||||
|
t.continuous |> Continuous.getShape |> XYShape.T.length;
|
||||||
|
let discreteLength = t.discrete |> Discrete.getShape |> XYShape.T.length;
|
||||||
|
|
||||||
|
continuousLength + discreteLength;
|
||||||
|
};
|
||||||
|
|
||||||
|
let scaleBy = (~scale=1.0, t: t): t => {
|
||||||
|
let scaledDiscrete = Discrete.scaleBy(~scale, t.discrete);
|
||||||
|
let scaledContinuous = Continuous.scaleBy(~scale, t.continuous);
|
||||||
|
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(Continuous.scaleBy(~scale, v)));
|
||||||
|
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, s => Some(s *. scale));
|
||||||
|
make(~discrete=scaledDiscrete, ~continuous=scaledContinuous, ~integralSumCache=scaledIntegralSumCache, ~integralCache=scaledIntegralCache);
|
||||||
|
};
|
||||||
|
|
||||||
|
let toContinuous = ({continuous}: t) => Some(continuous);
|
||||||
|
let toDiscrete = ({discrete}: t) => Some(discrete);
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
|
||||||
|
module T =
|
||||||
|
Dist({
|
||||||
|
type t = DistTypes.mixedShape;
|
||||||
|
type integral = DistTypes.continuousShape;
|
||||||
|
let minX = ({continuous, discrete}: t) => {
|
||||||
|
min(Continuous.T.minX(continuous), Discrete.T.minX(discrete));
|
||||||
|
};
|
||||||
|
let maxX = ({continuous, discrete}: t) =>
|
||||||
|
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
||||||
|
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
||||||
|
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
|
|
||||||
|
let toContinuous = toContinuous;
|
||||||
|
let toDiscrete = toDiscrete;
|
||||||
|
|
||||||
|
let truncate =
|
||||||
|
(
|
||||||
|
leftCutoff: option(float),
|
||||||
|
rightCutoff: option(float),
|
||||||
|
{discrete, continuous}: t,
|
||||||
|
) => {
|
||||||
|
let truncatedContinuous =
|
||||||
|
Continuous.T.truncate(leftCutoff, rightCutoff, continuous);
|
||||||
|
let truncatedDiscrete =
|
||||||
|
Discrete.T.truncate(leftCutoff, rightCutoff, discrete);
|
||||||
|
|
||||||
|
make(~integralSumCache=None, ~integralCache=None, ~discrete=truncatedDiscrete, ~continuous=truncatedContinuous);
|
||||||
|
};
|
||||||
|
|
||||||
|
let normalize = (t: t): t => {
|
||||||
|
let continuousIntegral = Continuous.T.Integral.get(t.continuous);
|
||||||
|
let discreteIntegral = Discrete.T.Integral.get(t.discrete);
|
||||||
|
|
||||||
|
let continuous = t.continuous |> Continuous.updateIntegralCache(Some(continuousIntegral));
|
||||||
|
let discrete = t.discrete |> Discrete.updateIntegralCache(Some(discreteIntegral));
|
||||||
|
|
||||||
|
let continuousIntegralSum =
|
||||||
|
Continuous.T.Integral.sum(continuous);
|
||||||
|
let discreteIntegralSum =
|
||||||
|
Discrete.T.Integral.sum(discrete);
|
||||||
|
let totalIntegralSum = continuousIntegralSum +. discreteIntegralSum;
|
||||||
|
|
||||||
|
let newContinuousSum = continuousIntegralSum /. totalIntegralSum;
|
||||||
|
let newDiscreteSum = discreteIntegralSum /. totalIntegralSum;
|
||||||
|
|
||||||
|
let normalizedContinuous =
|
||||||
|
continuous
|
||||||
|
|> Continuous.scaleBy(~scale=newContinuousSum /. continuousIntegralSum)
|
||||||
|
|> Continuous.updateIntegralSumCache(Some(newContinuousSum));
|
||||||
|
let normalizedDiscrete =
|
||||||
|
discrete
|
||||||
|
|> Discrete.scaleBy(~scale=newDiscreteSum /. discreteIntegralSum)
|
||||||
|
|> Discrete.updateIntegralSumCache(Some(newDiscreteSum));
|
||||||
|
|
||||||
|
make(~integralSumCache=Some(1.0), ~integralCache=None, ~continuous=normalizedContinuous, ~discrete=normalizedDiscrete);
|
||||||
|
};
|
||||||
|
|
||||||
|
let xToY = (x, t: t) => {
|
||||||
|
// This evaluates the mixedShape at x, interpolating if necessary.
|
||||||
|
// Note that we normalize entire mixedShape first.
|
||||||
|
let {continuous, discrete}: t = normalize(t);
|
||||||
|
let c = Continuous.T.xToY(x, continuous);
|
||||||
|
let d = Discrete.T.xToY(x, discrete);
|
||||||
|
DistTypes.MixedPoint.add(c, d); // "add" here just combines the two values into a single MixedPoint.
|
||||||
|
};
|
||||||
|
|
||||||
|
let toDiscreteProbabilityMassFraction = ({discrete, continuous}: t) => {
|
||||||
|
let discreteIntegralSum =
|
||||||
|
Discrete.T.Integral.sum(discrete);
|
||||||
|
let continuousIntegralSum =
|
||||||
|
Continuous.T.Integral.sum(continuous);
|
||||||
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
|
discreteIntegralSum /. totalIntegralSum;
|
||||||
|
};
|
||||||
|
|
||||||
|
let downsample = (count, t: t): t => {
|
||||||
|
// We will need to distribute the new xs fairly between the discrete and continuous shapes.
|
||||||
|
// The easiest way to do this is to simply go by the previous probability masses.
|
||||||
|
|
||||||
|
let discreteIntegralSum =
|
||||||
|
Discrete.T.Integral.sum(t.discrete);
|
||||||
|
let continuousIntegralSum =
|
||||||
|
Continuous.T.Integral.sum(t.continuous);
|
||||||
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
|
// TODO: figure out what to do when the totalIntegralSum is zero.
|
||||||
|
|
||||||
|
let downsampledDiscrete =
|
||||||
|
Discrete.T.downsample(
|
||||||
|
int_of_float(
|
||||||
|
float_of_int(count) *. (discreteIntegralSum /. totalIntegralSum),
|
||||||
|
),
|
||||||
|
t.discrete,
|
||||||
|
);
|
||||||
|
|
||||||
|
let downsampledContinuous =
|
||||||
|
Continuous.T.downsample(
|
||||||
|
int_of_float(
|
||||||
|
float_of_int(count) *. (continuousIntegralSum /. totalIntegralSum),
|
||||||
|
),
|
||||||
|
t.continuous,
|
||||||
|
);
|
||||||
|
|
||||||
|
{...t, discrete: downsampledDiscrete, continuous: downsampledContinuous};
|
||||||
|
};
|
||||||
|
|
||||||
|
let integral = (t: t) => {
|
||||||
|
switch (t.integralCache) {
|
||||||
|
| Some(cache) => cache
|
||||||
|
| None =>
|
||||||
|
// note: if the underlying shapes aren't normalized, then these integrals won't be either -- but that's the way it should be.
|
||||||
|
let continuousIntegral = Continuous.T.Integral.get(t.continuous);
|
||||||
|
let discreteIntegral = Continuous.stepwiseToLinear(Discrete.T.Integral.get(t.discrete));
|
||||||
|
|
||||||
|
Continuous.make(
|
||||||
|
XYShape.PointwiseCombination.combine(
|
||||||
|
(+.),
|
||||||
|
XYShape.XtoY.continuousInterpolator(`Linear, `UseOutermostPoints),
|
||||||
|
Continuous.getShape(continuousIntegral),
|
||||||
|
Continuous.getShape(discreteIntegral),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let integralEndY = (t: t) => {
|
||||||
|
t |> integral |> Continuous.lastY;
|
||||||
|
};
|
||||||
|
|
||||||
|
let integralXtoY = (f, t) => {
|
||||||
|
t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f);
|
||||||
|
};
|
||||||
|
|
||||||
|
let integralYtoX = (f, t) => {
|
||||||
|
t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f);
|
||||||
|
};
|
||||||
|
|
||||||
|
// This pipes all ys (continuous and discrete) through fn.
|
||||||
|
// If mapY is a linear operation, we might be able to update the integralSumCaches as well;
|
||||||
|
// if not, they'll be set to None.
|
||||||
|
let mapY =
|
||||||
|
(
|
||||||
|
~integralSumCacheFn=previousIntegralSum => None,
|
||||||
|
~integralCacheFn=previousIntegral => None,
|
||||||
|
~fn,
|
||||||
|
t: t,
|
||||||
|
)
|
||||||
|
: t => {
|
||||||
|
let yMappedDiscrete: DistTypes.discreteShape =
|
||||||
|
t.discrete
|
||||||
|
|> Discrete.T.mapY(~fn)
|
||||||
|
|> Discrete.updateIntegralSumCache(E.O.bind(t.discrete.integralSumCache, integralSumCacheFn))
|
||||||
|
|> Discrete.updateIntegralCache(E.O.bind(t.discrete.integralCache, integralCacheFn));
|
||||||
|
|
||||||
|
let yMappedContinuous: DistTypes.continuousShape =
|
||||||
|
t.continuous
|
||||||
|
|> Continuous.T.mapY(~fn)
|
||||||
|
|> Continuous.updateIntegralSumCache(E.O.bind(t.continuous.integralSumCache, integralSumCacheFn))
|
||||||
|
|> Continuous.updateIntegralCache(E.O.bind(t.continuous.integralCache, integralCacheFn));
|
||||||
|
|
||||||
|
{
|
||||||
|
discrete: yMappedDiscrete,
|
||||||
|
continuous: yMappedContinuous,
|
||||||
|
integralSumCache: E.O.bind(t.integralSumCache, integralSumCacheFn),
|
||||||
|
integralCache: E.O.bind(t.integralCache, integralCacheFn),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let mean = ({discrete, continuous}: t): float => {
|
||||||
|
let discreteMean = Discrete.T.mean(discrete);
|
||||||
|
let continuousMean = Continuous.T.mean(continuous);
|
||||||
|
|
||||||
|
// the combined mean is the weighted sum of the two:
|
||||||
|
let discreteIntegralSum = Discrete.T.Integral.sum(discrete);
|
||||||
|
let continuousIntegralSum = Continuous.T.Integral.sum(continuous);
|
||||||
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
|
(
|
||||||
|
discreteMean
|
||||||
|
*. discreteIntegralSum
|
||||||
|
+. continuousMean
|
||||||
|
*. continuousIntegralSum
|
||||||
|
)
|
||||||
|
/. totalIntegralSum;
|
||||||
|
};
|
||||||
|
|
||||||
|
let variance = ({discrete, continuous} as t: t): float => {
|
||||||
|
// the combined mean is the weighted sum of the two:
|
||||||
|
let discreteIntegralSum = Discrete.T.Integral.sum(discrete);
|
||||||
|
let continuousIntegralSum = Continuous.T.Integral.sum(continuous);
|
||||||
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
|
||||||
|
|
||||||
|
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
||||||
|
let discreteMean =
|
||||||
|
discrete
|
||||||
|
|> Discrete.shapeMap(XYShape.Analysis.squareXYShape)
|
||||||
|
|> Discrete.T.mean;
|
||||||
|
let continuousMean =
|
||||||
|
continuous |> XYShape.Analysis.getMeanOfSquaresContinuousShape;
|
||||||
|
(
|
||||||
|
discreteMean
|
||||||
|
*. discreteIntegralSum
|
||||||
|
+. continuousMean
|
||||||
|
*. continuousIntegralSum
|
||||||
|
)
|
||||||
|
/. totalIntegralSum;
|
||||||
|
};
|
||||||
|
|
||||||
|
switch (discreteIntegralSum /. totalIntegralSum) {
|
||||||
|
| 1.0 => Discrete.T.variance(discrete)
|
||||||
|
| 0.0 => Continuous.T.variance(continuous)
|
||||||
|
| _ =>
|
||||||
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
let combineAlgebraically =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t)
|
||||||
|
: t => {
|
||||||
|
// Discrete convolution can cause a huge increase in the number of samples,
|
||||||
|
// so we'll first downsample.
|
||||||
|
|
||||||
|
// An alternative (to be explored in the future) may be to first perform the full convolution and then to downsample the result;
|
||||||
|
// to use non-uniform fast Fourier transforms (for addition only), add web workers or gpu.js, etc. ...
|
||||||
|
|
||||||
|
// we have to figure out where to downsample, and how to effectively
|
||||||
|
//let downsampleIfTooLarge = (t: t) => {
|
||||||
|
// let sqtl = sqrt(float_of_int(totalLength(t)));
|
||||||
|
// sqtl > 10 ? T.downsample(int_of_float(sqtl), t) : t;
|
||||||
|
//};
|
||||||
|
|
||||||
|
let t1d = t1;
|
||||||
|
let t2d = t2;
|
||||||
|
|
||||||
|
// continuous (*) continuous => continuous, but also
|
||||||
|
// discrete (*) continuous => continuous (and vice versa). We have to take care of all combos and then combine them:
|
||||||
|
let ccConvResult =
|
||||||
|
Continuous.combineAlgebraically(
|
||||||
|
op,
|
||||||
|
t1.continuous,
|
||||||
|
t2.continuous,
|
||||||
|
);
|
||||||
|
let dcConvResult =
|
||||||
|
Continuous.combineAlgebraicallyWithDiscrete(
|
||||||
|
op,
|
||||||
|
t2.continuous,
|
||||||
|
t1.discrete,
|
||||||
|
);
|
||||||
|
let cdConvResult =
|
||||||
|
Continuous.combineAlgebraicallyWithDiscrete(
|
||||||
|
op,
|
||||||
|
t1.continuous,
|
||||||
|
t2.discrete,
|
||||||
|
);
|
||||||
|
let continuousConvResult =
|
||||||
|
Continuous.reduce((+.), [|ccConvResult, dcConvResult, cdConvResult|]);
|
||||||
|
|
||||||
|
// ... finally, discrete (*) discrete => discrete, obviously:
|
||||||
|
let discreteConvResult =
|
||||||
|
Discrete.combineAlgebraically(op, t1.discrete, t2.discrete);
|
||||||
|
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
(a, b) => Some(a *. b),
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
{discrete: discreteConvResult, continuous: continuousConvResult, integralSumCache: combinedIntegralSum, integralCache: None};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinePointwise = (~integralSumCachesFn = (_, _) => None, ~integralCachesFn = (_, _) => None, fn, t1: t, t2: t): t => {
|
||||||
|
let reducedDiscrete =
|
||||||
|
[|t1, t2|]
|
||||||
|
|> E.A.fmap(toDiscrete)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Discrete.reduce(~integralSumCachesFn, ~integralCachesFn, fn);
|
||||||
|
|
||||||
|
let reducedContinuous =
|
||||||
|
[|t1, t2|]
|
||||||
|
|> E.A.fmap(toContinuous)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Continuous.reduce(~integralSumCachesFn, ~integralCachesFn, fn);
|
||||||
|
|
||||||
|
let combinedIntegralSum =
|
||||||
|
Common.combineIntegralSums(
|
||||||
|
integralSumCachesFn,
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
let combinedIntegral =
|
||||||
|
Common.combineIntegrals(
|
||||||
|
integralCachesFn,
|
||||||
|
t1.integralCache,
|
||||||
|
t2.integralCache,
|
||||||
|
);
|
||||||
|
|
||||||
|
make(~integralSumCache=combinedIntegralSum, ~integralCache=combinedIntegral, ~discrete=reducedDiscrete, ~continuous=reducedContinuous);
|
||||||
|
};
|
|
@ -8,98 +8,27 @@ type assumptions = {
|
||||||
discreteProbabilityMass: option(float),
|
discreteProbabilityMass: option(float),
|
||||||
};
|
};
|
||||||
|
|
||||||
let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete): option(DistTypes.shape) => {
|
let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete: option(DistTypes.discreteShape)): option(DistTypes.shape) => {
|
||||||
let continuous = continuous |> E.O.default(Distributions.Continuous.make(`Linear, {xs: [||], ys: [||]}))
|
let continuous = continuous |> E.O.default(Continuous.make(~integralSumCache=Some(0.0), {xs: [||], ys: [||]}));
|
||||||
|
let discrete = discrete |> E.O.default(Discrete.make(~integralSumCache=Some(0.0), {xs: [||], ys: [||]}));
|
||||||
let cLength =
|
let cLength =
|
||||||
continuous
|
continuous
|
||||||
|> Distributions.Continuous.getShape
|
|> Continuous.getShape
|
||||||
|> XYShape.T.xs
|
|> XYShape.T.xs
|
||||||
|> E.A.length;
|
|> E.A.length;
|
||||||
let dLength = discrete |> XYShape.T.xs |> E.A.length;
|
let dLength = discrete |> Discrete.getShape |> XYShape.T.xs |> E.A.length;
|
||||||
switch (cLength, dLength) {
|
switch (cLength, dLength) {
|
||||||
| (0 | 1, 0) => None
|
| (0 | 1, 0) => None
|
||||||
| (0 | 1, _) => Some(Discrete(discrete))
|
| (0 | 1, _) => Some(Discrete(discrete))
|
||||||
| (_, 0) => Some(Continuous(continuous))
|
| (_, 0) => Some(Continuous(continuous))
|
||||||
| (_, _) =>
|
| (_, _) =>
|
||||||
let discreteProbabilityMassFraction =
|
|
||||||
Distributions.Discrete.T.Integral.sum(~cache=None, discrete);
|
|
||||||
let discrete =
|
|
||||||
Distributions.Discrete.T.scaleToIntegralSum(~intendedSum=1.0, discrete);
|
|
||||||
let continuous =
|
|
||||||
Distributions.Continuous.T.scaleToIntegralSum(
|
|
||||||
~intendedSum=1.0,
|
|
||||||
continuous,
|
|
||||||
);
|
|
||||||
let mixedDist =
|
let mixedDist =
|
||||||
Distributions.Mixed.make(
|
Mixed.make(
|
||||||
|
~integralSumCache=None,
|
||||||
|
~integralCache=None,
|
||||||
~continuous,
|
~continuous,
|
||||||
~discrete,
|
~discrete,
|
||||||
~discreteProbabilityMassFraction,
|
|
||||||
);
|
);
|
||||||
Some(Mixed(mixedDist));
|
Some(Mixed(mixedDist));
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// TODO: Delete, only being used in tests
|
|
||||||
let build = (~continuous, ~discrete, ~assumptions) =>
|
|
||||||
switch (assumptions) {
|
|
||||||
| {
|
|
||||||
continuous: ADDS_TO_CORRECT_PROBABILITY,
|
|
||||||
discrete: ADDS_TO_CORRECT_PROBABILITY,
|
|
||||||
discreteProbabilityMass: Some(r),
|
|
||||||
} =>
|
|
||||||
// TODO: Fix this, it's wrong :(
|
|
||||||
Some(
|
|
||||||
Distributions.Mixed.make(
|
|
||||||
~continuous,
|
|
||||||
~discrete,
|
|
||||||
~discreteProbabilityMassFraction=r,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
| {
|
|
||||||
continuous: ADDS_TO_1,
|
|
||||||
discrete: ADDS_TO_1,
|
|
||||||
discreteProbabilityMass: Some(r),
|
|
||||||
} =>
|
|
||||||
Some(
|
|
||||||
Distributions.Mixed.make(
|
|
||||||
~continuous,
|
|
||||||
~discrete,
|
|
||||||
~discreteProbabilityMassFraction=r,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
| {
|
|
||||||
continuous: ADDS_TO_1,
|
|
||||||
discrete: ADDS_TO_1,
|
|
||||||
discreteProbabilityMass: None,
|
|
||||||
} =>
|
|
||||||
None
|
|
||||||
|
|
||||||
| {
|
|
||||||
continuous: ADDS_TO_CORRECT_PROBABILITY,
|
|
||||||
discrete: ADDS_TO_1,
|
|
||||||
discreteProbabilityMass: None,
|
|
||||||
} =>
|
|
||||||
None
|
|
||||||
|
|
||||||
| {
|
|
||||||
continuous: ADDS_TO_1,
|
|
||||||
discrete: ADDS_TO_CORRECT_PROBABILITY,
|
|
||||||
discreteProbabilityMass: None,
|
|
||||||
} =>
|
|
||||||
let discreteProbabilityMassFraction =
|
|
||||||
Distributions.Discrete.T.Integral.sum(~cache=None, discrete);
|
|
||||||
let discrete =
|
|
||||||
Distributions.Discrete.T.scaleToIntegralSum(~intendedSum=1.0, discrete);
|
|
||||||
Some(
|
|
||||||
Distributions.Mixed.make(
|
|
||||||
~continuous,
|
|
||||||
~discrete,
|
|
||||||
~discreteProbabilityMassFraction,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
| _ => None
|
|
||||||
};
|
|
233
src/distPlus/distribution/Shape.re
Normal file
233
src/distPlus/distribution/Shape.re
Normal file
|
@ -0,0 +1,233 @@
|
||||||
|
open Distributions;
|
||||||
|
|
||||||
|
type t = DistTypes.shape;
|
||||||
|
let mapToAll = ((fn1, fn2, fn3), t: t) =>
|
||||||
|
switch (t) {
|
||||||
|
| Mixed(m) => fn1(m)
|
||||||
|
| Discrete(m) => fn2(m)
|
||||||
|
| Continuous(m) => fn3(m)
|
||||||
|
};
|
||||||
|
|
||||||
|
let fmap = ((fn1, fn2, fn3), t: t): t =>
|
||||||
|
switch (t) {
|
||||||
|
| Mixed(m) => Mixed(fn1(m))
|
||||||
|
| Discrete(m) => Discrete(fn2(m))
|
||||||
|
| Continuous(m) => Continuous(fn3(m))
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
let toMixed =
|
||||||
|
mapToAll((
|
||||||
|
m => m,
|
||||||
|
d => Mixed.make(~integralSumCache=d.integralSumCache, ~integralCache=d.integralCache, ~discrete=d, ~continuous=Continuous.empty),
|
||||||
|
c => Mixed.make(~integralSumCache=c.integralSumCache, ~integralCache=c.integralCache, ~discrete=Discrete.empty, ~continuous=c),
|
||||||
|
));
|
||||||
|
|
||||||
|
let combineAlgebraically =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
|
||||||
|
switch (t1, t2) {
|
||||||
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
|
Continuous.combineAlgebraically(op, m1, m2) |> Continuous.T.toShape;
|
||||||
|
| (Continuous(m1), Discrete(m2))
|
||||||
|
| (Discrete(m2), Continuous(m1)) =>
|
||||||
|
Continuous.combineAlgebraicallyWithDiscrete(op, m1, m2) |> Continuous.T.toShape
|
||||||
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
|
Discrete.combineAlgebraically(op, m1, m2) |> Discrete.T.toShape
|
||||||
|
| (m1, m2) =>
|
||||||
|
Mixed.combineAlgebraically(
|
||||||
|
op,
|
||||||
|
toMixed(m1),
|
||||||
|
toMixed(m2),
|
||||||
|
)
|
||||||
|
|> Mixed.T.toShape
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinePointwise =
|
||||||
|
(~integralSumCachesFn: (float, float) => option(float) = (_, _) => None,
|
||||||
|
~integralCachesFn: (DistTypes.continuousShape, DistTypes.continuousShape) => option(DistTypes.continuousShape) = (_, _) => None,
|
||||||
|
fn,
|
||||||
|
t1: t,
|
||||||
|
t2: t) =>
|
||||||
|
switch (t1, t2) {
|
||||||
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
|
DistTypes.Continuous(
|
||||||
|
Continuous.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
|
||||||
|
)
|
||||||
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
|
DistTypes.Discrete(
|
||||||
|
Discrete.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
|
||||||
|
)
|
||||||
|
| (m1, m2) =>
|
||||||
|
DistTypes.Mixed(
|
||||||
|
Mixed.combinePointwise(
|
||||||
|
~integralSumCachesFn,
|
||||||
|
~integralCachesFn,
|
||||||
|
fn,
|
||||||
|
toMixed(m1),
|
||||||
|
toMixed(m2),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
module T =
|
||||||
|
Dist({
|
||||||
|
type t = DistTypes.shape;
|
||||||
|
type integral = DistTypes.continuousShape;
|
||||||
|
|
||||||
|
let xToY = (f: float) =>
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.xToY(f),
|
||||||
|
Discrete.T.xToY(f),
|
||||||
|
Continuous.T.xToY(f),
|
||||||
|
));
|
||||||
|
|
||||||
|
let toShape = (t: t) => t;
|
||||||
|
|
||||||
|
let toContinuous = t => None;
|
||||||
|
let toDiscrete = t => None;
|
||||||
|
|
||||||
|
let downsample = (i, t) =>
|
||||||
|
fmap(
|
||||||
|
(
|
||||||
|
Mixed.T.downsample(i),
|
||||||
|
Discrete.T.downsample(i),
|
||||||
|
Continuous.T.downsample(i),
|
||||||
|
),
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
|
||||||
|
let truncate = (leftCutoff, rightCutoff, t): t =>
|
||||||
|
fmap(
|
||||||
|
(
|
||||||
|
Mixed.T.truncate(leftCutoff, rightCutoff),
|
||||||
|
Discrete.T.truncate(leftCutoff, rightCutoff),
|
||||||
|
Continuous.T.truncate(leftCutoff, rightCutoff),
|
||||||
|
),
|
||||||
|
t,
|
||||||
|
);
|
||||||
|
|
||||||
|
let toDiscreteProbabilityMassFraction = t => 0.0;
|
||||||
|
|
||||||
|
let normalize =
|
||||||
|
fmap((
|
||||||
|
Mixed.T.normalize,
|
||||||
|
Discrete.T.normalize,
|
||||||
|
Continuous.T.normalize
|
||||||
|
));
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t =>
|
||||||
|
fmap((
|
||||||
|
Mixed.T.updateIntegralCache(integralCache),
|
||||||
|
Discrete.T.updateIntegralCache(integralCache),
|
||||||
|
Continuous.T.updateIntegralCache(integralCache),
|
||||||
|
), t);
|
||||||
|
|
||||||
|
let toContinuous =
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.toContinuous,
|
||||||
|
Discrete.T.toContinuous,
|
||||||
|
Continuous.T.toContinuous,
|
||||||
|
));
|
||||||
|
let toDiscrete =
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.toDiscrete,
|
||||||
|
Discrete.T.toDiscrete,
|
||||||
|
Continuous.T.toDiscrete,
|
||||||
|
));
|
||||||
|
|
||||||
|
let toDiscreteProbabilityMassFraction =
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.toDiscreteProbabilityMassFraction,
|
||||||
|
Discrete.T.toDiscreteProbabilityMassFraction,
|
||||||
|
Continuous.T.toDiscreteProbabilityMassFraction,
|
||||||
|
));
|
||||||
|
|
||||||
|
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
|
||||||
|
let integral =
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.Integral.get,
|
||||||
|
Discrete.T.Integral.get,
|
||||||
|
Continuous.T.Integral.get,
|
||||||
|
));
|
||||||
|
let integralEndY =
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.Integral.sum,
|
||||||
|
Discrete.T.Integral.sum,
|
||||||
|
Continuous.T.Integral.sum,
|
||||||
|
));
|
||||||
|
let integralXtoY = (f) => {
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.Integral.xToY(f),
|
||||||
|
Discrete.T.Integral.xToY(f),
|
||||||
|
Continuous.T.Integral.xToY(f),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let integralYtoX = (f) => {
|
||||||
|
mapToAll((
|
||||||
|
Mixed.T.Integral.yToX(f),
|
||||||
|
Discrete.T.Integral.yToX(f),
|
||||||
|
Continuous.T.Integral.yToX(f),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX));
|
||||||
|
let mapY = (~integralSumCacheFn=previousIntegralSum => None, ~integralCacheFn=previousIntegral=>None, ~fn) =>
|
||||||
|
fmap((
|
||||||
|
Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
|
Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
|
Continuous.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
|
||||||
|
));
|
||||||
|
|
||||||
|
let mean = (t: t): float =>
|
||||||
|
switch (t) {
|
||||||
|
| Mixed(m) => Mixed.T.mean(m)
|
||||||
|
| Discrete(m) => Discrete.T.mean(m)
|
||||||
|
| Continuous(m) => Continuous.T.mean(m)
|
||||||
|
};
|
||||||
|
|
||||||
|
let variance = (t: t): float =>
|
||||||
|
switch (t) {
|
||||||
|
| Mixed(m) => Mixed.T.variance(m)
|
||||||
|
| Discrete(m) => Discrete.T.variance(m)
|
||||||
|
| Continuous(m) => Continuous.T.variance(m)
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
let pdf = (f: float, t: t) => {
|
||||||
|
let mixedPoint: DistTypes.mixedPoint = T.xToY(f, t);
|
||||||
|
mixedPoint.continuous +. mixedPoint.discrete;
|
||||||
|
};
|
||||||
|
|
||||||
|
let inv = T.Integral.yToX;
|
||||||
|
let cdf = T.Integral.xToY;
|
||||||
|
|
||||||
|
let doN = (n, fn) => {
|
||||||
|
let items = Belt.Array.make(n, 0.0);
|
||||||
|
for (x in 0 to n - 1) {
|
||||||
|
let _ = Belt.Array.set(items, x, fn());
|
||||||
|
();
|
||||||
|
};
|
||||||
|
items;
|
||||||
|
};
|
||||||
|
|
||||||
|
let sample = (t: t): float => {
|
||||||
|
let randomItem = Random.float(1.);
|
||||||
|
let bar = t |> T.Integral.yToX(randomItem);
|
||||||
|
bar;
|
||||||
|
};
|
||||||
|
|
||||||
|
let sampleNRendered = (n, dist) => {
|
||||||
|
let integralCache = T.Integral.get(dist);
|
||||||
|
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist);
|
||||||
|
|
||||||
|
doN(n, () => sample(distWithUpdatedIntegralCache));
|
||||||
|
};
|
||||||
|
|
||||||
|
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s): float =>
|
||||||
|
switch (distToFloatOp) {
|
||||||
|
| `Pdf(f) => pdf(f, s)
|
||||||
|
| `Cdf(f) => pdf(f, s)
|
||||||
|
| `Inv(f) => inv(f, s)
|
||||||
|
| `Sample => sample(s)
|
||||||
|
| `Mean => T.mean(s)
|
||||||
|
};
|
|
@ -9,7 +9,7 @@ let interpolate =
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Make sure that shapes cannot be empty.
|
// TODO: Make sure that shapes cannot be empty.
|
||||||
let extImp = E.O.toExt("Should not be possible");
|
let extImp = E.O.toExt("Tried to perform an operation on an empty XYShape.");
|
||||||
|
|
||||||
module T = {
|
module T = {
|
||||||
type t = xyShape;
|
type t = xyShape;
|
||||||
|
@ -17,7 +17,9 @@ module T = {
|
||||||
type ts = array(xyShape);
|
type ts = array(xyShape);
|
||||||
let xs = (t: t) => t.xs;
|
let xs = (t: t) => t.xs;
|
||||||
let ys = (t: t) => t.ys;
|
let ys = (t: t) => t.ys;
|
||||||
|
let length = (t: t) => E.A.length(t.xs);
|
||||||
let empty = {xs: [||], ys: [||]};
|
let empty = {xs: [||], ys: [||]};
|
||||||
|
let isEmpty = (t: t) => length(t) == 0;
|
||||||
let minX = (t: t) => t |> xs |> E.A.Sorted.min |> extImp;
|
let minX = (t: t) => t |> xs |> E.A.Sorted.min |> extImp;
|
||||||
let maxX = (t: t) => t |> xs |> E.A.Sorted.max |> extImp;
|
let maxX = (t: t) => t |> xs |> E.A.Sorted.max |> extImp;
|
||||||
let firstY = (t: t) => t |> ys |> E.A.first |> extImp;
|
let firstY = (t: t) => t |> ys |> E.A.first |> extImp;
|
||||||
|
@ -31,6 +33,11 @@ module T = {
|
||||||
let accumulateYs = (fn, p: t) => {
|
let accumulateYs = (fn, p: t) => {
|
||||||
fromArray((p.xs, E.A.accumulate(fn, p.ys)));
|
fromArray((p.xs, E.A.accumulate(fn, p.ys)));
|
||||||
};
|
};
|
||||||
|
let concat = (t1: t, t2: t) => {
|
||||||
|
let cxs = Array.concat([t1.xs, t2.xs]);
|
||||||
|
let cys = Array.concat([t1.ys, t2.ys]);
|
||||||
|
{xs: cxs, ys: cys};
|
||||||
|
};
|
||||||
let fromZippedArray = (pairs: array((float, float))): t =>
|
let fromZippedArray = (pairs: array((float, float))): t =>
|
||||||
pairs |> Belt.Array.unzip |> fromArray;
|
pairs |> Belt.Array.unzip |> fromArray;
|
||||||
let equallyDividedXs = (t: t, newLength) => {
|
let equallyDividedXs = (t: t, newLength) => {
|
||||||
|
@ -136,6 +143,63 @@ module XtoY = {
|
||||||
};
|
};
|
||||||
n;
|
n;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
|
||||||
|
Interpolation can either be stepwise (using the value on the left) or linear. Extrapolation can be `UseZero or `UseOutermostPoints. */
|
||||||
|
let continuousInterpolator = (interpolation: DistTypes.interpolationStrategy, extrapolation: DistTypes.extrapolationStrategy): interpolator => {
|
||||||
|
switch (interpolation, extrapolation) {
|
||||||
|
| (`Linear, `UseZero) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
0.0
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
|
let x1 = t.xs[leftIndex];
|
||||||
|
let x2 = t.xs[leftIndex + 1];
|
||||||
|
let y1 = t.ys[leftIndex];
|
||||||
|
let y2 = t.ys[leftIndex + 1];
|
||||||
|
let fraction = (x -. x1) /. (x2 -. x1);
|
||||||
|
y1 *. (1. -. fraction) +. y2 *. fraction;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
| (`Linear, `UseOutermostPoints) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
t.ys[0];
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
t.ys[T.length(t) - 1]
|
||||||
|
} else {
|
||||||
|
let x1 = t.xs[leftIndex];
|
||||||
|
let x2 = t.xs[leftIndex + 1];
|
||||||
|
let y1 = t.ys[leftIndex];
|
||||||
|
let y2 = t.ys[leftIndex + 1];
|
||||||
|
let fraction = (x -. x1) /. (x2 -. x1);
|
||||||
|
y1 *. (1. -. fraction) +. y2 *. fraction;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
| (`Stepwise, `UseZero) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
0.0
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
|
t.ys[leftIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
| (`Stepwise, `UseOutermostPoints) => (t: T.t, leftIndex: int, x: float) => {
|
||||||
|
if (leftIndex < 0) {
|
||||||
|
t.ys[0];
|
||||||
|
} else if (leftIndex >= T.length(t) - 1) {
|
||||||
|
t.ys[T.length(t) - 1]
|
||||||
|
} else {
|
||||||
|
t.ys[leftIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
|
||||||
|
For discrete distributions, the probability density between points is zero, so we just return zero here. */
|
||||||
|
let discreteInterpolator: interpolator = (t: T.t, leftIndex: int, x: float) => 0.0;
|
||||||
};
|
};
|
||||||
|
|
||||||
module XsConversion = {
|
module XsConversion = {
|
||||||
|
@ -154,7 +218,9 @@ module XsConversion = {
|
||||||
|
|
||||||
let proportionByProbabilityMass =
|
let proportionByProbabilityMass =
|
||||||
(newLength: int, integral: T.t, t: T.t): T.t => {
|
(newLength: int, integral: T.t, t: T.t): T.t => {
|
||||||
equallyDivideXByMass(newLength, integral) |> _replaceWithXs(_, t);
|
integral
|
||||||
|
|> equallyDivideXByMass(newLength) // creates a new set of xs at evenly spaced percentiles
|
||||||
|
|> _replaceWithXs(_, t); // linearly interpolates new ys for the new xs
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -164,37 +230,90 @@ module Zipped = {
|
||||||
let compareXs = ((x1, _), (x2, _)) => x1 > x2 ? 1 : 0;
|
let compareXs = ((x1, _), (x2, _)) => x1 > x2 ? 1 : 0;
|
||||||
let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs);
|
let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs);
|
||||||
let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs);
|
let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs);
|
||||||
|
let filterByX = (testFn: (float => bool), t: zipped) => t |> E.A.filter(((x, _)) => testFn(x));
|
||||||
};
|
};
|
||||||
|
|
||||||
module Combine = {
|
module PointwiseCombination = {
|
||||||
type xsSelection =
|
|
||||||
| ALL_XS
|
|
||||||
| XS_EVENLY_DIVIDED(int);
|
|
||||||
|
|
||||||
let combine =
|
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
||||||
|
let combine = [%raw {| // : (float => float => float, T.t, T.t, bool) => T.t
|
||||||
|
// This function combines two xyShapes by looping through both of them simultaneously.
|
||||||
|
// It always moves on to the next smallest x, whether that's in the first or second input's xs,
|
||||||
|
// and interpolates the value on the other side, thus accumulating xs and ys.
|
||||||
|
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
||||||
|
|
||||||
|
function(fn, interpolator, t1, t2) {
|
||||||
|
let t1n = t1.xs.length;
|
||||||
|
let t2n = t2.xs.length;
|
||||||
|
let outX = [];
|
||||||
|
let outY = [];
|
||||||
|
let i = -1;
|
||||||
|
let j = -1;
|
||||||
|
|
||||||
|
while (i <= t1n - 1 && j <= t2n - 1) {
|
||||||
|
let x, ya, yb;
|
||||||
|
if (j == t2n - 1 && i < t1n - 1 ||
|
||||||
|
t1.xs[i+1] < t2.xs[j+1]) { // if a has to catch up to b, or if b is already done
|
||||||
|
i++;
|
||||||
|
|
||||||
|
x = t1.xs[i];
|
||||||
|
ya = t1.ys[i];
|
||||||
|
|
||||||
|
yb = interpolator(t2, j, x);
|
||||||
|
} else if (i == t1n - 1 && j < t2n - 1 ||
|
||||||
|
t1.xs[i+1] > t2.xs[j+1]) { // if b has to catch up to a, or if a is already done
|
||||||
|
j++;
|
||||||
|
|
||||||
|
x = t2.xs[j];
|
||||||
|
yb = t2.ys[j];
|
||||||
|
|
||||||
|
ya = interpolator(t1, i, x);
|
||||||
|
} else if (i < t1n - 1 && j < t2n && t1.xs[i+1] === t2.xs[j+1]) { // if they happen to be equal, move both ahead
|
||||||
|
i++;
|
||||||
|
j++;
|
||||||
|
x = t1.xs[i];
|
||||||
|
ya = t1.ys[i];
|
||||||
|
yb = t2.ys[j];
|
||||||
|
} else if (i === t1n - 1 && j === t2n - 1) {
|
||||||
|
// finished!
|
||||||
|
i = t1n;
|
||||||
|
j = t2n;
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
console.log("Error!", i, j);
|
||||||
|
}
|
||||||
|
|
||||||
|
outX.push(x);
|
||||||
|
outY.push(fn(ya, yb));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {xs: outX, ys: outY};
|
||||||
|
}
|
||||||
|
|}];
|
||||||
|
|
||||||
|
let combineEvenXs =
|
||||||
(
|
(
|
||||||
~xToYSelection: (float, T.t) => 'a,
|
|
||||||
~xsSelection=ALL_XS,
|
|
||||||
~fn,
|
~fn,
|
||||||
|
~xToYSelection,
|
||||||
|
sampleCount,
|
||||||
t1: T.t,
|
t1: T.t,
|
||||||
t2: T.t,
|
t2: T.t,
|
||||||
) => {
|
) => {
|
||||||
let allXs =
|
|
||||||
switch (xsSelection) {
|
|
||||||
| ALL_XS => Ts.allXs([|t1, t2|])
|
|
||||||
| XS_EVENLY_DIVIDED(sampleCount) =>
|
|
||||||
Ts.equallyDividedXs([|t1, t2|], sampleCount)
|
|
||||||
};
|
|
||||||
|
|
||||||
let allYs =
|
switch ((E.A.length(t1.xs), E.A.length(t2.xs))) {
|
||||||
allXs |> E.A.fmap(x => fn(xToYSelection(x, t1), xToYSelection(x, t2)));
|
| (0, 0) => T.empty
|
||||||
|
| (0, _) => t2
|
||||||
|
| (_, 0) => t1
|
||||||
|
| (_, _) => {
|
||||||
|
let allXs = Ts.equallyDividedXs([|t1, t2|], sampleCount);
|
||||||
|
|
||||||
|
let allYs = allXs |> E.A.fmap(x => fn(xToYSelection(x, t1), xToYSelection(x, t2)));
|
||||||
|
|
||||||
T.fromArrays(allXs, allYs);
|
T.fromArrays(allXs, allYs);
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let combineLinear = combine(~xToYSelection=XtoY.linear);
|
|
||||||
let combineStepwise = combine(~xToYSelection=XtoY.stepwiseIncremental);
|
|
||||||
let combineIfAtX = combine(~xToYSelection=XtoY.stepwiseIfAtX);
|
|
||||||
|
|
||||||
// TODO: I'd bet this is pretty slow. Maybe it would be faster to intersperse Xs and Ys separately.
|
// TODO: I'd bet this is pretty slow. Maybe it would be faster to intersperse Xs and Ys separately.
|
||||||
let intersperse = (t1: T.t, t2: T.t) => {
|
let intersperse = (t1: T.t, t2: T.t) => {
|
||||||
E.A.intersperse(T.zip(t1), T.zip(t2)) |> T.fromZippedArray;
|
E.A.intersperse(T.zip(t1), T.zip(t2)) |> T.fromZippedArray;
|
||||||
|
@ -244,8 +363,8 @@ module Range = {
|
||||||
Belt.Array.set(
|
Belt.Array.set(
|
||||||
cumulativeY,
|
cumulativeY,
|
||||||
x + 1,
|
x + 1,
|
||||||
(xs[x + 1] -. xs[x])
|
(xs[x + 1] -. xs[x]) // dx
|
||||||
*. ((ys[x] +. ys[x + 1]) /. 2.)
|
*. ((ys[x] +. ys[x + 1]) /. 2.) // (1/2) * (avgY)
|
||||||
+. cumulativeY[x],
|
+. cumulativeY[x],
|
||||||
);
|
);
|
||||||
();
|
();
|
||||||
|
@ -255,8 +374,31 @@ module Range = {
|
||||||
|
|
||||||
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x);
|
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x);
|
||||||
|
|
||||||
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
let stepwiseToLinear = ({xs, ys}: T.t): T.t => {
|
||||||
|
// adds points at the bottom of each step.
|
||||||
|
let length = E.A.length(xs);
|
||||||
|
let newXs: array(float) = Belt.Array.makeUninitializedUnsafe(2 * length);
|
||||||
|
let newYs: array(float) = Belt.Array.makeUninitializedUnsafe(2 * length);
|
||||||
|
|
||||||
|
Belt.Array.set(newXs, 0, xs[0] -. epsilon_float) |> ignore;
|
||||||
|
Belt.Array.set(newYs, 0, 0.) |> ignore;
|
||||||
|
Belt.Array.set(newXs, 1, xs[0]) |> ignore;
|
||||||
|
Belt.Array.set(newYs, 1, ys[0]) |> ignore;
|
||||||
|
|
||||||
|
for (i in 1 to E.A.length(xs) - 1) {
|
||||||
|
Belt.Array.set(newXs, i * 2, xs[i] -. epsilon_float) |> ignore;
|
||||||
|
Belt.Array.set(newYs, i * 2, ys[i-1]) |> ignore;
|
||||||
|
Belt.Array.set(newXs, i * 2 + 1, xs[i]) |> ignore;
|
||||||
|
Belt.Array.set(newYs, i * 2 + 1, ys[i]) |> ignore;
|
||||||
|
();
|
||||||
|
};
|
||||||
|
|
||||||
|
{xs: newXs, ys: newYs};
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: I think this isn't needed by any functions anymore.
|
||||||
let stepsToContinuous = t => {
|
let stepsToContinuous = t => {
|
||||||
|
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
||||||
let diff = T.xTotalRange(t) |> (r => r *. 0.00001);
|
let diff = T.xTotalRange(t) |> (r => r *. 0.00001);
|
||||||
let items =
|
let items =
|
||||||
switch (E.A.toRanges(Belt.Array.zip(t.xs, t.ys))) {
|
switch (E.A.toRanges(Belt.Array.zip(t.xs, t.ys))) {
|
||||||
|
@ -265,7 +407,7 @@ module Range = {
|
||||||
items
|
items
|
||||||
|> Belt.Array.map(_, rangePointAssumingSteps)
|
|> Belt.Array.map(_, rangePointAssumingSteps)
|
||||||
|> T.fromZippedArray
|
|> T.fromZippedArray
|
||||||
|> Combine.intersperse(t |> T.mapX(e => e +. diff)),
|
|> PointwiseCombination.intersperse(t |> T.mapX(e => e +. diff)),
|
||||||
)
|
)
|
||||||
| _ => Some(t)
|
| _ => Some(t)
|
||||||
};
|
};
|
||||||
|
@ -287,10 +429,10 @@ let pointLogScore = (prediction, answer) =>
|
||||||
};
|
};
|
||||||
|
|
||||||
let logScorePoint = (sampleCount, t1, t2) =>
|
let logScorePoint = (sampleCount, t1, t2) =>
|
||||||
Combine.combine(
|
PointwiseCombination.combineEvenXs(
|
||||||
~xsSelection=XS_EVENLY_DIVIDED(sampleCount),
|
|
||||||
~xToYSelection=XtoY.linear,
|
|
||||||
~fn=pointLogScore,
|
~fn=pointLogScore,
|
||||||
|
~xToYSelection=XtoY.linear,
|
||||||
|
sampleCount,
|
||||||
t1,
|
t1,
|
||||||
t2,
|
t2,
|
||||||
)
|
)
|
||||||
|
@ -315,6 +457,7 @@ module Analysis = {
|
||||||
0.0,
|
0.0,
|
||||||
(acc, _x, i) => {
|
(acc, _x, i) => {
|
||||||
let areaUnderIntegral =
|
let areaUnderIntegral =
|
||||||
|
// TODO Take this switch statement out of the loop body
|
||||||
switch (t.interpolation, i) {
|
switch (t.interpolation, i) {
|
||||||
| (_, 0) => 0.0
|
| (_, 0) => 0.0
|
||||||
| (`Stepwise, _) =>
|
| (`Stepwise, _) =>
|
||||||
|
@ -323,6 +466,9 @@ module Analysis = {
|
||||||
| (`Linear, _) =>
|
| (`Linear, _) =>
|
||||||
let x1 = xs[i - 1];
|
let x1 = xs[i - 1];
|
||||||
let x2 = xs[i];
|
let x2 = xs[i];
|
||||||
|
if (x1 == x2) {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
let h1 = ys[i - 1];
|
let h1 = ys[i - 1];
|
||||||
let h2 = ys[i];
|
let h2 = ys[i];
|
||||||
let b = (h1 -. h2) /. (x1 -. x2);
|
let b = (h1 -. h2) /. (x1 -. x2);
|
||||||
|
@ -330,6 +476,7 @@ module Analysis = {
|
||||||
indefiniteIntegralLinear(x2, a, b)
|
indefiniteIntegralLinear(x2, a, b)
|
||||||
-. indefiniteIntegralLinear(x1, a, b);
|
-. indefiniteIntegralLinear(x1, a, b);
|
||||||
};
|
};
|
||||||
|
};
|
||||||
acc +. areaUnderIntegral;
|
acc +. areaUnderIntegral;
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
23
src/distPlus/expressionTree/ExpressionTree.re
Normal file
23
src/distPlus/expressionTree/ExpressionTree.re
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
open ExpressionTypes.ExpressionTree;
|
||||||
|
|
||||||
|
let toShape = (intendedShapeLength: int, samplingInputs, node: node) => {
|
||||||
|
let renderResult =
|
||||||
|
`Render(`Normalize(node))
|
||||||
|
|> ExpressionTreeEvaluator.toLeaf({
|
||||||
|
samplingInputs,
|
||||||
|
intendedShapeLength,
|
||||||
|
evaluateNode: ExpressionTreeEvaluator.toLeaf,
|
||||||
|
});
|
||||||
|
|
||||||
|
switch (renderResult) {
|
||||||
|
| Ok(`RenderedDist(shape)) => Ok(shape)
|
||||||
|
| Ok(_) => Error("Rendering failed.")
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec toString =
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(d) => SymbolicDist.T.toString(d)
|
||||||
|
| `RenderedDist(_) => "[shape]"
|
||||||
|
| op => Operation.T.toString(toString, op);
|
318
src/distPlus/expressionTree/ExpressionTreeEvaluator.re
Normal file
318
src/distPlus/expressionTree/ExpressionTreeEvaluator.re
Normal file
|
@ -0,0 +1,318 @@
|
||||||
|
open ExpressionTypes;
|
||||||
|
open ExpressionTypes.ExpressionTree;
|
||||||
|
|
||||||
|
type t = node;
|
||||||
|
type tResult = node => result(node, string);
|
||||||
|
|
||||||
|
/* Given two random variables A and B, this returns the distribution
|
||||||
|
of a new variable that is the result of the operation on A and B.
|
||||||
|
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
|
||||||
|
In general, this is implemented via convolution. */
|
||||||
|
module AlgebraicCombination = {
|
||||||
|
let tryAnalyticalSimplification = (operation, t1: t, t2: t) =>
|
||||||
|
switch (operation, t1, t2) {
|
||||||
|
| (operation, `SymbolicDist(d1), `SymbolicDist(d2)) =>
|
||||||
|
switch (SymbolicDist.T.tryAnalyticalSimplification(d1, d2, operation)) {
|
||||||
|
| `AnalyticalSolution(symbolicDist) => Ok(`SymbolicDist(symbolicDist))
|
||||||
|
| `Error(er) => Error(er)
|
||||||
|
| `NoSolution => Ok(`AlgebraicCombination((operation, t1, t2)))
|
||||||
|
}
|
||||||
|
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
|
||||||
|
};
|
||||||
|
|
||||||
|
let combinationByRendering =
|
||||||
|
(evaluationParams, algebraicOp, t1: node, t2: node)
|
||||||
|
: result(node, string) => {
|
||||||
|
E.R.merge(
|
||||||
|
Render.ensureIsRenderedAndGetShape(evaluationParams, t1),
|
||||||
|
Render.ensureIsRenderedAndGetShape(evaluationParams, t2),
|
||||||
|
)
|
||||||
|
|> E.R.fmap(((a, b)) =>
|
||||||
|
`RenderedDist(Shape.combineAlgebraically(algebraicOp, a, b))
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let nodeScore: node => int =
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(`Float(_)) => 1
|
||||||
|
| `SymbolicDist(_) => 1000
|
||||||
|
| `RenderedDist(Discrete(m)) => m.xyShape |> XYShape.T.length
|
||||||
|
| `RenderedDist(Mixed(_)) => 1000
|
||||||
|
| `RenderedDist(Continuous(_)) => 1000
|
||||||
|
| _ => 1000;
|
||||||
|
|
||||||
|
let choose = (t1: node, t2: node) => {
|
||||||
|
nodeScore(t1) * nodeScore(t2) > 10000 ? `Sampling : `Analytical;
|
||||||
|
};
|
||||||
|
|
||||||
|
let combine =
|
||||||
|
(evaluationParams, algebraicOp, t1: node, t2: node)
|
||||||
|
: result(node, string) => {
|
||||||
|
E.R.merge(
|
||||||
|
SamplingDistribution.renderIfIsNotSamplingDistribution(
|
||||||
|
evaluationParams,
|
||||||
|
t1,
|
||||||
|
),
|
||||||
|
SamplingDistribution.renderIfIsNotSamplingDistribution(
|
||||||
|
evaluationParams,
|
||||||
|
t2,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|> E.R.bind(_, ((a, b)) =>
|
||||||
|
switch (choose(a, b)) {
|
||||||
|
| `Sampling =>
|
||||||
|
SamplingDistribution.combineShapesUsingSampling(
|
||||||
|
evaluationParams,
|
||||||
|
algebraicOp,
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
)
|
||||||
|
| `Analytical =>
|
||||||
|
combinationByRendering(evaluationParams, algebraicOp, a, b)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let operationToLeaf =
|
||||||
|
(
|
||||||
|
evaluationParams: evaluationParams,
|
||||||
|
algebraicOp: ExpressionTypes.algebraicOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
)
|
||||||
|
: result(node, string) =>
|
||||||
|
algebraicOp
|
||||||
|
|> tryAnalyticalSimplification(_, t1, t2)
|
||||||
|
|> E.R.bind(
|
||||||
|
_,
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(_) as t => Ok(t)
|
||||||
|
| _ => combine(evaluationParams, algebraicOp, t1, t2),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
module VerticalScaling = {
|
||||||
|
let operationToLeaf =
|
||||||
|
(evaluationParams: evaluationParams, scaleOp, t, scaleBy) => {
|
||||||
|
// scaleBy has to be a single float, otherwise we'll return an error.
|
||||||
|
let fn = Operation.Scale.toFn(scaleOp);
|
||||||
|
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(scaleOp);
|
||||||
|
let integralCacheFn = Operation.Scale.toIntegralCacheFn(scaleOp);
|
||||||
|
let renderedShape = Render.render(evaluationParams, t);
|
||||||
|
|
||||||
|
switch (renderedShape, scaleBy) {
|
||||||
|
| (Ok(`RenderedDist(rs)), `SymbolicDist(`Float(sm))) =>
|
||||||
|
Ok(
|
||||||
|
`RenderedDist(
|
||||||
|
Shape.T.mapY(
|
||||||
|
~integralSumCacheFn=integralSumCacheFn(sm),
|
||||||
|
~integralCacheFn=integralCacheFn(sm),
|
||||||
|
~fn=fn(sm),
|
||||||
|
rs,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
| (Error(e1), _) => Error(e1)
|
||||||
|
| (_, _) => Error("Can only scale by float values.")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module PointwiseCombination = {
|
||||||
|
let pointwiseAdd = (evaluationParams: evaluationParams, t1: t, t2: t) => {
|
||||||
|
switch (
|
||||||
|
Render.render(evaluationParams, t1),
|
||||||
|
Render.render(evaluationParams, t2),
|
||||||
|
) {
|
||||||
|
| (Ok(`RenderedDist(rs1)), Ok(`RenderedDist(rs2))) =>
|
||||||
|
Ok(
|
||||||
|
`RenderedDist(
|
||||||
|
Shape.combinePointwise(
|
||||||
|
~integralSumCachesFn=(a, b) => Some(a +. b),
|
||||||
|
~integralCachesFn=
|
||||||
|
(a, b) =>
|
||||||
|
Some(
|
||||||
|
Continuous.combinePointwise(
|
||||||
|
~distributionType=`CDF,
|
||||||
|
(+.),
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(+.),
|
||||||
|
rs1,
|
||||||
|
rs2,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
| (Error(e1), _) => Error(e1)
|
||||||
|
| (_, Error(e2)) => Error(e2)
|
||||||
|
| _ => Error("Pointwise combination: rendering failed.")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let pointwiseMultiply = (evaluationParams: evaluationParams, t1: t, t2: t) => {
|
||||||
|
// TODO: construct a function that we can easily sample from, to construct
|
||||||
|
// a RenderedDist. Use the xMin and xMax of the rendered shapes to tell the sampling function where to look.
|
||||||
|
// TODO: This should work for symbolic distributions too!
|
||||||
|
switch (
|
||||||
|
Render.render(evaluationParams, t1),
|
||||||
|
Render.render(evaluationParams, t2),
|
||||||
|
) {
|
||||||
|
| (Ok(`RenderedDist(rs1)), Ok(`RenderedDist(rs2))) =>
|
||||||
|
Ok(`RenderedDist(Shape.combinePointwise(( *. ), rs1, rs2)))
|
||||||
|
| (Error(e1), _) => Error(e1)
|
||||||
|
| (_, Error(e2)) => Error(e2)
|
||||||
|
| _ => Error("Pointwise combination: rendering failed.")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let operationToLeaf =
|
||||||
|
(
|
||||||
|
evaluationParams: evaluationParams,
|
||||||
|
pointwiseOp: pointwiseOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
) => {
|
||||||
|
switch (pointwiseOp) {
|
||||||
|
| `Add => pointwiseAdd(evaluationParams, t1, t2)
|
||||||
|
| `Multiply => pointwiseMultiply(evaluationParams, t1, t2)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module Truncate = {
|
||||||
|
let trySimplification = (leftCutoff, rightCutoff, t): simplificationResult => {
|
||||||
|
switch (leftCutoff, rightCutoff, t) {
|
||||||
|
| (None, None, t) => `Solution(t)
|
||||||
|
| (Some(lc), Some(rc), _) when lc > rc =>
|
||||||
|
`Error(
|
||||||
|
"Left truncation bound must be smaller than right truncation bound.",
|
||||||
|
)
|
||||||
|
| (lc, rc, `SymbolicDist(`Uniform(u))) =>
|
||||||
|
`Solution(
|
||||||
|
`SymbolicDist(`Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))),
|
||||||
|
)
|
||||||
|
| _ => `NoSolution
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let truncateAsShape =
|
||||||
|
(evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) => {
|
||||||
|
// TODO: use named args for xMin/xMax in renderToShape; if we're lucky we can at least get the tail
|
||||||
|
// of a distribution we otherwise wouldn't get at all
|
||||||
|
switch (Render.ensureIsRendered(evaluationParams, t)) {
|
||||||
|
| Ok(`RenderedDist(rs)) =>
|
||||||
|
Ok(`RenderedDist(Shape.T.truncate(leftCutoff, rightCutoff, rs)))
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
| _ => Error("Could not truncate distribution.")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let operationToLeaf =
|
||||||
|
(
|
||||||
|
evaluationParams,
|
||||||
|
leftCutoff: option(float),
|
||||||
|
rightCutoff: option(float),
|
||||||
|
t: node,
|
||||||
|
)
|
||||||
|
: result(node, string) => {
|
||||||
|
t
|
||||||
|
|> trySimplification(leftCutoff, rightCutoff)
|
||||||
|
|> (
|
||||||
|
fun
|
||||||
|
| `Solution(t) => Ok(t)
|
||||||
|
| `Error(e) => Error(e)
|
||||||
|
| `NoSolution =>
|
||||||
|
truncateAsShape(evaluationParams, leftCutoff, rightCutoff, t)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module Normalize = {
|
||||||
|
let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => {
|
||||||
|
switch (t) {
|
||||||
|
| `RenderedDist(s) => Ok(`RenderedDist(Shape.T.normalize(s)))
|
||||||
|
| `SymbolicDist(_) => Ok(t)
|
||||||
|
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module FloatFromDist = {
|
||||||
|
let rec operationToLeaf =
|
||||||
|
(evaluationParams, distToFloatOp: distToFloatOperation, t: node)
|
||||||
|
: result(node, string) => {
|
||||||
|
switch (t) {
|
||||||
|
| `SymbolicDist(s) =>
|
||||||
|
SymbolicDist.T.operate(distToFloatOp, s)
|
||||||
|
|> E.R.bind(_, v => Ok(`SymbolicDist(`Float(v))))
|
||||||
|
| `RenderedDist(rs) =>
|
||||||
|
Shape.operate(distToFloatOp, rs)
|
||||||
|
|> (v => Ok(`SymbolicDist(`Float(v))))
|
||||||
|
| _ =>
|
||||||
|
t
|
||||||
|
|> evaluateAndRetry(evaluationParams, r =>
|
||||||
|
operationToLeaf(r, distToFloatOp)
|
||||||
|
)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module Render = {
|
||||||
|
let rec operationToLeaf =
|
||||||
|
(evaluationParams: evaluationParams, t: node): result(t, string) => {
|
||||||
|
switch (t) {
|
||||||
|
| `SymbolicDist(d) =>
|
||||||
|
Ok(
|
||||||
|
`RenderedDist(
|
||||||
|
SymbolicDist.T.toShape(evaluationParams.intendedShapeLength, d),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
| `RenderedDist(_) as t => Ok(t) // already a rendered shape, we're done here
|
||||||
|
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/* This function recursively goes through the nodes of the parse tree,
|
||||||
|
replacing each Operation node and its subtree with a Data node.
|
||||||
|
Whenever possible, the replacement produces a new Symbolic Data node,
|
||||||
|
but most often it will produce a RenderedDist.
|
||||||
|
This function is used mainly to turn a parse tree into a single RenderedDist
|
||||||
|
that can then be displayed to the user. */
|
||||||
|
let toLeaf =
|
||||||
|
(
|
||||||
|
evaluationParams: ExpressionTypes.ExpressionTree.evaluationParams,
|
||||||
|
node: t,
|
||||||
|
)
|
||||||
|
: result(t, string) => {
|
||||||
|
switch (node) {
|
||||||
|
// Leaf nodes just stay leaf nodes
|
||||||
|
| `SymbolicDist(_)
|
||||||
|
| `RenderedDist(_) => Ok(node)
|
||||||
|
// Operations nevaluationParamsd to be turned into leaves
|
||||||
|
| `AlgebraicCombination(algebraicOp, t1, t2) =>
|
||||||
|
AlgebraicCombination.operationToLeaf(
|
||||||
|
evaluationParams,
|
||||||
|
algebraicOp,
|
||||||
|
t1,
|
||||||
|
t2,
|
||||||
|
)
|
||||||
|
| `PointwiseCombination(pointwiseOp, t1, t2) =>
|
||||||
|
PointwiseCombination.operationToLeaf(
|
||||||
|
evaluationParams,
|
||||||
|
pointwiseOp,
|
||||||
|
t1,
|
||||||
|
t2,
|
||||||
|
)
|
||||||
|
| `VerticalScaling(scaleOp, t, scaleBy) =>
|
||||||
|
VerticalScaling.operationToLeaf(evaluationParams, scaleOp, t, scaleBy)
|
||||||
|
| `Truncate(leftCutoff, rightCutoff, t) =>
|
||||||
|
Truncate.operationToLeaf(evaluationParams, leftCutoff, rightCutoff, t)
|
||||||
|
| `FloatFromDist(distToFloatOp, t) =>
|
||||||
|
FloatFromDist.operationToLeaf(evaluationParams, distToFloatOp, t)
|
||||||
|
| `Normalize(t) => Normalize.operationToLeaf(evaluationParams, t)
|
||||||
|
| `Render(t) => Render.operationToLeaf(evaluationParams, t)
|
||||||
|
};
|
||||||
|
};
|
82
src/distPlus/expressionTree/ExpressionTypes.re
Normal file
82
src/distPlus/expressionTree/ExpressionTypes.re
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
type algebraicOperation = [ | `Add | `Multiply | `Subtract | `Divide];
|
||||||
|
type pointwiseOperation = [ | `Add | `Multiply];
|
||||||
|
type scaleOperation = [ | `Multiply | `Exponentiate | `Log];
|
||||||
|
type distToFloatOperation = [
|
||||||
|
| `Pdf(float)
|
||||||
|
| `Cdf(float)
|
||||||
|
| `Inv(float)
|
||||||
|
| `Mean
|
||||||
|
| `Sample
|
||||||
|
];
|
||||||
|
|
||||||
|
module ExpressionTree = {
|
||||||
|
type node = [
|
||||||
|
| `SymbolicDist(SymbolicTypes.symbolicDist)
|
||||||
|
| `RenderedDist(DistTypes.shape)
|
||||||
|
| `AlgebraicCombination(algebraicOperation, node, node)
|
||||||
|
| `PointwiseCombination(pointwiseOperation, node, node)
|
||||||
|
| `VerticalScaling(scaleOperation, node, node)
|
||||||
|
| `Render(node)
|
||||||
|
| `Truncate(option(float), option(float), node)
|
||||||
|
| `Normalize(node)
|
||||||
|
| `FloatFromDist(distToFloatOperation, node)
|
||||||
|
];
|
||||||
|
|
||||||
|
type samplingInputs = {
|
||||||
|
sampleCount: int,
|
||||||
|
outputXYPoints: int,
|
||||||
|
kernelWidth: option(float),
|
||||||
|
};
|
||||||
|
|
||||||
|
type evaluationParams = {
|
||||||
|
samplingInputs,
|
||||||
|
intendedShapeLength: int,
|
||||||
|
evaluateNode: (evaluationParams, node) => Belt.Result.t(node, string),
|
||||||
|
};
|
||||||
|
|
||||||
|
let evaluateNode = (evaluationParams: evaluationParams) =>
|
||||||
|
evaluationParams.evaluateNode(evaluationParams);
|
||||||
|
|
||||||
|
let evaluateAndRetry = (evaluationParams, fn, node) =>
|
||||||
|
node
|
||||||
|
|> evaluationParams.evaluateNode(evaluationParams)
|
||||||
|
|> E.R.bind(_, fn(evaluationParams));
|
||||||
|
|
||||||
|
module Render = {
|
||||||
|
type t = node;
|
||||||
|
|
||||||
|
let render = (evaluationParams: evaluationParams, r) =>
|
||||||
|
`Render(r) |> evaluateNode(evaluationParams);
|
||||||
|
|
||||||
|
let ensureIsRendered = (params, t) =>
|
||||||
|
switch (t) {
|
||||||
|
| `RenderedDist(_) => Ok(t)
|
||||||
|
| _ =>
|
||||||
|
switch (render(params, t)) {
|
||||||
|
| Ok(`RenderedDist(r)) => Ok(`RenderedDist(r))
|
||||||
|
| Ok(_) => Error("Did not render as requested")
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let ensureIsRenderedAndGetShape = (params, t) =>
|
||||||
|
switch (ensureIsRendered(params, t)) {
|
||||||
|
| Ok(`RenderedDist(r)) => Ok(r)
|
||||||
|
| Ok(_) => Error("Did not render as requested")
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
};
|
||||||
|
|
||||||
|
let getShape = (item: node) =>
|
||||||
|
switch (item) {
|
||||||
|
| `RenderedDist(r) => Some(r)
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
type simplificationResult = [
|
||||||
|
| `Solution(ExpressionTree.node)
|
||||||
|
| `Error(string)
|
||||||
|
| `NoSolution
|
||||||
|
];
|
392
src/distPlus/expressionTree/MathJsParser.re
Normal file
392
src/distPlus/expressionTree/MathJsParser.re
Normal file
|
@ -0,0 +1,392 @@
|
||||||
|
module MathJsonToMathJsAdt = {
|
||||||
|
type arg =
|
||||||
|
| Symbol(string)
|
||||||
|
| Value(float)
|
||||||
|
| Fn(fn)
|
||||||
|
| Array(array(arg))
|
||||||
|
| Object(Js.Dict.t(arg))
|
||||||
|
and fn = {
|
||||||
|
name: string,
|
||||||
|
args: array(arg),
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec run = (j: Js.Json.t) =>
|
||||||
|
Json.Decode.(
|
||||||
|
switch (field("mathjs", string, j)) {
|
||||||
|
| "FunctionNode" =>
|
||||||
|
let args = j |> field("args", array(run));
|
||||||
|
let name = j |> optional(field("fn", field("name", string)));
|
||||||
|
name |> E.O.fmap(name => Fn({name, args: args |> E.A.O.concatSomes}));
|
||||||
|
| "OperatorNode" =>
|
||||||
|
let args = j |> field("args", array(run));
|
||||||
|
Some(
|
||||||
|
Fn({
|
||||||
|
name: j |> field("fn", string),
|
||||||
|
args: args |> E.A.O.concatSomes,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
| "ConstantNode" =>
|
||||||
|
optional(field("value", Json.Decode.float), j)
|
||||||
|
|> E.O.fmap(r => Value(r))
|
||||||
|
| "ParenthesisNode" => j |> field("content", run)
|
||||||
|
| "ObjectNode" =>
|
||||||
|
let properties = j |> field("properties", dict(run));
|
||||||
|
Js.Dict.entries(properties)
|
||||||
|
|> E.A.fmap(((key, value)) => value |> E.O.fmap(v => (key, v)))
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Js.Dict.fromArray
|
||||||
|
|> (r => Some(Object(r)));
|
||||||
|
| "ArrayNode" =>
|
||||||
|
let items = field("items", array(run), j);
|
||||||
|
Some(Array(items |> E.A.O.concatSomes));
|
||||||
|
| "SymbolNode" => Some(Symbol(field("name", string, j)))
|
||||||
|
| n =>
|
||||||
|
Js.log3("Couldn't parse mathjs node", j, n);
|
||||||
|
None;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
module MathAdtToDistDst = {
|
||||||
|
open MathJsonToMathJsAdt;
|
||||||
|
|
||||||
|
module MathAdtCleaner = {
|
||||||
|
let transformWithSymbol = (f: float, s: string) =>
|
||||||
|
switch (s) {
|
||||||
|
| "K"
|
||||||
|
| "k" => f *. 1000.
|
||||||
|
| "M"
|
||||||
|
| "m" => f *. 1000000.
|
||||||
|
| "B"
|
||||||
|
| "b" => f *. 1000000000.
|
||||||
|
| "T"
|
||||||
|
| "t" => f *. 1000000000000.
|
||||||
|
| _ => f
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec run =
|
||||||
|
fun
|
||||||
|
| Fn({name: "multiply", args: [|Value(f), Symbol(s)|]}) =>
|
||||||
|
Value(transformWithSymbol(f, s))
|
||||||
|
| Fn({name: "unaryMinus", args: [|Value(f)|]}) => Value((-1.0) *. f)
|
||||||
|
| Fn({name, args}) => Fn({name, args: args |> E.A.fmap(run)})
|
||||||
|
| Array(args) => Array(args |> E.A.fmap(run))
|
||||||
|
| Symbol(s) => Symbol(s)
|
||||||
|
| Value(v) => Value(v)
|
||||||
|
| Object(v) =>
|
||||||
|
Object(
|
||||||
|
v
|
||||||
|
|> Js.Dict.entries
|
||||||
|
|> E.A.fmap(((key, value)) => (key, run(value)))
|
||||||
|
|> Js.Dict.fromArray,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let normal:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(mean), Value(stdev)|] =>
|
||||||
|
Ok(`SymbolicDist(`Normal({mean, stdev})))
|
||||||
|
| _ => Error("Wrong number of variables in normal distribution");
|
||||||
|
|
||||||
|
let lognormal:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(mu), Value(sigma)|] =>
|
||||||
|
Ok(`SymbolicDist(`Lognormal({mu, sigma})))
|
||||||
|
| [|Object(o)|] => {
|
||||||
|
let g = Js.Dict.get(o);
|
||||||
|
switch (g("mean"), g("stdev"), g("mu"), g("sigma")) {
|
||||||
|
| (Some(Value(mean)), Some(Value(stdev)), _, _) =>
|
||||||
|
Ok(
|
||||||
|
`SymbolicDist(
|
||||||
|
SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
| (_, _, Some(Value(mu)), Some(Value(sigma))) =>
|
||||||
|
Ok(`SymbolicDist(`Lognormal({mu, sigma})))
|
||||||
|
| _ => Error("Lognormal distribution would need mean and stdev")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||||
|
|
||||||
|
let to_: array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(low), Value(high)|] when low <= 0.0 && low < high => {
|
||||||
|
Ok(`SymbolicDist(SymbolicDist.Normal.from90PercentCI(low, high)));
|
||||||
|
}
|
||||||
|
| [|Value(low), Value(high)|] when low < high => {
|
||||||
|
Ok(
|
||||||
|
`SymbolicDist(SymbolicDist.Lognormal.from90PercentCI(low, high)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
| [|Value(_), Value(_)|] =>
|
||||||
|
Error("Low value must be less than high value.")
|
||||||
|
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||||
|
|
||||||
|
let uniform:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(low), Value(high)|] =>
|
||||||
|
Ok(`SymbolicDist(`Uniform({low, high})))
|
||||||
|
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||||
|
|
||||||
|
let beta: array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(alpha), Value(beta)|] =>
|
||||||
|
Ok(`SymbolicDist(`Beta({alpha, beta})))
|
||||||
|
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||||
|
|
||||||
|
let exponential:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(rate)|] => Ok(`SymbolicDist(`Exponential({rate: rate})))
|
||||||
|
| _ => Error("Wrong number of variables in Exponential distribution");
|
||||||
|
|
||||||
|
let cauchy:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(local), Value(scale)|] =>
|
||||||
|
Ok(`SymbolicDist(`Cauchy({local, scale})))
|
||||||
|
| _ => Error("Wrong number of variables in cauchy distribution");
|
||||||
|
|
||||||
|
let triangular:
|
||||||
|
array(arg) => result(ExpressionTypes.ExpressionTree.node, string) =
|
||||||
|
fun
|
||||||
|
| [|Value(low), Value(medium), Value(high)|]
|
||||||
|
when low < medium && medium < high =>
|
||||||
|
Ok(`SymbolicDist(`Triangular({low, medium, high})))
|
||||||
|
| [|Value(_), Value(_), Value(_)|] =>
|
||||||
|
Error("Triangular values must be increasing order")
|
||||||
|
| _ => Error("Wrong number of variables in triangle distribution");
|
||||||
|
|
||||||
|
let multiModal =
|
||||||
|
(
|
||||||
|
args: array(result(ExpressionTypes.ExpressionTree.node, string)),
|
||||||
|
weights: option(array(float)),
|
||||||
|
) => {
|
||||||
|
let weights = weights |> E.O.default([||]);
|
||||||
|
|
||||||
|
/*let dists: =
|
||||||
|
args
|
||||||
|
|> E.A.fmap(
|
||||||
|
fun
|
||||||
|
| Ok(a) => a
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
);*/
|
||||||
|
|
||||||
|
let firstWithError = args |> Belt.Array.getBy(_, Belt.Result.isError);
|
||||||
|
let withoutErrors = args |> E.A.fmap(E.R.toOption) |> E.A.O.concatSomes;
|
||||||
|
|
||||||
|
switch (firstWithError) {
|
||||||
|
| Some(Error(e)) => Error(e)
|
||||||
|
| None when withoutErrors |> E.A.length == 0 =>
|
||||||
|
Error("Multimodals need at least one input")
|
||||||
|
| _ =>
|
||||||
|
let components =
|
||||||
|
withoutErrors
|
||||||
|
|> E.A.fmapi((index, t) => {
|
||||||
|
let w = weights |> E.A.get(_, index) |> E.O.default(1.0);
|
||||||
|
|
||||||
|
`VerticalScaling((`Multiply, t, `SymbolicDist(`Float(w))));
|
||||||
|
});
|
||||||
|
|
||||||
|
let pointwiseSum =
|
||||||
|
components
|
||||||
|
|> Js.Array.sliceFrom(1)
|
||||||
|
|> E.A.fold_left(
|
||||||
|
(acc, x) => {`PointwiseCombination((`Add, acc, x))},
|
||||||
|
E.A.unsafe_get(components, 0),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(`Normalize(pointwiseSum));
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// let arrayParser =
|
||||||
|
// (args: array(arg))
|
||||||
|
// : result(ExpressionTypes.ExpressionTree.node, string) => {
|
||||||
|
// let samples =
|
||||||
|
// args
|
||||||
|
// |> E.A.fmap(
|
||||||
|
// fun
|
||||||
|
// | Value(n) => Some(n)
|
||||||
|
// | _ => None,
|
||||||
|
// )
|
||||||
|
// |> E.A.O.concatSomes;
|
||||||
|
// let outputs = Samples.T.fromSamples(samples);
|
||||||
|
// let pdf =
|
||||||
|
// outputs.shape |> E.O.bind(_, Shape.T.toContinuous);
|
||||||
|
// let shape =
|
||||||
|
// pdf
|
||||||
|
// |> E.O.fmap(pdf => {
|
||||||
|
// let _pdf = Continuous.T.normalize(pdf);
|
||||||
|
// let cdf = Continuous.T.integral(~cache=None, _pdf);
|
||||||
|
// SymbolicDist.ContinuousShape.make(_pdf, cdf);
|
||||||
|
// });
|
||||||
|
// switch (shape) {
|
||||||
|
// | Some(s) => Ok(`SymbolicDist(`ContinuousShape(s)))
|
||||||
|
// | None => Error("Rendering did not work")
|
||||||
|
// };
|
||||||
|
// };
|
||||||
|
|
||||||
|
let operationParser =
|
||||||
|
(
|
||||||
|
name: string,
|
||||||
|
args: array(result(ExpressionTypes.ExpressionTree.node, string)),
|
||||||
|
) => {
|
||||||
|
let toOkAlgebraic = r => Ok(`AlgebraicCombination(r));
|
||||||
|
let toOkPointwise = r => Ok(`PointwiseCombination(r));
|
||||||
|
let toOkTruncate = r => Ok(`Truncate(r));
|
||||||
|
let toOkFloatFromDist = r => Ok(`FloatFromDist(r));
|
||||||
|
switch (name, args) {
|
||||||
|
| ("add", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Add, l, r))
|
||||||
|
| ("add", _) => Error("Addition needs two operands")
|
||||||
|
| ("subtract", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Subtract, l, r))
|
||||||
|
| ("subtract", _) => Error("Subtraction needs two operands")
|
||||||
|
| ("multiply", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Multiply, l, r))
|
||||||
|
| ("multiply", _) => Error("Multiplication needs two operands")
|
||||||
|
| ("dotMultiply", [|Ok(l), Ok(r)|]) => toOkPointwise((`Multiply, l, r))
|
||||||
|
| ("dotMultiply", _) =>
|
||||||
|
Error("Dotwise multiplication needs two operands")
|
||||||
|
| ("rightLogShift", [|Ok(l), Ok(r)|]) => toOkPointwise((`Add, l, r))
|
||||||
|
| ("rightLogShift", _) => Error("Dotwise addition needs two operands")
|
||||||
|
| ("divide", [|Ok(l), Ok(r)|]) => toOkAlgebraic((`Divide, l, r))
|
||||||
|
| ("divide", _) => Error("Division needs two operands")
|
||||||
|
| ("pow", _) => Error("Exponentiation is not yet supported.")
|
||||||
|
| ("leftTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(lc)))|]) =>
|
||||||
|
toOkTruncate((Some(lc), None, d))
|
||||||
|
| ("leftTruncate", _) =>
|
||||||
|
Error("leftTruncate needs two arguments: the expression and the cutoff")
|
||||||
|
| ("rightTruncate", [|Ok(d), Ok(`SymbolicDist(`Float(rc)))|]) =>
|
||||||
|
toOkTruncate((None, Some(rc), d))
|
||||||
|
| ("rightTruncate", _) =>
|
||||||
|
Error(
|
||||||
|
"rightTruncate needs two arguments: the expression and the cutoff",
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
"truncate",
|
||||||
|
[|
|
||||||
|
Ok(d),
|
||||||
|
Ok(`SymbolicDist(`Float(lc))),
|
||||||
|
Ok(`SymbolicDist(`Float(rc))),
|
||||||
|
|],
|
||||||
|
) =>
|
||||||
|
toOkTruncate((Some(lc), Some(rc), d))
|
||||||
|
| ("truncate", _) =>
|
||||||
|
Error("truncate needs three arguments: the expression and both cutoffs")
|
||||||
|
| ("pdf", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Pdf(v), d))
|
||||||
|
| ("cdf", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Cdf(v), d))
|
||||||
|
| ("inv", [|Ok(d), Ok(`SymbolicDist(`Float(v)))|]) =>
|
||||||
|
toOkFloatFromDist((`Inv(v), d))
|
||||||
|
| ("mean", [|Ok(d)|]) => toOkFloatFromDist((`Mean, d))
|
||||||
|
| ("sample", [|Ok(d)|]) => toOkFloatFromDist((`Sample, d))
|
||||||
|
| _ => Error("This type not currently supported")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let functionParser = (nodeParser, name, args) => {
|
||||||
|
let parseArgs = () => args |> E.A.fmap(nodeParser);
|
||||||
|
switch (name) {
|
||||||
|
| "normal" => normal(args)
|
||||||
|
| "lognormal" => lognormal(args)
|
||||||
|
| "uniform" => uniform(args)
|
||||||
|
| "beta" => beta(args)
|
||||||
|
| "to" => to_(args)
|
||||||
|
| "exponential" => exponential(args)
|
||||||
|
| "cauchy" => cauchy(args)
|
||||||
|
| "triangular" => triangular(args)
|
||||||
|
| "mm" =>
|
||||||
|
let weights =
|
||||||
|
args
|
||||||
|
|> E.A.last
|
||||||
|
|> E.O.bind(
|
||||||
|
_,
|
||||||
|
fun
|
||||||
|
| Array(values) => Some(values)
|
||||||
|
| _ => None,
|
||||||
|
)
|
||||||
|
|> E.O.fmap(o =>
|
||||||
|
o
|
||||||
|
|> E.A.fmap(
|
||||||
|
fun
|
||||||
|
| Value(r) => Some(r)
|
||||||
|
| _ => None,
|
||||||
|
)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
);
|
||||||
|
let possibleDists =
|
||||||
|
E.O.isSome(weights)
|
||||||
|
? Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1)
|
||||||
|
: args;
|
||||||
|
let dists = possibleDists |> E.A.fmap(nodeParser);
|
||||||
|
multiModal(dists, weights);
|
||||||
|
| "add"
|
||||||
|
| "subtract"
|
||||||
|
| "multiply"
|
||||||
|
| "dotMultiply"
|
||||||
|
| "rightLogShift"
|
||||||
|
| "divide"
|
||||||
|
| "pow"
|
||||||
|
| "leftTruncate"
|
||||||
|
| "rightTruncate"
|
||||||
|
| "truncate"
|
||||||
|
| "mean"
|
||||||
|
| "inv"
|
||||||
|
| "sample"
|
||||||
|
| "cdf"
|
||||||
|
| "pdf" => operationParser(name, parseArgs())
|
||||||
|
| n => Error(n ++ "(...) is not currently supported")
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec nodeParser =
|
||||||
|
fun
|
||||||
|
| Value(f) => Ok(`SymbolicDist(`Float(f)))
|
||||||
|
| Fn({name, args}) => functionParser(nodeParser, name, args)
|
||||||
|
| _ => {
|
||||||
|
Error("This type not currently supported");
|
||||||
|
};
|
||||||
|
|
||||||
|
let topLevel =
|
||||||
|
fun
|
||||||
|
| Value(_) as r => nodeParser(r)
|
||||||
|
| Fn(_) as r => nodeParser(r)
|
||||||
|
| Array(_) => Error("Array not valid as top level")
|
||||||
|
| Symbol(_) => Error("Symbol not valid as top level")
|
||||||
|
| Object(_) => Error("Object not valid as top level");
|
||||||
|
|
||||||
|
let run = (r): result(ExpressionTypes.ExpressionTree.node, string) =>
|
||||||
|
r |> MathAdtCleaner.run |> topLevel;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* The MathJs parser doesn't support '.+' syntax, but we want it because it
|
||||||
|
would make sense with '.*'. Our workaround is to change this to >>>, which is
|
||||||
|
logShift in mathJS. We don't expect to use logShift anytime soon, so this tradeoff
|
||||||
|
seems fine.
|
||||||
|
*/
|
||||||
|
let pointwiseToRightLogShift = Js.String.replaceByRe([%re "/\.\+/g"], ">>>");
|
||||||
|
|
||||||
|
let fromString = str => {
|
||||||
|
/* We feed the user-typed string into Mathjs.parseMath,
|
||||||
|
which returns a JSON with (hopefully) a single-element array.
|
||||||
|
This array element is the top-level node of a nested-object tree
|
||||||
|
representing the functions/arguments/values/etc. in the string.
|
||||||
|
|
||||||
|
The function MathJsonToMathJsAdt then recursively unpacks this JSON into a typed data structure we can use.
|
||||||
|
Inside of this function, MathAdtToDistDst is called whenever a distribution function is encountered.
|
||||||
|
*/
|
||||||
|
let mathJsToJson = str |> pointwiseToRightLogShift |> Mathjs.parseMath;
|
||||||
|
let mathJsParse =
|
||||||
|
E.R.bind(mathJsToJson, r => {
|
||||||
|
switch (MathJsonToMathJsAdt.run(r)) {
|
||||||
|
| Some(r) => Ok(r)
|
||||||
|
| None => Error("MathJsParse Error")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run);
|
||||||
|
value;
|
||||||
|
};
|
9
src/distPlus/expressionTree/MathjsWrapper.js
Normal file
9
src/distPlus/expressionTree/MathjsWrapper.js
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
const math = require("mathjs");
|
||||||
|
|
||||||
|
function parseMath(f) {
|
||||||
|
return JSON.parse(JSON.stringify(math.parse(f)))
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
parseMath,
|
||||||
|
};
|
101
src/distPlus/expressionTree/Operation.re
Normal file
101
src/distPlus/expressionTree/Operation.re
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
open ExpressionTypes;
|
||||||
|
|
||||||
|
module Algebraic = {
|
||||||
|
type t = algebraicOperation;
|
||||||
|
let toFn: (t, float, float) => float =
|
||||||
|
fun
|
||||||
|
| `Add => (+.)
|
||||||
|
| `Subtract => (-.)
|
||||||
|
| `Multiply => ( *. )
|
||||||
|
| `Divide => (/.);
|
||||||
|
|
||||||
|
let applyFn = (t, f1, f2) => {
|
||||||
|
switch (t, f1, f2) {
|
||||||
|
| (`Divide, _, 0.) => Error("Cannot divide $v1 by zero.")
|
||||||
|
| _ => Ok(toFn(t, f1, f2))
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let toString =
|
||||||
|
fun
|
||||||
|
| `Add => "+"
|
||||||
|
| `Subtract => "-"
|
||||||
|
| `Multiply => "*"
|
||||||
|
| `Divide => "/";
|
||||||
|
|
||||||
|
let format = (a, b, c) => b ++ " " ++ toString(a) ++ " " ++ c;
|
||||||
|
};
|
||||||
|
|
||||||
|
module Pointwise = {
|
||||||
|
type t = pointwiseOperation;
|
||||||
|
let toString =
|
||||||
|
fun
|
||||||
|
| `Add => "+"
|
||||||
|
| `Multiply => "*";
|
||||||
|
|
||||||
|
let format = (a, b, c) => b ++ " " ++ toString(a) ++ " " ++ c;
|
||||||
|
};
|
||||||
|
|
||||||
|
module DistToFloat = {
|
||||||
|
type t = distToFloatOperation;
|
||||||
|
|
||||||
|
let format = (operation, value) =>
|
||||||
|
switch (operation) {
|
||||||
|
| `Cdf(f) => {j|cdf(x=$f,$value)|j}
|
||||||
|
| `Pdf(f) => {j|pdf(x=$f,$value)|j}
|
||||||
|
| `Inv(f) => {j|inv(x=$f,$value)|j}
|
||||||
|
| `Sample => "sample($value)"
|
||||||
|
| `Mean => "mean($value)"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module Scale = {
|
||||||
|
type t = scaleOperation;
|
||||||
|
let toFn =
|
||||||
|
fun
|
||||||
|
| `Multiply => ( *. )
|
||||||
|
| `Exponentiate => ( ** )
|
||||||
|
| `Log => ((a, b) => log(a) /. log(b));
|
||||||
|
|
||||||
|
let format = (operation: t, value, scaleBy) =>
|
||||||
|
switch (operation) {
|
||||||
|
| `Multiply => {j|verticalMultiply($value, $scaleBy) |j}
|
||||||
|
| `Exponentiate => {j|verticalExponentiate($value, $scaleBy) |j}
|
||||||
|
| `Log => {j|verticalLog($value, $scaleBy) |j}
|
||||||
|
};
|
||||||
|
|
||||||
|
let toIntegralSumCacheFn =
|
||||||
|
fun
|
||||||
|
| `Multiply => ((a, b) => Some(a *. b))
|
||||||
|
| `Exponentiate => ((_, _) => None)
|
||||||
|
| `Log => ((_, _) => None);
|
||||||
|
|
||||||
|
let toIntegralCacheFn =
|
||||||
|
fun
|
||||||
|
| `Multiply => ((a, b) => None) // TODO: this could probably just be multiplied out (using Continuous.scaleBy)
|
||||||
|
| `Exponentiate => ((_, _) => None)
|
||||||
|
| `Log => ((_, _) => None);
|
||||||
|
};
|
||||||
|
|
||||||
|
module T = {
|
||||||
|
let truncateToString =
|
||||||
|
(left: option(float), right: option(float), nodeToString) => {
|
||||||
|
let left = left |> E.O.dimap(Js.Float.toString, () => "-inf");
|
||||||
|
let right = right |> E.O.dimap(Js.Float.toString, () => "inf");
|
||||||
|
{j|truncate($nodeToString, $left, $right)|j};
|
||||||
|
};
|
||||||
|
let toString = nodeToString =>
|
||||||
|
fun
|
||||||
|
| `AlgebraicCombination(op, t1, t2) =>
|
||||||
|
Algebraic.format(op, nodeToString(t1), nodeToString(t2))
|
||||||
|
| `PointwiseCombination(op, t1, t2) =>
|
||||||
|
Pointwise.format(op, nodeToString(t1), nodeToString(t2))
|
||||||
|
| `VerticalScaling(scaleOp, t, scaleBy) =>
|
||||||
|
Scale.format(scaleOp, nodeToString(t), nodeToString(scaleBy))
|
||||||
|
| `Normalize(t) => "normalize(k" ++ nodeToString(t) ++ ")"
|
||||||
|
| `FloatFromDist(floatFromDistOp, t) =>
|
||||||
|
DistToFloat.format(floatFromDistOp, nodeToString(t))
|
||||||
|
| `Truncate(lc, rc, t) => truncateToString(lc, rc, nodeToString(t))
|
||||||
|
| `Render(t) => nodeToString(t)
|
||||||
|
| _ => ""; // SymbolicDist and RenderedDist are handled in ExpressionTree.toString.
|
||||||
|
};
|
78
src/distPlus/expressionTree/SamplingDistribution.re
Normal file
78
src/distPlus/expressionTree/SamplingDistribution.re
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
open ExpressionTypes.ExpressionTree;
|
||||||
|
|
||||||
|
let isSamplingDistribution: node => bool =
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(_) => true
|
||||||
|
| `RenderedDist(_) => true
|
||||||
|
| _ => false;
|
||||||
|
|
||||||
|
let renderIfIsNotSamplingDistribution = (params, t): result(node, string) =>
|
||||||
|
!isSamplingDistribution(t)
|
||||||
|
? switch (Render.render(params, t)) {
|
||||||
|
| Ok(r) => Ok(r)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
}
|
||||||
|
: Ok(t);
|
||||||
|
|
||||||
|
let map = (~renderedDistFn, ~symbolicDistFn, node: node) =>
|
||||||
|
node
|
||||||
|
|> (
|
||||||
|
fun
|
||||||
|
| `RenderedDist(r) => Some(renderedDistFn(r))
|
||||||
|
| `SymbolicDist(s) => Some(symbolicDistFn(s))
|
||||||
|
| _ => None
|
||||||
|
);
|
||||||
|
|
||||||
|
let sampleN = n =>
|
||||||
|
map(
|
||||||
|
~renderedDistFn=Shape.sampleNRendered(n),
|
||||||
|
~symbolicDistFn=SymbolicDist.T.sampleN(n),
|
||||||
|
);
|
||||||
|
|
||||||
|
let getCombinationSamples = (n, algebraicOp, t1: node, t2: node) => {
|
||||||
|
switch (sampleN(n, t1), sampleN(n, t2)) {
|
||||||
|
| (Some(a), Some(b)) =>
|
||||||
|
Some(
|
||||||
|
Belt.Array.zip(a, b)
|
||||||
|
|> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
|
||||||
|
)
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let combineShapesUsingSampling =
|
||||||
|
(evaluationParams: evaluationParams, algebraicOp, t1: node, t2: node) => {
|
||||||
|
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1);
|
||||||
|
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2);
|
||||||
|
E.R.merge(i1, i2)
|
||||||
|
|> E.R.bind(
|
||||||
|
_,
|
||||||
|
((a, b)) => {
|
||||||
|
let samples =
|
||||||
|
getCombinationSamples(
|
||||||
|
evaluationParams.samplingInputs.sampleCount,
|
||||||
|
algebraicOp,
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
);
|
||||||
|
|
||||||
|
// todo: This bottom part should probably be somewhere else.
|
||||||
|
let shape =
|
||||||
|
samples
|
||||||
|
|> E.O.fmap(
|
||||||
|
Samples.T.fromSamples(
|
||||||
|
~samplingInputs={
|
||||||
|
sampleCount:
|
||||||
|
Some(evaluationParams.samplingInputs.sampleCount),
|
||||||
|
outputXYPoints:
|
||||||
|
Some(evaluationParams.samplingInputs.outputXYPoints),
|
||||||
|
kernelWidth: evaluationParams.samplingInputs.kernelWidth,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|> E.O.bind(_, (r) => r.shape)
|
||||||
|
|> E.O.toResult("No response");
|
||||||
|
shape |> E.R.fmap(r => `Normalize(`RenderedDist(r)));
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
|
@ -1,28 +1,14 @@
|
||||||
let truncateIfShould =
|
let run = (inputs: RenderTypes.DistPlusRenderer.inputs) => {
|
||||||
(
|
|
||||||
{recommendedLength, shouldTruncate}: RenderTypes.DistPlusRenderer.inputs,
|
|
||||||
outputs: RenderTypes.ShapeRenderer.Combined.outputs,
|
|
||||||
dist,
|
|
||||||
) => {
|
|
||||||
let willTruncate =
|
|
||||||
shouldTruncate
|
|
||||||
&& RenderTypes.ShapeRenderer.Combined.methodUsed(outputs) == `Sampling;
|
|
||||||
willTruncate ? dist |> Distributions.DistPlus.T.truncate(recommendedLength) : dist;
|
|
||||||
};
|
|
||||||
|
|
||||||
let run =
|
|
||||||
(inputs: RenderTypes.DistPlusRenderer.inputs)
|
|
||||||
: RenderTypes.DistPlusRenderer.outputs => {
|
|
||||||
let toDist = shape =>
|
let toDist = shape =>
|
||||||
Distributions.DistPlus.make(
|
DistPlus.make(
|
||||||
~shape,
|
~shape,
|
||||||
~domain=inputs.distPlusIngredients.domain,
|
~domain=inputs.distPlusIngredients.domain,
|
||||||
~unit=inputs.distPlusIngredients.unit,
|
~unit=inputs.distPlusIngredients.unit,
|
||||||
~guesstimatorString=Some(inputs.distPlusIngredients.guesstimatorString),
|
~guesstimatorString=Some(inputs.distPlusIngredients.guesstimatorString),
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
|> Distributions.DistPlus.T.scaleToIntegralSum(~intendedSum=1.0);
|
|> DistPlus.T.normalize;
|
||||||
let outputs =
|
let output =
|
||||||
ShapeRenderer.run({
|
ShapeRenderer.run({
|
||||||
samplingInputs: inputs.samplingInputs,
|
samplingInputs: inputs.samplingInputs,
|
||||||
guesstimatorString: inputs.distPlusIngredients.guesstimatorString,
|
guesstimatorString: inputs.distPlusIngredients.guesstimatorString,
|
||||||
|
@ -30,8 +16,8 @@ let run =
|
||||||
length: inputs.recommendedLength,
|
length: inputs.recommendedLength,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
let shape = outputs |> RenderTypes.ShapeRenderer.Combined.getShape;
|
output
|
||||||
let dist =
|
|> E.R.fmap((o: RenderTypes.ShapeRenderer.Symbolic.outputs) =>
|
||||||
shape |> E.O.fmap(toDist) |> E.O.fmap(truncateIfShould(inputs, outputs));
|
toDist(o.shape)
|
||||||
RenderTypes.DistPlusRenderer.Outputs.make(outputs, dist);
|
);
|
||||||
};
|
};
|
|
@ -43,7 +43,7 @@ module ShapeRenderer = {
|
||||||
module Symbolic = {
|
module Symbolic = {
|
||||||
type inputs = {length: int};
|
type inputs = {length: int};
|
||||||
type outputs = {
|
type outputs = {
|
||||||
graph: SymbolicDist.bigDist,
|
graph: ExpressionTypes.ExpressionTree.node,
|
||||||
shape: DistTypes.shape,
|
shape: DistTypes.shape,
|
||||||
};
|
};
|
||||||
let make = (graph, shape) => {graph, shape};
|
let make = (graph, shape) => {graph, shape};
|
||||||
|
@ -75,7 +75,7 @@ module ShapeRenderer = {
|
||||||
|
|
||||||
module DistPlusRenderer = {
|
module DistPlusRenderer = {
|
||||||
let defaultRecommendedLength = 10000;
|
let defaultRecommendedLength = 10000;
|
||||||
let defaultShouldTruncate = true;
|
let defaultShouldDownsample = true;
|
||||||
type ingredients = {
|
type ingredients = {
|
||||||
guesstimatorString: string,
|
guesstimatorString: string,
|
||||||
domain: DistTypes.domain,
|
domain: DistTypes.domain,
|
||||||
|
@ -85,7 +85,7 @@ module DistPlusRenderer = {
|
||||||
distPlusIngredients: ingredients,
|
distPlusIngredients: ingredients,
|
||||||
samplingInputs: ShapeRenderer.Sampling.inputs,
|
samplingInputs: ShapeRenderer.Sampling.inputs,
|
||||||
recommendedLength: int,
|
recommendedLength: int,
|
||||||
shouldTruncate: bool,
|
shouldDownsample: bool,
|
||||||
};
|
};
|
||||||
module Ingredients = {
|
module Ingredients = {
|
||||||
let make =
|
let make =
|
||||||
|
@ -105,7 +105,7 @@ module DistPlusRenderer = {
|
||||||
(
|
(
|
||||||
~samplingInputs=ShapeRenderer.Sampling.Inputs.empty,
|
~samplingInputs=ShapeRenderer.Sampling.Inputs.empty,
|
||||||
~recommendedLength=defaultRecommendedLength,
|
~recommendedLength=defaultRecommendedLength,
|
||||||
~shouldTruncate=defaultShouldTruncate,
|
~shouldDownsample=defaultShouldDownsample,
|
||||||
~distPlusIngredients,
|
~distPlusIngredients,
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
|
@ -113,7 +113,7 @@ module DistPlusRenderer = {
|
||||||
distPlusIngredients,
|
distPlusIngredients,
|
||||||
samplingInputs,
|
samplingInputs,
|
||||||
recommendedLength,
|
recommendedLength,
|
||||||
shouldTruncate,
|
shouldDownsample,
|
||||||
};
|
};
|
||||||
type outputs = {
|
type outputs = {
|
||||||
shapeRenderOutputs: ShapeRenderer.Combined.outputs,
|
shapeRenderOutputs: ShapeRenderer.Combined.outputs,
|
||||||
|
|
|
@ -14,33 +14,26 @@ let formatString = str => {
|
||||||
str |> formatMessyArray;
|
str |> formatMessyArray;
|
||||||
};
|
};
|
||||||
|
|
||||||
let runSymbolic = (guesstimatorString, length) => {
|
let runSymbolic = (inputs: RenderTypes.ShapeRenderer.Combined.inputs) => {
|
||||||
let str = formatString(guesstimatorString);
|
let str = formatString(inputs.guesstimatorString);
|
||||||
let graph = MathJsParser.fromString(str);
|
let graph = MathJsParser.fromString(str);
|
||||||
graph
|
graph
|
||||||
|> E.R.fmap(g =>
|
|> E.R.bind(_, g =>
|
||||||
RenderTypes.ShapeRenderer.Symbolic.make(
|
ExpressionTree.toShape(
|
||||||
|
inputs.symbolicInputs.length,
|
||||||
|
{
|
||||||
|
sampleCount:
|
||||||
|
inputs.samplingInputs.sampleCount |> E.O.default(10000),
|
||||||
|
outputXYPoints:
|
||||||
|
inputs.samplingInputs.outputXYPoints |> E.O.default(10000),
|
||||||
|
kernelWidth: inputs.samplingInputs.kernelWidth,
|
||||||
|
},
|
||||||
g,
|
g,
|
||||||
SymbolicDist.toShape(length, g),
|
|
||||||
)
|
)
|
||||||
|
|> E.R.fmap(RenderTypes.ShapeRenderer.Symbolic.make(g))
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let run =
|
let run = (inputs: RenderTypes.ShapeRenderer.Combined.inputs) => {
|
||||||
(inputs: RenderTypes.ShapeRenderer.Combined.inputs)
|
runSymbolic(inputs);
|
||||||
: RenderTypes.ShapeRenderer.Combined.outputs => {
|
|
||||||
let symbolic =
|
|
||||||
runSymbolic(inputs.guesstimatorString, inputs.symbolicInputs.length);
|
|
||||||
let sampling =
|
|
||||||
switch (symbolic) {
|
|
||||||
| Ok(_) => None
|
|
||||||
| Error(_) =>
|
|
||||||
Samples.T.fromGuesstimatorString(
|
|
||||||
~guesstimatorString=inputs.guesstimatorString,
|
|
||||||
~samplingInputs=inputs.samplingInputs,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
Js.log3("IS SOME?", symbolic |> E.R.toOption |> E.O.isSome, symbolic);
|
|
||||||
{symbolic: Some(symbolic), sampling};
|
|
||||||
};
|
};
|
|
@ -1,13 +0,0 @@
|
||||||
[@bs.deriving abstract]
|
|
||||||
type discrete = {
|
|
||||||
xs: array(float),
|
|
||||||
ys: array(float),
|
|
||||||
};
|
|
||||||
|
|
||||||
let jsToDistDiscrete = (d: discrete): DistTypes.discreteShape => {
|
|
||||||
xs: xsGet(d),
|
|
||||||
ys: ysGet(d),
|
|
||||||
};
|
|
||||||
|
|
||||||
[@bs.module "./GuesstimatorLibrary.js"]
|
|
||||||
external stringToSamples: (string, int) => array(float) = "stringToSamples";
|
|
|
@ -1,37 +0,0 @@
|
||||||
const _ = require("lodash");
|
|
||||||
const {
|
|
||||||
Guesstimator
|
|
||||||
} = require('@foretold/guesstimator/src');
|
|
||||||
|
|
||||||
const stringToSamples = (
|
|
||||||
text,
|
|
||||||
sampleCount,
|
|
||||||
inputs = [],
|
|
||||||
) => {
|
|
||||||
const [_error, {
|
|
||||||
parsedInput,
|
|
||||||
parsedError
|
|
||||||
}] = Guesstimator.parse({
|
|
||||||
text: "=" + text
|
|
||||||
});
|
|
||||||
|
|
||||||
const guesstimator = new Guesstimator({
|
|
||||||
parsedInput
|
|
||||||
});
|
|
||||||
const {
|
|
||||||
values,
|
|
||||||
errors
|
|
||||||
} = guesstimator.sample(
|
|
||||||
sampleCount,
|
|
||||||
inputs,
|
|
||||||
);
|
|
||||||
if (errors.length > 0) {
|
|
||||||
return []
|
|
||||||
} else {
|
|
||||||
return _.filter(values, _.isFinite)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
stringToSamples,
|
|
||||||
};
|
|
|
@ -1,3 +1,42 @@
|
||||||
|
module Types = {
|
||||||
|
let defaultSampleCount = 5000;
|
||||||
|
let defaultOutputXYPoints = 10000;
|
||||||
|
|
||||||
|
type inputs = {
|
||||||
|
sampleCount: option(int),
|
||||||
|
outputXYPoints: option(int),
|
||||||
|
kernelWidth: option(float),
|
||||||
|
};
|
||||||
|
|
||||||
|
type samplingStats = {
|
||||||
|
sampleCount: int,
|
||||||
|
outputXYPoints: int,
|
||||||
|
bandwidthXSuggested: float,
|
||||||
|
bandwidthUnitSuggested: float,
|
||||||
|
bandwidthXImplemented: float,
|
||||||
|
bandwidthUnitImplemented: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type outputs = {
|
||||||
|
continuousParseParams: option(samplingStats),
|
||||||
|
shape: option(DistTypes.shape),
|
||||||
|
};
|
||||||
|
|
||||||
|
let empty = {sampleCount: None, outputXYPoints: None, kernelWidth: None};
|
||||||
|
|
||||||
|
type fInputs = {
|
||||||
|
sampleCount: int,
|
||||||
|
outputXYPoints: int,
|
||||||
|
kernelWidth: option(float),
|
||||||
|
};
|
||||||
|
|
||||||
|
let toF = (i: inputs): fInputs => {
|
||||||
|
sampleCount: i.sampleCount |> E.O.default(defaultSampleCount),
|
||||||
|
outputXYPoints: i.outputXYPoints |> E.O.default(defaultOutputXYPoints),
|
||||||
|
kernelWidth: i.kernelWidth,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
module JS = {
|
module JS = {
|
||||||
[@bs.deriving abstract]
|
[@bs.deriving abstract]
|
||||||
type distJs = {
|
type distJs = {
|
||||||
|
@ -21,39 +60,6 @@ module KDE = {
|
||||||
|> JS.samplesToContinuousPdf(_, outputXYPoints, kernelWidth)
|
|> JS.samplesToContinuousPdf(_, outputXYPoints, kernelWidth)
|
||||||
|> JS.jsToDist;
|
|> JS.jsToDist;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Note: This was an experiment, but it didn't actually work that well.
|
|
||||||
let inGroups = (samples, outputXYPoints, kernelWidth, ~cuttoff=0.9, ()) => {
|
|
||||||
let partitionAt =
|
|
||||||
samples
|
|
||||||
|> E.A.length
|
|
||||||
|> float_of_int
|
|
||||||
|> (e => e *. cuttoff)
|
|
||||||
|> int_of_float;
|
|
||||||
let part1XYPoints =
|
|
||||||
outputXYPoints |> float_of_int |> (e => e *. cuttoff) |> int_of_float;
|
|
||||||
let part2XYPoints = outputXYPoints - part1XYPoints |> Js.Math.max_int(30);
|
|
||||||
let part1Data =
|
|
||||||
samples |> Belt.Array.slice(_, ~offset=0, ~len=partitionAt);
|
|
||||||
let part2DataLength = (samples |> E.A.length) - partitionAt;
|
|
||||||
let part2Data =
|
|
||||||
samples
|
|
||||||
|> Belt.Array.slice(
|
|
||||||
_,
|
|
||||||
~offset=(-1) * part2DataLength,
|
|
||||||
~len=part2DataLength,
|
|
||||||
);
|
|
||||||
let part1 =
|
|
||||||
part1Data
|
|
||||||
|> JS.samplesToContinuousPdf(_, part1XYPoints, kernelWidth)
|
|
||||||
|> JS.jsToDist;
|
|
||||||
let part2 =
|
|
||||||
part2Data
|
|
||||||
|> JS.samplesToContinuousPdf(_, part2XYPoints, 3)
|
|
||||||
|> JS.jsToDist;
|
|
||||||
let opp = 1.0 -. cuttoff;
|
|
||||||
part1;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module T = {
|
module T = {
|
||||||
|
@ -109,22 +115,24 @@ module T = {
|
||||||
let toShape =
|
let toShape =
|
||||||
(
|
(
|
||||||
~samples: t,
|
~samples: t,
|
||||||
~samplingInputs: RenderTypes.ShapeRenderer.Sampling.Inputs.fInputs,
|
~samplingInputs: Types.fInputs,
|
||||||
(),
|
(),
|
||||||
) => {
|
) => {
|
||||||
Array.fast_sort(compare, samples);
|
Array.fast_sort(compare, samples);
|
||||||
let (continuousPart, discretePart) = E.A.Sorted.Floats.split(samples);
|
let (continuousPart, discretePart) = E.A.Sorted.Floats.split(samples);
|
||||||
let length = samples |> E.A.length |> float_of_int;
|
let length = samples |> E.A.length |> float_of_int;
|
||||||
let discrete: DistTypes.xyShape =
|
let discrete: DistTypes.discreteShape =
|
||||||
discretePart
|
discretePart
|
||||||
|> E.FloatFloatMap.fmap(r => r /. length)
|
|> E.FloatFloatMap.fmap(r => r /. length)
|
||||||
|> E.FloatFloatMap.toArray
|
|> E.FloatFloatMap.toArray
|
||||||
|> XYShape.T.fromZippedArray;
|
|> XYShape.T.fromZippedArray
|
||||||
|
|> Discrete.make;
|
||||||
|
|
||||||
let pdf =
|
let pdf =
|
||||||
continuousPart |> E.A.length > 5
|
continuousPart |> E.A.length > 5
|
||||||
? {
|
? {
|
||||||
let _suggestedXWidth = Bandwidth.nrd0(continuousPart);
|
let _suggestedXWidth = Bandwidth.nrd0(continuousPart);
|
||||||
|
// todo: This does some recalculating from the last step.
|
||||||
let _suggestedUnitWidth =
|
let _suggestedUnitWidth =
|
||||||
suggestedUnitWidth(continuousPart, samplingInputs.outputXYPoints);
|
suggestedUnitWidth(continuousPart, samplingInputs.outputXYPoints);
|
||||||
let usedWidth =
|
let usedWidth =
|
||||||
|
@ -135,7 +143,7 @@ module T = {
|
||||||
samplingInputs.outputXYPoints,
|
samplingInputs.outputXYPoints,
|
||||||
usedWidth,
|
usedWidth,
|
||||||
);
|
);
|
||||||
let foo: RenderTypes.ShapeRenderer.Sampling.samplingStats = {
|
let foo: Types.samplingStats = {
|
||||||
sampleCount: samplingInputs.sampleCount,
|
sampleCount: samplingInputs.sampleCount,
|
||||||
outputXYPoints: samplingInputs.outputXYPoints,
|
outputXYPoints: samplingInputs.outputXYPoints,
|
||||||
bandwidthXSuggested: _suggestedXWidth,
|
bandwidthXSuggested: _suggestedXWidth,
|
||||||
|
@ -149,16 +157,16 @@ module T = {
|
||||||
~outputXYPoints=samplingInputs.outputXYPoints,
|
~outputXYPoints=samplingInputs.outputXYPoints,
|
||||||
formatUnitWidth(usedUnitWidth),
|
formatUnitWidth(usedUnitWidth),
|
||||||
)
|
)
|
||||||
|> Distributions.Continuous.make(`Linear)
|
|> Continuous.make
|
||||||
|> (r => Some((r, foo)));
|
|> (r => Some((r, foo)));
|
||||||
}
|
}
|
||||||
: None;
|
: None;
|
||||||
let shape =
|
let shape =
|
||||||
MixedShapeBuilder.buildSimple(
|
MixedShapeBuilder.buildSimple(
|
||||||
~continuous=pdf |> E.O.fmap(fst),
|
~continuous=pdf |> E.O.fmap(fst),
|
||||||
~discrete,
|
~discrete=Some(discrete),
|
||||||
);
|
);
|
||||||
let samplesParse: RenderTypes.ShapeRenderer.Sampling.outputs = {
|
let samplesParse: Types.outputs = {
|
||||||
continuousParseParams: pdf |> E.O.fmap(snd),
|
continuousParseParams: pdf |> E.O.fmap(snd),
|
||||||
shape,
|
shape,
|
||||||
};
|
};
|
||||||
|
@ -167,33 +175,11 @@ module T = {
|
||||||
|
|
||||||
let fromSamples =
|
let fromSamples =
|
||||||
(
|
(
|
||||||
~samplingInputs=RenderTypes.ShapeRenderer.Sampling.Inputs.empty,
|
~samplingInputs=Types.empty,
|
||||||
samples,
|
samples,
|
||||||
) => {
|
) => {
|
||||||
let samplingInputs =
|
let samplingInputs =
|
||||||
RenderTypes.ShapeRenderer.Sampling.Inputs.toF(samplingInputs);
|
Types.toF(samplingInputs);
|
||||||
toShape(~samples, ~samplingInputs, ());
|
toShape(~samples, ~samplingInputs, ());
|
||||||
};
|
};
|
||||||
|
|
||||||
let fromGuesstimatorString =
|
|
||||||
(
|
|
||||||
~guesstimatorString,
|
|
||||||
~samplingInputs=RenderTypes.ShapeRenderer.Sampling.Inputs.empty,
|
|
||||||
(),
|
|
||||||
) => {
|
|
||||||
let hasValidSamples =
|
|
||||||
Guesstimator.stringToSamples(guesstimatorString, 10) |> E.A.length > 0;
|
|
||||||
let _samplingInputs =
|
|
||||||
RenderTypes.ShapeRenderer.Sampling.Inputs.toF(samplingInputs);
|
|
||||||
switch (hasValidSamples) {
|
|
||||||
| false => None
|
|
||||||
| true =>
|
|
||||||
let samples =
|
|
||||||
Guesstimator.stringToSamples(
|
|
||||||
guesstimatorString,
|
|
||||||
_samplingInputs.sampleCount,
|
|
||||||
);
|
|
||||||
Some(fromSamples(~samplingInputs, samples));
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
|
@ -1,273 +0,0 @@
|
||||||
// todo: rename to SymbolicParser
|
|
||||||
|
|
||||||
module MathJsonToMathJsAdt = {
|
|
||||||
type arg =
|
|
||||||
| Symbol(string)
|
|
||||||
| Value(float)
|
|
||||||
| Fn(fn)
|
|
||||||
| Array(array(arg))
|
|
||||||
| Object(Js.Dict.t(arg))
|
|
||||||
and fn = {
|
|
||||||
name: string,
|
|
||||||
args: array(arg),
|
|
||||||
};
|
|
||||||
|
|
||||||
let rec run = (j: Js.Json.t) =>
|
|
||||||
Json.Decode.(
|
|
||||||
switch (field("mathjs", string, j)) {
|
|
||||||
| "FunctionNode" =>
|
|
||||||
let args = j |> field("args", array(run));
|
|
||||||
Some(
|
|
||||||
Fn({
|
|
||||||
name: j |> field("fn", field("name", string)),
|
|
||||||
args: args |> E.A.O.concatSomes,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
| "OperatorNode" =>
|
|
||||||
let args = j |> field("args", array(run));
|
|
||||||
Some(
|
|
||||||
Fn({
|
|
||||||
name: j |> field("fn", string),
|
|
||||||
args: args |> E.A.O.concatSomes,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
| "ConstantNode" =>
|
|
||||||
optional(field("value", Json.Decode.float), j)
|
|
||||||
|> E.O.fmap(r => Value(r))
|
|
||||||
| "ParenthesisNode" => j |> field("content", run)
|
|
||||||
| "ObjectNode" =>
|
|
||||||
let properties = j |> field("properties", dict(run));
|
|
||||||
Js.Dict.entries(properties)
|
|
||||||
|> E.A.fmap(((key, value)) => value |> E.O.fmap(v => (key, v)))
|
|
||||||
|> E.A.O.concatSomes
|
|
||||||
|> Js.Dict.fromArray
|
|
||||||
|> (r => Some(Object(r)));
|
|
||||||
| "ArrayNode" =>
|
|
||||||
let items = field("items", array(run), j);
|
|
||||||
Some(Array(items |> E.A.O.concatSomes));
|
|
||||||
| "SymbolNode" => Some(Symbol(field("name", string, j)))
|
|
||||||
| n =>
|
|
||||||
Js.log3("Couldn't parse mathjs node", j, n);
|
|
||||||
None;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
module MathAdtToDistDst = {
|
|
||||||
open MathJsonToMathJsAdt;
|
|
||||||
|
|
||||||
module MathAdtCleaner = {
|
|
||||||
let transformWithSymbol = (f: float, s: string) =>
|
|
||||||
switch (s) {
|
|
||||||
| "K"
|
|
||||||
| "k" => f *. 1000.
|
|
||||||
| "M"
|
|
||||||
| "m" => f *. 1000000.
|
|
||||||
| "B"
|
|
||||||
| "b" => f *. 1000000000.
|
|
||||||
| "T"
|
|
||||||
| "t" => f *. 1000000000000.
|
|
||||||
| _ => f
|
|
||||||
};
|
|
||||||
|
|
||||||
let rec run =
|
|
||||||
fun
|
|
||||||
| Fn({name: "multiply", args: [|Value(f), Symbol(s)|]}) =>
|
|
||||||
Value(transformWithSymbol(f, s))
|
|
||||||
| Fn({name: "unaryMinus", args: [|Value(f)|]}) => Value((-1.0) *. f)
|
|
||||||
| Fn({name, args}) => Fn({name, args: args |> E.A.fmap(run)})
|
|
||||||
| Array(args) => Array(args |> E.A.fmap(run))
|
|
||||||
| Symbol(s) => Symbol(s)
|
|
||||||
| Value(v) => Value(v)
|
|
||||||
| Object(v) =>
|
|
||||||
Object(
|
|
||||||
v
|
|
||||||
|> Js.Dict.entries
|
|
||||||
|> E.A.fmap(((key, value)) => (key, run(value)))
|
|
||||||
|> Js.Dict.fromArray,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
let normal: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(mean), Value(stdev)|] =>
|
|
||||||
Ok(`Simple(`Normal({mean, stdev})))
|
|
||||||
| _ => Error("Wrong number of variables in normal distribution");
|
|
||||||
|
|
||||||
let lognormal: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(mu), Value(sigma)|] => Ok(`Simple(`Lognormal({mu, sigma})))
|
|
||||||
| [|Object(o)|] => {
|
|
||||||
let g = Js.Dict.get(o);
|
|
||||||
switch (g("mean"), g("stdev"), g("mu"), g("sigma")) {
|
|
||||||
| (Some(Value(mean)), Some(Value(stdev)), _, _) =>
|
|
||||||
Ok(`Simple(SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev)))
|
|
||||||
| (_, _, Some(Value(mu)), Some(Value(sigma))) =>
|
|
||||||
Ok(`Simple(`Lognormal({mu, sigma})))
|
|
||||||
| _ => Error("Lognormal distribution would need mean and stdev")
|
|
||||||
};
|
|
||||||
}
|
|
||||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
|
||||||
|
|
||||||
let to_: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(low), Value(high)|] when low <= 0.0 && low < high=> {
|
|
||||||
Ok(`Simple(SymbolicDist.Normal.from90PercentCI(low, high)));
|
|
||||||
}
|
|
||||||
| [|Value(low), Value(high)|] when low < high => {
|
|
||||||
Ok(`Simple(SymbolicDist.Lognormal.from90PercentCI(low, high)));
|
|
||||||
}
|
|
||||||
| [|Value(_), Value(_)|] =>
|
|
||||||
Error("Low value must be less than high value.")
|
|
||||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
|
||||||
|
|
||||||
let uniform: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(low), Value(high)|] => Ok(`Simple(`Uniform({low, high})))
|
|
||||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
|
||||||
|
|
||||||
let beta: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(alpha), Value(beta)|] => Ok(`Simple(`Beta({alpha, beta})))
|
|
||||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
|
||||||
|
|
||||||
let exponential: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(rate)|] => Ok(`Simple(`Exponential({rate: rate})))
|
|
||||||
| _ => Error("Wrong number of variables in Exponential distribution");
|
|
||||||
|
|
||||||
let cauchy: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(local), Value(scale)|] =>
|
|
||||||
Ok(`Simple(`Cauchy({local, scale})))
|
|
||||||
| _ => Error("Wrong number of variables in cauchy distribution");
|
|
||||||
|
|
||||||
let triangular: array(arg) => result(SymbolicDist.bigDist, string) =
|
|
||||||
fun
|
|
||||||
| [|Value(low), Value(medium), Value(high)|] =>
|
|
||||||
Ok(`Simple(`Triangular({low, medium, high})))
|
|
||||||
| _ => Error("Wrong number of variables in triangle distribution");
|
|
||||||
|
|
||||||
let multiModal =
|
|
||||||
(
|
|
||||||
args: array(result(SymbolicDist.bigDist, string)),
|
|
||||||
weights: option(array(float)),
|
|
||||||
) => {
|
|
||||||
let weights = weights |> E.O.default([||]);
|
|
||||||
let dists =
|
|
||||||
args
|
|
||||||
|> E.A.fmap(
|
|
||||||
fun
|
|
||||||
| Ok(`Simple(n)) => Ok(n)
|
|
||||||
| Error(e) => Error(e)
|
|
||||||
| Ok(k) => Error(SymbolicDist.toString(k)),
|
|
||||||
);
|
|
||||||
let firstWithError = dists |> Belt.Array.getBy(_, Belt.Result.isError);
|
|
||||||
let withoutErrors = dists |> E.A.fmap(E.R.toOption) |> E.A.O.concatSomes;
|
|
||||||
switch (firstWithError) {
|
|
||||||
| Some(Error(e)) => Error(e)
|
|
||||||
| None when withoutErrors |> E.A.length == 0 =>
|
|
||||||
Error("Multimodals need at least one input")
|
|
||||||
| _ =>
|
|
||||||
withoutErrors
|
|
||||||
|> E.A.fmapi((index, item) =>
|
|
||||||
(item, weights |> E.A.get(_, index) |> E.O.default(1.0))
|
|
||||||
)
|
|
||||||
|> (r => Ok(`PointwiseCombination(r)))
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let arrayParser = (args:array(arg)):result(SymbolicDist.bigDist, string) => {
|
|
||||||
let samples = args
|
|
||||||
|> E.A.fmap(
|
|
||||||
fun
|
|
||||||
| Value(n) => Some(n)
|
|
||||||
| _ => None
|
|
||||||
)
|
|
||||||
|> E.A.O.concatSomes
|
|
||||||
let outputs = Samples.T.fromSamples(samples);
|
|
||||||
let pdf = outputs.shape |> E.O.bind(_,Distributions.Shape.T.toContinuous)
|
|
||||||
let shape = pdf |> E.O.fmap(pdf => {
|
|
||||||
let _pdf = Distributions.Continuous.T.scaleToIntegralSum(~cache=None, ~intendedSum=1.0, pdf);
|
|
||||||
let cdf = Distributions.Continuous.T.integral(~cache=None, _pdf);
|
|
||||||
SymbolicDist.ContinuousShape.make(_pdf, cdf)
|
|
||||||
})
|
|
||||||
switch(shape){
|
|
||||||
| Some(s) => Ok(`Simple(`ContinuousShape(s)))
|
|
||||||
| None => Error("Rendering did not work")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
let rec functionParser = (r): result(SymbolicDist.bigDist, string) =>
|
|
||||||
r
|
|
||||||
|> (
|
|
||||||
fun
|
|
||||||
| Fn({name: "normal", args}) => normal(args)
|
|
||||||
| Fn({name: "lognormal", args}) => lognormal(args)
|
|
||||||
| Fn({name: "uniform", args}) => uniform(args)
|
|
||||||
| Fn({name: "beta", args}) => beta(args)
|
|
||||||
| Fn({name: "to", args}) => to_(args)
|
|
||||||
| Fn({name: "exponential", args}) => exponential(args)
|
|
||||||
| Fn({name: "cauchy", args}) => cauchy(args)
|
|
||||||
| Fn({name: "triangular", args}) => triangular(args)
|
|
||||||
| Value(f) => Ok(`Simple(`Float(f)))
|
|
||||||
| Fn({name: "mm", args}) => {
|
|
||||||
let weights =
|
|
||||||
args
|
|
||||||
|> E.A.last
|
|
||||||
|> E.O.bind(
|
|
||||||
_,
|
|
||||||
fun
|
|
||||||
| Array(values) => Some(values)
|
|
||||||
| _ => None,
|
|
||||||
)
|
|
||||||
|> E.O.fmap(o =>
|
|
||||||
o
|
|
||||||
|> E.A.fmap(
|
|
||||||
fun
|
|
||||||
| Value(r) => Some(r)
|
|
||||||
| _ => None,
|
|
||||||
)
|
|
||||||
|> E.A.O.concatSomes
|
|
||||||
);
|
|
||||||
let possibleDists =
|
|
||||||
E.O.isSome(weights)
|
|
||||||
? Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1)
|
|
||||||
: args;
|
|
||||||
let dists = possibleDists |> E.A.fmap(functionParser);
|
|
||||||
multiModal(dists, weights);
|
|
||||||
}
|
|
||||||
| Fn({name}) => Error(name ++ ": function not supported")
|
|
||||||
| _ => {
|
|
||||||
Error("This type not currently supported");
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
let topLevel = (r): result(SymbolicDist.bigDist, string) =>
|
|
||||||
r
|
|
||||||
|> (
|
|
||||||
fun
|
|
||||||
| Fn(_) => functionParser(r)
|
|
||||||
| Value(r) => Ok(`Simple(`Float(r)))
|
|
||||||
| Array(r) => arrayParser(r)
|
|
||||||
| Symbol(_) => Error("Symbol not valid as top level")
|
|
||||||
| Object(_) => Error("Object not valid as top level")
|
|
||||||
);
|
|
||||||
|
|
||||||
let run = (r): result(SymbolicDist.bigDist, string) =>
|
|
||||||
r |> MathAdtCleaner.run |> topLevel;
|
|
||||||
};
|
|
||||||
|
|
||||||
let fromString = str => {
|
|
||||||
let mathJsToJson = Mathjs.parseMath(str);
|
|
||||||
let mathJsParse =
|
|
||||||
E.R.bind(mathJsToJson, r =>
|
|
||||||
switch (MathJsonToMathJsAdt.run(r)) {
|
|
||||||
| Some(r) => Ok(r)
|
|
||||||
| None => Error("MathJsParse Error")
|
|
||||||
}
|
|
||||||
);
|
|
||||||
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run);
|
|
||||||
value;
|
|
||||||
};
|
|
|
@ -1,8 +0,0 @@
|
||||||
|
|
||||||
const math = require("mathjs");
|
|
||||||
|
|
||||||
function parseMath(f){ return JSON.parse(JSON.stringify(math.parse(f))) };
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
parseMath,
|
|
||||||
};
|
|
|
@ -1,102 +1,39 @@
|
||||||
type normal = {
|
open SymbolicTypes;
|
||||||
mean: float,
|
|
||||||
stdev: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type lognormal = {
|
|
||||||
mu: float,
|
|
||||||
sigma: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type uniform = {
|
|
||||||
low: float,
|
|
||||||
high: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type beta = {
|
|
||||||
alpha: float,
|
|
||||||
beta: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type exponential = {rate: float};
|
|
||||||
|
|
||||||
type cauchy = {
|
|
||||||
local: float,
|
|
||||||
scale: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type triangular = {
|
|
||||||
low: float,
|
|
||||||
medium: float,
|
|
||||||
high: float,
|
|
||||||
};
|
|
||||||
|
|
||||||
type continuousShape = {
|
|
||||||
pdf: DistTypes.continuousShape,
|
|
||||||
cdf: DistTypes.continuousShape,
|
|
||||||
};
|
|
||||||
|
|
||||||
type contType = [ | `Continuous | `Discrete];
|
|
||||||
|
|
||||||
type dist = [
|
|
||||||
| `Normal(normal)
|
|
||||||
| `Beta(beta)
|
|
||||||
| `Lognormal(lognormal)
|
|
||||||
| `Uniform(uniform)
|
|
||||||
| `Exponential(exponential)
|
|
||||||
| `Cauchy(cauchy)
|
|
||||||
| `Triangular(triangular)
|
|
||||||
| `ContinuousShape(continuousShape)
|
|
||||||
| `Float(float)
|
|
||||||
];
|
|
||||||
|
|
||||||
type pointwiseAdd = array((dist, float));
|
|
||||||
|
|
||||||
type bigDist = [ | `Simple(dist) | `PointwiseCombination(pointwiseAdd)];
|
|
||||||
|
|
||||||
module ContinuousShape = {
|
|
||||||
type t = continuousShape;
|
|
||||||
let make = (pdf, cdf): t => {pdf, cdf};
|
|
||||||
let pdf = (x, t: t) =>
|
|
||||||
Distributions.Continuous.T.xToY(x, t.pdf).continuous;
|
|
||||||
let inv = (p, t: t) =>
|
|
||||||
Distributions.Continuous.T.xToY(p, t.pdf).continuous;
|
|
||||||
// TODO: Fix the sampling, to have it work correctly.
|
|
||||||
let sample = (t: t) => 3.0;
|
|
||||||
let toString = t => {j|CustomContinuousShape|j};
|
|
||||||
let contType: contType = `Continuous;
|
|
||||||
};
|
|
||||||
|
|
||||||
module Exponential = {
|
module Exponential = {
|
||||||
type t = exponential;
|
type t = exponential;
|
||||||
let pdf = (x, t: t) => Jstat.exponential##pdf(x, t.rate);
|
let pdf = (x, t: t) => Jstat.exponential##pdf(x, t.rate);
|
||||||
|
let cdf = (x, t: t) => Jstat.exponential##cdf(x, t.rate);
|
||||||
let inv = (p, t: t) => Jstat.exponential##inv(p, t.rate);
|
let inv = (p, t: t) => Jstat.exponential##inv(p, t.rate);
|
||||||
let sample = (t: t) => Jstat.exponential##sample(t.rate);
|
let sample = (t: t) => Jstat.exponential##sample(t.rate);
|
||||||
|
let mean = (t: t) => Ok(Jstat.exponential##mean(t.rate));
|
||||||
let toString = ({rate}: t) => {j|Exponential($rate)|j};
|
let toString = ({rate}: t) => {j|Exponential($rate)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module Cauchy = {
|
module Cauchy = {
|
||||||
type t = cauchy;
|
type t = cauchy;
|
||||||
let pdf = (x, t: t) => Jstat.cauchy##pdf(x, t.local, t.scale);
|
let pdf = (x, t: t) => Jstat.cauchy##pdf(x, t.local, t.scale);
|
||||||
|
let cdf = (x, t: t) => Jstat.cauchy##cdf(x, t.local, t.scale);
|
||||||
let inv = (p, t: t) => Jstat.cauchy##inv(p, t.local, t.scale);
|
let inv = (p, t: t) => Jstat.cauchy##inv(p, t.local, t.scale);
|
||||||
let sample = (t: t) => Jstat.cauchy##sample(t.local, t.scale);
|
let sample = (t: t) => Jstat.cauchy##sample(t.local, t.scale);
|
||||||
|
let mean = (_: t) => Error("Cauchy distributions have no mean value.");
|
||||||
let toString = ({local, scale}: t) => {j|Cauchy($local, $scale)|j};
|
let toString = ({local, scale}: t) => {j|Cauchy($local, $scale)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module Triangular = {
|
module Triangular = {
|
||||||
type t = triangular;
|
type t = triangular;
|
||||||
let pdf = (x, t: t) => Jstat.triangular##pdf(x, t.low, t.high, t.medium);
|
let pdf = (x, t: t) => Jstat.triangular##pdf(x, t.low, t.high, t.medium);
|
||||||
|
let cdf = (x, t: t) => Jstat.triangular##cdf(x, t.low, t.high, t.medium);
|
||||||
let inv = (p, t: t) => Jstat.triangular##inv(p, t.low, t.high, t.medium);
|
let inv = (p, t: t) => Jstat.triangular##inv(p, t.low, t.high, t.medium);
|
||||||
let sample = (t: t) => Jstat.triangular##sample(t.low, t.high, t.medium);
|
let sample = (t: t) => Jstat.triangular##sample(t.low, t.high, t.medium);
|
||||||
|
let mean = (t: t) => Ok(Jstat.triangular##mean(t.low, t.high, t.medium));
|
||||||
let toString = ({low, medium, high}: t) => {j|Triangular($low, $medium, $high)|j};
|
let toString = ({low, medium, high}: t) => {j|Triangular($low, $medium, $high)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module Normal = {
|
module Normal = {
|
||||||
type t = normal;
|
type t = normal;
|
||||||
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
||||||
|
let cdf = (x, t: t) => Jstat.normal##cdf(x, t.mean, t.stdev);
|
||||||
|
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let mean = E.A.Floats.mean([|low, high|]);
|
let mean = E.A.Floats.mean([|low, high|]);
|
||||||
|
@ -105,26 +42,55 @@ module Normal = {
|
||||||
};
|
};
|
||||||
let inv = (p, t: t) => Jstat.normal##inv(p, t.mean, t.stdev);
|
let inv = (p, t: t) => Jstat.normal##inv(p, t.mean, t.stdev);
|
||||||
let sample = (t: t) => Jstat.normal##sample(t.mean, t.stdev);
|
let sample = (t: t) => Jstat.normal##sample(t.mean, t.stdev);
|
||||||
|
let mean = (t: t) => Ok(Jstat.normal##mean(t.mean, t.stdev));
|
||||||
let toString = ({mean, stdev}: t) => {j|Normal($mean,$stdev)|j};
|
let toString = ({mean, stdev}: t) => {j|Normal($mean,$stdev)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
|
let add = (n1: t, n2: t) => {
|
||||||
|
let mean = n1.mean +. n2.mean;
|
||||||
|
let stdev = sqrt(n1.stdev ** 2. +. n2.stdev ** 2.);
|
||||||
|
`Normal({mean, stdev});
|
||||||
|
};
|
||||||
|
let subtract = (n1: t, n2: t) => {
|
||||||
|
let mean = n1.mean -. n2.mean;
|
||||||
|
let stdev = sqrt(n1.stdev ** 2. +. n2.stdev ** 2.);
|
||||||
|
`Normal({mean, stdev});
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: is this useful here at all? would need the integral as well ...
|
||||||
|
let pointwiseProduct = (n1: t, n2: t) => {
|
||||||
|
let mean =
|
||||||
|
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.)
|
||||||
|
/. (n1.stdev ** 2. +. n2.stdev ** 2.);
|
||||||
|
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.);
|
||||||
|
`Normal({mean, stdev});
|
||||||
|
};
|
||||||
|
|
||||||
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
|
switch (operation) {
|
||||||
|
| `Add => Some(add(n1, n2))
|
||||||
|
| `Subtract => Some(subtract(n1, n2))
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
module Beta = {
|
module Beta = {
|
||||||
type t = beta;
|
type t = beta;
|
||||||
let pdf = (x, t: t) => Jstat.beta##pdf(x, t.alpha, t.beta);
|
let pdf = (x, t: t) => Jstat.beta##pdf(x, t.alpha, t.beta);
|
||||||
|
let cdf = (x, t: t) => Jstat.beta##cdf(x, t.alpha, t.beta);
|
||||||
let inv = (p, t: t) => Jstat.beta##inv(p, t.alpha, t.beta);
|
let inv = (p, t: t) => Jstat.beta##inv(p, t.alpha, t.beta);
|
||||||
let sample = (t: t) => Jstat.beta##sample(t.alpha, t.beta);
|
let sample = (t: t) => Jstat.beta##sample(t.alpha, t.beta);
|
||||||
|
let mean = (t: t) => Ok(Jstat.beta##mean(t.alpha, t.beta));
|
||||||
let toString = ({alpha, beta}: t) => {j|Beta($alpha,$beta)|j};
|
let toString = ({alpha, beta}: t) => {j|Beta($alpha,$beta)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module Lognormal = {
|
module Lognormal = {
|
||||||
type t = lognormal;
|
type t = lognormal;
|
||||||
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
||||||
|
let cdf = (x, t: t) => Jstat.lognormal##cdf(x, t.mu, t.sigma);
|
||||||
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
||||||
|
let mean = (t: t) => Ok(Jstat.lognormal##mean(t.mu, t.sigma));
|
||||||
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
||||||
let toString = ({mu, sigma}: t) => {j|Lognormal($mu,$sigma)|j};
|
let toString = ({mu, sigma}: t) => {j|Lognormal($mu,$sigma)|j};
|
||||||
let contType: contType = `Continuous;
|
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let logLow = Js.Math.log(low);
|
let logLow = Js.Math.log(low);
|
||||||
let logHigh = Js.Math.log(high);
|
let logHigh = Js.Math.log(high);
|
||||||
|
@ -144,27 +110,51 @@ module Lognormal = {
|
||||||
);
|
);
|
||||||
`Lognormal({mu, sigma});
|
`Lognormal({mu, sigma});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let multiply = (l1, l2) => {
|
||||||
|
let mu = l1.mu +. l2.mu;
|
||||||
|
let sigma = l1.sigma +. l2.sigma;
|
||||||
|
`Lognormal({mu, sigma});
|
||||||
|
};
|
||||||
|
let divide = (l1, l2) => {
|
||||||
|
let mu = l1.mu -. l2.mu;
|
||||||
|
let sigma = l1.sigma +. l2.sigma;
|
||||||
|
`Lognormal({mu, sigma});
|
||||||
|
};
|
||||||
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
|
switch (operation) {
|
||||||
|
| `Multiply => Some(multiply(n1, n2))
|
||||||
|
| `Divide => Some(divide(n1, n2))
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
type t = uniform;
|
type t = uniform;
|
||||||
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
||||||
|
let cdf = (x, t: t) => Jstat.uniform##cdf(x, t.low, t.high);
|
||||||
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
||||||
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
||||||
|
let mean = (t: t) => Ok(Jstat.uniform##mean(t.low, t.high));
|
||||||
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
||||||
let contType: contType = `Continuous;
|
let truncate = (low, high, t: t): t => {
|
||||||
|
let newLow = max(E.O.default(neg_infinity, low), t.low);
|
||||||
|
let newHigh = min(E.O.default(infinity, high), t.high);
|
||||||
|
{low: newLow, high: newHigh};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
module Float = {
|
module Float = {
|
||||||
type t = float;
|
type t = float;
|
||||||
let pdf = (x, t: t) => x == t ? 1.0 : 0.0;
|
let pdf = (x, t: t) => x == t ? 1.0 : 0.0;
|
||||||
|
let cdf = (x, t: t) => x >= t ? 1.0 : 0.0;
|
||||||
let inv = (p, t: t) => p < t ? 0.0 : 1.0;
|
let inv = (p, t: t) => p < t ? 0.0 : 1.0;
|
||||||
|
let mean = (t: t) => Ok(t);
|
||||||
let sample = (t: t) => t;
|
let sample = (t: t) => t;
|
||||||
let toString = Js.Float.toString;
|
let toString = Js.Float.toString;
|
||||||
let contType: contType = `Discrete;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module GenericSimple = {
|
module T = {
|
||||||
let minCdfValue = 0.0001;
|
let minCdfValue = 0.0001;
|
||||||
let maxCdfValue = 0.9999;
|
let maxCdfValue = 0.9999;
|
||||||
|
|
||||||
|
@ -178,20 +168,18 @@ module GenericSimple = {
|
||||||
| `Uniform(n) => Uniform.pdf(x, n)
|
| `Uniform(n) => Uniform.pdf(x, n)
|
||||||
| `Beta(n) => Beta.pdf(x, n)
|
| `Beta(n) => Beta.pdf(x, n)
|
||||||
| `Float(n) => Float.pdf(x, n)
|
| `Float(n) => Float.pdf(x, n)
|
||||||
| `ContinuousShape(n) => ContinuousShape.pdf(x, n)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let contType = (dist: dist): contType =>
|
let cdf = (x, dist) =>
|
||||||
switch (dist) {
|
switch (dist) {
|
||||||
| `Normal(_) => Normal.contType
|
| `Normal(n) => Normal.cdf(x, n)
|
||||||
| `Triangular(_) => Triangular.contType
|
| `Triangular(n) => Triangular.cdf(x, n)
|
||||||
| `Exponential(_) => Exponential.contType
|
| `Exponential(n) => Exponential.cdf(x, n)
|
||||||
| `Cauchy(_) => Cauchy.contType
|
| `Cauchy(n) => Cauchy.cdf(x, n)
|
||||||
| `Lognormal(_) => Lognormal.contType
|
| `Lognormal(n) => Lognormal.cdf(x, n)
|
||||||
| `Uniform(_) => Uniform.contType
|
| `Uniform(n) => Uniform.cdf(x, n)
|
||||||
| `Beta(_) => Beta.contType
|
| `Beta(n) => Beta.cdf(x, n)
|
||||||
| `Float(_) => Float.contType
|
| `Float(n) => Float.cdf(x, n)
|
||||||
| `ContinuousShape(_) => ContinuousShape.contType
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let inv = (x, dist) =>
|
let inv = (x, dist) =>
|
||||||
|
@ -204,10 +192,9 @@ module GenericSimple = {
|
||||||
| `Uniform(n) => Uniform.inv(x, n)
|
| `Uniform(n) => Uniform.inv(x, n)
|
||||||
| `Beta(n) => Beta.inv(x, n)
|
| `Beta(n) => Beta.inv(x, n)
|
||||||
| `Float(n) => Float.inv(x, n)
|
| `Float(n) => Float.inv(x, n)
|
||||||
| `ContinuousShape(n) => ContinuousShape.inv(x, n)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let sample: dist => float =
|
let sample: symbolicDist => float =
|
||||||
fun
|
fun
|
||||||
| `Normal(n) => Normal.sample(n)
|
| `Normal(n) => Normal.sample(n)
|
||||||
| `Triangular(n) => Triangular.sample(n)
|
| `Triangular(n) => Triangular.sample(n)
|
||||||
|
@ -216,10 +203,22 @@ module GenericSimple = {
|
||||||
| `Lognormal(n) => Lognormal.sample(n)
|
| `Lognormal(n) => Lognormal.sample(n)
|
||||||
| `Uniform(n) => Uniform.sample(n)
|
| `Uniform(n) => Uniform.sample(n)
|
||||||
| `Beta(n) => Beta.sample(n)
|
| `Beta(n) => Beta.sample(n)
|
||||||
| `Float(n) => Float.sample(n)
|
| `Float(n) => Float.sample(n);
|
||||||
| `ContinuousShape(n) => ContinuousShape.sample(n);
|
|
||||||
|
|
||||||
let toString: dist => string =
|
let doN = (n, fn) => {
|
||||||
|
let items = Belt.Array.make(n, 0.0);
|
||||||
|
for (x in 0 to n - 1) {
|
||||||
|
let _ = Belt.Array.set(items, x, fn());
|
||||||
|
();
|
||||||
|
};
|
||||||
|
items;
|
||||||
|
};
|
||||||
|
|
||||||
|
let sampleN = (n, dist) => {
|
||||||
|
doN(n, () => sample(dist));
|
||||||
|
};
|
||||||
|
|
||||||
|
let toString: symbolicDist => string =
|
||||||
fun
|
fun
|
||||||
| `Triangular(n) => Triangular.toString(n)
|
| `Triangular(n) => Triangular.toString(n)
|
||||||
| `Exponential(n) => Exponential.toString(n)
|
| `Exponential(n) => Exponential.toString(n)
|
||||||
|
@ -228,10 +227,9 @@ module GenericSimple = {
|
||||||
| `Lognormal(n) => Lognormal.toString(n)
|
| `Lognormal(n) => Lognormal.toString(n)
|
||||||
| `Uniform(n) => Uniform.toString(n)
|
| `Uniform(n) => Uniform.toString(n)
|
||||||
| `Beta(n) => Beta.toString(n)
|
| `Beta(n) => Beta.toString(n)
|
||||||
| `Float(n) => Float.toString(n)
|
| `Float(n) => Float.toString(n);
|
||||||
| `ContinuousShape(n) => ContinuousShape.toString(n);
|
|
||||||
|
|
||||||
let min: dist => float =
|
let min: symbolicDist => float =
|
||||||
fun
|
fun
|
||||||
| `Triangular({low}) => low
|
| `Triangular({low}) => low
|
||||||
| `Exponential(n) => Exponential.inv(minCdfValue, n)
|
| `Exponential(n) => Exponential.inv(minCdfValue, n)
|
||||||
|
@ -240,10 +238,9 @@ module GenericSimple = {
|
||||||
| `Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
| `Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
||||||
| `Uniform({low}) => low
|
| `Uniform({low}) => low
|
||||||
| `Beta(n) => Beta.inv(minCdfValue, n)
|
| `Beta(n) => Beta.inv(minCdfValue, n)
|
||||||
| `ContinuousShape(n) => ContinuousShape.inv(minCdfValue, n)
|
|
||||||
| `Float(n) => n;
|
| `Float(n) => n;
|
||||||
|
|
||||||
let max: dist => float =
|
let max: symbolicDist => float =
|
||||||
fun
|
fun
|
||||||
| `Triangular(n) => n.high
|
| `Triangular(n) => n.high
|
||||||
| `Exponential(n) => Exponential.inv(maxCdfValue, n)
|
| `Exponential(n) => Exponential.inv(maxCdfValue, n)
|
||||||
|
@ -251,148 +248,82 @@ module GenericSimple = {
|
||||||
| `Normal(n) => Normal.inv(maxCdfValue, n)
|
| `Normal(n) => Normal.inv(maxCdfValue, n)
|
||||||
| `Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
| `Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
||||||
| `Beta(n) => Beta.inv(maxCdfValue, n)
|
| `Beta(n) => Beta.inv(maxCdfValue, n)
|
||||||
| `ContinuousShape(n) => ContinuousShape.inv(maxCdfValue, n)
|
|
||||||
| `Uniform({high}) => high
|
| `Uniform({high}) => high
|
||||||
| `Float(n) => n;
|
| `Float(n) => n;
|
||||||
|
|
||||||
|
let mean: symbolicDist => result(float, string) =
|
||||||
|
fun
|
||||||
|
| `Triangular(n) => Triangular.mean(n)
|
||||||
|
| `Exponential(n) => Exponential.mean(n)
|
||||||
|
| `Cauchy(n) => Cauchy.mean(n)
|
||||||
|
| `Normal(n) => Normal.mean(n)
|
||||||
|
| `Lognormal(n) => Lognormal.mean(n)
|
||||||
|
| `Beta(n) => Beta.mean(n)
|
||||||
|
| `Uniform(n) => Uniform.mean(n)
|
||||||
|
| `Float(n) => Float.mean(n);
|
||||||
|
|
||||||
/* This function returns a list of x's at which to evaluate the overall distribution (for rendering).
|
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s) =>
|
||||||
This function is called separately for each individual distribution.
|
switch (distToFloatOp) {
|
||||||
|
| `Cdf(f) => Ok(cdf(f, s))
|
||||||
|
| `Pdf(f) => Ok(pdf(f, s))
|
||||||
|
| `Inv(f) => Ok(inv(f, s))
|
||||||
|
| `Sample => Ok(sample(s))
|
||||||
|
| `Mean => mean(s)
|
||||||
|
};
|
||||||
|
|
||||||
When called with xSelection=`Linear, this function will return (sampleCount) x's, evenly
|
|
||||||
distributed between the min and max of the distribution (whatever those are defined to be above).
|
|
||||||
|
|
||||||
When called with xSelection=`ByWeight, this function will distribute the x's such as to
|
|
||||||
match the cumulative shape of the distribution. This is slower but may give better results.
|
|
||||||
*/
|
|
||||||
let interpolateXs =
|
let interpolateXs =
|
||||||
(~xSelection: [ | `Linear | `ByWeight]=`Linear, dist: dist, sampleCount) => {
|
(~xSelection: [ | `Linear | `ByWeight]=`Linear, dist: symbolicDist, n) => {
|
||||||
|
|
||||||
switch (xSelection, dist) {
|
switch (xSelection, dist) {
|
||||||
| (`Linear, _) => E.A.Floats.range(min(dist), max(dist), sampleCount)
|
| (`Linear, _) => E.A.Floats.range(min(dist), max(dist), n)
|
||||||
| (`ByWeight, `Uniform(n)) =>
|
| (`ByWeight, `Uniform(n)) =>
|
||||||
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
|
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
|
||||||
// on either side for proper rendering (just left and right of the discontinuities).
|
// on either side for proper rendering (just left and right of the discontinuities).
|
||||||
let dx = 0.00001 *. (n.high -. n.low);
|
let dx = 0.00001 *. (n.high -. n.low);
|
||||||
[|n.low -. dx, n.low +. dx, n.high -. dx, n.high +. dx|]
|
[|n.low -. dx, n.low +. dx, n.high -. dx, n.high +. dx|];
|
||||||
| (`ByWeight, _) =>
|
| (`ByWeight, _) =>
|
||||||
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, sampleCount)
|
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n);
|
||||||
ys |> E.A.fmap(y => inv(y, dist))
|
ys |> E.A.fmap(y => inv(y, dist));
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let toShape =
|
/* Calling e.g. "Normal.operate" returns an optional that wraps a result.
|
||||||
(~xSelection: [ | `Linear | `ByWeight]=`Linear, dist: dist, sampleCount)
|
If the optional is None, there is no valid analytic solution. If it Some, it
|
||||||
: DistTypes.shape => {
|
can still return an error if there is a serious problem,
|
||||||
switch (dist) {
|
like in the case of a divide by 0.
|
||||||
| `ContinuousShape(n) => n.pdf |> Distributions.Continuous.T.toShape
|
*/
|
||||||
| dist =>
|
let tryAnalyticalSimplification =
|
||||||
let xs = interpolateXs(~xSelection, dist, sampleCount);
|
(
|
||||||
let ys = xs |> E.A.fmap(r => pdf(r, dist));
|
d1: symbolicDist,
|
||||||
XYShape.T.fromArrays(xs, ys)
|
d2: symbolicDist,
|
||||||
|> Distributions.Continuous.make(`Linear, _)
|
op: ExpressionTypes.algebraicOperation,
|
||||||
|> Distributions.Continuous.T.toShape;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module PointwiseAddDistributionsWeighted = {
|
|
||||||
type t = pointwiseAdd;
|
|
||||||
|
|
||||||
let normalizeWeights = (dists: t) => {
|
|
||||||
let total = dists |> E.A.fmap(snd) |> E.A.Floats.sum;
|
|
||||||
dists |> E.A.fmap(((a, b)) => (a, b /. total));
|
|
||||||
};
|
|
||||||
|
|
||||||
let pdf = (x: float, dists: t) =>
|
|
||||||
dists
|
|
||||||
|> E.A.fmap(((e, w)) => GenericSimple.pdf(x, e) *. w)
|
|
||||||
|> E.A.Floats.sum;
|
|
||||||
|
|
||||||
let min = (dists: t) =>
|
|
||||||
dists |> E.A.fmap(d => d |> fst |> GenericSimple.min) |> E.A.min;
|
|
||||||
|
|
||||||
let max = (dists: t) =>
|
|
||||||
dists |> E.A.fmap(d => d |> fst |> GenericSimple.max) |> E.A.max;
|
|
||||||
|
|
||||||
let discreteShape = (dists: t, sampleCount: int) => {
|
|
||||||
let discrete =
|
|
||||||
dists
|
|
||||||
|> E.A.fmap(((r, e)) =>
|
|
||||||
r
|
|
||||||
|> (
|
|
||||||
fun
|
|
||||||
| `Float(r) => Some((r, e))
|
|
||||||
| _ => None
|
|
||||||
)
|
)
|
||||||
)
|
: analyticalSimplificationResult =>
|
||||||
|> E.A.O.concatSomes
|
switch (d1, d2) {
|
||||||
|> E.A.fmap(((x, y)) =>
|
| (`Float(v1), `Float(v2)) =>
|
||||||
({xs: [|x|], ys: [|y|]}: DistTypes.xyShape)
|
switch (Operation.Algebraic.applyFn(op, v1, v2)) {
|
||||||
)
|
| Ok(r) => `AnalyticalSolution(`Float(r))
|
||||||
|> Distributions.Discrete.reduce((+.));
|
| Error(n) => `Error(n)
|
||||||
discrete;
|
}
|
||||||
|
| (`Normal(v1), `Normal(v2)) =>
|
||||||
|
Normal.operate(op, v1, v2)
|
||||||
|
|> E.O.dimap(r => `AnalyticalSolution(r), () => `NoSolution)
|
||||||
|
| (`Lognormal(v1), `Lognormal(v2)) =>
|
||||||
|
Lognormal.operate(op, v1, v2)
|
||||||
|
|> E.O.dimap(r => `AnalyticalSolution(r), () => `NoSolution)
|
||||||
|
| _ => `NoSolution
|
||||||
};
|
};
|
||||||
|
|
||||||
let continuousShape = (dists: t, sampleCount: int) => {
|
let toShape = (sampleCount, d: symbolicDist): DistTypes.shape =>
|
||||||
let xs =
|
switch (d) {
|
||||||
dists
|
| `Float(v) =>
|
||||||
|> E.A.fmap(r =>
|
Discrete(
|
||||||
r
|
Discrete.make(~integralSumCache=Some(1.0), {xs: [|v|], ys: [|1.0|]}),
|
||||||
|> fst
|
|
||||||
|> GenericSimple.interpolateXs(
|
|
||||||
~xSelection=`ByWeight,
|
|
||||||
_,
|
|
||||||
sampleCount / (dists |> E.A.length),
|
|
||||||
)
|
)
|
||||||
)
|
| _ =>
|
||||||
|> E.A.concatMany;
|
let xs = interpolateXs(~xSelection=`ByWeight, d, sampleCount);
|
||||||
xs |> Array.fast_sort(compare);
|
let ys = xs |> E.A.fmap(x => pdf(x, d));
|
||||||
let ys = xs |> E.A.fmap(pdf(_, dists));
|
Continuous(
|
||||||
XYShape.T.fromArrays(xs, ys) |> Distributions.Continuous.make(`Linear, _);
|
Continuous.make(~integralSumCache=Some(1.0), {xs, ys}),
|
||||||
};
|
|
||||||
|
|
||||||
let toShape = (dists: t, sampleCount: int) => {
|
|
||||||
let normalized = normalizeWeights(dists);
|
|
||||||
let continuous =
|
|
||||||
normalized
|
|
||||||
|> E.A.filter(((r, _)) => GenericSimple.contType(r) == `Continuous)
|
|
||||||
|> continuousShape(_, sampleCount);
|
|
||||||
let discrete =
|
|
||||||
normalized
|
|
||||||
|> E.A.filter(((r, _)) => GenericSimple.contType(r) == `Discrete)
|
|
||||||
|> discreteShape(_, sampleCount);
|
|
||||||
let shape =
|
|
||||||
MixedShapeBuilder.buildSimple(~continuous=Some(continuous), ~discrete);
|
|
||||||
shape |> E.O.toExt("");
|
|
||||||
};
|
|
||||||
|
|
||||||
let toString = (dists: t) => {
|
|
||||||
let distString =
|
|
||||||
dists
|
|
||||||
|> E.A.fmap(d => GenericSimple.toString(fst(d)))
|
|
||||||
|> Js.Array.joinWith(",");
|
|
||||||
let weights =
|
|
||||||
dists
|
|
||||||
|> E.A.fmap(d =>
|
|
||||||
snd(d) |> Js.Float.toPrecisionWithPrecision(~digits=2)
|
|
||||||
)
|
|
||||||
|> Js.Array.joinWith(",");
|
|
||||||
{j|multimodal($distString, [$weights])|j};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let toString = (r: bigDist) =>
|
|
||||||
r
|
|
||||||
|> (
|
|
||||||
fun
|
|
||||||
| `Simple(d) => GenericSimple.toString(d)
|
|
||||||
| `PointwiseCombination(d) =>
|
|
||||||
PointwiseAddDistributionsWeighted.toString(d)
|
|
||||||
);
|
);
|
||||||
|
};
|
||||||
let toShape = n =>
|
};
|
||||||
fun
|
|
||||||
| `Simple(d) => GenericSimple.toShape(~xSelection=`ByWeight, d, n)
|
|
||||||
| `PointwiseCombination(d) =>
|
|
||||||
PointwiseAddDistributionsWeighted.toShape(d, n);
|
|
||||||
|
|
49
src/distPlus/symbolic/SymbolicTypes.re
Normal file
49
src/distPlus/symbolic/SymbolicTypes.re
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
type normal = {
|
||||||
|
mean: float,
|
||||||
|
stdev: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type lognormal = {
|
||||||
|
mu: float,
|
||||||
|
sigma: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type uniform = {
|
||||||
|
low: float,
|
||||||
|
high: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type beta = {
|
||||||
|
alpha: float,
|
||||||
|
beta: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type exponential = {rate: float};
|
||||||
|
|
||||||
|
type cauchy = {
|
||||||
|
local: float,
|
||||||
|
scale: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type triangular = {
|
||||||
|
low: float,
|
||||||
|
medium: float,
|
||||||
|
high: float,
|
||||||
|
};
|
||||||
|
|
||||||
|
type symbolicDist = [
|
||||||
|
| `Normal(normal)
|
||||||
|
| `Beta(beta)
|
||||||
|
| `Lognormal(lognormal)
|
||||||
|
| `Uniform(uniform)
|
||||||
|
| `Exponential(exponential)
|
||||||
|
| `Cauchy(cauchy)
|
||||||
|
| `Triangular(triangular)
|
||||||
|
| `Float(float) // Dirac delta at x. Practically useful only in the context of multimodals.
|
||||||
|
];
|
||||||
|
|
||||||
|
type analyticalSimplificationResult = [
|
||||||
|
| `AnalyticalSolution(symbolicDist)
|
||||||
|
| `Error(string)
|
||||||
|
| `NoSolution
|
||||||
|
];
|
|
@ -145,6 +145,13 @@ module R = {
|
||||||
let id = e => e |> result(U.id, U.id);
|
let id = e => e |> result(U.id, U.id);
|
||||||
let fmap = Rationale.Result.fmap;
|
let fmap = Rationale.Result.fmap;
|
||||||
let bind = Rationale.Result.bind;
|
let bind = Rationale.Result.bind;
|
||||||
|
let toExn = Belt.Result.getExn;
|
||||||
|
let merge = (a, b) =>
|
||||||
|
switch (a, b) {
|
||||||
|
| (Error(e), _) => Error(e)
|
||||||
|
| (_, Error(e)) => Error(e)
|
||||||
|
| (Ok(a), Ok(b)) => Ok((a, b))
|
||||||
|
};
|
||||||
let toOption = (e: Belt.Result.t('a, 'b)) =>
|
let toOption = (e: Belt.Result.t('a, 'b)) =>
|
||||||
switch (e) {
|
switch (e) {
|
||||||
| Ok(r) => Some(r)
|
| Ok(r) => Some(r)
|
||||||
|
|
|
@ -5,6 +5,7 @@ type normal = {
|
||||||
[@bs.meth] "cdf": (float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float) => float,
|
[@bs.meth] "sample": (float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float) => float,
|
||||||
};
|
};
|
||||||
type lognormal = {
|
type lognormal = {
|
||||||
.
|
.
|
||||||
|
@ -12,6 +13,7 @@ type lognormal = {
|
||||||
[@bs.meth] "cdf": (float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float) => float,
|
[@bs.meth] "sample": (float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float) => float,
|
||||||
};
|
};
|
||||||
type uniform = {
|
type uniform = {
|
||||||
.
|
.
|
||||||
|
@ -19,6 +21,7 @@ type uniform = {
|
||||||
[@bs.meth] "cdf": (float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float) => float,
|
[@bs.meth] "sample": (float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float) => float,
|
||||||
};
|
};
|
||||||
type beta = {
|
type beta = {
|
||||||
.
|
.
|
||||||
|
@ -26,6 +29,7 @@ type beta = {
|
||||||
[@bs.meth] "cdf": (float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float) => float,
|
[@bs.meth] "sample": (float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float) => float,
|
||||||
};
|
};
|
||||||
type exponential = {
|
type exponential = {
|
||||||
.
|
.
|
||||||
|
@ -33,6 +37,7 @@ type exponential = {
|
||||||
[@bs.meth] "cdf": (float, float) => float,
|
[@bs.meth] "cdf": (float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float) => float,
|
[@bs.meth] "inv": (float, float) => float,
|
||||||
[@bs.meth] "sample": float => float,
|
[@bs.meth] "sample": float => float,
|
||||||
|
[@bs.meth] "mean": float => float,
|
||||||
};
|
};
|
||||||
type cauchy = {
|
type cauchy = {
|
||||||
.
|
.
|
||||||
|
@ -47,6 +52,7 @@ type triangular = {
|
||||||
[@bs.meth] "cdf": (float, float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float, float) => float,
|
[@bs.meth] "sample": (float, float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float, float) => float,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Pareto doesn't have sample for some reason
|
// Pareto doesn't have sample for some reason
|
||||||
|
@ -61,6 +67,7 @@ type poisson = {
|
||||||
[@bs.meth] "pdf": (float, float) => float,
|
[@bs.meth] "pdf": (float, float) => float,
|
||||||
[@bs.meth] "cdf": (float, float) => float,
|
[@bs.meth] "cdf": (float, float) => float,
|
||||||
[@bs.meth] "sample": float => float,
|
[@bs.meth] "sample": float => float,
|
||||||
|
[@bs.meth] "mean": float => float,
|
||||||
};
|
};
|
||||||
type weibull = {
|
type weibull = {
|
||||||
.
|
.
|
||||||
|
@ -68,6 +75,7 @@ type weibull = {
|
||||||
[@bs.meth] "cdf": (float, float, float) => float,
|
[@bs.meth] "cdf": (float, float, float) => float,
|
||||||
[@bs.meth] "inv": (float, float, float) => float,
|
[@bs.meth] "inv": (float, float, float) => float,
|
||||||
[@bs.meth] "sample": (float, float) => float,
|
[@bs.meth] "sample": (float, float) => float,
|
||||||
|
[@bs.meth] "mean": (float, float) => float,
|
||||||
};
|
};
|
||||||
type binomial = {
|
type binomial = {
|
||||||
.
|
.
|
||||||
|
|
|
@ -22,14 +22,12 @@ let propValue = (t: Prop.Value.t) => {
|
||||||
RenderTypes.DistPlusRenderer.make(
|
RenderTypes.DistPlusRenderer.make(
|
||||||
~distPlusIngredients=r,
|
~distPlusIngredients=r,
|
||||||
~recommendedLength=10000,
|
~recommendedLength=10000,
|
||||||
~shouldTruncate=true,
|
~shouldDownsample=true,
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
|> DistPlusRenderer.run
|
|> DistPlusRenderer.run;
|
||||||
|> RenderTypes.DistPlusRenderer.Outputs.distplus;
|
|
||||||
switch (newDistribution) {
|
switch (newDistribution) {
|
||||||
| Some(distribution) =>
|
| Ok(distribution) => <div> <DistPlusPlot distPlus=distribution /> </div>
|
||||||
<div> <DistPlusPlot distPlus=distribution /> </div>
|
|
||||||
// <input
|
// <input
|
||||||
// readOnly=true
|
// readOnly=true
|
||||||
// className="shadow appearance-none border w-1/3 rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline"
|
// className="shadow appearance-none border w-1/3 rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline"
|
||||||
|
@ -45,7 +43,7 @@ let propValue = (t: Prop.Value.t) => {
|
||||||
// className="w-1/3 border w-1/2 rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline bg-white">
|
// className="w-1/3 border w-1/2 rounded py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline bg-white">
|
||||||
// {"30 to infinity, 80% mass" |> ReasonReact.string}
|
// {"30 to infinity, 80% mass" |> ReasonReact.string}
|
||||||
// </div>
|
// </div>
|
||||||
| None => "Something went wrong" |> ReasonReact.string
|
| Error(e) => e |> ReasonReact.string
|
||||||
};
|
};
|
||||||
| FloatCdf(_) => <div />
|
| FloatCdf(_) => <div />
|
||||||
| Probability(r) =>
|
| Probability(r) =>
|
||||||
|
|
|
@ -112,8 +112,12 @@ module Model = {
|
||||||
GlobalCatastrophe.makeI(MomentRe.momentNow())
|
GlobalCatastrophe.makeI(MomentRe.momentNow())
|
||||||
|> RenderTypes.DistPlusRenderer.make(~distPlusIngredients=_, ())
|
|> RenderTypes.DistPlusRenderer.make(~distPlusIngredients=_, ())
|
||||||
|> DistPlusRenderer.run
|
|> DistPlusRenderer.run
|
||||||
|> RenderTypes.DistPlusRenderer.Outputs.distplus
|
|> E.R.bind(_, r =>
|
||||||
|> E.O.bind(_, Distributions.DistPlusTime.Integral.xToY(Time(dateTime)));
|
r
|
||||||
|
|> DistPlusTime.Integral.xToY(Time(dateTime))
|
||||||
|
|> E.O.toResult("error")
|
||||||
|
)
|
||||||
|
|> E.R.toOption;
|
||||||
};
|
};
|
||||||
|
|
||||||
let make =
|
let make =
|
||||||
|
|
Loading…
Reference in New Issue
Block a user