Merge pull request #60 from foretold-app/epic-refactor

Expression Tree Epic Refactor #1
This commit is contained in:
Ozzie Gooen 2020-07-09 00:12:36 +01:00 committed by GitHub
commit f9fa76245e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 1750 additions and 1809 deletions

View File

@ -3,413 +3,413 @@ open Expect;
let shape: DistTypes.xyShape = {xs: [|1., 4., 8.|], ys: [|8., 9., 2.|]};
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () =>
expect(item1) |> toEqual(item2)
)
: test(str, () =>
expect(item1) |> toEqual(item2)
);
// let makeTest = (~only=false, str, item1, item2) =>
// only
// ? Only.test(str, () =>
// expect(item1) |> toEqual(item2)
// )
// : test(str, () =>
// expect(item1) |> toEqual(item2)
// );
let makeTestCloseEquality = (~only=false, str, item1, item2, ~digits) =>
only
? Only.test(str, () =>
expect(item1) |> toBeSoCloseTo(item2, ~digits)
)
: test(str, () =>
expect(item1) |> toBeSoCloseTo(item2, ~digits)
);
// let makeTestCloseEquality = (~only=false, str, item1, item2, ~digits) =>
// only
// ? Only.test(str, () =>
// expect(item1) |> toBeSoCloseTo(item2, ~digits)
// )
// : test(str, () =>
// expect(item1) |> toBeSoCloseTo(item2, ~digits)
// );
describe("Shape", () => {
describe("Continuous", () => {
open Distributions.Continuous;
let continuous = make(`Linear, shape, None);
makeTest("minX", T.minX(continuous), 1.0);
makeTest("maxX", T.maxX(continuous), 8.0);
makeTest(
"mapY",
T.mapY(r => r *. 2.0, continuous) |> getShape |> (r => r.ys),
[|16., 18.0, 4.0|],
);
describe("xToY", () => {
describe("when Linear", () => {
makeTest(
"at 4.0",
T.xToY(4., continuous),
{continuous: 9.0, discrete: 0.0},
);
// Note: This below is weird to me, I'm not sure if it's what we want really.
makeTest(
"at 0.0",
T.xToY(0., continuous),
{continuous: 8.0, discrete: 0.0},
);
makeTest(
"at 5.0",
T.xToY(5., continuous),
{continuous: 7.25, discrete: 0.0},
);
makeTest(
"at 10.0",
T.xToY(10., continuous),
{continuous: 2.0, discrete: 0.0},
);
});
describe("when Stepwise", () => {
let continuous = make(`Stepwise, shape, None);
makeTest(
"at 4.0",
T.xToY(4., continuous),
{continuous: 9.0, discrete: 0.0},
);
makeTest(
"at 0.0",
T.xToY(0., continuous),
{continuous: 0.0, discrete: 0.0},
);
makeTest(
"at 5.0",
T.xToY(5., continuous),
{continuous: 9.0, discrete: 0.0},
);
makeTest(
"at 10.0",
T.xToY(10., continuous),
{continuous: 2.0, discrete: 0.0},
);
});
});
makeTest(
"integral",
T.Integral.get(~cache=None, continuous) |> getShape,
{xs: [|1.0, 4.0, 8.0|], ys: [|0.0, 25.5, 47.5|]},
);
makeTest(
"toLinear",
{
let continuous =
make(`Stepwise, {xs: [|1., 4., 8.|], ys: [|0.1, 5., 1.0|]}, None);
continuous |> toLinear |> E.O.fmap(getShape);
},
Some({
xs: [|1.00007, 1.00007, 4.0, 4.00007, 8.0, 8.00007|],
ys: [|0.0, 0.1, 0.1, 5.0, 5.0, 1.0|],
}),
);
makeTest(
"toLinear",
{
let continuous = make(`Stepwise, {xs: [|0.0|], ys: [|0.3|]}, None);
continuous |> toLinear |> E.O.fmap(getShape);
},
Some({xs: [|0.0|], ys: [|0.3|]}),
);
makeTest(
"integralXToY",
T.Integral.xToY(~cache=None, 0.0, continuous),
0.0,
);
makeTest(
"integralXToY",
T.Integral.xToY(~cache=None, 2.0, continuous),
8.5,
);
makeTest(
"integralXToY",
T.Integral.xToY(~cache=None, 100.0, continuous),
47.5,
);
makeTest(
"integralEndY",
continuous
|> T.normalize //scaleToIntegralSum(~intendedSum=1.0)
|> T.Integral.sum(~cache=None),
1.0,
);
});
// describe("Shape", () => {
// describe("Continuous", () => {
// open Continuous;
// let continuous = make(`Linear, shape, None);
// makeTest("minX", T.minX(continuous), 1.0);
// makeTest("maxX", T.maxX(continuous), 8.0);
// makeTest(
// "mapY",
// T.mapY(r => r *. 2.0, continuous) |> getShape |> (r => r.ys),
// [|16., 18.0, 4.0|],
// );
// describe("xToY", () => {
// describe("when Linear", () => {
// makeTest(
// "at 4.0",
// T.xToY(4., continuous),
// {continuous: 9.0, discrete: 0.0},
// );
// // Note: This below is weird to me, I'm not sure if it's what we want really.
// makeTest(
// "at 0.0",
// T.xToY(0., continuous),
// {continuous: 8.0, discrete: 0.0},
// );
// makeTest(
// "at 5.0",
// T.xToY(5., continuous),
// {continuous: 7.25, discrete: 0.0},
// );
// makeTest(
// "at 10.0",
// T.xToY(10., continuous),
// {continuous: 2.0, discrete: 0.0},
// );
// });
// describe("when Stepwise", () => {
// let continuous = make(`Stepwise, shape, None);
// makeTest(
// "at 4.0",
// T.xToY(4., continuous),
// {continuous: 9.0, discrete: 0.0},
// );
// makeTest(
// "at 0.0",
// T.xToY(0., continuous),
// {continuous: 0.0, discrete: 0.0},
// );
// makeTest(
// "at 5.0",
// T.xToY(5., continuous),
// {continuous: 9.0, discrete: 0.0},
// );
// makeTest(
// "at 10.0",
// T.xToY(10., continuous),
// {continuous: 2.0, discrete: 0.0},
// );
// });
// });
// makeTest(
// "integral",
// T.Integral.get(~cache=None, continuous) |> getShape,
// {xs: [|1.0, 4.0, 8.0|], ys: [|0.0, 25.5, 47.5|]},
// );
// makeTest(
// "toLinear",
// {
// let continuous =
// make(`Stepwise, {xs: [|1., 4., 8.|], ys: [|0.1, 5., 1.0|]}, None);
// continuous |> toLinear |> E.O.fmap(getShape);
// },
// Some({
// xs: [|1.00007, 1.00007, 4.0, 4.00007, 8.0, 8.00007|],
// ys: [|0.0, 0.1, 0.1, 5.0, 5.0, 1.0|],
// }),
// );
// makeTest(
// "toLinear",
// {
// let continuous = make(`Stepwise, {xs: [|0.0|], ys: [|0.3|]}, None);
// continuous |> toLinear |> E.O.fmap(getShape);
// },
// Some({xs: [|0.0|], ys: [|0.3|]}),
// );
// makeTest(
// "integralXToY",
// T.Integral.xToY(~cache=None, 0.0, continuous),
// 0.0,
// );
// makeTest(
// "integralXToY",
// T.Integral.xToY(~cache=None, 2.0, continuous),
// 8.5,
// );
// makeTest(
// "integralXToY",
// T.Integral.xToY(~cache=None, 100.0, continuous),
// 47.5,
// );
// makeTest(
// "integralEndY",
// continuous
// |> T.normalize //scaleToIntegralSum(~intendedSum=1.0)
// |> T.Integral.sum(~cache=None),
// 1.0,
// );
// });
describe("Discrete", () => {
open Distributions.Discrete;
let shape: DistTypes.xyShape = {
xs: [|1., 4., 8.|],
ys: [|0.3, 0.5, 0.2|],
};
let discrete = make(shape, None);
makeTest("minX", T.minX(discrete), 1.0);
makeTest("maxX", T.maxX(discrete), 8.0);
makeTest(
"mapY",
T.mapY(r => r *. 2.0, discrete) |> (r => getShape(r).ys),
[|0.6, 1.0, 0.4|],
);
makeTest(
"xToY at 4.0",
T.xToY(4., discrete),
{discrete: 0.5, continuous: 0.0},
);
makeTest(
"xToY at 0.0",
T.xToY(0., discrete),
{discrete: 0.0, continuous: 0.0},
);
makeTest(
"xToY at 5.0",
T.xToY(5., discrete),
{discrete: 0.0, continuous: 0.0},
);
makeTest(
"scaleBy",
scaleBy(~scale=4.0, discrete),
make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
);
makeTest(
"normalize, then scale by 4.0",
discrete
|> T.normalize
|> scaleBy(~scale=4.0),
make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
);
makeTest(
"scaleToIntegralSum: back and forth",
discrete
|> T.normalize
|> scaleBy(~scale=4.0)
|> T.normalize,
discrete,
);
makeTest(
"integral",
T.Integral.get(~cache=None, discrete),
Distributions.Continuous.make(
`Stepwise,
{xs: [|1., 4., 8.|], ys: [|0.3, 0.8, 1.0|]},
None
),
);
makeTest(
"integral with 1 element",
T.Integral.get(~cache=None, Distributions.Discrete.make({xs: [|0.0|], ys: [|1.0|]}, None)),
Distributions.Continuous.make(`Stepwise, {xs: [|0.0|], ys: [|1.0|]}, None),
);
makeTest(
"integralXToY",
T.Integral.xToY(~cache=None, 6.0, discrete),
0.9,
);
makeTest("integralEndY", T.Integral.sum(~cache=None, discrete), 1.0);
makeTest("mean", T.mean(discrete), 3.9);
makeTestCloseEquality(
"variance",
T.variance(discrete),
5.89,
~digits=7,
);
});
// describe("Discrete", () => {
// open Discrete;
// let shape: DistTypes.xyShape = {
// xs: [|1., 4., 8.|],
// ys: [|0.3, 0.5, 0.2|],
// };
// let discrete = make(shape, None);
// makeTest("minX", T.minX(discrete), 1.0);
// makeTest("maxX", T.maxX(discrete), 8.0);
// makeTest(
// "mapY",
// T.mapY(r => r *. 2.0, discrete) |> (r => getShape(r).ys),
// [|0.6, 1.0, 0.4|],
// );
// makeTest(
// "xToY at 4.0",
// T.xToY(4., discrete),
// {discrete: 0.5, continuous: 0.0},
// );
// makeTest(
// "xToY at 0.0",
// T.xToY(0., discrete),
// {discrete: 0.0, continuous: 0.0},
// );
// makeTest(
// "xToY at 5.0",
// T.xToY(5., discrete),
// {discrete: 0.0, continuous: 0.0},
// );
// makeTest(
// "scaleBy",
// scaleBy(~scale=4.0, discrete),
// make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
// );
// makeTest(
// "normalize, then scale by 4.0",
// discrete
// |> T.normalize
// |> scaleBy(~scale=4.0),
// make({xs: [|1., 4., 8.|], ys: [|1.2, 2.0, 0.8|]}, None),
// );
// makeTest(
// "scaleToIntegralSum: back and forth",
// discrete
// |> T.normalize
// |> scaleBy(~scale=4.0)
// |> T.normalize,
// discrete,
// );
// makeTest(
// "integral",
// T.Integral.get(~cache=None, discrete),
// Continuous.make(
// `Stepwise,
// {xs: [|1., 4., 8.|], ys: [|0.3, 0.8, 1.0|]},
// None
// ),
// );
// makeTest(
// "integral with 1 element",
// T.Integral.get(~cache=None, Discrete.make({xs: [|0.0|], ys: [|1.0|]}, None)),
// Continuous.make(`Stepwise, {xs: [|0.0|], ys: [|1.0|]}, None),
// );
// makeTest(
// "integralXToY",
// T.Integral.xToY(~cache=None, 6.0, discrete),
// 0.9,
// );
// makeTest("integralEndY", T.Integral.sum(~cache=None, discrete), 1.0);
// makeTest("mean", T.mean(discrete), 3.9);
// makeTestCloseEquality(
// "variance",
// T.variance(discrete),
// 5.89,
// ~digits=7,
// );
// });
describe("Mixed", () => {
open Distributions.Mixed;
let discreteShape: DistTypes.xyShape = {
xs: [|1., 4., 8.|],
ys: [|0.3, 0.5, 0.2|],
};
let discrete = Distributions.Discrete.make(discreteShape, None);
let continuous =
Distributions.Continuous.make(
`Linear,
{xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
None
)
|> Distributions.Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
let mixed = Distributions.Mixed.make(
~continuous,
~discrete,
);
makeTest("minX", T.minX(mixed), 1.0);
makeTest("maxX", T.maxX(mixed), 14.0);
makeTest(
"mapY",
T.mapY(r => r *. 2.0, mixed),
Distributions.Mixed.make(
~continuous=
Distributions.Continuous.make(
`Linear,
{
xs: [|3., 7., 14.|],
ys: [|
0.11588411588411589,
0.16383616383616384,
0.24775224775224775,
|],
},
None
),
~discrete=Distributions.Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None)
),
);
makeTest(
"xToY at 4.0",
T.xToY(4., mixed),
{discrete: 0.25, continuous: 0.03196803196803197},
);
makeTest(
"xToY at 0.0",
T.xToY(0., mixed),
{discrete: 0.0, continuous: 0.028971028971028972},
);
makeTest(
"xToY at 5.0",
T.xToY(7., mixed),
{discrete: 0.0, continuous: 0.04095904095904096},
);
makeTest("integralEndY", T.Integral.sum(~cache=None, mixed), 1.0);
makeTest(
"scaleBy",
Distributions.Mixed.scaleBy(~scale=2.0, mixed),
Distributions.Mixed.make(
~continuous=
Distributions.Continuous.make(
`Linear,
{
xs: [|3., 7., 14.|],
ys: [|
0.11588411588411589,
0.16383616383616384,
0.24775224775224775,
|],
},
None
),
~discrete=Distributions.Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None),
),
);
makeTest(
"integral",
T.Integral.get(~cache=None, mixed),
Distributions.Continuous.make(
`Linear,
{
xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
ys: [|
0.0,
0.0,
0.15,
0.18496503496503497,
0.4349674825174825,
0.5398601398601399,
0.5913086913086913,
0.6913122927072927,
1.0,
|],
},
None,
),
);
});
// describe("Mixed", () => {
// open Distributions.Mixed;
// let discreteShape: DistTypes.xyShape = {
// xs: [|1., 4., 8.|],
// ys: [|0.3, 0.5, 0.2|],
// };
// let discrete = Discrete.make(discreteShape, None);
// let continuous =
// Continuous.make(
// `Linear,
// {xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
// None
// )
// |> Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
// let mixed = Mixed.make(
// ~continuous,
// ~discrete,
// );
// makeTest("minX", T.minX(mixed), 1.0);
// makeTest("maxX", T.maxX(mixed), 14.0);
// makeTest(
// "mapY",
// T.mapY(r => r *. 2.0, mixed),
// Mixed.make(
// ~continuous=
// Continuous.make(
// `Linear,
// {
// xs: [|3., 7., 14.|],
// ys: [|
// 0.11588411588411589,
// 0.16383616383616384,
// 0.24775224775224775,
// |],
// },
// None
// ),
// ~discrete=Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None)
// ),
// );
// makeTest(
// "xToY at 4.0",
// T.xToY(4., mixed),
// {discrete: 0.25, continuous: 0.03196803196803197},
// );
// makeTest(
// "xToY at 0.0",
// T.xToY(0., mixed),
// {discrete: 0.0, continuous: 0.028971028971028972},
// );
// makeTest(
// "xToY at 5.0",
// T.xToY(7., mixed),
// {discrete: 0.0, continuous: 0.04095904095904096},
// );
// makeTest("integralEndY", T.Integral.sum(~cache=None, mixed), 1.0);
// makeTest(
// "scaleBy",
// Mixed.scaleBy(~scale=2.0, mixed),
// Mixed.make(
// ~continuous=
// Continuous.make(
// `Linear,
// {
// xs: [|3., 7., 14.|],
// ys: [|
// 0.11588411588411589,
// 0.16383616383616384,
// 0.24775224775224775,
// |],
// },
// None
// ),
// ~discrete=Discrete.make({xs: [|1., 4., 8.|], ys: [|0.6, 1.0, 0.4|]}, None),
// ),
// );
// makeTest(
// "integral",
// T.Integral.get(~cache=None, mixed),
// Continuous.make(
// `Linear,
// {
// xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
// ys: [|
// 0.0,
// 0.0,
// 0.15,
// 0.18496503496503497,
// 0.4349674825174825,
// 0.5398601398601399,
// 0.5913086913086913,
// 0.6913122927072927,
// 1.0,
// |],
// },
// None,
// ),
// );
// });
describe("Distplus", () => {
open Distributions.DistPlus;
let discreteShape: DistTypes.xyShape = {
xs: [|1., 4., 8.|],
ys: [|0.3, 0.5, 0.2|],
};
let discrete = Distributions.Discrete.make(discreteShape, None);
let continuous =
Distributions.Continuous.make(
`Linear,
{xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
None
)
|> Distributions.Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
let mixed =
Distributions.Mixed.make(
~continuous,
~discrete,
);
let distPlus =
Distributions.DistPlus.make(
~shape=Mixed(mixed),
~guesstimatorString=None,
(),
);
makeTest("minX", T.minX(distPlus), 1.0);
makeTest("maxX", T.maxX(distPlus), 14.0);
makeTest(
"xToY at 4.0",
T.xToY(4., distPlus),
{discrete: 0.25, continuous: 0.03196803196803197},
);
makeTest(
"xToY at 0.0",
T.xToY(0., distPlus),
{discrete: 0.0, continuous: 0.028971028971028972},
);
makeTest(
"xToY at 5.0",
T.xToY(7., distPlus),
{discrete: 0.0, continuous: 0.04095904095904096},
);
makeTest("integralEndY", T.Integral.sum(~cache=None, distPlus), 1.0);
makeTest(
"integral",
T.Integral.get(~cache=None, distPlus) |> T.toContinuous,
Some(
Distributions.Continuous.make(
`Linear,
{
xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
ys: [|
0.0,
0.0,
0.15,
0.18496503496503497,
0.4349674825174825,
0.5398601398601399,
0.5913086913086913,
0.6913122927072927,
1.0,
|],
},
None,
),
),
);
});
// describe("Distplus", () => {
// open DistPlus;
// let discreteShape: DistTypes.xyShape = {
// xs: [|1., 4., 8.|],
// ys: [|0.3, 0.5, 0.2|],
// };
// let discrete = Discrete.make(discreteShape, None);
// let continuous =
// Continuous.make(
// `Linear,
// {xs: [|3., 7., 14.|], ys: [|0.058, 0.082, 0.124|]},
// None
// )
// |> Continuous.T.normalize; //scaleToIntegralSum(~intendedSum=1.0);
// let mixed =
// Mixed.make(
// ~continuous,
// ~discrete,
// );
// let distPlus =
// DistPlus.make(
// ~shape=Mixed(mixed),
// ~guesstimatorString=None,
// (),
// );
// makeTest("minX", T.minX(distPlus), 1.0);
// makeTest("maxX", T.maxX(distPlus), 14.0);
// makeTest(
// "xToY at 4.0",
// T.xToY(4., distPlus),
// {discrete: 0.25, continuous: 0.03196803196803197},
// );
// makeTest(
// "xToY at 0.0",
// T.xToY(0., distPlus),
// {discrete: 0.0, continuous: 0.028971028971028972},
// );
// makeTest(
// "xToY at 5.0",
// T.xToY(7., distPlus),
// {discrete: 0.0, continuous: 0.04095904095904096},
// );
// makeTest("integralEndY", T.Integral.sum(~cache=None, distPlus), 1.0);
// makeTest(
// "integral",
// T.Integral.get(~cache=None, distPlus) |> T.toContinuous,
// Some(
// Continuous.make(
// `Linear,
// {
// xs: [|1.00007, 1.00007, 3., 4., 4.00007, 7., 8., 8.00007, 14.|],
// ys: [|
// 0.0,
// 0.0,
// 0.15,
// 0.18496503496503497,
// 0.4349674825174825,
// 0.5398601398601399,
// 0.5913086913086913,
// 0.6913122927072927,
// 1.0,
// |],
// },
// None,
// ),
// ),
// );
// });
describe("Shape", () => {
let mean = 10.0;
let stdev = 4.0;
let variance = stdev ** 2.0;
let numSamples = 10000;
open Distributions.Shape;
let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
let normalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(normal));
let lognormal = SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev);
let lognormalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(lognormal));
// describe("Shape", () => {
// let mean = 10.0;
// let stdev = 4.0;
// let variance = stdev ** 2.0;
// let numSamples = 10000;
// open Distributions.Shape;
// let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
// let normalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(normal));
// let lognormal = SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev);
// let lognormalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(lognormal));
makeTestCloseEquality(
"Mean of a normal",
T.mean(normalShape),
mean,
~digits=2,
);
makeTestCloseEquality(
"Variance of a normal",
T.variance(normalShape),
variance,
~digits=1,
);
makeTestCloseEquality(
"Mean of a lognormal",
T.mean(lognormalShape),
mean,
~digits=2,
);
makeTestCloseEquality(
"Variance of a lognormal",
T.variance(lognormalShape),
variance,
~digits=0,
);
});
});
// makeTestCloseEquality(
// "Mean of a normal",
// T.mean(normalShape),
// mean,
// ~digits=2,
// );
// makeTestCloseEquality(
// "Variance of a normal",
// T.variance(normalShape),
// variance,
// ~digits=1,
// );
// makeTestCloseEquality(
// "Mean of a lognormal",
// T.mean(lognormalShape),
// mean,
// ~digits=2,
// );
// makeTestCloseEquality(
// "Variance of a lognormal",
// T.variance(lognormalShape),
// variance,
// ~digits=0,
// );
// });
// });

View File

@ -1 +1 @@
let entries = EntryTypes.[Continuous.entry,ExpressionTreeExamples.entry];
let entries = EntryTypes.[Continuous2.entry,ExpressionTreeExamples.entry];

View File

@ -41,17 +41,17 @@ module DemoDist = {
? "Nothing to show" |> R.ste
: {
let distPlus =
Distributions.DistPlus.make(
DistPlus.make(
~shape=
Continuous(
Distributions.Continuous.make(`Linear, {xs, ys}, None),
Continuous.make(`Linear, {xs, ys}, None),
),
~domain=Complete,
~unit=UnspecifiedDistribution,
~guesstimatorString=None,
(),
)
|> Distributions.DistPlus.T.normalize;
|> DistPlus.T.normalize;
<DistPlusPlot distPlus />;
};
<Antd.Card title={"Distribution" |> R.ste}>

View File

@ -51,14 +51,14 @@ module DemoDist = {
shape
|> E.O.fmap(shape => {
let distPlus =
Distributions.DistPlus.make(
DistPlus.make(
~shape,
~domain=Complete,
~unit=UnspecifiedDistribution,
~guesstimatorString=None,
(),
)
|> Distributions.DistPlus.T.normalize;
|> DistPlus.T.normalize;
<DistPlusPlot distPlus />;
})
|> E.O.default(ReasonReact.null);

View File

@ -291,8 +291,8 @@ module Draw = {
/*
let continuousShape =
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
let mean = Distributions.Continuous.T.mean(continuousShape);
let variance = Distributions.Continuous.T.variance(continuousShape);
let mean = Continuous.T.mean(continuousShape);
let variance = Continuous.T.variance(continuousShape);
let meanLocation =
Convert.findClosestInOrderedArrayDangerously(mean, canvasShape.xValues);
let meanLocationCanvasX = canvasShape.ws[meanLocation];
@ -394,7 +394,7 @@ module Draw = {
switch (normalShape) {
| Mixed(_) => {xs: [||], ys: [||]}
| Discrete(_) => {xs: [||], ys: [||]}
| Continuous(m) => Distributions.Continuous.getShape(m)
| Continuous(m) => Continuous.getShape(m)
};
/* // To use a lognormal instead:
@ -405,7 +405,7 @@ module Draw = {
switch (lognormalShape) {
| Mixed(_) => {xs: [||], ys: [||]}
| Discrete(_) => {xs: [||], ys: [||]}
| Continuous(m) => Distributions.Continuous.getShape(m)
| Continuous(m) => Continuous.getShape(m)
};
*/
@ -669,11 +669,11 @@ module State = {
/* create a cdf from a pdf */
let _pdf =
Distributions.Continuous.T.normalize(
Continuous.T.normalize(
pdf,
);
let cdf = Distributions.Continuous.T.integral(~cache=None, _pdf);
let cdf = Continuous.T.integral(~cache=None, _pdf);
let xs = [||];
let ys = [||];
for (i in 1 to 999) {

View File

@ -37,27 +37,27 @@ let table = (distPlus, x) => {
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.xToY(x)
|> DistPlus.T.xToY(x)
|> DistTypes.MixedPoint.toDiscreteValue
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.xToY(x)
|> DistPlus.T.xToY(x)
|> DistTypes.MixedPoint.toContinuousValue
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.Integral.xToY(~cache=None, x)
|> DistPlus.T.Integral.xToY(~cache=None, x)
|> E.Float.with2DigitsPrecision
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.Integral.sum(~cache=None)
|> DistPlus.T.Integral.sum(~cache=None)
|> E.Float.with2DigitsPrecision
|> ReasonReact.string}
</td>
@ -85,9 +85,9 @@ let table = (distPlus, x) => {
<tr>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.toContinuous
|> DistPlus.T.toContinuous
|> E.O.fmap(
Distributions.Continuous.T.Integral.sum(~cache=None),
Continuous.T.Integral.sum(~cache=None),
)
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
@ -95,9 +95,9 @@ let table = (distPlus, x) => {
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.normalizedToContinuous
|> DistPlus.T.normalizedToContinuous
|> E.O.fmap(
Distributions.Continuous.T.Integral.sum(~cache=None),
Continuous.T.Integral.sum(~cache=None),
)
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
@ -105,16 +105,16 @@ let table = (distPlus, x) => {
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.toDiscrete
|> E.O.fmap(Distributions.Discrete.T.Integral.sum(~cache=None))
|> DistPlus.T.toDiscrete
|> E.O.fmap(Discrete.T.Integral.sum(~cache=None))
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> Distributions.DistPlus.T.normalizedToDiscrete
|> E.O.fmap(Distributions.Discrete.T.Integral.sum(~cache=None))
|> DistPlus.T.normalizedToDiscrete
|> E.O.fmap(Discrete.T.Integral.sum(~cache=None))
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
|> ReasonReact.string}
@ -143,42 +143,42 @@ let percentiles = distPlus => {
<tr>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.01)
|> DistPlus.T.Integral.yToX(~cache=None, 0.01)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.05)
|> DistPlus.T.Integral.yToX(~cache=None, 0.05)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.25)
|> DistPlus.T.Integral.yToX(~cache=None, 0.25)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.5)
|> DistPlus.T.Integral.yToX(~cache=None, 0.5)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.75)
|> DistPlus.T.Integral.yToX(~cache=None, 0.75)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.95)
|> DistPlus.T.Integral.yToX(~cache=None, 0.95)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99)
|> DistPlus.T.Integral.yToX(~cache=None, 0.99)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99999)
|> DistPlus.T.Integral.yToX(~cache=None, 0.99999)
|> showFloat}
</td>
</tr>
@ -197,13 +197,13 @@ let percentiles = distPlus => {
<tbody>
<tr>
<td className="px-4 py-2 border">
{distPlus |> Distributions.DistPlus.T.mean |> showFloat}
{distPlus |> DistPlus.T.mean |> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus |> Distributions.DistPlus.T.variance |> (r => r ** 0.5) |> showFloat}
{distPlus |> DistPlus.T.variance |> (r => r ** 0.5) |> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus |> Distributions.DistPlus.T.variance |> showFloat}
{distPlus |> DistPlus.T.variance |> showFloat}
</td>
</tr>
</tbody>
@ -224,19 +224,19 @@ let adjustBoth = discreteProbabilityMassFraction => {
module DistPlusChart = {
[@react.component]
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
open Distributions.DistPlus;
let discrete = distPlus |> T.normalizedToDiscrete |> E.O.fmap(Distributions.Discrete.getShape);
open DistPlus;
let discrete = distPlus |> T.normalizedToDiscrete |> E.O.fmap(Discrete.getShape);
let continuous =
distPlus
|> T.normalizedToContinuous
|> E.O.fmap(Distributions.Continuous.getShape);
|> E.O.fmap(Continuous.getShape);
let range = T.xTotalRange(distPlus);
// // We subtract a bit from the range to make sure that it fits. Maybe this should be done in d3 instead.
// let minX =
// switch (
// distPlus
// |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.0001),
// |> DistPlus.T.Integral.yToX(~cache=None, 0.0001),
// range,
// ) {
// | (min, Some(range)) => Some(min -. range *. 0.001)
@ -244,16 +244,16 @@ module DistPlusChart = {
// };
let minX = {
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.00001);
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.00001);
};
let maxX = {
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99);
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.99);
};
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
let discreteProbabilityMassFraction =
distPlus |> Distributions.DistPlus.T.toDiscreteProbabilityMassFraction;
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
adjustBoth(discreteProbabilityMassFraction);
<DistributionPlot
@ -276,18 +276,18 @@ module DistPlusChart = {
module IntegralChart = {
[@react.component]
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
open Distributions.DistPlus;
open DistPlus;
let integral = distPlus.integralCache;
let continuous =
integral
|> Distributions.Continuous.toLinear
|> E.O.fmap(Distributions.Continuous.getShape);
|> Continuous.toLinear
|> E.O.fmap(Continuous.getShape);
let minX = {
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.00001);
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.00001);
};
let maxX = {
distPlus |> Distributions.DistPlus.T.Integral.yToX(~cache=None, 0.99);
distPlus |> DistPlus.T.Integral.yToX(~cache=None, 0.99);
};
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
<DistributionPlot

View File

@ -0,0 +1,275 @@
open Distributions;
type t = DistTypes.continuousShape;
let getShape = (t: t) => t.xyShape;
let interpolation = (t: t) => t.interpolation;
let make = (interpolation, xyShape, knownIntegralSum): t => {
xyShape,
interpolation,
knownIntegralSum,
};
let shapeMap = (fn, {xyShape, interpolation, knownIntegralSum}: t): t => {
xyShape: fn(xyShape),
interpolation,
knownIntegralSum,
};
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
let oShapeMap =
(fn, {xyShape, interpolation, knownIntegralSum}: t)
: option(DistTypes.continuousShape) =>
fn(xyShape) |> E.O.fmap(make(interpolation, _, knownIntegralSum));
let empty: DistTypes.continuousShape = {
xyShape: XYShape.T.empty,
interpolation: `Linear,
knownIntegralSum: Some(0.0),
};
let combinePointwise =
(
~knownIntegralSumsFn,
fn: (float, float) => float,
t1: DistTypes.continuousShape,
t2: DistTypes.continuousShape,
)
: DistTypes.continuousShape => {
// If we're adding the distributions, and we know the total of each, then we
// can just sum them up. Otherwise, all bets are off.
let combinedIntegralSum =
Common.combineIntegralSums(
knownIntegralSumsFn,
t1.knownIntegralSum,
t2.knownIntegralSum,
);
make(
`Linear,
XYShape.PointwiseCombination.combineLinear(
~fn=(+.),
t1.xyShape,
t2.xyShape,
),
combinedIntegralSum,
);
};
let toLinear = (t: t): option(t) => {
switch (t) {
| {interpolation: `Stepwise, xyShape, knownIntegralSum} =>
xyShape
|> XYShape.Range.stepsToContinuous
|> E.O.fmap(make(`Linear, _, knownIntegralSum))
| {interpolation: `Linear} => Some(t)
};
};
let shapeFn = (fn, t: t) => t |> getShape |> fn;
let updateKnownIntegralSum = (knownIntegralSum, t: t): t => {
...t,
knownIntegralSum,
};
let reduce =
(
~knownIntegralSumsFn: (float, float) => option(float)=(_, _) => None,
fn,
continuousShapes,
) =>
continuousShapes
|> E.A.fold_left(combinePointwise(~knownIntegralSumsFn, fn), empty);
let mapY = (~knownIntegralSumFn=_ => None, fn, t: t) => {
let u = E.O.bind(_, knownIntegralSumFn);
let yMapFn = shapeMap(XYShape.T.mapY(fn));
t |> yMapFn |> updateKnownIntegralSum(u(t.knownIntegralSum));
};
let scaleBy = (~scale=1.0, t: t): t => {
t
|> mapY((r: float) => r *. scale)
|> updateKnownIntegralSum(
E.O.bind(t.knownIntegralSum, v => Some(scale *. v)),
);
};
module T =
Dist({
type t = DistTypes.continuousShape;
type integral = DistTypes.continuousShape;
let minX = shapeFn(XYShape.T.minX);
let maxX = shapeFn(XYShape.T.maxX);
let mapY = mapY;
let toDiscreteProbabilityMassFraction = _ => 0.0;
let toShape = (t: t): DistTypes.shape => Continuous(t);
let xToY = (f, {interpolation, xyShape}: t) => {
(
switch (interpolation) {
| `Stepwise =>
xyShape |> XYShape.XtoY.stepwiseIncremental(f) |> E.O.default(0.0)
| `Linear => xyShape |> XYShape.XtoY.linear(f)
}
)
|> DistTypes.MixedPoint.makeContinuous;
};
let truncate =
(leftCutoff: option(float), rightCutoff: option(float), t: t) => {
let lc = E.O.default(neg_infinity, leftCutoff);
let rc = E.O.default(infinity, rightCutoff);
let truncatedZippedPairs =
t
|> getShape
|> XYShape.T.zip
|> XYShape.Zipped.filterByX(x => x >= lc && x <= rc);
let eps = (t |> getShape |> XYShape.T.xTotalRange) *. 0.0001;
let leftNewPoint =
leftCutoff |> E.O.dimap(lc => [|(lc -. eps, 0.)|], _ => [||]);
let rightNewPoint =
rightCutoff |> E.O.dimap(rc => [|(rc +. eps, 0.)|], _ => [||]);
let truncatedZippedPairsWithNewPoints =
E.A.concatMany([|leftNewPoint, truncatedZippedPairs, rightNewPoint|]);
let truncatedShape =
XYShape.T.fromZippedArray(truncatedZippedPairsWithNewPoints);
make(`Linear, truncatedShape, None);
};
// TODO: This should work with stepwise plots.
let integral = (~cache, t) =>
if (t |> getShape |> XYShape.T.length > 0) {
switch (cache) {
| Some(cache) => cache
| None =>
t
|> getShape
|> XYShape.Range.integrateWithTriangles
|> E.O.toExt("This should not have happened")
|> make(`Linear, _, None)
};
} else {
make(`Linear, {xs: [|neg_infinity|], ys: [|0.0|]}, None);
};
let downsample = (~cache=None, length, t): t =>
t
|> shapeMap(
XYShape.XsConversion.proportionByProbabilityMass(
length,
integral(~cache, t).xyShape,
),
);
let integralEndY = (~cache, t: t) =>
t.knownIntegralSum |> E.O.default(t |> integral(~cache) |> lastY);
let integralXtoY = (~cache, f, t: t) =>
t |> integral(~cache) |> shapeFn(XYShape.XtoY.linear(f));
let integralYtoX = (~cache, f, t: t) =>
t |> integral(~cache) |> shapeFn(XYShape.YtoX.linear(f));
let toContinuous = t => Some(t);
let toDiscrete = _ => None;
let normalize = (t: t): t => {
t
|> scaleBy(~scale=1. /. integralEndY(~cache=None, t))
|> updateKnownIntegralSum(Some(1.0));
};
let normalizedToContinuous = t => Some(t |> normalize);
let normalizedToDiscrete = _ => None;
let mean = (t: t) => {
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0;
let indefiniteIntegralLinear = (p, a, b) =>
a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0;
XYShape.Analysis.integrateContinuousShape(
~indefiniteIntegralStepwise,
~indefiniteIntegralLinear,
t,
);
};
let variance = (t: t): float =>
XYShape.Analysis.getVarianceDangerously(
t,
mean,
XYShape.Analysis.getMeanOfSquaresContinuousShape,
);
});
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
each discrete data point, and then adds them all together. */
let combineAlgebraicallyWithDiscrete =
(
~downsample=false,
op: ExpressionTypes.algebraicOperation,
t1: t,
t2: DistTypes.discreteShape,
) => {
let t1s = t1 |> getShape;
let t2s = t2.xyShape; // would like to use Discrete.getShape here, but current file structure doesn't allow for that
let t1n = t1s |> XYShape.T.length;
let t2n = t2s |> XYShape.T.length;
let fn = Operation.Algebraic.toFn(op);
let outXYShapes: array(array((float, float))) =
Belt.Array.makeUninitializedUnsafe(t2n);
for (j in 0 to t2n - 1) {
// for each one of the discrete points
// create a new distribution, as long as the original continuous one
let dxyShape: array((float, float)) =
Belt.Array.makeUninitializedUnsafe(t1n);
for (i in 0 to t1n - 1) {
let _ =
Belt.Array.set(
dxyShape,
i,
(fn(t1s.xs[i], t2s.xs[j]), t1s.ys[i] *. t2s.ys[j]),
);
();
};
let _ = Belt.Array.set(outXYShapes, j, dxyShape);
();
};
let combinedIntegralSum =
Common.combineIntegralSums(
(a, b) => Some(a *. b),
t1.knownIntegralSum,
t2.knownIntegralSum,
);
outXYShapes
|> E.A.fmap(s => {
let xyShape = XYShape.T.fromZippedArray(s);
make(`Linear, xyShape, None);
})
|> reduce((+.))
|> updateKnownIntegralSum(combinedIntegralSum);
};
let combineAlgebraically =
(~downsample=false, op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
let s1 = t1 |> getShape;
let s2 = t2 |> getShape;
let t1n = s1 |> XYShape.T.length;
let t2n = s2 |> XYShape.T.length;
if (t1n == 0 || t2n == 0) {
empty;
} else {
let combinedShape =
AlgebraicShapeCombination.combineShapesContinuousContinuous(op, s1, s2);
let combinedIntegralSum =
Common.combineIntegralSums(
(a, b) => Some(a *. b),
t1.knownIntegralSum,
t2.knownIntegralSum,
);
// return a new Continuous distribution
make(`Linear, combinedShape, combinedIntegralSum);
};
};

View File

@ -0,0 +1,210 @@
open Distributions;
type t = DistTypes.discreteShape;
let make = (xyShape, knownIntegralSum): t => {xyShape, knownIntegralSum};
let shapeMap = (fn, {xyShape, knownIntegralSum}: t): t => {
xyShape: fn(xyShape),
knownIntegralSum,
};
let getShape = (t: t) => t.xyShape;
let oShapeMap = (fn, {xyShape, knownIntegralSum}: t): option(t) =>
fn(xyShape) |> E.O.fmap(make(_, knownIntegralSum));
let empty: t = {xyShape: XYShape.T.empty, knownIntegralSum: Some(0.0)};
let shapeFn = (fn, t: t) => t |> getShape |> fn;
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY;
let combinePointwise =
(
~knownIntegralSumsFn,
fn,
t1: DistTypes.discreteShape,
t2: DistTypes.discreteShape,
)
: DistTypes.discreteShape => {
let combinedIntegralSum =
Common.combineIntegralSums(
knownIntegralSumsFn,
t1.knownIntegralSum,
t2.knownIntegralSum,
);
make(
XYShape.PointwiseCombination.combine(
~xsSelection=ALL_XS,
~xToYSelection=XYShape.XtoY.stepwiseIfAtX,
~fn=(a, b) => fn(E.O.default(0.0, a), E.O.default(0.0, b)), // stepwiseIfAtX returns option(float), so this fn needs to handle None
t1.xyShape,
t2.xyShape,
),
combinedIntegralSum,
);
};
let reduce =
(~knownIntegralSumsFn=(_, _) => None, fn, discreteShapes)
: DistTypes.discreteShape =>
discreteShapes
|> E.A.fold_left(combinePointwise(~knownIntegralSumsFn, fn), empty);
let updateKnownIntegralSum = (knownIntegralSum, t: t): t => {
...t,
knownIntegralSum,
};
/* This multiples all of the data points together and creates a new discrete distribution from the results.
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
let combineAlgebraically =
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
let t1s = t1 |> getShape;
let t2s = t2 |> getShape;
let t1n = t1s |> XYShape.T.length;
let t2n = t2s |> XYShape.T.length;
let combinedIntegralSum =
Common.combineIntegralSums(
(s1, s2) => Some(s1 *. s2),
t1.knownIntegralSum,
t2.knownIntegralSum,
);
let fn = Operation.Algebraic.toFn(op);
let xToYMap = E.FloatFloatMap.empty();
for (i in 0 to t1n - 1) {
for (j in 0 to t2n - 1) {
let x = fn(t1s.xs[i], t2s.xs[j]);
let cv = xToYMap |> E.FloatFloatMap.get(x) |> E.O.default(0.);
let my = t1s.ys[i] *. t2s.ys[j];
let _ = Belt.MutableMap.set(xToYMap, x, cv +. my);
();
};
};
let rxys = xToYMap |> E.FloatFloatMap.toArray |> XYShape.Zipped.sortByX;
let combinedShape = XYShape.T.fromZippedArray(rxys);
make(combinedShape, combinedIntegralSum);
};
let mapY = (~knownIntegralSumFn=previousKnownIntegralSum => None, fn, t: t) => {
let u = E.O.bind(_, knownIntegralSumFn);
let yMapFn = shapeMap(XYShape.T.mapY(fn));
t |> yMapFn |> updateKnownIntegralSum(u(t.knownIntegralSum));
};
let scaleBy = (~scale=1.0, t: t): t => {
t
|> mapY((r: float) => r *. scale)
|> updateKnownIntegralSum(
E.O.bind(t.knownIntegralSum, v => Some(scale *. v)),
);
};
module T =
Dist({
type t = DistTypes.discreteShape;
type integral = DistTypes.continuousShape;
let integral = (~cache, t) =>
if (t |> getShape |> XYShape.T.length > 0) {
switch (cache) {
| Some(c) => c
| None =>
Continuous.make(
`Stepwise,
XYShape.T.accumulateYs((+.), getShape(t)),
None,
)
};
} else {
Continuous.make(
`Stepwise,
{xs: [|neg_infinity|], ys: [|0.0|]},
None,
);
};
let integralEndY = (~cache, t: t) =>
t.knownIntegralSum
|> E.O.default(t |> integral(~cache) |> Continuous.lastY);
let minX = shapeFn(XYShape.T.minX);
let maxX = shapeFn(XYShape.T.maxX);
let toDiscreteProbabilityMassFraction = _ => 1.0;
let mapY = mapY;
let toShape = (t: t): DistTypes.shape => Discrete(t);
let toContinuous = _ => None;
let toDiscrete = t => Some(t);
let normalize = (t: t): t => {
t
|> scaleBy(~scale=1. /. integralEndY(~cache=None, t))
|> updateKnownIntegralSum(Some(1.0));
};
let normalizedToContinuous = _ => None;
let normalizedToDiscrete = t => Some(t); // TODO: this should be normalized!
let downsample = (~cache=None, i, t: t): t => {
// It's not clear how to downsample a set of discrete points in a meaningful way.
// The best we can do is to clip off the smallest values.
let currentLength = t |> getShape |> XYShape.T.length;
if (i < currentLength && i >= 1 && currentLength > 1) {
let clippedShape =
t
|> getShape
|> XYShape.T.zip
|> XYShape.Zipped.sortByY
|> Belt.Array.reverse
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|> XYShape.Zipped.sortByX
|> XYShape.T.fromZippedArray;
make(clippedShape, None); // if someone needs the sum, they'll have to recompute it
} else {
t;
};
};
let truncate =
(leftCutoff: option(float), rightCutoff: option(float), t: t): t => {
let truncatedShape =
t
|> getShape
|> XYShape.T.zip
|> XYShape.Zipped.filterByX(x =>
x >= E.O.default(neg_infinity, leftCutoff)
|| x <= E.O.default(infinity, rightCutoff)
)
|> XYShape.T.fromZippedArray;
make(truncatedShape, None);
};
let xToY = (f, t) =>
t
|> getShape
|> XYShape.XtoY.stepwiseIfAtX(f)
|> E.O.default(0.0)
|> DistTypes.MixedPoint.makeDiscrete;
let integralXtoY = (~cache, f, t) =>
t |> integral(~cache) |> Continuous.getShape |> XYShape.XtoY.linear(f);
let integralYtoX = (~cache, f, t) =>
t |> integral(~cache) |> Continuous.getShape |> XYShape.YtoX.linear(f);
let mean = (t: t): float => {
let s = getShape(t);
E.A.reducei(s.xs, 0.0, (acc, x, i) => acc +. x *. s.ys[i]);
};
let variance = (t: t): float => {
let getMeanOfSquares = t =>
t |> shapeMap(XYShape.Analysis.squareXYShape) |> mean;
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares);
};
});

View File

@ -0,0 +1,151 @@
open DistTypes;
type t = DistTypes.distPlus;
let shapeIntegral = shape =>
Shape.T.Integral.get(~cache=None, shape);
let make =
(
~shape,
~guesstimatorString,
~domain=Complete,
~unit=UnspecifiedDistribution,
(),
)
: t => {
let integral = shapeIntegral(shape);
{shape, domain, integralCache: integral, unit, guesstimatorString};
};
let update =
(
~shape=?,
~integralCache=?,
~domain=?,
~unit=?,
~guesstimatorString=?,
t: t,
) => {
shape: E.O.default(t.shape, shape),
integralCache: E.O.default(t.integralCache, integralCache),
domain: E.O.default(t.domain, domain),
unit: E.O.default(t.unit, unit),
guesstimatorString: E.O.default(t.guesstimatorString, guesstimatorString),
};
let updateShape = (shape, t) => {
let integralCache = shapeIntegral(shape);
update(~shape, ~integralCache, t);
};
let domainIncludedProbabilityMass = (t: t) =>
Domain.includedProbabilityMass(t.domain);
let domainIncludedProbabilityMassAdjustment = (t: t, f) =>
f *. Domain.includedProbabilityMass(t.domain);
let toShape = ({shape, _}: t) => shape;
let shapeFn = (fn, {shape}: t) => fn(shape);
module T =
Distributions.Dist({
type t = DistTypes.distPlus;
type integral = DistTypes.distPlus;
let toShape = toShape;
let toContinuous = shapeFn(Shape.T.toContinuous);
let toDiscrete = shapeFn(Shape.T.toDiscrete);
let normalize = (t: t): t => {
let normalizedShape = t |> toShape |> Shape.T.normalize;
t |> updateShape(normalizedShape);
// TODO: also adjust for domainIncludedProbabilityMass here.
};
let truncate = (leftCutoff, rightCutoff, t: t): t => {
let truncatedShape =
t
|> toShape
|> Shape.T.truncate(leftCutoff, rightCutoff);
t |> updateShape(truncatedShape);
};
let normalizedToContinuous = (t: t) => {
t
|> toShape
|> Shape.T.normalizedToContinuous
|> E.O.fmap(
Continuous.T.mapY(
domainIncludedProbabilityMassAdjustment(t),
),
);
};
let normalizedToDiscrete = (t: t) => {
t
|> toShape
|> Shape.T.normalizedToDiscrete
|> E.O.fmap(
Discrete.T.mapY(
domainIncludedProbabilityMassAdjustment(t),
),
);
};
let xToY = (f, t: t) =>
t
|> toShape
|> Shape.T.xToY(f)
|> MixedPoint.fmap(domainIncludedProbabilityMassAdjustment(t));
let minX = shapeFn(Shape.T.minX);
let maxX = shapeFn(Shape.T.maxX);
let toDiscreteProbabilityMassFraction =
shapeFn(Shape.T.toDiscreteProbabilityMassFraction);
// This bit is kind of awkward, could probably use rethinking.
let integral = (~cache, t: t) =>
updateShape(Continuous(t.integralCache), t);
let downsample = (~cache=None, i, t): t =>
updateShape(t |> toShape |> Shape.T.downsample(i), t);
// todo: adjust for limit, maybe?
let mapY =
(
~knownIntegralSumFn=previousIntegralSum => None,
fn,
{shape, _} as t: t,
)
: t =>
Shape.T.mapY(~knownIntegralSumFn, fn, shape)
|> updateShape(_, t);
// get the total of everything
let integralEndY = (~cache as _, t: t) => {
Shape.T.Integral.sum(
~cache=Some(t.integralCache),
toShape(t),
);
};
// TODO: Fix this below, obviously. Adjust for limits
let integralXtoY = (~cache as _, f, t: t) => {
Shape.T.Integral.xToY(
~cache=Some(t.integralCache),
f,
toShape(t),
)
|> domainIncludedProbabilityMassAdjustment(t);
};
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
let integralYtoX = (~cache as _, f, t: t) => {
Shape.T.Integral.yToX(~cache=None, f, toShape(t));
};
let mean = (t: t) => {
Shape.T.mean(t.shape);
};
let variance = (t: t) => Shape.T.variance(t.shape);
});

View File

@ -0,0 +1,28 @@
open DistTypes;
type t = DistTypes.distPlus;
let unitToJson = ({unit}: t) => unit |> DistTypes.DistributionUnit.toJson;
let timeVector = ({unit}: t) =>
switch (unit) {
| TimeDistribution(timeVector) => Some(timeVector)
| UnspecifiedDistribution => None
};
let timeInVectorToX = (f: TimeTypes.timeInVector, t: t) => {
let timeVector = t |> timeVector;
timeVector |> E.O.fmap(TimeTypes.RelativeTimePoint.toXValue(_, f));
};
let xToY = (f: TimeTypes.timeInVector, t: t) => {
timeInVectorToX(f, t) |> E.O.fmap(DistPlus.T.xToY(_, t));
};
module Integral = {
include DistPlus.T.Integral;
let xToY = (f: TimeTypes.timeInVector, t: t) => {
timeInVectorToX(f, t)
|> E.O.fmap(x => DistPlus.T.Integral.xToY(~cache=None, x, t));
};
};

View File

@ -28,7 +28,6 @@ type discreteShape = {
type mixedShape = {
continuous: continuousShape,
discrete: discreteShape,
// discreteProbabilityMassFraction: float,
};
type shapeMonad('a, 'b, 'c) =

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,307 @@
open Distributions;
type t = DistTypes.mixedShape;
let make = (~continuous, ~discrete): t => {continuous, discrete};
let totalLength = (t: t): int => {
let continuousLength =
t.continuous |> Continuous.getShape |> XYShape.T.length;
let discreteLength = t.discrete |> Discrete.getShape |> XYShape.T.length;
continuousLength + discreteLength;
};
let scaleBy = (~scale=1.0, {discrete, continuous}: t): t => {
let scaledDiscrete = Discrete.scaleBy(~scale, discrete);
let scaledContinuous = Continuous.scaleBy(~scale, continuous);
make(~discrete=scaledDiscrete, ~continuous=scaledContinuous);
};
let toContinuous = ({continuous}: t) => Some(continuous);
let toDiscrete = ({discrete}: t) => Some(discrete);
let combinePointwise = (~knownIntegralSumsFn, fn, t1: t, t2: t) => {
let reducedDiscrete =
[|t1, t2|]
|> E.A.fmap(toDiscrete)
|> E.A.O.concatSomes
|> Discrete.reduce(~knownIntegralSumsFn, fn);
let reducedContinuous =
[|t1, t2|]
|> E.A.fmap(toContinuous)
|> E.A.O.concatSomes
|> Continuous.reduce(~knownIntegralSumsFn, fn);
make(~discrete=reducedDiscrete, ~continuous=reducedContinuous);
};
module T =
Dist({
type t = DistTypes.mixedShape;
type integral = DistTypes.continuousShape;
let minX = ({continuous, discrete}: t) => {
min(Continuous.T.minX(continuous), Discrete.T.minX(discrete));
};
let maxX = ({continuous, discrete}: t) =>
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
let toShape = (t: t): DistTypes.shape => Mixed(t);
let toContinuous = toContinuous;
let toDiscrete = toDiscrete;
let truncate =
(
leftCutoff: option(float),
rightCutoff: option(float),
{discrete, continuous}: t,
) => {
let truncatedContinuous =
Continuous.T.truncate(leftCutoff, rightCutoff, continuous);
let truncatedDiscrete =
Discrete.T.truncate(leftCutoff, rightCutoff, discrete);
make(~discrete=truncatedDiscrete, ~continuous=truncatedContinuous);
};
let normalize = (t: t): t => {
let continuousIntegralSum =
Continuous.T.Integral.sum(~cache=None, t.continuous);
let discreteIntegralSum =
Discrete.T.Integral.sum(~cache=None, t.discrete);
let totalIntegralSum = continuousIntegralSum +. discreteIntegralSum;
let newContinuousSum = continuousIntegralSum /. totalIntegralSum;
let newDiscreteSum = discreteIntegralSum /. totalIntegralSum;
let normalizedContinuous =
t.continuous
|> Continuous.scaleBy(~scale=1. /. newContinuousSum)
|> Continuous.updateKnownIntegralSum(Some(newContinuousSum));
let normalizedDiscrete =
t.discrete
|> Discrete.scaleBy(~scale=1. /. newDiscreteSum)
|> Discrete.updateKnownIntegralSum(Some(newDiscreteSum));
make(~continuous=normalizedContinuous, ~discrete=normalizedDiscrete);
};
let xToY = (x, t: t) => {
// This evaluates the mixedShape at x, interpolating if necessary.
// Note that we normalize entire mixedShape first.
let {continuous, discrete}: t = normalize(t);
let c = Continuous.T.xToY(x, continuous);
let d = Discrete.T.xToY(x, discrete);
DistTypes.MixedPoint.add(c, d); // "add" here just combines the two values into a single MixedPoint.
};
let toDiscreteProbabilityMassFraction = ({discrete, continuous}: t) => {
let discreteIntegralSum =
Discrete.T.Integral.sum(~cache=None, discrete);
let continuousIntegralSum =
Continuous.T.Integral.sum(~cache=None, continuous);
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
discreteIntegralSum /. totalIntegralSum;
};
let downsample = (~cache=None, count, {discrete, continuous}: t): t => {
// We will need to distribute the new xs fairly between the discrete and continuous shapes.
// The easiest way to do this is to simply go by the previous probability masses.
// The cache really isn't helpful here, because we would need two separate caches
let discreteIntegralSum =
Discrete.T.Integral.sum(~cache=None, discrete);
let continuousIntegralSum =
Continuous.T.Integral.sum(~cache=None, continuous);
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
// TODO: figure out what to do when the totalIntegralSum is zero.
let downsampledDiscrete =
Discrete.T.downsample(
int_of_float(
float_of_int(count) *. (discreteIntegralSum /. totalIntegralSum),
),
discrete,
);
let downsampledContinuous =
Continuous.T.downsample(
int_of_float(
float_of_int(count) *. (continuousIntegralSum /. totalIntegralSum),
),
continuous,
);
{discrete: downsampledDiscrete, continuous: downsampledContinuous};
};
let normalizedToContinuous = (t: t) => Some(normalize(t).continuous);
let normalizedToDiscrete = ({discrete} as t: t) =>
Some(normalize(t).discrete);
let integral = (~cache, {continuous, discrete}: t) => {
switch (cache) {
| Some(cache) => cache
| None =>
// note: if the underlying shapes aren't normalized, then these integrals won't be either!
let continuousIntegral =
Continuous.T.Integral.get(~cache=None, continuous);
let discreteIntegral = Discrete.T.Integral.get(~cache=None, discrete);
Continuous.make(
`Linear,
XYShape.PointwiseCombination.combineLinear(
~fn=(+.),
Continuous.getShape(continuousIntegral),
Continuous.getShape(discreteIntegral),
),
None,
);
};
};
let integralEndY = (~cache, t: t) => {
integral(~cache, t) |> Continuous.lastY;
};
let integralXtoY = (~cache, f, t) => {
t |> integral(~cache) |> Continuous.getShape |> XYShape.XtoY.linear(f);
};
let integralYtoX = (~cache, f, t) => {
t |> integral(~cache) |> Continuous.getShape |> XYShape.YtoX.linear(f);
};
// This pipes all ys (continuous and discrete) through fn.
// If mapY is a linear operation, we might be able to update the knownIntegralSums as well;
// if not, they'll be set to None.
let mapY =
(
~knownIntegralSumFn=previousIntegralSum => None,
fn,
{discrete, continuous}: t,
)
: t => {
let u = E.O.bind(_, knownIntegralSumFn);
let yMappedDiscrete =
discrete
|> Discrete.T.mapY(fn)
|> Discrete.updateKnownIntegralSum(u(discrete.knownIntegralSum));
let yMappedContinuous =
continuous
|> Continuous.T.mapY(fn)
|> Continuous.updateKnownIntegralSum(u(continuous.knownIntegralSum));
{
discrete: yMappedDiscrete,
continuous: Continuous.T.mapY(fn, continuous),
};
};
let mean = ({discrete, continuous}: t): float => {
let discreteMean = Discrete.T.mean(discrete);
let continuousMean = Continuous.T.mean(continuous);
// the combined mean is the weighted sum of the two:
let discreteIntegralSum =
Discrete.T.Integral.sum(~cache=None, discrete);
let continuousIntegralSum =
Continuous.T.Integral.sum(~cache=None, continuous);
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
(
discreteMean
*. discreteIntegralSum
+. continuousMean
*. continuousIntegralSum
)
/. totalIntegralSum;
};
let variance = ({discrete, continuous} as t: t): float => {
// the combined mean is the weighted sum of the two:
let discreteIntegralSum =
Discrete.T.Integral.sum(~cache=None, discrete);
let continuousIntegralSum =
Continuous.T.Integral.sum(~cache=None, continuous);
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum;
let getMeanOfSquares = ({discrete, continuous}: t) => {
let discreteMean =
discrete
|> Discrete.shapeMap(XYShape.Analysis.squareXYShape)
|> Discrete.T.mean;
let continuousMean =
continuous |> XYShape.Analysis.getMeanOfSquaresContinuousShape;
(
discreteMean
*. discreteIntegralSum
+. continuousMean
*. continuousIntegralSum
)
/. totalIntegralSum;
};
switch (discreteIntegralSum /. totalIntegralSum) {
| 1.0 => Discrete.T.variance(discrete)
| 0.0 => Continuous.T.variance(continuous)
| _ =>
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
};
};
});
let combineAlgebraically =
(~downsample=false, op: ExpressionTypes.algebraicOperation, t1: t, t2: t)
: t => {
// Discrete convolution can cause a huge increase in the number of samples,
// so we'll first downsample.
// An alternative (to be explored in the future) may be to first perform the full convolution and then to downsample the result;
// to use non-uniform fast Fourier transforms (for addition only), add web workers or gpu.js, etc. ...
let downsampleIfTooLarge = (t: t) => {
let sqtl = sqrt(float_of_int(totalLength(t)));
sqtl > 10. && downsample ? T.downsample(int_of_float(sqtl), t) : t;
};
let t1d = downsampleIfTooLarge(t1);
let t2d = downsampleIfTooLarge(t2);
// continuous (*) continuous => continuous, but also
// discrete (*) continuous => continuous (and vice versa). We have to take care of all combos and then combine them:
let ccConvResult =
Continuous.combineAlgebraically(
~downsample=false,
op,
t1d.continuous,
t2d.continuous,
);
let dcConvResult =
Continuous.combineAlgebraicallyWithDiscrete(
~downsample=false,
op,
t2d.continuous,
t1d.discrete,
);
let cdConvResult =
Continuous.combineAlgebraicallyWithDiscrete(
~downsample=false,
op,
t1d.continuous,
t2d.discrete,
);
let continuousConvResult =
Continuous.reduce((+.), [|ccConvResult, dcConvResult, cdConvResult|]);
// ... finally, discrete (*) discrete => discrete, obviously:
let discreteConvResult =
Discrete.combineAlgebraically(op, t1d.discrete, t2d.discrete);
{discrete: discreteConvResult, continuous: continuousConvResult};
};

View File

@ -9,25 +9,25 @@ type assumptions = {
};
let buildSimple = (~continuous: option(DistTypes.continuousShape), ~discrete: option(DistTypes.discreteShape)): option(DistTypes.shape) => {
let continuous = continuous |> E.O.default(Distributions.Continuous.make(`Linear, {xs: [||], ys: [||]}, Some(0.0)));
let discrete = discrete |> E.O.default(Distributions.Discrete.make({xs: [||], ys: [||]}, Some(0.0)));
let continuous = continuous |> E.O.default(Continuous.make(`Linear, {xs: [||], ys: [||]}, Some(0.0)));
let discrete = discrete |> E.O.default(Discrete.make({xs: [||], ys: [||]}, Some(0.0)));
let cLength =
continuous
|> Distributions.Continuous.getShape
|> Continuous.getShape
|> XYShape.T.xs
|> E.A.length;
let dLength = discrete |> Distributions.Discrete.getShape |> XYShape.T.xs |> E.A.length;
let dLength = discrete |> Discrete.getShape |> XYShape.T.xs |> E.A.length;
switch (cLength, dLength) {
| (0 | 1, 0) => None
| (0 | 1, _) => Some(Discrete(discrete))
| (_, 0) => Some(Continuous(continuous))
| (_, _) =>
let discreteProbabilityMassFraction =
Distributions.Discrete.T.Integral.sum(~cache=None, discrete);
let discrete = Distributions.Discrete.T.normalize(discrete);
let continuous = Distributions.Continuous.T.normalize(continuous);
Discrete.T.Integral.sum(~cache=None, discrete);
let discrete = Discrete.T.normalize(discrete);
let continuous = Continuous.T.normalize(continuous);
let mixedDist =
Distributions.Mixed.make(
Mixed.make(
~continuous,
~discrete
);

View File

@ -0,0 +1,209 @@
open Distributions;
type t = DistTypes.shape;
let mapToAll = ((fn1, fn2, fn3), t: t) =>
switch (t) {
| Mixed(m) => fn1(m)
| Discrete(m) => fn2(m)
| Continuous(m) => fn3(m)
};
let fmap = ((fn1, fn2, fn3), t: t): t =>
switch (t) {
| Mixed(m) => Mixed(fn1(m))
| Discrete(m) => Discrete(fn2(m))
| Continuous(m) => Continuous(fn3(m))
};
let toMixed =
mapToAll((
m => m,
d => Mixed.make(~discrete=d, ~continuous=Continuous.empty),
c => Mixed.make(~discrete=Discrete.empty, ~continuous=c),
));
let combineAlgebraically =
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
DistTypes.Continuous(
Continuous.combineAlgebraically(~downsample=true, op, m1, m2),
)
| (Discrete(m1), Discrete(m2)) =>
DistTypes.Discrete(Discrete.combineAlgebraically(op, m1, m2))
| (m1, m2) =>
DistTypes.Mixed(
Mixed.combineAlgebraically(
~downsample=true,
op,
toMixed(m1),
toMixed(m2),
),
)
};
};
let combinePointwise =
(~knownIntegralSumsFn=(_, _) => None, fn, t1: t, t2: t) =>
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
DistTypes.Continuous(
Continuous.combinePointwise(~knownIntegralSumsFn, fn, m1, m2),
)
| (Discrete(m1), Discrete(m2)) =>
DistTypes.Discrete(
Discrete.combinePointwise(~knownIntegralSumsFn, fn, m1, m2),
)
| (m1, m2) =>
DistTypes.Mixed(
Mixed.combinePointwise(
~knownIntegralSumsFn,
fn,
toMixed(m1),
toMixed(m2),
),
)
};
// TODO: implement these functions
let pdf = (f: float, t: t): float => {
0.0;
};
let inv = (f: float, t: t): float => {
0.0;
};
let sample = (t: t): float => {
0.0;
};
module T =
Dist({
type t = DistTypes.shape;
type integral = DistTypes.continuousShape;
let xToY = (f: float) =>
mapToAll((
Mixed.T.xToY(f),
Discrete.T.xToY(f),
Continuous.T.xToY(f),
));
let toShape = (t: t) => t;
let toContinuous = t => None;
let toDiscrete = t => None;
let downsample = (~cache=None, i, t) =>
fmap(
(
Mixed.T.downsample(i),
Discrete.T.downsample(i),
Continuous.T.downsample(i),
),
t,
);
let truncate = (leftCutoff, rightCutoff, t): t =>
fmap(
(
Mixed.T.truncate(leftCutoff, rightCutoff),
Discrete.T.truncate(leftCutoff, rightCutoff),
Continuous.T.truncate(leftCutoff, rightCutoff),
),
t,
);
let toDiscreteProbabilityMassFraction = t => 0.0;
let normalize =
fmap((Mixed.T.normalize, Discrete.T.normalize, Continuous.T.normalize));
let toContinuous =
mapToAll((
Mixed.T.toContinuous,
Discrete.T.toContinuous,
Continuous.T.toContinuous,
));
let toDiscrete =
mapToAll((
Mixed.T.toDiscrete,
Discrete.T.toDiscrete,
Continuous.T.toDiscrete,
));
let toDiscreteProbabilityMassFraction =
mapToAll((
Mixed.T.toDiscreteProbabilityMassFraction,
Discrete.T.toDiscreteProbabilityMassFraction,
Continuous.T.toDiscreteProbabilityMassFraction,
));
let normalizedToDiscrete =
mapToAll((
Mixed.T.normalizedToDiscrete,
Discrete.T.normalizedToDiscrete,
Continuous.T.normalizedToDiscrete,
));
let normalizedToContinuous =
mapToAll((
Mixed.T.normalizedToContinuous,
Discrete.T.normalizedToContinuous,
Continuous.T.normalizedToContinuous,
));
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX));
let integral = (~cache) =>
mapToAll((
Mixed.T.Integral.get(~cache=None),
Discrete.T.Integral.get(~cache=None),
Continuous.T.Integral.get(~cache=None),
));
let integralEndY = (~cache) =>
mapToAll((
Mixed.T.Integral.sum(~cache=None),
Discrete.T.Integral.sum(~cache),
Continuous.T.Integral.sum(~cache=None),
));
let integralXtoY = (~cache, f) => {
mapToAll((
Mixed.T.Integral.xToY(~cache, f),
Discrete.T.Integral.xToY(~cache, f),
Continuous.T.Integral.xToY(~cache, f),
));
};
let integralYtoX = (~cache, f) => {
mapToAll((
Mixed.T.Integral.yToX(~cache, f),
Discrete.T.Integral.yToX(~cache, f),
Continuous.T.Integral.yToX(~cache, f),
));
};
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX));
let mapY = (~knownIntegralSumFn=previousIntegralSum => None, fn) =>
fmap((
Mixed.T.mapY(~knownIntegralSumFn, fn),
Discrete.T.mapY(~knownIntegralSumFn, fn),
Continuous.T.mapY(~knownIntegralSumFn, fn),
));
let mean = (t: t): float =>
switch (t) {
| Mixed(m) => Mixed.T.mean(m)
| Discrete(m) => Discrete.T.mean(m)
| Continuous(m) => Continuous.T.mean(m)
};
let variance = (t: t): float =>
switch (t) {
| Mixed(m) => Mixed.T.variance(m)
| Discrete(m) => Discrete.T.variance(m)
| Continuous(m) => Continuous.T.variance(m)
};
});
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s) =>
switch (distToFloatOp) {
| `Pdf(f) => pdf(f, s)
| `Inv(f) => inv(f, s)
| `Sample => sample(s)
| `Mean => T.mean(s)
};

View File

@ -3,13 +3,13 @@ open ExpressionTypes.ExpressionTree;
let toShape = (sampleCount: int, node: node) => {
let renderResult =
`Render(`Normalize(node))
|> ExpressionTreeEvaluator.toLeaf({sampleCount: sampleCount});
|> ExpressionTreeEvaluator.toLeaf({sampleCount: sampleCount, evaluateNode: ExpressionTreeEvaluator.toLeaf});
switch (renderResult) {
| Ok(`RenderedDist(rs)) =>
// todo: Why is this here? It converts a mixed shape to a mixed shape.
let continuous = Distributions.Shape.T.toContinuous(rs);
let discrete = Distributions.Shape.T.toDiscrete(rs);
let continuous = Shape.T.toContinuous(rs);
let discrete = Shape.T.toDiscrete(rs);
let shape = MixedShapeBuilder.buildSimple(~continuous, ~discrete);
shape |> E.O.toExt("Could not build final shape.");
| Ok(_) => E.O.toExn("Rendering failed.", None)

View File

@ -22,13 +22,14 @@ module AlgebraicCombination = {
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
};
let combineAsShapes = (toLeaf, renderParams, algebraicOp, t1, t2) => {
let renderShape = r => toLeaf(renderParams, `Render(r));
let combineAsShapes =
(evaluationParams: evaluationParams, algebraicOp, t1, t2) => {
let renderShape = render(evaluationParams);
switch (renderShape(t1), renderShape(t2)) {
| (Ok(`RenderedDist(s1)), Ok(`RenderedDist(s2))) =>
Ok(
`RenderedDist(
Distributions.Shape.combineAlgebraically(algebraicOp, s1, s2),
Shape.combineAlgebraically(algebraicOp, s1, s2),
),
)
| (Error(e1), _) => Error(e1)
@ -39,8 +40,7 @@ module AlgebraicCombination = {
let operationToLeaf =
(
toLeaf,
renderParams: renderParams,
evaluationParams: evaluationParams,
algebraicOp: ExpressionTypes.algebraicOperation,
t1: t,
t2: t,
@ -52,22 +52,23 @@ module AlgebraicCombination = {
_,
fun
| `SymbolicDist(d) as t => Ok(t)
| _ => combineAsShapes(toLeaf, renderParams, algebraicOp, t1, t2),
| _ => combineAsShapes(evaluationParams, algebraicOp, t1, t2),
);
};
module VerticalScaling = {
let operationToLeaf = (toLeaf, renderParams, scaleOp, t, scaleBy) => {
let operationToLeaf =
(evaluationParams: evaluationParams, scaleOp, t, scaleBy) => {
// scaleBy has to be a single float, otherwise we'll return an error.
let fn = Operation.Scale.toFn(scaleOp);
let knownIntegralSumFn = Operation.Scale.toKnownIntegralSumFn(scaleOp);
let renderedShape = toLeaf(renderParams, `Render(t));
let renderedShape = render(evaluationParams, t);
switch (renderedShape, scaleBy) {
| (Ok(`RenderedDist(rs)), `SymbolicDist(`Float(sm))) =>
Ok(
`RenderedDist(
Distributions.Shape.T.mapY(
Shape.T.mapY(
~knownIntegralSumFn=knownIntegralSumFn(sm),
fn(sm),
rs,
@ -81,13 +82,12 @@ module VerticalScaling = {
};
module PointwiseCombination = {
let pointwiseAdd = (toLeaf, renderParams, t1, t2) => {
let renderShape = r => toLeaf(renderParams, `Render(r));
switch (renderShape(t1), renderShape(t2)) {
let pointwiseAdd = (evaluationParams: evaluationParams, t1, t2) => {
switch (render(evaluationParams, t1), render(evaluationParams, t2)) {
| (Ok(`RenderedDist(rs1)), Ok(`RenderedDist(rs2))) =>
Ok(
`RenderedDist(
Distributions.Shape.combinePointwise(
Shape.combinePointwise(
~knownIntegralSumsFn=(a, b) => Some(a +. b),
(+.),
rs1,
@ -101,7 +101,7 @@ module PointwiseCombination = {
};
};
let pointwiseMultiply = (toLeaf, renderParams, t1, t2) => {
let pointwiseMultiply = (evaluationParams: evaluationParams, t1, t2) => {
// TODO: construct a function that we can easily sample from, to construct
// a RenderedDist. Use the xMin and xMax of the rendered shapes to tell the sampling function where to look.
Error(
@ -109,10 +109,11 @@ module PointwiseCombination = {
);
};
let operationToLeaf = (toLeaf, renderParams, pointwiseOp, t1, t2) => {
let operationToLeaf =
(evaluationParams: evaluationParams, pointwiseOp, t1, t2) => {
switch (pointwiseOp) {
| `Add => pointwiseAdd(toLeaf, renderParams, t1, t2)
| `Multiply => pointwiseMultiply(toLeaf, renderParams, t1, t2)
| `Add => pointwiseAdd(evaluationParams, t1, t2)
| `Multiply => pointwiseMultiply(evaluationParams, t1, t2)
};
};
};
@ -133,24 +134,23 @@ module Truncate = {
};
};
let truncateAsShape = (toLeaf, renderParams, leftCutoff, rightCutoff, t) => {
let truncateAsShape =
(evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) => {
// TODO: use named args in renderToShape; if we're lucky we can at least get the tail
// of a distribution we otherwise wouldn't get at all
let renderedShape = toLeaf(renderParams, `Render(t));
switch (renderedShape) {
switch (render(evaluationParams, t)) {
| Ok(`RenderedDist(rs)) =>
let truncatedShape =
rs |> Distributions.Shape.T.truncate(leftCutoff, rightCutoff);
rs |> Shape.T.truncate(leftCutoff, rightCutoff);
Ok(`RenderedDist(truncatedShape));
| Error(e1) => Error(e1)
| Error(e) => Error(e)
| _ => Error("Could not truncate distribution.")
};
};
let operationToLeaf =
(
toLeaf,
renderParams,
evaluationParams,
leftCutoff: option(float),
rightCutoff: option(float),
t: node,
@ -163,62 +163,54 @@ module Truncate = {
| `Solution(t) => Ok(t)
| `Error(e) => Error(e)
| `NoSolution =>
truncateAsShape(toLeaf, renderParams, leftCutoff, rightCutoff, t)
truncateAsShape(evaluationParams, leftCutoff, rightCutoff, t)
);
};
};
module Normalize = {
let rec operationToLeaf =
(toLeaf, renderParams, t: node): result(node, string) => {
let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => {
switch (t) {
| `RenderedDist(s) =>
Ok(`RenderedDist(Distributions.Shape.T.normalize(s)))
Ok(`RenderedDist(Shape.T.normalize(s)))
| `SymbolicDist(_) => Ok(t)
| _ =>
t
|> toLeaf(renderParams)
|> E.R.bind(_, operationToLeaf(toLeaf, renderParams))
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
};
};
};
module FloatFromDist = {
let symbolicToLeaf = (distToFloatOp: distToFloatOperation, s) => {
SymbolicDist.T.operate(distToFloatOp, s)
|> E.R.bind(_, v => Ok(`SymbolicDist(`Float(v))));
};
let renderedToLeaf =
(distToFloatOp: distToFloatOperation, rs: DistTypes.shape)
: result(node, string) => {
Distributions.Shape.operate(distToFloatOp, rs)
|> (v => Ok(`SymbolicDist(`Float(v))));
};
let rec operationToLeaf =
(toLeaf, renderParams, distToFloatOp: distToFloatOperation, t: node)
(evaluationParams, distToFloatOp: distToFloatOperation, t: node)
: result(node, string) => {
switch (t) {
| `SymbolicDist(s) => symbolicToLeaf(distToFloatOp, s)
| `RenderedDist(rs) => renderedToLeaf(distToFloatOp, rs)
| `SymbolicDist(s) =>
SymbolicDist.T.operate(distToFloatOp, s)
|> E.R.bind(_, v => Ok(`SymbolicDist(`Float(v))))
| `RenderedDist(rs) =>
Shape.operate(distToFloatOp, rs)
|> (v => Ok(`SymbolicDist(`Float(v))))
| _ =>
t
|> toLeaf(renderParams)
|> E.R.bind(_, operationToLeaf(toLeaf, renderParams, distToFloatOp))
|> evaluateAndRetry(evaluationParams, r =>
operationToLeaf(r, distToFloatOp)
)
};
};
};
module Render = {
let rec operationToLeaf =
(toLeaf, renderParams, t: node): result(t, string) => {
(evaluationParams: evaluationParams, t: node): result(t, string) => {
switch (t) {
| `SymbolicDist(d) =>
Ok(`RenderedDist(SymbolicDist.T.toShape(renderParams.sampleCount, d)))
Ok(
`RenderedDist(
SymbolicDist.T.toShape(evaluationParams.sampleCount, d),
),
)
| `RenderedDist(_) as t => Ok(t) // already a rendered shape, we're done here
| _ =>
t
|> toLeaf(renderParams)
|> E.R.bind(_, operationToLeaf(toLeaf, renderParams))
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
};
};
};
@ -229,35 +221,38 @@ module Render = {
but most often it will produce a RenderedDist.
This function is used mainly to turn a parse tree into a single RenderedDist
that can then be displayed to the user. */
let rec toLeaf = (renderParams, node: t): result(t, string) => {
let toLeaf =
(
evaluationParams: ExpressionTypes.ExpressionTree.evaluationParams,
node: t,
)
: result(t, string) => {
switch (node) {
// Leaf nodes just stay leaf nodes
| `SymbolicDist(_)
| `RenderedDist(_) => Ok(node)
// Operations need to be turned into leaves
// Operations nevaluationParamsd to be turned into leaves
| `AlgebraicCombination(algebraicOp, t1, t2) =>
AlgebraicCombination.operationToLeaf(
toLeaf,
renderParams,
evaluationParams,
algebraicOp,
t1,
t2,
)
| `PointwiseCombination(pointwiseOp, t1, t2) =>
PointwiseCombination.operationToLeaf(
toLeaf,
renderParams,
evaluationParams,
pointwiseOp,
t1,
t2,
)
| `VerticalScaling(scaleOp, t, scaleBy) =>
VerticalScaling.operationToLeaf(toLeaf, renderParams, scaleOp, t, scaleBy)
VerticalScaling.operationToLeaf(evaluationParams, scaleOp, t, scaleBy)
| `Truncate(leftCutoff, rightCutoff, t) =>
Truncate.operationToLeaf(toLeaf, renderParams, leftCutoff, rightCutoff, t)
Truncate.operationToLeaf(evaluationParams, leftCutoff, rightCutoff, t)
| `FloatFromDist(distToFloatOp, t) =>
FloatFromDist.operationToLeaf(toLeaf, renderParams, distToFloatOp, t)
| `Normalize(t) => Normalize.operationToLeaf(toLeaf, renderParams, t)
| `Render(t) => Render.operationToLeaf(toLeaf, renderParams, t)
FloatFromDist.operationToLeaf(evaluationParams, distToFloatOp, t)
| `Normalize(t) => Normalize.operationToLeaf(evaluationParams, t)
| `Render(t) => Render.operationToLeaf(evaluationParams, t)
};
};

View File

@ -5,10 +5,8 @@ type distToFloatOperation = [ | `Pdf(float) | `Inv(float) | `Mean | `Sample];
module ExpressionTree = {
type node = [
// leaf nodes:
| `SymbolicDist(SymbolicTypes.symbolicDist)
| `RenderedDist(DistTypes.shape)
// operations:
| `AlgebraicCombination(algebraicOperation, node, node)
| `PointwiseCombination(pointwiseOperation, node, node)
| `VerticalScaling(scaleOperation, node, node)
@ -17,6 +15,20 @@ module ExpressionTree = {
| `Normalize(node)
| `FloatFromDist(distToFloatOperation, node)
];
type evaluationParams = {
sampleCount: int,
evaluateNode: (evaluationParams, node) => Belt.Result.t(node, string),
};
let evaluateNode = (evaluationParams: evaluationParams) =>
evaluationParams.evaluateNode(evaluationParams);
let render = (evaluationParams: evaluationParams, r) =>
evaluateNode(evaluationParams, `Render(r));
let evaluateAndRetry = (evaluationParams, fn, node) =>
node |> evaluationParams.evaluateNode(evaluationParams) |> E.R.bind(_, fn(evaluationParams));
};
type simplificationResult = [

View File

@ -204,32 +204,32 @@ module MathAdtToDistDst = {
};
};
let arrayParser =
(args: array(arg))
: result(ExpressionTypes.ExpressionTree.node, string) => {
let samples =
args
|> E.A.fmap(
fun
| Value(n) => Some(n)
| _ => None,
)
|> E.A.O.concatSomes;
let outputs = Samples.T.fromSamples(samples);
let pdf =
outputs.shape |> E.O.bind(_, Distributions.Shape.T.toContinuous);
let shape =
pdf
|> E.O.fmap(pdf => {
let _pdf = Distributions.Continuous.T.normalize(pdf);
let cdf = Distributions.Continuous.T.integral(~cache=None, _pdf);
SymbolicDist.ContinuousShape.make(_pdf, cdf);
});
switch (shape) {
| Some(s) => Ok(`SymbolicDist(`ContinuousShape(s)))
| None => Error("Rendering did not work")
};
};
// let arrayParser =
// (args: array(arg))
// : result(ExpressionTypes.ExpressionTree.node, string) => {
// let samples =
// args
// |> E.A.fmap(
// fun
// | Value(n) => Some(n)
// | _ => None,
// )
// |> E.A.O.concatSomes;
// let outputs = Samples.T.fromSamples(samples);
// let pdf =
// outputs.shape |> E.O.bind(_, Shape.T.toContinuous);
// let shape =
// pdf
// |> E.O.fmap(pdf => {
// let _pdf = Continuous.T.normalize(pdf);
// let cdf = Continuous.T.integral(~cache=None, _pdf);
// SymbolicDist.ContinuousShape.make(_pdf, cdf);
// });
// switch (shape) {
// | Some(s) => Ok(`SymbolicDist(`ContinuousShape(s)))
// | None => Error("Rendering did not work")
// };
// };
let operationParser =
(
@ -335,9 +335,9 @@ module MathAdtToDistDst = {
let topLevel =
fun
| Array(r) => arrayParser(r)
| Value(_) as r => nodeParser(r)
| Fn(_) as r => nodeParser(r)
| Array(_) => Error("Array not valid as top level")
| Symbol(_) => Error("Symbol not valid as top level")
| Object(_) => Error("Object not valid as top level");

View File

@ -7,21 +7,21 @@ let downsampleIfShould =
let willDownsample =
shouldDownsample
&& RenderTypes.ShapeRenderer.Combined.methodUsed(outputs) == `Sampling;
willDownsample ? dist |> Distributions.DistPlus.T.downsample(recommendedLength) : dist;
willDownsample ? dist |> DistPlus.T.downsample(recommendedLength) : dist;
};
let run =
(inputs: RenderTypes.DistPlusRenderer.inputs)
: RenderTypes.DistPlusRenderer.outputs => {
let toDist = shape =>
Distributions.DistPlus.make(
DistPlus.make(
~shape,
~domain=inputs.distPlusIngredients.domain,
~unit=inputs.distPlusIngredients.unit,
~guesstimatorString=Some(inputs.distPlusIngredients.guesstimatorString),
(),
)
|> Distributions.DistPlus.T.normalize;
|> DistPlus.T.normalize;
let outputs =
ShapeRenderer.run({
samplingInputs: inputs.samplingInputs,

View File

@ -120,7 +120,7 @@ module T = {
|> E.FloatFloatMap.fmap(r => r /. length)
|> E.FloatFloatMap.toArray
|> XYShape.T.fromZippedArray
|> Distributions.Discrete.make(_, None);
|> Discrete.make(_, None);
let pdf =
continuousPart |> E.A.length > 5
@ -150,7 +150,7 @@ module T = {
~outputXYPoints=samplingInputs.outputXYPoints,
formatUnitWidth(usedUnitWidth),
)
|> Distributions.Continuous.make(`Linear, _, None)
|> Continuous.make(`Linear, _, None)
|> (r => Some((r, foo)));
}
: None;

View File

@ -1,20 +1,5 @@
open SymbolicTypes;
module ContinuousShape = {
type t = continuousShape;
let make = (pdf, cdf): t => {pdf, cdf};
let pdf = (x, t: t) =>
Distributions.Continuous.T.xToY(x, t.pdf).continuous;
// TODO: pdf and inv are currently the same, this seems broken.
let inv = (p, t: t) =>
Distributions.Continuous.T.xToY(p, t.pdf).continuous;
// TODO: Fix the sampling, to have it work correctly.
let sample = (t: t) => 3.0;
// TODO: Fix the mean, to have it work correctly.
let mean = (t: t) => Ok(0.0);
let toString = t => {j|CustomContinuousShape|j};
};
module Exponential = {
type t = exponential;
let pdf = (x, t: t) => Jstat.exponential##pdf(x, t.rate);
@ -170,7 +155,6 @@ module T = {
| `Uniform(n) => Uniform.pdf(x, n)
| `Beta(n) => Beta.pdf(x, n)
| `Float(n) => Float.pdf(x, n)
| `ContinuousShape(n) => ContinuousShape.pdf(x, n)
};
let inv = (x, dist) =>
@ -183,7 +167,6 @@ module T = {
| `Uniform(n) => Uniform.inv(x, n)
| `Beta(n) => Beta.inv(x, n)
| `Float(n) => Float.inv(x, n)
| `ContinuousShape(n) => ContinuousShape.inv(x, n)
};
let sample: symbolicDist => float =
@ -196,7 +179,6 @@ module T = {
| `Uniform(n) => Uniform.sample(n)
| `Beta(n) => Beta.sample(n)
| `Float(n) => Float.sample(n)
| `ContinuousShape(n) => ContinuousShape.sample(n);
let toString: symbolicDist => string =
fun
@ -208,7 +190,6 @@ module T = {
| `Uniform(n) => Uniform.toString(n)
| `Beta(n) => Beta.toString(n)
| `Float(n) => Float.toString(n)
| `ContinuousShape(n) => ContinuousShape.toString(n);
let min: symbolicDist => float =
fun
@ -219,7 +200,6 @@ module T = {
| `Lognormal(n) => Lognormal.inv(minCdfValue, n)
| `Uniform({low}) => low
| `Beta(n) => Beta.inv(minCdfValue, n)
| `ContinuousShape(n) => ContinuousShape.inv(minCdfValue, n)
| `Float(n) => n;
let max: symbolicDist => float =
@ -230,7 +210,6 @@ module T = {
| `Normal(n) => Normal.inv(maxCdfValue, n)
| `Lognormal(n) => Lognormal.inv(maxCdfValue, n)
| `Beta(n) => Beta.inv(maxCdfValue, n)
| `ContinuousShape(n) => ContinuousShape.inv(maxCdfValue, n)
| `Uniform({high}) => high
| `Float(n) => n;
@ -242,7 +221,6 @@ module T = {
| `Normal(n) => Normal.mean(n)
| `Lognormal(n) => Lognormal.mean(n)
| `Beta(n) => Beta.mean(n)
| `ContinuousShape(n) => ContinuousShape.mean(n)
| `Uniform(n) => Uniform.mean(n)
| `Float(n) => Float.mean(n);
@ -300,13 +278,13 @@ module T = {
switch (d) {
| `Float(v) =>
Discrete(
Distributions.Discrete.make({xs: [|v|], ys: [|1.0|]}, Some(1.0)),
Discrete.make({xs: [|v|], ys: [|1.0|]}, Some(1.0)),
)
| _ =>
let xs = interpolateXs(~xSelection=`ByWeight, d, sampleCount);
let ys = xs |> E.A.fmap(x => pdf(x, d));
Continuous(
Distributions.Continuous.make(`Linear, {xs, ys}, Some(1.0)),
Continuous.make(`Linear, {xs, ys}, Some(1.0)),
);
};
};

View File

@ -31,11 +31,6 @@ type triangular = {
high: float,
};
type continuousShape = {
pdf: DistTypes.continuousShape,
cdf: DistTypes.continuousShape,
};
type symbolicDist = [
| `Normal(normal)
| `Beta(beta)
@ -44,7 +39,6 @@ type symbolicDist = [
| `Exponential(exponential)
| `Cauchy(cauchy)
| `Triangular(triangular)
| `ContinuousShape(continuousShape)
| `Float(float) // Dirac delta at x. Practically useful only in the context of multimodals.
];

View File

@ -113,7 +113,7 @@ module Model = {
|> RenderTypes.DistPlusRenderer.make(~distPlusIngredients=_, ())
|> DistPlusRenderer.run
|> RenderTypes.DistPlusRenderer.Outputs.distplus
|> E.O.bind(_, Distributions.DistPlusTime.Integral.xToY(Time(dateTime)));
|> E.O.bind(_, DistPlusTime.Integral.xToY(Time(dateTime)));
};
let make =