Merge pull request #232 from quantified-uncertainty/testing-discipline-algebraic-operations
Testing discipline: algebraic operations
This commit is contained in:
commit
bd10a0bbf8
|
@ -18,11 +18,9 @@ let {
|
||||||
triangularDist,
|
triangularDist,
|
||||||
exponentialDist,
|
exponentialDist,
|
||||||
} = module(GenericDist_Fixtures)
|
} = module(GenericDist_Fixtures)
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
|
||||||
|
|
||||||
let {toFloat, toDist, toString, toError} = module(DistributionOperation.Output)
|
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
||||||
let {run} = module(DistributionOperation)
|
let {run} = module(DistributionOperation)
|
||||||
let {fmap} = module(DistributionOperation.Output)
|
|
||||||
let run = run(~env)
|
let run = run(~env)
|
||||||
let outputMap = fmap(~env)
|
let outputMap = fmap(~env)
|
||||||
let toExt: option<'a> => 'a = E.O.toExt(
|
let toExt: option<'a> => 'a = E.O.toExt(
|
||||||
|
|
|
@ -11,3 +11,4 @@ let triangularDist: GenericDist_Types.genericDist = Symbolic(
|
||||||
)
|
)
|
||||||
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
||||||
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
||||||
|
let floatDist: GenericDist_Types.genericDist = Symbolic(#Float(1e1))
|
||||||
|
|
|
@ -0,0 +1,368 @@
|
||||||
|
/*
|
||||||
|
This file is like a half measure between one-off unit tests and proper invariant validation.
|
||||||
|
As such, I'm not that excited about it, though it does provide some structure and will alarm us
|
||||||
|
when things substantially change.
|
||||||
|
Also, there are some open comments in https://github.com/quantified-uncertainty/squiggle/pull/232 that haven't been addressed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
let {
|
||||||
|
normalDist5, // mean=5, stdev=2
|
||||||
|
normalDist10, // mean=10, stdev=2
|
||||||
|
normalDist20, // mean=20, stdev=2
|
||||||
|
normalDist, // mean=5; stdev=2
|
||||||
|
uniformDist, // low=9; high=10
|
||||||
|
betaDist, // alpha=2; beta=5
|
||||||
|
lognormalDist, // mu=0; sigma=1
|
||||||
|
cauchyDist, // local=1; scale=1
|
||||||
|
triangularDist, // low=1; medium=2; high=3;
|
||||||
|
exponentialDist, // rate=2
|
||||||
|
} = module(GenericDist_Fixtures)
|
||||||
|
|
||||||
|
let {
|
||||||
|
algebraicAdd,
|
||||||
|
algebraicMultiply,
|
||||||
|
algebraicDivide,
|
||||||
|
algebraicSubtract,
|
||||||
|
algebraicLogarithm,
|
||||||
|
algebraicPower,
|
||||||
|
} = module(DistributionOperation.Constructors)
|
||||||
|
|
||||||
|
let algebraicAdd = algebraicAdd(~env)
|
||||||
|
let algebraicMultiply = algebraicMultiply(~env)
|
||||||
|
let algebraicDivide = algebraicDivide(~env)
|
||||||
|
let algebraicSubtract = algebraicSubtract(~env)
|
||||||
|
let algebraicLogarithm = algebraicLogarithm(~env)
|
||||||
|
let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
|
describe("(Algebraic) addition of distributions", () => {
|
||||||
|
describe("mean", () => {
|
||||||
|
test("normal(mean=5) + normal(mean=20)", () => {
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist20)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
->expect
|
||||||
|
->toBe(Some(2.5e1))
|
||||||
|
})
|
||||||
|
|
||||||
|
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
|
||||||
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.01927225696028752, ~digits=1) // (uniformMean +. betaMean)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
|
||||||
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.019275414920485248, ~digits=1) // (uniformMean +. betaMean)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("pdf", () => {
|
||||||
|
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
||||||
|
testAll(
|
||||||
|
"(normal(mean=5) + normal(mean=5)).pdf (imprecise)",
|
||||||
|
list{8e0, 1e1, 1.2e1, 1.4e1},
|
||||||
|
x => {
|
||||||
|
let received =
|
||||||
|
normalDist10 // this should be normal(10, sqrt(8))
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001978994877226945, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001978994877226945, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("cdf", () => {
|
||||||
|
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
|
||||||
|
let received =
|
||||||
|
normalDist10
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.0013961779932477507, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001388898111625753, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("inv", () => {
|
||||||
|
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
|
||||||
|
let received =
|
||||||
|
normalDist10
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(10.927078217530806, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(10.915396627014363, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,140 @@
|
||||||
|
/*
|
||||||
|
This is the most basic file in our invariants family of tests.
|
||||||
|
|
||||||
|
See document in https://github.com/quantified-uncertainty/squiggle/pull/238 for details
|
||||||
|
|
||||||
|
Note: digits parameter should be higher than -4.
|
||||||
|
*/
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
let {
|
||||||
|
algebraicAdd,
|
||||||
|
algebraicMultiply,
|
||||||
|
algebraicDivide,
|
||||||
|
algebraicSubtract,
|
||||||
|
algebraicLogarithm,
|
||||||
|
algebraicPower,
|
||||||
|
} = module(DistributionOperation.Constructors)
|
||||||
|
|
||||||
|
let algebraicAdd = algebraicAdd(~env)
|
||||||
|
let algebraicMultiply = algebraicMultiply(~env)
|
||||||
|
let algebraicDivide = algebraicDivide(~env)
|
||||||
|
let algebraicSubtract = algebraicSubtract(~env)
|
||||||
|
let algebraicLogarithm = algebraicLogarithm(~env)
|
||||||
|
let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
|
describe("Mean", () => {
|
||||||
|
let digits = -4
|
||||||
|
|
||||||
|
let mean = GenericDist_Types.Constructors.UsingDists.mean
|
||||||
|
|
||||||
|
let runMean: result<DistributionTypes.genericDist, DistributionTypes.error> => float = distR => {
|
||||||
|
distR
|
||||||
|
->E.R2.fmap(mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
->E.O2.toExn("Shouldn't see this because we trust testcase input")
|
||||||
|
}
|
||||||
|
|
||||||
|
let impossiblePath: string => assertion = algebraicOp =>
|
||||||
|
`${algebraicOp} has`->expect->toEqual("failed")
|
||||||
|
|
||||||
|
let distributions = list{
|
||||||
|
normalMake(0.0, 1e0),
|
||||||
|
betaMake(2e0, 4e0),
|
||||||
|
exponentialMake(1.234e0),
|
||||||
|
uniformMake(7e0, 1e1),
|
||||||
|
// cauchyMake(1e0, 1e0),
|
||||||
|
lognormalMake(1e0, 1e0),
|
||||||
|
triangularMake(1e0, 1e1, 5e1),
|
||||||
|
Ok(floatMake(1e1)),
|
||||||
|
}
|
||||||
|
let combinations = E.L.combinations2(distributions)
|
||||||
|
let zipDistsDists = E.L.zip(distributions, distributions)
|
||||||
|
|
||||||
|
let testOperationMean = (
|
||||||
|
distOp: (DistributionTypes.genericDist, DistributionTypes.genericDist) => result<DistributionTypes.genericDist, DistributionTypes.error>,
|
||||||
|
description: string,
|
||||||
|
floatOp: (float, float) => float,
|
||||||
|
dist1': result<SymbolicDistTypes.symbolicDist, string>,
|
||||||
|
dist2': result<SymbolicDistTypes.symbolicDist, string>
|
||||||
|
) => {
|
||||||
|
let dist1 = dist1'->E.R2.fmap(x=>DistributionTypes.Symbolic(x))->E.R2.fmap2(s=>DistributionTypes.Other(s))
|
||||||
|
let dist2 = dist2'->E.R2.fmap(x=>DistributionTypes.Symbolic(x))->E.R2.fmap2(s=>DistributionTypes.Other(s))
|
||||||
|
let received =
|
||||||
|
E.R.liftJoin2(distOp, dist1, dist2)
|
||||||
|
->E.R2.fmap(mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
let expected = floatOp(runMean(dist1), runMean(dist2))
|
||||||
|
switch received {
|
||||||
|
| Error(err) => impossiblePath(description)
|
||||||
|
| Ok(x) =>
|
||||||
|
switch x {
|
||||||
|
| None => impossiblePath(description)
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(expected, ~digits)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("addition", () => {
|
||||||
|
let testAdditionMean = testOperationMean(algebraicAdd, "algebraicAdd", \"+.")
|
||||||
|
|
||||||
|
testAll("homogeneous addition", zipDistsDists, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testAdditionMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneous addition (1)", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testAdditionMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneous addition (commuted of 1 (or; 2))", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testAdditionMean(dist2, dist1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("subtraction", () => {
|
||||||
|
let testSubtractionMean = testOperationMean(algebraicSubtract, "algebraicSubtract", \"-.")
|
||||||
|
|
||||||
|
testAll("homogeneous subtraction", zipDistsDists, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testSubtractionMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneous subtraction (1)", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testSubtractionMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneous subtraction (commuted of 1 (or; 2))", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testSubtractionMean(dist2, dist1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("multiplication", () => {
|
||||||
|
let testMultiplicationMean = testOperationMean(algebraicMultiply, "algebraicMultiply", \"*.")
|
||||||
|
|
||||||
|
testAll("homogeneous subtraction", zipDistsDists, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testMultiplicationMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneoous subtraction (1)", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testMultiplicationMean(dist1, dist2)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("heterogeneoous subtraction (commuted of 1 (or; 2))", combinations, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
testMultiplicationMean(dist2, dist1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -2,14 +2,6 @@ open Jest
|
||||||
open Expect
|
open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
|
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
|
||||||
let mkBeta = (alpha, beta) => GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
|
||||||
let mkExponential = rate => GenericDist_Types.Symbolic(#Exponential({rate: rate}))
|
|
||||||
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
|
||||||
let mkCauchy = (local, scale) => GenericDist_Types.Symbolic(#Cauchy({local: local, scale: scale}))
|
|
||||||
let mkLognormal = (mu, sigma) => GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
|
||||||
|
|
||||||
describe("mixture", () => {
|
describe("mixture", () => {
|
||||||
testAll(
|
testAll(
|
||||||
"fair mean of two normal distributions",
|
"fair mean of two normal distributions",
|
||||||
|
|
|
@ -3,7 +3,7 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
|
@ -28,16 +28,16 @@ describe("(Symbolic) mean", () => {
|
||||||
|
|
||||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of a cauchy distribution", () => {
|
test("of a cauchy distribution", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(2.01868297874546)
|
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
||||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ describe("(Symbolic) mean", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(
|
FromDist(
|
||||||
ToFloat(#Mean),
|
ToFloat(#Mean),
|
||||||
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||||
|
@ -63,7 +63,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (alpha, beta) = tup
|
let (alpha, beta) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
},
|
},
|
||||||
|
@ -72,7 +72,7 @@ describe("(Symbolic) mean", () => {
|
||||||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||||
test("of beta(0, 0)", () => {
|
test("of beta(0, 0)", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
})
|
})
|
||||||
|
@ -83,7 +83,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (mu, sigma) = tup
|
let (mu, sigma) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
},
|
},
|
||||||
|
@ -95,14 +95,14 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, high) = tup
|
let (low, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
test("of a float", () => {
|
test("of a float", () => {
|
||||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Float(7.7))))
|
let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -11,17 +11,33 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
|
||||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: DistributionOperation.env = {
|
||||||
sampleCount: 100,
|
sampleCount: 10000,
|
||||||
xyPointLength: 100,
|
xyPointLength: 1000,
|
||||||
}
|
}
|
||||||
|
|
||||||
let run = DistributionOperation.run(~env)
|
let run = DistributionOperation.run(~env)
|
||||||
let outputMap = fmap(~env)
|
let outputMap = fmap(~env)
|
||||||
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
||||||
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
||||||
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(
|
let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.genericDist = E.O.toExt(
|
||||||
unreachableInTestFileMessage,
|
unreachableInTestFileMessage,
|
||||||
)
|
)
|
||||||
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
||||||
let unpackFloat = x => x->toFloat->toExtFloat
|
let unpackFloat = x => x->toFloat->toExtFloat
|
||||||
let unpackDist = y => y->toDist->toExtDist
|
let unpackDist = y => y->toDist->toExtDist
|
||||||
|
|
||||||
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||||
|
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
||||||
|
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
||||||
|
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||||
|
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
|
|
||||||
|
let normalMake = SymbolicDist.Normal.make
|
||||||
|
let betaMake = SymbolicDist.Beta.make
|
||||||
|
let exponentialMake = SymbolicDist.Exponential.make
|
||||||
|
let uniformMake = SymbolicDist.Uniform.make
|
||||||
|
let cauchyMake = SymbolicDist.Cauchy.make
|
||||||
|
let lognormalMake = SymbolicDist.Lognormal.make
|
||||||
|
let triangularMake = SymbolicDist.Triangular.make
|
||||||
|
let floatMake = SymbolicDist.Float.make
|
||||||
|
|
10
packages/squiggle-lang/__tests__/Utility_test.res
Normal file
10
packages/squiggle-lang/__tests__/Utility_test.res
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
describe("E.L.combinations2", () => {
|
||||||
|
test("size three", () => {
|
||||||
|
E.L.combinations2(list{"alice", "bob", "eve"})
|
||||||
|
->expect
|
||||||
|
->toEqual(list{("alice", "bob"), ("alice", "eve"), ("bob", "eve")})
|
||||||
|
})
|
||||||
|
})
|
|
@ -10,6 +10,7 @@
|
||||||
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
|
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
|
"test:quick": "jest --modulePathIgnorePatterns=__tests__/Distributions/Invariants/*",
|
||||||
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
|
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
|
||||||
"coverage:ci": "yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report send-to Codecov",
|
"coverage:ci": "yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report send-to Codecov",
|
||||||
"lint:rescript": "./lint.sh",
|
"lint:rescript": "./lint.sh",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
type functionCallInfo = GenericDist_Types.Operation.genericFunctionCallInfo
|
type functionCallInfo = GenericDist_Types.Operation.genericFunctionCallInfo
|
||||||
type genericDist = GenericDist_Types.genericDist
|
type genericDist = DistributionTypes.genericDist
|
||||||
type error = GenericDist_Types.error
|
type error = DistributionTypes.error
|
||||||
|
|
||||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
|
@genType
|
||||||
type genericDist =
|
type genericDist =
|
||||||
| PointSet(PointSetTypes.pointSetDist)
|
| PointSet(PointSetTypes.pointSetDist)
|
||||||
| SampleSet(array<float>)
|
| SampleSet(SampleSetDist.t)
|
||||||
| Symbolic(SymbolicDistTypes.symbolicDist)
|
| Symbolic(SymbolicDistTypes.symbolicDist)
|
||||||
|
|
||||||
|
@genType
|
||||||
type error =
|
type error =
|
||||||
| NotYetImplemented
|
| NotYetImplemented
|
||||||
| Unreachable
|
| Unreachable
|
||||||
| DistributionVerticalShiftIsInvalid
|
| DistributionVerticalShiftIsInvalid
|
||||||
|
| ArgumentError(string)
|
||||||
| Other(string)
|
| Other(string)
|
||||||
|
|
||||||
module Operation = {
|
module Operation = {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//TODO: multimodal, add interface, test somehow, track performance, refactor sampleSet, refactor ASTEvaluator.res.
|
//TODO: multimodal, add interface, test somehow, track performance, refactor sampleSet, refactor ASTEvaluator.res.
|
||||||
type t = GenericDist_Types.genericDist
|
type t = DistributionTypes.genericDist
|
||||||
type error = GenericDist_Types.error
|
type error = DistributionTypes.error
|
||||||
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
||||||
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
|
@ -115,7 +115,7 @@ module Truncate = {
|
||||||
| Some(r) => Ok(r)
|
| Some(r) => Ok(r)
|
||||||
| None =>
|
| None =>
|
||||||
toPointSetFn(t)->E.R2.fmap(t => {
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
GenericDist_Types.PointSet(PointSetDist.T.truncate(leftCutoff, rightCutoff, t))
|
DistributionTypes.PointSet(PointSetDist.T.truncate(leftCutoff, rightCutoff, t))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,7 @@ module AlgebraicCombination = {
|
||||||
->E.R.bind(((t1, t2)) => {
|
->E.R.bind(((t1, t2)) => {
|
||||||
SampleSetDist.map2(~fn, ~t1, ~t2)->GenericDist_Types.Error.resultStringToResultError
|
SampleSetDist.map2(~fn, ~t1, ~t2)->GenericDist_Types.Error.resultStringToResultError
|
||||||
})
|
})
|
||||||
->E.R2.fmap(r => GenericDist_Types.SampleSet(r))
|
->E.R2.fmap(r => DistributionTypes.SampleSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
//I'm (Ozzie) really just guessing here, very little idea what's best
|
//I'm (Ozzie) really just guessing here, very little idea what's best
|
||||||
|
@ -206,7 +206,7 @@ module AlgebraicCombination = {
|
||||||
arithmeticOperation,
|
arithmeticOperation,
|
||||||
t1,
|
t1,
|
||||||
t2,
|
t2,
|
||||||
)->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
)->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -229,7 +229,7 @@ let pointwiseCombination = (
|
||||||
t2,
|
t2,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
let pointwiseCombinationFloat = (
|
let pointwiseCombinationFloat = (
|
||||||
|
@ -239,7 +239,7 @@ let pointwiseCombinationFloat = (
|
||||||
~float: float,
|
~float: float,
|
||||||
): result<t, error> => {
|
): result<t, error> => {
|
||||||
let m = switch arithmeticOperation {
|
let m = switch arithmeticOperation {
|
||||||
| #Add | #Subtract => Error(GenericDist_Types.DistributionVerticalShiftIsInvalid)
|
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
||||||
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||||
toPointSetFn(t)->E.R2.fmap(t => {
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
//TODO: Move to PointSet codebase
|
//TODO: Move to PointSet codebase
|
||||||
|
@ -254,7 +254,7 @@ let pointwiseCombinationFloat = (
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
m->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
m->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
//Note: The result should always cumulatively sum to 1. This would be good to test.
|
//Note: The result should always cumulatively sum to 1. This would be good to test.
|
||||||
|
@ -265,7 +265,7 @@ let mixture = (
|
||||||
~pointwiseAddFn: pointwiseAddFn,
|
~pointwiseAddFn: pointwiseAddFn,
|
||||||
) => {
|
) => {
|
||||||
if E.A.length(values) == 0 {
|
if E.A.length(values) == 0 {
|
||||||
Error(GenericDist_Types.Other("Mixture error: mixture must have at least 1 element"))
|
Error(DistributionTypes.Other("Mixture error: mixture must have at least 1 element"))
|
||||||
} else {
|
} else {
|
||||||
let totalWeight = values->E.A2.fmap(E.Tuple2.second)->E.A.Floats.sum
|
let totalWeight = values->E.A2.fmap(E.Tuple2.second)->E.A.Floats.sum
|
||||||
let properlyWeightedValues =
|
let properlyWeightedValues =
|
||||||
|
|
|
@ -1,15 +1,6 @@
|
||||||
type genericDist =
|
type genericDist = DistributionTypes.genericDist
|
||||||
| PointSet(PointSetTypes.pointSetDist)
|
|
||||||
| SampleSet(SampleSetDist.t)
|
|
||||||
| Symbolic(SymbolicDistTypes.symbolicDist)
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type error =
|
type error = DistributionTypes.error
|
||||||
| NotYetImplemented
|
|
||||||
| Unreachable
|
|
||||||
| DistributionVerticalShiftIsInvalid
|
|
||||||
| ArgumentError(string)
|
|
||||||
| Other(string)
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
module Error = {
|
module Error = {
|
||||||
|
@ -23,6 +14,7 @@ module Error = {
|
||||||
| NotYetImplemented => "Not Yet Implemented"
|
| NotYetImplemented => "Not Yet Implemented"
|
||||||
| Unreachable => "Unreachable"
|
| Unreachable => "Unreachable"
|
||||||
| DistributionVerticalShiftIsInvalid => "Distribution Vertical Shift Is Invalid"
|
| DistributionVerticalShiftIsInvalid => "Distribution Vertical Shift Is Invalid"
|
||||||
|
| ArgumentError(x) => `Argument Error: ${x}`
|
||||||
| Other(s) => s
|
| Other(s) => s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,6 +141,8 @@ module Lognormal = {
|
||||||
}
|
}
|
||||||
let divide = (l1, l2) => {
|
let divide = (l1, l2) => {
|
||||||
let mu = l1.mu -. l2.mu
|
let mu = l1.mu -. l2.mu
|
||||||
|
// We believe the ratiands will have covariance zero.
|
||||||
|
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
||||||
let sigma = l1.sigma +. l2.sigma
|
let sigma = l1.sigma +. l2.sigma
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,10 +11,10 @@ The below few seem to work fine. In the future there's definitely more work to d
|
||||||
type samplingParams = DistributionOperation.env
|
type samplingParams = DistributionOperation.env
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type genericDist = GenericDist_Types.genericDist
|
type genericDist = DistributionTypes.genericDist
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type distributionError = GenericDist_Types.error
|
type distributionError = DistributionTypes.error
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type resultDist = result<genericDist, distributionError>
|
type resultDist = result<genericDist, distributionError>
|
||||||
|
|
|
@ -59,8 +59,9 @@ module O = {
|
||||||
let toExn = Rationale.Option.toExn
|
let toExn = Rationale.Option.toExn
|
||||||
let some = Rationale.Option.some
|
let some = Rationale.Option.some
|
||||||
let firstSome = Rationale.Option.firstSome
|
let firstSome = Rationale.Option.firstSome
|
||||||
let toExt = Rationale.Option.toExn
|
let toExt = Rationale.Option.toExn // wanna flag this-- looks like a typo but `Rationale.OptiontoExt` doesn't exist.
|
||||||
let flatApply = (fn, b) => Rationale.Option.apply(fn, Some(b)) |> Rationale.Option.flatten
|
let flatApply = (fn, b) => Rationale.Option.apply(fn, Some(b)) |> Rationale.Option.flatten
|
||||||
|
let flatten = Rationale.Option.flatten
|
||||||
|
|
||||||
let toBool = opt =>
|
let toBool = opt =>
|
||||||
switch opt {
|
switch opt {
|
||||||
|
@ -103,6 +104,7 @@ module O2 = {
|
||||||
let toExn = (a, b) => O.toExn(b, a)
|
let toExn = (a, b) => O.toExn(b, a)
|
||||||
let fmap = (a, b) => O.fmap(b, a)
|
let fmap = (a, b) => O.fmap(b, a)
|
||||||
let toResult = (a, b) => O.toResult(b, a)
|
let toResult = (a, b) => O.toResult(b, a)
|
||||||
|
let bind = (a, b) => O.bind(b, a)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Functions */
|
/* Functions */
|
||||||
|
@ -176,6 +178,31 @@ module R = {
|
||||||
|
|
||||||
let errorIfCondition = (errorCondition, errorMessage, r) =>
|
let errorIfCondition = (errorCondition, errorMessage, r) =>
|
||||||
errorCondition(r) ? Error(errorMessage) : Ok(r)
|
errorCondition(r) ? Error(errorMessage) : Ok(r)
|
||||||
|
|
||||||
|
let ap = Rationale.Result.ap
|
||||||
|
let ap' = (r, a) =>
|
||||||
|
switch r {
|
||||||
|
| Ok(f) => fmap(f, a)
|
||||||
|
| Error(err) => Error(err)
|
||||||
|
}
|
||||||
|
// (a1 -> a2 -> r) -> m a1 -> m a2 -> m r // not in Rationale
|
||||||
|
let liftM2: (('a, 'b) => 'c, result<'a, 'd>, result<'b, 'd>) => result<'c, 'd> = (op, xR, yR) => {
|
||||||
|
ap'(fmap(op, xR), yR)
|
||||||
|
}
|
||||||
|
|
||||||
|
let liftJoin2: (('a, 'b) => result<'c, 'd>, result<'a, 'd>, result<'b, 'd>) => result<'c, 'd> = (
|
||||||
|
op,
|
||||||
|
xR,
|
||||||
|
yR,
|
||||||
|
) => {
|
||||||
|
bind(liftM2(op, xR, yR), x => x)
|
||||||
|
}
|
||||||
|
|
||||||
|
let fmap2 = (f, r) =>
|
||||||
|
switch r {
|
||||||
|
| Ok(r) => r->Ok
|
||||||
|
| Error(x) => x->f->Error
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module R2 = {
|
module R2 = {
|
||||||
|
@ -188,6 +215,12 @@ module R2 = {
|
||||||
| Ok(r) => Ok(r)
|
| Ok(r) => Ok(r)
|
||||||
| Error(e) => map(e)
|
| Error(e) => map(e)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let fmap2 = (xR, f) =>
|
||||||
|
switch xR {
|
||||||
|
| Ok(x) => x->Ok
|
||||||
|
| Error(x) => x->f->Error
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let safe_fn_of_string = (fn, s: string): option<'a> =>
|
let safe_fn_of_string = (fn, s: string): option<'a> =>
|
||||||
|
@ -258,6 +291,29 @@ module L = {
|
||||||
let update = Rationale.RList.update
|
let update = Rationale.RList.update
|
||||||
let iter = List.iter
|
let iter = List.iter
|
||||||
let findIndex = Rationale.RList.findIndex
|
let findIndex = Rationale.RList.findIndex
|
||||||
|
let headSafe = Belt.List.head
|
||||||
|
let tailSafe = Belt.List.tail
|
||||||
|
let headExn = Belt.List.headExn
|
||||||
|
let tailExn = Belt.List.tailExn
|
||||||
|
let zip = Belt.List.zip
|
||||||
|
|
||||||
|
let combinations2: list<'a> => list<('a, 'a)> = xs => {
|
||||||
|
let rec loop: ('a, list<'a>) => list<('a, 'a)> = (x', xs') => {
|
||||||
|
let n = length(xs')
|
||||||
|
if n == 0 {
|
||||||
|
list{}
|
||||||
|
} else {
|
||||||
|
let combs = fmap(y => (x', y), xs')
|
||||||
|
let hd = headExn(xs')
|
||||||
|
let tl = tailExn(xs')
|
||||||
|
concat(list{combs, loop(hd, tl)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch (headSafe(xs), tailSafe(xs)) {
|
||||||
|
| (Some(x'), Some(xs')) => loop(x', xs')
|
||||||
|
| (_, _) => list{}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* A for Array */
|
/* A for Array */
|
||||||
|
|
Loading…
Reference in New Issue
Block a user