Merge pull request #192 from QURIresearch/testing-discipline-two

symbolic dists tests (april 6)
This commit is contained in:
Quinn 2022-04-07 20:59:37 -04:00 committed by GitHub
commit 67bcd7052b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 339 additions and 120 deletions

View File

@ -13,6 +13,9 @@ Other:
yarn start # listens to files and recompiles at every mutation yarn start # listens to files and recompiles at every mutation
yarn test yarn test
yarn test:watch # keeps an active session and runs all tests at every mutation yarn test:watch # keeps an active session and runs all tests at every mutation
# where o := open in osx and o := xdg-open in linux,
yarn coverage; o _coverage/index.html # produces coverage report and opens it in browser
``` ```
# TODO: clean up this README.md # TODO: clean up this README.md

View File

@ -6,9 +6,8 @@ let env: DistributionOperation.env = {
xyPointLength: 100, xyPointLength: 100,
} }
let normalDist5: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 5.0, stdev: 2.0})) let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
let normalDist10: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 10.0, stdev: 2.0})) let normalDist5: GenericDist_Types.genericDist = mkNormal(5.0, 2.0)
let normalDist20: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 20.0, stdev: 2.0}))
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0})) let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
let {toFloat, toDist, toString, toError} = module(DistributionOperation.Output) let {toFloat, toDist, toString, toError} = module(DistributionOperation.Output)
@ -20,31 +19,6 @@ let toExt: option<'a> => 'a = E.O.toExt(
"Should be impossible to reach (This error is in test file)", "Should be impossible to reach (This error is in test file)",
) )
describe("normalize", () => {
test("has no impact on normal dist", () => {
let result = run(FromDist(ToDist(Normalize), normalDist5))
expect(result)->toEqual(Dist(normalDist5))
})
})
describe("mean", () => {
test("for a normal distribution", () => {
let result = DistributionOperation.run(~env, FromDist(ToFloat(#Mean), normalDist5))
expect(result)->toEqual(Float(5.0))
})
})
describe("mixture", () => {
test("on two normal distributions", () => {
let result =
run(Mixture([(normalDist10, 0.5), (normalDist20, 0.5)]))
->outputMap(FromDist(ToFloat(#Mean)))
->toFloat
->toExt
expect(result)->toBeCloseTo(15.28)
})
})
describe("toPointSet", () => { describe("toPointSet", () => {
test("on symbolic normal distribution", () => { test("on symbolic normal distribution", () => {
let result = let result =
@ -52,7 +26,7 @@ describe("toPointSet", () => {
->outputMap(FromDist(ToFloat(#Mean))) ->outputMap(FromDist(ToFloat(#Mean)))
->toFloat ->toFloat
->toExt ->toExt
expect(result)->toBeCloseTo(5.09) expect(result)->toBeSoCloseTo(5.0, ~digits=0)
}) })
test("on sample set distribution with under 4 points", () => { test("on sample set distribution with under 4 points", () => {
@ -63,7 +37,7 @@ describe("toPointSet", () => {
expect(result)->toEqual(GenDistError(Other("Converting sampleSet to pointSet failed"))) expect(result)->toEqual(GenDistError(Other("Converting sampleSet to pointSet failed")))
}) })
Skip.test("on sample set", () => { test("on sample set", () => {
let result = let result =
run(FromDist(ToDist(ToPointSet), normalDist5)) run(FromDist(ToDist(ToPointSet), normalDist5))
->outputMap(FromDist(ToDist(ToSampleSet(1000)))) ->outputMap(FromDist(ToDist(ToSampleSet(1000))))
@ -71,6 +45,6 @@ describe("toPointSet", () => {
->outputMap(FromDist(ToFloat(#Mean))) ->outputMap(FromDist(ToFloat(#Mean)))
->toFloat ->toFloat
->toExt ->toExt
expect(result)->toBeCloseTo(5.09) expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
}) })
}) })

View File

@ -0,0 +1,70 @@
open Jest
open Expect
open TestHelpers
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
let mkBeta = (alpha, beta) => GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
let mkExponential = rate => GenericDist_Types.Symbolic(#Exponential({rate: rate}))
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
let mkCauchy = (local, scale) => GenericDist_Types.Symbolic(#Cauchy({local: local, scale: scale}))
let mkLognormal = (mu, sigma) => GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
describe("mixture", () => {
testAll("fair mean of two normal distributions", list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)}, tup => { // should be property
let (mean1, mean2) = tup
let meanValue = {
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))
-> outputMap(FromDist(ToFloat(#Mean)))
}
meanValue -> unpackFloat -> expect -> toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
})
testAll(
"weighted mean of a beta and an exponential",
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
tup => {
let ((alpha, beta), rate) = tup
let betaWeight = 0.25
let exponentialWeight = 0.75
let meanValue = {
run(Mixture(
[
(mkBeta(alpha, beta), betaWeight),
(mkExponential(rate), exponentialWeight)
]
)) -> outputMap(FromDist(ToFloat(#Mean)))
}
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
let exponentialMean = 1.0 /. rate
meanValue
-> unpackFloat
-> expect
-> toBeSoCloseTo(
betaWeight *. betaMean +. exponentialWeight *. exponentialMean,
~digits=-1
)
}
)
testAll(
"weighted mean of lognormal and uniform",
// Would not survive property tests: very easy to find cases that NaN out.
list{((-1e2,1e1), (2e0,1e0)), ((-1e-16,1e-16), (1e-8,1e0)), ((0.0,1e0), (1e0,1e-2))},
tup => {
let ((low, high), (mu, sigma)) = tup
let uniformWeight = 0.6
let lognormalWeight = 0.4
let meanValue = {
run(Mixture([(mkUniform(low, high), uniformWeight), (mkLognormal(mu, sigma), lognormalWeight)]))
-> outputMap(FromDist(ToFloat(#Mean)))
}
let uniformMean = (low +. high) /. 2.0
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
meanValue
-> unpackFloat
-> expect
-> toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
}
)
})

View File

@ -0,0 +1,41 @@
open Jest
open TestHelpers
describe("Continuous and discrete splits", () => {
makeTest(
"splits (1)",
SampleSet.Internals.T.splitContinuousAndDiscrete([1.432, 1.33455, 2.0]),
([1.432, 1.33455, 2.0], E.FloatFloatMap.empty()),
)
makeTest(
"splits (2)",
SampleSet.Internals.T.splitContinuousAndDiscrete([
1.432,
1.33455,
2.0,
2.0,
2.0,
2.0,
]) |> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
([1.432, 1.33455], [(2.0, 4.0)]),
)
let makeDuplicatedArray = count => {
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int)
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare)
E.A.concatMany([sorted, sorted, sorted, sorted]) |> Belt.SortArray.stableSortBy(_, compare)
}
let (_, discrete1) = SampleSet.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(10),
)
let toArr1 = discrete1 |> E.FloatFloatMap.toArray
makeTest("splitMedium at count=10", toArr1 |> Belt.Array.length, 10)
let (_c, discrete2) = SampleSet.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(500),
)
let toArr2 = discrete2 |> E.FloatFloatMap.toArray
makeTest("splitMedium at count=500", toArr2 |> Belt.Array.length, 500)
})

View File

@ -0,0 +1,161 @@
open Jest
open Expect
open TestHelpers
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
describe("(Symbolic) normalize", () => {
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
let normalValue = mkNormal(mean, 2.0)
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
normalizedValue
-> unpackDist
-> expect
-> toEqual(normalValue)
})
})
describe("(Symbolic) mean", () => {
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))
-> unpackFloat
-> expect
-> toBeCloseTo(mean)
})
Skip.test("of normal(0, -1) (it NaNs out)", () => {
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))
-> unpackFloat
-> expect
-> ExpectJs.toBeFalsy
})
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))
-> unpackFloat
-> expect
-> toBeCloseTo(0.0)
})
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))))
meanValue -> unpackFloat -> expect -> toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
})
test("of a cauchy distribution", () => {
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))))
meanValue
-> unpackFloat
-> expect
-> toBeCloseTo(2.01868297874546)
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
})
testAll("of triangular distributions", list{(1.0,2.0,3.0), (-1e7,-1e-7,1e-7), (-1e-7,1e0,1e7), (-1e-16,0.0,1e-16)}, tup => {
let (low, medium, high) = tup
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high}))
))
meanValue
-> unpackFloat
-> expect
-> toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
})
// TODO: nonpositive inputs are SUPPOSED to crash.
testAll("of beta distributions", list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)}, tup => {
let (alpha, beta) = tup
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
))
meanValue
-> unpackFloat
-> expect
-> toBeCloseTo(1.0 /. (1.0 +. (beta /. alpha))) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
})
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
test("of beta(0, 0)", () => {
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))
))
meanValue
-> unpackFloat
-> expect
-> ExpectJs.toBeFalsy
})
testAll("of lognormal distributions", list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)}, tup => {
let (mu, sigma) = tup
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
))
meanValue
-> unpackFloat
-> expect
-> toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0 )) // https://brilliant.org/wiki/log-normal-distribution/
})
testAll("of uniform distributions", list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)}, tup => {
let (low, high) = tup
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
))
meanValue
-> unpackFloat
-> expect
-> toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
})
test("of a float", () => {
let meanValue = run(FromDist(
ToFloat(#Mean),
GenericDist_Types.Symbolic(#Float(7.7))
))
meanValue -> unpackFloat -> expect -> toBeCloseTo(7.7)
})
})
describe("Normal distribution with sparklines", () => {
let parameterWiseAdditionPdf = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
let normalDistAtSumMeanConstr = SymbolicDist.Normal.add(n1, n2)
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
| #Normal(params) => params
}
x => SymbolicDist.Normal.pdf(x, normalDistAtSumMean)
}
let normalDistAtMean5: SymbolicDistTypes.normal = {mean: 5.0, stdev: 2.0}
let normalDistAtMean10: SymbolicDistTypes.normal = {mean: 10.0, stdev: 2.0}
let range20Float = E.A.rangeFloat(0, 20) // [0.0,1.0,2.0,3.0,4.0,...19.0,]
test("mean=5 pdf", () => {
let pdfNormalDistAtMean5 = x => SymbolicDist.Normal.pdf(x, normalDistAtMean5)
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
Sparklines.create(sparklineMean5, ())
-> expect
-> toEqual(`▁▂▃▅███▅▃▂▁▁▁▁▁▁▁▁▁▁▁`)
})
test("parameter-wise addition of two normal distributions", () => {
let sparklineMean15 = normalDistAtMean5 -> parameterWiseAdditionPdf(normalDistAtMean10) -> fnImage(range20Float)
Sparklines.create(sparklineMean15, ())
-> expect
-> toEqual(`▁▁▁▁▁▁▁▁▁▁▂▃▅▇███▇▅▃▂`)
})
test("mean=10 cdf", () => {
let cdfNormalDistAtMean10 = x => SymbolicDist.Normal.cdf(x, normalDistAtMean10)
let sparklineMean10 = fnImage(cdfNormalDistAtMean10, range20Float)
Sparklines.create(sparklineMean10, ())
-> expect
-> toEqual(`▁▁▁▁▁▁▁▁▂▃▅▆▇████████`)
})
})

View File

@ -1,47 +0,0 @@
open Jest
open Expect
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () => expect(item1) -> toEqual(item2))
: test(str, () => expect(item1) -> toEqual(item2))
describe("Lodash", () =>
describe("Lodash", () => {
makeTest(
"split",
SampleSet.Internals.T.splitContinuousAndDiscrete([1.432, 1.33455, 2.0]),
([1.432, 1.33455, 2.0], E.FloatFloatMap.empty()),
)
makeTest(
"split",
SampleSet.Internals.T.splitContinuousAndDiscrete([
1.432,
1.33455,
2.0,
2.0,
2.0,
2.0,
]) |> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
([1.432, 1.33455], [(2.0, 4.0)]),
)
let makeDuplicatedArray = count => {
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int)
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare)
E.A.concatMany([sorted, sorted, sorted, sorted]) |> Belt.SortArray.stableSortBy(_, compare)
}
let (_, discrete) = SampleSet.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(10),
)
let toArr = discrete |> E.FloatFloatMap.toArray
makeTest("splitMedium", toArr |> Belt.Array.length, 10)
let (_c, discrete) = SampleSet.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(500),
)
let toArr = discrete |> E.FloatFloatMap.toArray
makeTest("splitMedium", toArr |> Belt.Array.length, 500)
})
)

View File

@ -1,33 +0,0 @@
open Jest
open Expect
open Js.Array
open SymbolicDist
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () => expect(item1) -> toEqual(item2))
: test(str, () => expect(item1) -> toEqual(item2))
let pdfImage = (thePdf, inps) => map(thePdf, inps)
let parameterWiseAdditionHelper = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
let normalDistAtSumMeanConstr = Normal.add(n1, n2)
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
| #Normal(params) => params
}
x => Normal.pdf(x, normalDistAtSumMean)
}
describe("Normal distribution with sparklines", () => {
let normalDistAtMean5: SymbolicDistTypes.normal = {mean: 5.0, stdev: 2.0}
let normalDistAtMean10: SymbolicDistTypes.normal = {mean: 10.0, stdev: 2.0}
let range20Float = E.A.rangeFloat(0, 20) // [0.0,1.0,2.0,3.0,4.0,...19.0,]
let pdfNormalDistAtMean5 = x => Normal.pdf(x, normalDistAtMean5)
let sparklineMean5 = pdfImage(pdfNormalDistAtMean5, range20Float)
makeTest("mean=5", Sparklines.create(sparklineMean5, ()), `▁▂▃▅███▅▃▂▁▁▁▁▁▁▁▁▁▁▁`)
let sparklineMean15 = normalDistAtMean5 -> parameterWiseAdditionHelper(normalDistAtMean10) -> pdfImage(range20Float)
makeTest("parameter-wise addition of two normal distributions", Sparklines.create(sparklineMean15, ()), `▁▁▁▁▁▁▁▁▁▁▂▃▅▇███▇▅▃▂`)
})

View File

@ -0,0 +1,26 @@
open Jest
open Expect
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () => expect(item1) -> toEqual(item2))
: test(str, () => expect(item1) -> toEqual(item2))
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
let env: DistributionOperation.env = {
sampleCount: 100,
xyPointLength: 100,
}
let run = DistributionOperation.run(~env)
let outputMap = fmap(~env)
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(unreachableInTestFileMessage)
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
let unpackFloat = x => x -> toFloat -> toExtFloat
let unpackDist = y => y -> toDist -> toExtDist

View File

@ -47,7 +47,7 @@
[ [
"../../node_modules/bisect_ppx/ppx", "../../node_modules/bisect_ppx/ppx",
"--exclude-files", "--exclude-files",
".*_Test\\.res$$" ".*_test\\.res$$"
] ]
] ]
} }

View File

@ -5,4 +5,7 @@ module.exports = {
setupFilesAfterEnv: [ setupFilesAfterEnv: [
"<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js" "<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js"
], ],
testPathIgnorePatterns: [
"__tests__/TestHelpers.bs.js"
],
}; };

View File

@ -10,7 +10,7 @@
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'", "test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
"test": "jest", "test": "jest",
"test:watch": "jest --watchAll", "test:watch": "jest --watchAll",
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; ./node_modules/.bin/bisect-ppx-report html", "coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
"all": "yarn build && yarn bundle && yarn test" "all": "yarn build && yarn bundle && yarn test"
}, },
"keywords": [ "keywords": [

View File

@ -10,10 +10,10 @@ type env = {
} }
type outputType = type outputType =
| Dist(GenericDist_Types.genericDist) | Dist(genericDist)
| Float(float) | Float(float)
| String(string) | String(string)
| GenDistError(GenericDist_Types.error) | GenDistError(error)
/* /*
We're going to add another function to this module later, so first define a We're going to add another function to this module later, so first define a

View File

@ -1,6 +1,6 @@
type genericDist = type genericDist =
| PointSet(PointSetTypes.pointSetDist) | PointSet(PointSetTypes.pointSetDist)
| SampleSet(array<float>) | SampleSet(SampleSet.t)
| Symbolic(SymbolicDistTypes.symbolicDist) | Symbolic(SymbolicDistTypes.symbolicDist)
type error = type error =

View File

@ -1,3 +1,5 @@
type t = array<float>
// TODO: Refactor to raise correct error when not enough samples // TODO: Refactor to raise correct error when not enough samples
module Internals = { module Internals = {

View File

@ -55,7 +55,7 @@ module Exponential = {
rate: rate, rate: rate,
}), }),
) )
: Error("Exponential distributions mean must be larger than 0") : Error("Exponential distributions rate must be larger than 0.")
let pdf = (x, t: t) => Jstat.Exponential.pdf(x, t.rate) let pdf = (x, t: t) => Jstat.Exponential.pdf(x, t.rate)
let cdf = (x, t: t) => Jstat.Exponential.cdf(x, t.rate) let cdf = (x, t: t) => Jstat.Exponential.cdf(x, t.rate)
let inv = (p, t: t) => Jstat.Exponential.inv(p, t.rate) let inv = (p, t: t) => Jstat.Exponential.inv(p, t.rate)
@ -71,7 +71,7 @@ module Cauchy = {
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale) let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
let inv = (p, t: t) => Jstat.Cauchy.inv(p, t.local, t.scale) let inv = (p, t: t) => Jstat.Cauchy.inv(p, t.local, t.scale)
let sample = (t: t) => Jstat.Cauchy.sample(t.local, t.scale) let sample = (t: t) => Jstat.Cauchy.sample(t.local, t.scale)
let mean = (_: t) => Error("Cauchy distributions have no mean value.") let mean = (_: t) => Error("Cauchy distributions may have no mean value.")
let toString = ({local, scale}: t) => j`Cauchy($local, $scale)` let toString = ({local, scale}: t) => j`Cauchy($local, $scale)`
} }
@ -80,8 +80,8 @@ module Triangular = {
let make = (low, medium, high): result<symbolicDist, string> => let make = (low, medium, high): result<symbolicDist, string> =>
low < medium && medium < high low < medium && medium < high
? Ok(#Triangular({low: low, medium: medium, high: high})) ? Ok(#Triangular({low: low, medium: medium, high: high}))
: Error("Triangular values must be increasing order") : Error("Triangular values must be increasing order.")
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium) let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium) let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium) let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)

View File

@ -8434,6 +8434,13 @@ extglob@^2.0.4:
snapdragon "^0.8.1" snapdragon "^0.8.1"
to-regex "^3.0.1" to-regex "^3.0.1"
fast-check@^2.17.0:
version "2.24.0"
resolved "https://registry.yarnpkg.com/fast-check/-/fast-check-2.24.0.tgz#39f85586862108a4de6394c5196ebcf8b76b6c8b"
integrity sha512-iNXbN90lbabaCUfnW5jyXYPwMJLFYl09eJDkXA9ZoidFlBK63gNRvcKxv+8D1OJ1kIYjwBef4bO/K3qesUeWLQ==
dependencies:
pure-rand "^5.0.1"
fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3, fast-deep-equal@~3.1.3: fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3, fast-deep-equal@~3.1.3:
version "3.1.3" version "3.1.3"
resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz"
@ -13566,6 +13573,11 @@ pure-color@^1.2.0:
resolved "https://registry.yarnpkg.com/pure-color/-/pure-color-1.3.0.tgz#1fe064fb0ac851f0de61320a8bf796836422f33e" resolved "https://registry.yarnpkg.com/pure-color/-/pure-color-1.3.0.tgz#1fe064fb0ac851f0de61320a8bf796836422f33e"
integrity sha1-H+Bk+wrIUfDeYTIKi/eWg2Qi8z4= integrity sha1-H+Bk+wrIUfDeYTIKi/eWg2Qi8z4=
pure-rand@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-5.0.1.tgz#97a287b4b4960b2a3448c0932bf28f2405cac51d"
integrity sha512-ksWccjmXOHU2gJBnH0cK1lSYdvSZ0zLoCMSz/nTGh6hDvCSgcRxDyIcOBD6KNxFz3xhMPm/T267Tbe2JRymKEQ==
q@^1.1.2: q@^1.1.2:
version "1.5.1" version "1.5.1"
resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
@ -14754,6 +14766,13 @@ requires-port@^1.0.0:
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=
rescript-fast-check@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/rescript-fast-check/-/rescript-fast-check-1.1.1.tgz#ef153cb01254b2f01a738faf85b73327d423d5e1"
integrity sha512-wxeW0TsL/prkRvEYGbhEiLaLKmYJaECyDcKEWh65hDqP2i76lARzVW3QmYujSYK4OnjAC70dln3X6UC/2m2Huw==
dependencies:
fast-check "^2.17.0"
rescript@^9.1.4: rescript@^9.1.4:
version "9.1.4" version "9.1.4"
resolved "https://registry.npmjs.org/rescript/-/rescript-9.1.4.tgz" resolved "https://registry.npmjs.org/rescript/-/rescript-9.1.4.tgz"