Merge pull request #589 from quantified-uncertainty/scoring-cleanup-three
Scoring cleanup
This commit is contained in:
commit
606cbd8859
|
@ -1,7 +1,7 @@
|
||||||
open Jest
|
open Jest
|
||||||
open Expect
|
open Expect
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: GenericDist.env = {
|
||||||
sampleCount: 100,
|
sampleCount: 100,
|
||||||
xyPointLength: 100,
|
xyPointLength: 100,
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ describe("sparkline", () => {
|
||||||
expected: DistributionOperation.outputType,
|
expected: DistributionOperation.outputType,
|
||||||
) => {
|
) => {
|
||||||
test(name, () => {
|
test(name, () => {
|
||||||
let result = DistributionOperation.run(~env, FromDist(ToString(ToSparkline(20)), dist))
|
let result = DistributionOperation.run(~env, FromDist(#ToString(ToSparkline(20)), dist))
|
||||||
expect(result)->toEqual(expected)
|
expect(result)->toEqual(expected)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -81,8 +81,8 @@ describe("sparkline", () => {
|
||||||
describe("toPointSet", () => {
|
describe("toPointSet", () => {
|
||||||
test("on symbolic normal distribution", () => {
|
test("on symbolic normal distribution", () => {
|
||||||
let result =
|
let result =
|
||||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||||
->outputMap(FromDist(ToFloat(#Mean)))
|
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
->toFloat
|
->toFloat
|
||||||
->toExt
|
->toExt
|
||||||
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
||||||
|
@ -90,10 +90,10 @@ describe("toPointSet", () => {
|
||||||
|
|
||||||
test("on sample set", () => {
|
test("on sample set", () => {
|
||||||
let result =
|
let result =
|
||||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||||
->outputMap(FromDist(ToDist(ToSampleSet(1000))))
|
->outputMap(FromDist(#ToDist(ToSampleSet(1000))))
|
||||||
->outputMap(FromDist(ToDist(ToPointSet)))
|
->outputMap(FromDist(#ToDist(ToPointSet)))
|
||||||
->outputMap(FromDist(ToFloat(#Mean)))
|
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
->toFloat
|
->toFloat
|
||||||
->toExt
|
->toExt
|
||||||
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
||||||
|
|
|
@ -19,7 +19,6 @@ exception MixtureFailed
|
||||||
let float1 = 1.0
|
let float1 = 1.0
|
||||||
let float2 = 2.0
|
let float2 = 2.0
|
||||||
let float3 = 3.0
|
let float3 = 3.0
|
||||||
let {mkDelta} = module(TestHelpers)
|
let point1 = TestHelpers.mkDelta(float1)
|
||||||
let point1 = mkDelta(float1)
|
let point2 = TestHelpers.mkDelta(float2)
|
||||||
let point2 = mkDelta(float2)
|
let point3 = TestHelpers.mkDelta(float3)
|
||||||
let point3 = mkDelta(float3)
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ describe("mixture", () => {
|
||||||
let (mean1, mean2) = tup
|
let (mean1, mean2) = tup
|
||||||
let meanValue = {
|
let meanValue = {
|
||||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
||||||
FromDist(ToFloat(#Mean)),
|
FromDist(#ToFloat(#Mean)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||||
|
@ -28,7 +28,7 @@ describe("mixture", () => {
|
||||||
let meanValue = {
|
let meanValue = {
|
||||||
run(
|
run(
|
||||||
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
||||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
}
|
}
|
||||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||||
let exponentialMean = 1.0 /. rate
|
let exponentialMean = 1.0 /. rate
|
||||||
|
@ -52,7 +52,7 @@ describe("mixture", () => {
|
||||||
(mkUniform(low, high), uniformWeight),
|
(mkUniform(low, high), uniformWeight),
|
||||||
(mkLognormal(mu, sigma), lognormalWeight),
|
(mkLognormal(mu, sigma), lognormalWeight),
|
||||||
]),
|
]),
|
||||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
}
|
}
|
||||||
let uniformMean = (low +. high) /. 2.0
|
let uniformMean = (low +. high) /. 2.0
|
||||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||||
|
|
|
@ -3,6 +3,7 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
open GenericDist_Fixtures
|
open GenericDist_Fixtures
|
||||||
|
|
||||||
|
let klDivergence = DistributionOperation.Constructors.LogScore.distEstimateDistAnswer(~env)
|
||||||
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
||||||
let klNormalUniform = (mean, stdev, low, high): float =>
|
let klNormalUniform = (mean, stdev, low, high): float =>
|
||||||
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
||||||
|
@ -11,8 +12,6 @@ let klNormalUniform = (mean, stdev, low, high): float =>
|
||||||
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
||||||
|
|
||||||
describe("klDivergence: continuous -> continuous -> float", () => {
|
describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
|
|
||||||
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
||||||
test("of two uniforms is equal to the analytic expression", () => {
|
test("of two uniforms is equal to the analytic expression", () => {
|
||||||
let answer =
|
let answer =
|
||||||
|
@ -58,7 +57,7 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
|
||||||
switch kl {
|
switch kl {
|
||||||
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=3)
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=2)
|
||||||
| Error(err) => {
|
| Error(err) => {
|
||||||
Js.Console.log(DistributionTypes.Error.toString(err))
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
raise(KlFailed)
|
raise(KlFailed)
|
||||||
|
@ -82,7 +81,6 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("klDivergence: discrete -> discrete -> float", () => {
|
describe("klDivergence: discrete -> discrete -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
||||||
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
||||||
|
@ -117,7 +115,6 @@ describe("klDivergence: discrete -> discrete -> float", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("klDivergence: mixed -> mixed -> float", () => {
|
describe("klDivergence: mixed -> mixed -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
let mixture = a => {
|
let mixture = a => {
|
||||||
let dist' = a->mixture'->run
|
let dist' = a->mixture'->run
|
||||||
|
@ -189,15 +186,15 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
||||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||||
s,
|
s,
|
||||||
))
|
))
|
||||||
let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
|
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||||
let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
|
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||||
|
|
||||||
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
||||||
let integrand = PointSetDist_Scoring.KLDivergence.integrand
|
let integrand = PointSetDist_Scoring.WithDistAnswer.integrand
|
||||||
|
|
||||||
let result = switch (answerWrapped, predictionWrapped) {
|
let result = switch (answerWrapped, predictionWrapped) {
|
||||||
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
||||||
Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
|
Some(combineAlongSupportOfSecondArgument(interpolator, integrand, a.xyShape, b.xyShape))
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
result
|
result
|
|
@ -0,0 +1,68 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
open GenericDist_Fixtures
|
||||||
|
exception ScoreFailed
|
||||||
|
|
||||||
|
describe("WithScalarAnswer: discrete -> scalar -> score", () => {
|
||||||
|
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
|
let pointA = mkDelta(3.0)
|
||||||
|
let pointB = mkDelta(2.0)
|
||||||
|
let pointC = mkDelta(1.0)
|
||||||
|
let pointD = mkDelta(0.0)
|
||||||
|
|
||||||
|
test("score: agrees with analytical answer when finite", () => {
|
||||||
|
let prediction' = [(pointA, 0.25), (pointB, 0.25), (pointC, 0.25), (pointD, 0.25)]->mixture->run
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let answer = 2.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.25 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test("score: agrees with analytical answer when finite", () => {
|
||||||
|
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test("scoreWithPrior: agrees with analytical answer when finite", () => {
|
||||||
|
let prior' = [(pointA, 0.5), (pointB, 0.5)]->mixture->run
|
||||||
|
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||||
|
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let prior = switch prior' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.scoreWithPrior(
|
||||||
|
~estimate=prediction,
|
||||||
|
~answer,
|
||||||
|
~prior,
|
||||||
|
)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0) -. -.Js.Math.log(0.5 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
|
@ -8,34 +8,34 @@ let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean,
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
let normalValue = mkNormal(mean, 2.0)
|
let normalValue = mkNormal(mean, 2.0)
|
||||||
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
let normalizedValue = run(FromDist(#ToDist(Normalize), normalValue))
|
||||||
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("(Symbolic) mean", () => {
|
describe("(Symbolic) mean", () => {
|
||||||
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
run(FromDist(#ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
})
|
})
|
||||||
|
|
||||||
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
run(FromDist(#ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
run(FromDist(#ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||||
})
|
})
|
||||||
|
|
||||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of a cauchy distribution", () => {
|
test("of a cauchy distribution", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
||||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||||
|
@ -48,7 +48,7 @@ describe("(Symbolic) mean", () => {
|
||||||
let (low, medium, high) = tup
|
let (low, medium, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(
|
FromDist(
|
||||||
ToFloat(#Mean),
|
#ToFloat(#Mean),
|
||||||
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -63,7 +63,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (alpha, beta) = tup
|
let (alpha, beta) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
},
|
},
|
||||||
|
@ -72,7 +72,7 @@ describe("(Symbolic) mean", () => {
|
||||||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||||
test("of beta(0, 0)", () => {
|
test("of beta(0, 0)", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
})
|
})
|
||||||
|
@ -85,7 +85,7 @@ describe("(Symbolic) mean", () => {
|
||||||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||||
let meanValue =
|
let meanValue =
|
||||||
betaDistribution->E.R2.fmap(d =>
|
betaDistribution->E.R2.fmap(d =>
|
||||||
run(FromDist(ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||||
)
|
)
|
||||||
switch meanValue {
|
switch meanValue {
|
||||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
|
@ -100,7 +100,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (mu, sigma) = tup
|
let (mu, sigma) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
},
|
},
|
||||||
|
@ -112,14 +112,14 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, high) = tup
|
let (low, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
test("of a float", () => {
|
test("of a float", () => {
|
||||||
let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
let meanValue = run(FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -29,7 +29,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
|
||||||
|
|
||||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: GenericDist.env = {
|
||||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,12 +4,9 @@ type error = DistributionTypes.error
|
||||||
|
|
||||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||||
|
|
||||||
type env = {
|
type env = GenericDist.env
|
||||||
sampleCount: int,
|
|
||||||
xyPointLength: int,
|
|
||||||
}
|
|
||||||
|
|
||||||
let defaultEnv = {
|
let defaultEnv: env = {
|
||||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||||
}
|
}
|
||||||
|
@ -93,7 +90,7 @@ module OutputLocal = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
let {sampleCount, xyPointLength} = env
|
let {sampleCount, xyPointLength} = env
|
||||||
|
|
||||||
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
||||||
|
@ -101,14 +98,14 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
}
|
}
|
||||||
|
|
||||||
let toPointSetFn = r => {
|
let toPointSetFn = r => {
|
||||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToPointSet), r), ()) {
|
switch reCall(~functionCallInfo=FromDist(#ToDist(ToPointSet), r), ()) {
|
||||||
| Dist(PointSet(p)) => Ok(p)
|
| Dist(PointSet(p)) => Ok(p)
|
||||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let toSampleSetFn = r => {
|
let toSampleSetFn = r => {
|
||||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToSampleSet(sampleCount)), r), ()) {
|
switch reCall(~functionCallInfo=FromDist(#ToDist(ToSampleSet(sampleCount)), r), ()) {
|
||||||
| Dist(SampleSet(p)) => Ok(p)
|
| Dist(SampleSet(p)) => Ok(p)
|
||||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
}
|
}
|
||||||
|
@ -116,13 +113,13 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
|
|
||||||
let scaleMultiply = (r, weight) =>
|
let scaleMultiply = (r, weight) =>
|
||||||
reCall(
|
reCall(
|
||||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
||||||
(),
|
(),
|
||||||
)->OutputLocal.toDistR
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
let pointwiseAdd = (r1, r2) =>
|
let pointwiseAdd = (r1, r2) =>
|
||||||
reCall(
|
reCall(
|
||||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
||||||
(),
|
(),
|
||||||
)->OutputLocal.toDistR
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
|
@ -131,49 +128,40 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
dist: genericDist,
|
dist: genericDist,
|
||||||
): outputType => {
|
): outputType => {
|
||||||
let response = switch subFnName {
|
let response = switch subFnName {
|
||||||
| ToFloat(distToFloatOperation) =>
|
| #ToFloat(distToFloatOperation) =>
|
||||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToString(ToString) => dist->GenericDist.toString->String
|
| #ToString(ToString) => dist->GenericDist.toString->String
|
||||||
| ToString(ToSparkline(bucketCount)) =>
|
| #ToString(ToSparkline(bucketCount)) =>
|
||||||
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
||||||
->E.R2.fmap(r => String(r))
|
->E.R2.fmap(r => String(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Inspect) => {
|
| #ToDist(Inspect) => {
|
||||||
Js.log2("Console log requested: ", dist)
|
Js.log2("Console log requested: ", dist)
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
| ToScore(KLDivergence(t2)) =>
|
| #ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(s => Float(s))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToScore(LogScore(answer, prior)) =>
|
| #ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
GenericDist.Score.logScoreWithPointResolution(
|
| #ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||||
~prediction=dist,
|
|
||||||
~answer,
|
|
||||||
~prior,
|
|
||||||
~toPointSetFn,
|
|
||||||
)
|
|
||||||
->E.R2.fmap(r => Float(r))
|
|
||||||
->OutputLocal.fromResult
|
|
||||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
|
||||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
|
||||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(ToSampleSet(n)) =>
|
| #ToDist(ToSampleSet(n)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.toSampleSetDist(n)
|
->GenericDist.toSampleSetDist(n)
|
||||||
->E.R2.fmap(r => Dist(SampleSet(r)))
|
->E.R2.fmap(r => Dist(SampleSet(r)))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(ToPointSet) =>
|
| #ToDist(ToPointSet) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||||
->E.R2.fmap(r => Dist(PointSet(r)))
|
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
| #ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(
|
->GenericDist.pointwiseCombinationFloat(
|
||||||
~toPointSetFn,
|
~toPointSetFn,
|
||||||
|
@ -182,23 +170,23 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Multiply, f)) =>
|
| #ToDist(Scale(#Multiply, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Logarithm, f)) =>
|
| #ToDist(Scale(#Logarithm, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Power, f)) =>
|
| #ToDist(Scale(#Power, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
| #ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||||
| ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
| #ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.algebraicCombination(
|
->GenericDist.algebraicCombination(
|
||||||
~strategy,
|
~strategy,
|
||||||
|
@ -209,12 +197,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
| #ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
|
->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
| #ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
@ -225,8 +213,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
|
|
||||||
switch functionCallInfo {
|
switch functionCallInfo {
|
||||||
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
||||||
| FromFloat(subFnName, float) =>
|
| FromFloat(subFnName, x) => reCall(~functionCallInfo=FromFloat(subFnName, x), ())
|
||||||
reCall(~functionCallInfo=FromDist(subFnName, GenericDist.fromFloat(float)), ())
|
|
||||||
| Mixture(dists) =>
|
| Mixture(dists) =>
|
||||||
dists
|
dists
|
||||||
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
||||||
|
@ -278,13 +265,16 @@ module Constructors = {
|
||||||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
module LogScore = {
|
||||||
let logScoreWithPointResolution = (
|
let distEstimateDistAnswer = (~env, estimate, answer) =>
|
||||||
~env,
|
C.LogScore.distEstimateDistAnswer(estimate, answer)->run(~env)->toFloatR
|
||||||
~prediction: DistributionTypes.genericDist,
|
let distEstimateDistAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||||
~answer: float,
|
C.LogScore.distEstimateDistAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||||
~prior: option<DistributionTypes.genericDist>,
|
let distEstimateScalarAnswer = (~env, estimate, answer) =>
|
||||||
) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
|
C.LogScore.distEstimateScalarAnswer(estimate, answer)->run(~env)->toFloatR
|
||||||
|
let distEstimateScalarAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||||
|
C.LogScore.distEstimateScalarAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||||
|
}
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
@genType
|
@genType
|
||||||
type env = {
|
let defaultEnv: GenericDist.env
|
||||||
sampleCount: int,
|
|
||||||
xyPointLength: int,
|
|
||||||
}
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let defaultEnv: env
|
|
||||||
|
|
||||||
open DistributionTypes
|
open DistributionTypes
|
||||||
|
|
||||||
|
@ -19,15 +13,18 @@ type outputType =
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let run: (~env: env, DistributionTypes.DistributionOperation.genericFunctionCallInfo) => outputType
|
let run: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
DistributionTypes.DistributionOperation.genericFunctionCallInfo,
|
||||||
|
) => outputType
|
||||||
let runFromDist: (
|
let runFromDist: (
|
||||||
~env: env,
|
~env: GenericDist.env,
|
||||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
||||||
genericDist,
|
genericDist,
|
||||||
) => outputType
|
) => outputType
|
||||||
let runFromFloat: (
|
let runFromFloat: (
|
||||||
~env: env,
|
~env: GenericDist.env,
|
||||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
~functionCallInfo: DistributionTypes.DistributionOperation.fromFloat,
|
||||||
float,
|
float,
|
||||||
) => outputType
|
) => outputType
|
||||||
|
|
||||||
|
@ -42,79 +39,147 @@ module Output: {
|
||||||
let toBool: t => option<bool>
|
let toBool: t => option<bool>
|
||||||
let toBoolR: t => result<bool, error>
|
let toBoolR: t => result<bool, error>
|
||||||
let toError: t => option<error>
|
let toError: t => option<error>
|
||||||
let fmap: (~env: env, t, DistributionTypes.DistributionOperation.singleParamaterFunction) => t
|
let fmap: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
t,
|
||||||
|
DistributionTypes.DistributionOperation.singleParamaterFunction,
|
||||||
|
) => t
|
||||||
}
|
}
|
||||||
|
|
||||||
module Constructors: {
|
module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let mean: (~env: env, genericDist) => result<float, error>
|
let mean: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let stdev: (~env: env, genericDist) => result<float, error>
|
let stdev: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let variance: (~env: env, genericDist) => result<float, error>
|
let variance: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let sample: (~env: env, genericDist) => result<float, error>
|
let sample: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
let cdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
let inv: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
let pdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
let normalize: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
let isNormalized: (~env: GenericDist.env, genericDist) => result<bool, error>
|
||||||
|
module LogScore: {
|
||||||
@genType
|
@genType
|
||||||
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
let distEstimateDistAnswer: (
|
||||||
@genType
|
~env: GenericDist.env,
|
||||||
let logScoreWithPointResolution: (
|
genericDist,
|
||||||
~env: env,
|
genericDist,
|
||||||
~prediction: genericDist,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<genericDist>,
|
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let distEstimateDistAnswerWithPrior: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
let distEstimateScalarAnswer: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
float,
|
||||||
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let fromSamples: (~env: env, SampleSetDist.t) => result<genericDist, error>
|
let distEstimateScalarAnswerWithPrior: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
float,
|
||||||
|
genericDist,
|
||||||
|
) => result<float, error>
|
||||||
|
}
|
||||||
@genType
|
@genType
|
||||||
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
let toPointSet: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
let toSampleSet: (~env: GenericDist.env, genericDist, int) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let toString: (~env: env, genericDist) => result<string, error>
|
let fromSamples: (~env: GenericDist.env, SampleSetDist.t) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
let truncate: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
option<float>,
|
||||||
|
option<float>,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let inspect: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let toString: (~env: GenericDist.env, genericDist) => result<string, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let toSparkline: (~env: GenericDist.env, genericDist, int) => result<string, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let algebraicAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let algebraicMultiply: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let algebraicDivide: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
|
let algebraicSubtract: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let scaleMultiply: (~env: env, genericDist, float) => result<genericDist, error>
|
let algebraicLogarithm: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
|
let algebraicPower: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let scaleLogarithm: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let scaleMultiply: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let scalePower: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let pointwiseAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let pointwiseMultiply: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let pointwiseDivide: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseSubtract: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseLogarithm: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwisePower: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,61 +98,86 @@ module DistributionOperation = {
|
||||||
| ToString
|
| ToString
|
||||||
| ToSparkline(int)
|
| ToSparkline(int)
|
||||||
|
|
||||||
type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
|
type genericDistOrScalar = Score_Dist(genericDist) | Score_Scalar(float)
|
||||||
|
|
||||||
type fromDist =
|
type toScore = LogScore(genericDistOrScalar, option<genericDist>)
|
||||||
| ToFloat(toFloat)
|
|
||||||
| ToDist(toDist)
|
type fromFloat = [
|
||||||
| ToScore(toScore)
|
| #ToFloat(toFloat)
|
||||||
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
| #ToDist(toDist)
|
||||||
| ToString(toString)
|
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||||
| ToBool(toBool)
|
| #ToString(toString)
|
||||||
|
| #ToBool(toBool)
|
||||||
|
]
|
||||||
|
|
||||||
|
type fromDist = [
|
||||||
|
| fromFloat
|
||||||
|
| #ToScore(toScore)
|
||||||
|
]
|
||||||
|
|
||||||
type singleParamaterFunction =
|
type singleParamaterFunction =
|
||||||
| FromDist(fromDist)
|
| FromDist(fromDist)
|
||||||
| FromFloat(fromDist)
|
| FromFloat(fromFloat)
|
||||||
|
|
||||||
type genericFunctionCallInfo =
|
type genericFunctionCallInfo =
|
||||||
| FromDist(fromDist, genericDist)
|
| FromDist(fromDist, genericDist)
|
||||||
| FromFloat(fromDist, float)
|
| FromFloat(fromFloat, float)
|
||||||
| FromSamples(array<float>)
|
| FromSamples(array<float>)
|
||||||
| Mixture(array<(genericDist, float)>)
|
| Mixture(array<(genericDist, float)>)
|
||||||
|
|
||||||
let distCallToString = (distFunction: fromDist): string =>
|
let floatCallToString = (floatFunction: fromFloat): string =>
|
||||||
switch distFunction {
|
switch floatFunction {
|
||||||
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
| #ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
| #ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Mean) => `mean`
|
| #ToFloat(#Mean) => `mean`
|
||||||
| ToFloat(#Min) => `min`
|
| #ToFloat(#Min) => `min`
|
||||||
| ToFloat(#Max) => `max`
|
| #ToFloat(#Max) => `max`
|
||||||
| ToFloat(#Stdev) => `stdev`
|
| #ToFloat(#Stdev) => `stdev`
|
||||||
| ToFloat(#Variance) => `variance`
|
| #ToFloat(#Variance) => `variance`
|
||||||
| ToFloat(#Mode) => `mode`
|
| #ToFloat(#Mode) => `mode`
|
||||||
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
| #ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Sample) => `sample`
|
| #ToFloat(#Sample) => `sample`
|
||||||
| ToFloat(#IntegralSum) => `integralSum`
|
| #ToFloat(#IntegralSum) => `integralSum`
|
||||||
| ToScore(KLDivergence(_)) => `klDivergence`
|
| #ToDist(Normalize) => `normalize`
|
||||||
| ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
|
| #ToDist(ToPointSet) => `toPointSet`
|
||||||
| ToDist(Normalize) => `normalize`
|
| #ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
| ToDist(ToPointSet) => `toPointSet`
|
| #ToDist(Truncate(_, _)) => `truncate`
|
||||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
| #ToDist(Inspect) => `inspect`
|
||||||
| ToDist(Truncate(_, _)) => `truncate`
|
| #ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Inspect) => `inspect`
|
| #ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
| #ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
| #ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||||
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
|
||||||
| ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
|
||||||
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
||||||
| ToString(ToString) => `toString`
|
| #ToString(ToString) => `toString`
|
||||||
| ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
| #ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
||||||
| ToBool(IsNormalized) => `isNormalized`
|
| #ToBool(IsNormalized) => `isNormalized`
|
||||||
| ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
| #ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
||||||
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
| #ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||||
|
}
|
||||||
|
|
||||||
|
let distCallToString = (
|
||||||
|
distFunction: [
|
||||||
|
| #ToFloat(toFloat)
|
||||||
|
| #ToDist(toDist)
|
||||||
|
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||||
|
| #ToString(toString)
|
||||||
|
| #ToBool(toBool)
|
||||||
|
| #ToScore(toScore)
|
||||||
|
],
|
||||||
|
): string =>
|
||||||
|
switch distFunction {
|
||||||
|
| #ToScore(_) => `logScore`
|
||||||
|
| #ToFloat(x) => floatCallToString(#ToFloat(x))
|
||||||
|
| #ToDist(x) => floatCallToString(#ToDist(x))
|
||||||
|
| #ToString(x) => floatCallToString(#ToString(x))
|
||||||
|
| #ToBool(x) => floatCallToString(#ToBool(x))
|
||||||
|
| #ToDistCombination(x, y, z) => floatCallToString(#ToDistCombination(x, y, z))
|
||||||
}
|
}
|
||||||
|
|
||||||
let toString = (d: genericFunctionCallInfo): string =>
|
let toString = (d: genericFunctionCallInfo): string =>
|
||||||
switch d {
|
switch d {
|
||||||
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
| FromDist(f, _) => distCallToString(f)
|
||||||
|
| FromFloat(f, _) => floatCallToString(f)
|
||||||
| Mixture(_) => `mixture`
|
| Mixture(_) => `mixture`
|
||||||
| FromSamples(_) => `fromSamples`
|
| FromSamples(_) => `fromSamples`
|
||||||
}
|
}
|
||||||
|
@ -162,80 +187,93 @@ module Constructors = {
|
||||||
|
|
||||||
module UsingDists = {
|
module UsingDists = {
|
||||||
@genType
|
@genType
|
||||||
let mean = (dist): t => FromDist(ToFloat(#Mean), dist)
|
let mean = (dist): t => FromDist(#ToFloat(#Mean), dist)
|
||||||
let stdev = (dist): t => FromDist(ToFloat(#Stdev), dist)
|
let stdev = (dist): t => FromDist(#ToFloat(#Stdev), dist)
|
||||||
let variance = (dist): t => FromDist(ToFloat(#Variance), dist)
|
let variance = (dist): t => FromDist(#ToFloat(#Variance), dist)
|
||||||
let sample = (dist): t => FromDist(ToFloat(#Sample), dist)
|
let sample = (dist): t => FromDist(#ToFloat(#Sample), dist)
|
||||||
let cdf = (dist, x): t => FromDist(ToFloat(#Cdf(x)), dist)
|
let cdf = (dist, x): t => FromDist(#ToFloat(#Cdf(x)), dist)
|
||||||
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
let inv = (dist, x): t => FromDist(#ToFloat(#Inv(x)), dist)
|
||||||
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
let pdf = (dist, x): t => FromDist(#ToFloat(#Pdf(x)), dist)
|
||||||
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
let normalize = (dist): t => FromDist(#ToDist(Normalize), dist)
|
||||||
let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
|
let isNormalized = (dist): t => FromDist(#ToBool(IsNormalized), dist)
|
||||||
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
let toPointSet = (dist): t => FromDist(#ToDist(ToPointSet), dist)
|
||||||
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
let toSampleSet = (dist, r): t => FromDist(#ToDist(ToSampleSet(r)), dist)
|
||||||
let fromSamples = (xs): t => FromSamples(xs)
|
let fromSamples = (xs): t => FromSamples(xs)
|
||||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
let truncate = (dist, left, right): t => FromDist(#ToDist(Truncate(left, right)), dist)
|
||||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
let inspect = (dist): t => FromDist(#ToDist(Inspect), dist)
|
||||||
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
module LogScore = {
|
||||||
let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
|
let distEstimateDistAnswer = (estimate, answer): t => FromDist(
|
||||||
ToScore(LogScore(answer, prior)),
|
#ToScore(LogScore(Score_Dist(answer), None)),
|
||||||
prediction,
|
estimate,
|
||||||
)
|
)
|
||||||
let scaleMultiply = (dist, n): t => FromDist(ToDist(Scale(#Multiply, n)), dist)
|
let distEstimateDistAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
#ToScore(LogScore(Score_Dist(answer), Some(prior))),
|
||||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
estimate,
|
||||||
|
)
|
||||||
|
let distEstimateScalarAnswer = (estimate, answer): t => FromDist(
|
||||||
|
#ToScore(LogScore(Score_Scalar(answer), None)),
|
||||||
|
estimate,
|
||||||
|
)
|
||||||
|
let distEstimateScalarAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||||
|
#ToScore(LogScore(Score_Scalar(answer), Some(prior))),
|
||||||
|
estimate,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
let scaleMultiply = (dist, n): t => FromDist(#ToDist(Scale(#Multiply, n)), dist)
|
||||||
|
let scalePower = (dist, n): t => FromDist(#ToDist(Scale(#Power, n)), dist)
|
||||||
|
let scaleLogarithm = (dist, n): t => FromDist(#ToDist(Scale(#Logarithm, n)), dist)
|
||||||
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
||||||
ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
#ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||||
dist,
|
dist,
|
||||||
)
|
)
|
||||||
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
let toString = (dist): t => FromDist(#ToString(ToString), dist)
|
||||||
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
let toSparkline = (dist, n): t => FromDist(#ToString(ToSparkline(n)), dist)
|
||||||
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicDivide = (dist1, dist2): t => FromDist(
|
let algebraicDivide = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicPower = (dist1, dist2): t => FromDist(
|
let algebraicPower = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwisePower = (dist1, dist2): t => FromDist(
|
let pointwisePower = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,11 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
type pointwiseAddFn = (t, t) => result<t, error>
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
type env = {
|
||||||
|
sampleCount: int,
|
||||||
|
xyPointLength: int,
|
||||||
|
}
|
||||||
|
|
||||||
let isPointSet = (t: t) =>
|
let isPointSet = (t: t) =>
|
||||||
switch t {
|
switch t {
|
||||||
| PointSet(_) => true
|
| PointSet(_) => true
|
||||||
|
@ -61,46 +66,6 @@ let integralEndY = (t: t): float =>
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||||
|
|
||||||
module Score = {
|
|
||||||
let klDivergence = (prediction, answer, ~toPointSetFn: toPointSetFn): result<float, error> => {
|
|
||||||
let pointSets = E.R.merge(toPointSetFn(prediction), toPointSetFn(answer))
|
|
||||||
pointSets |> E.R2.bind(((predi, ans)) =>
|
|
||||||
PointSetDist.T.klDivergence(predi, ans)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let logScoreWithPointResolution = (
|
|
||||||
~prediction: DistributionTypes.genericDist,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<DistributionTypes.genericDist>,
|
|
||||||
~toPointSetFn: toPointSetFn,
|
|
||||||
): result<float, error> => {
|
|
||||||
switch prior {
|
|
||||||
| Some(prior') =>
|
|
||||||
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
|
|
||||||
prior'',
|
|
||||||
prediction'',
|
|
||||||
)) =>
|
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
|
||||||
~prediction=prediction'',
|
|
||||||
~answer,
|
|
||||||
~prior=prior''->Some,
|
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
| None =>
|
|
||||||
prediction
|
|
||||||
->toPointSetFn
|
|
||||||
->E.R.bind(x =>
|
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
|
||||||
~prediction=x,
|
|
||||||
~answer,
|
|
||||||
~prior=None,
|
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let toFloatOperation = (
|
let toFloatOperation = (
|
||||||
t,
|
t,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
|
@ -171,6 +136,70 @@ let toPointSet = (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module Score = {
|
||||||
|
type genericDistOrScalar = DistributionTypes.DistributionOperation.genericDistOrScalar
|
||||||
|
|
||||||
|
let argsMake = (~esti: t, ~answ: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||||
|
PointSetDist_Scoring.scoreArgs,
|
||||||
|
error,
|
||||||
|
> => {
|
||||||
|
let toPointSetFn = t =>
|
||||||
|
toPointSet(
|
||||||
|
t,
|
||||||
|
~xyPointLength=env.xyPointLength,
|
||||||
|
~sampleCount=env.sampleCount,
|
||||||
|
~xSelection=#ByWeight,
|
||||||
|
(),
|
||||||
|
)
|
||||||
|
let prior': option<result<PointSetTypes.pointSetDist, error>> = switch prior {
|
||||||
|
| None => None
|
||||||
|
| Some(d) => toPointSetFn(d)->Some
|
||||||
|
}
|
||||||
|
let twoDists = (~toPointSetFn, esti': t, answ': t): result<
|
||||||
|
(PointSetTypes.pointSetDist, PointSetTypes.pointSetDist),
|
||||||
|
error,
|
||||||
|
> => E.R.merge(toPointSetFn(esti'), toPointSetFn(answ'))
|
||||||
|
switch (esti, answ, prior') {
|
||||||
|
| (esti', Score_Dist(answ'), None) =>
|
||||||
|
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||||
|
{estimate: esti'', answer: answ'', prior: None}->PointSetDist_Scoring.DistAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Dist(answ'), Some(Ok(prior''))) =>
|
||||||
|
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ'',
|
||||||
|
prior: Some(prior''),
|
||||||
|
}->PointSetDist_Scoring.DistAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Scalar(answ'), None) =>
|
||||||
|
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ',
|
||||||
|
prior: None,
|
||||||
|
}->PointSetDist_Scoring.ScalarAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Scalar(answ'), Some(Ok(prior''))) =>
|
||||||
|
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ',
|
||||||
|
prior: Some(prior''),
|
||||||
|
}->PointSetDist_Scoring.ScalarAnswer
|
||||||
|
)
|
||||||
|
| (_, _, Some(Error(err))) => err->Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let logScore = (~estimate: t, ~answer: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||||
|
float,
|
||||||
|
error,
|
||||||
|
> =>
|
||||||
|
argsMake(~esti=estimate, ~answ=answer, ~prior, ~env)->E.R.bind(x =>
|
||||||
|
x->PointSetDist.logScore->E.R2.errMap(y => DistributionTypes.OperationError(y))
|
||||||
|
)
|
||||||
|
}
|
||||||
/*
|
/*
|
||||||
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
||||||
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
||||||
|
|
|
@ -5,6 +5,9 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
type pointwiseAddFn = (t, t) => result<t, error>
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type env = {sampleCount: int, xyPointLength: int}
|
||||||
|
|
||||||
let sampleN: (t, int) => array<float>
|
let sampleN: (t, int) => array<float>
|
||||||
let sample: t => float
|
let sample: t => float
|
||||||
|
|
||||||
|
@ -25,12 +28,11 @@ let toFloatOperation: (
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
|
|
||||||
module Score: {
|
module Score: {
|
||||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
let logScore: (
|
||||||
let logScoreWithPointResolution: (
|
~estimate: t,
|
||||||
~prediction: t,
|
~answer: DistributionTypes.DistributionOperation.genericDistOrScalar,
|
||||||
~answer: float,
|
|
||||||
~prior: option<t>,
|
~prior: option<t>,
|
||||||
~toPointSetFn: toPointSetFn,
|
~env: env,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,7 @@ let combinePointwise = (
|
||||||
|
|
||||||
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
||||||
|
|
||||||
combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
combiner(interpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||||
make(~integralSumCache=combinedIntegralSum, x)
|
make(~integralSumCache=combinedIntegralSum, x)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -270,20 +270,6 @@ module T = Dist({
|
||||||
}
|
}
|
||||||
let variance = (t: t): float =>
|
let variance = (t: t): float =>
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
|
||||||
PointSetDist_Scoring.KLDivergence.integrand,
|
|
||||||
prediction.xyShape,
|
|
||||||
answer.xyShape,
|
|
||||||
)
|
|
||||||
newShape->E.R2.fmap(x => x->make->integralEndY)
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
|
||||||
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
|
||||||
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => {
|
let isNormalized = (t: t): bool => {
|
||||||
|
|
|
@ -49,7 +49,7 @@ let combinePointwise = (
|
||||||
// TODO: does it ever make sense to pointwise combine the integrals here?
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
// It could be done for pointwise additions, but is that ever needed?
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
combiner(XYShape.XtoY.discreteInterpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
||||||
}
|
}
|
||||||
|
|
||||||
let reduce = (
|
let reduce = (
|
||||||
|
@ -222,15 +222,4 @@ module T = Dist({
|
||||||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
combinePointwise(
|
|
||||||
~fn=PointSetDist_Scoring.KLDivergence.integrand,
|
|
||||||
prediction,
|
|
||||||
answer,
|
|
||||||
)->E.R2.fmap(integralEndY)
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -33,12 +33,6 @@ module type dist = {
|
||||||
|
|
||||||
let mean: t => float
|
let mean: t => float
|
||||||
let variance: t => float
|
let variance: t => float
|
||||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
|
||||||
let logScoreWithPointResolution: (
|
|
||||||
~prediction: t,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<t>,
|
|
||||||
) => result<float, Operation.Error.t>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module Dist = (T: dist) => {
|
module Dist = (T: dist) => {
|
||||||
|
@ -61,9 +55,6 @@ module Dist = (T: dist) => {
|
||||||
let mean = T.mean
|
let mean = T.mean
|
||||||
let variance = T.variance
|
let variance = T.variance
|
||||||
let integralEndY = T.integralEndY
|
let integralEndY = T.integralEndY
|
||||||
let klDivergence = T.klDivergence
|
|
||||||
let logScoreWithPointResolution = T.logScoreWithPointResolution
|
|
||||||
|
|
||||||
let updateIntegralCache = T.updateIntegralCache
|
let updateIntegralCache = T.updateIntegralCache
|
||||||
|
|
||||||
module Integral = {
|
module Integral = {
|
||||||
|
|
|
@ -302,15 +302,6 @@ module T = Dist({
|
||||||
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
|
|
||||||
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
|
||||||
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
||||||
|
|
|
@ -66,6 +66,7 @@ let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t
|
||||||
}
|
}
|
||||||
|
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
~combiner=XYShape.PointwiseCombination.combine,
|
||||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||||
~integralCachesFn: (
|
~integralCachesFn: (
|
||||||
PointSetTypes.continuousShape,
|
PointSetTypes.continuousShape,
|
||||||
|
@ -78,6 +79,7 @@ let combinePointwise = (
|
||||||
switch (t1, t2) {
|
switch (t1, t2) {
|
||||||
| (Continuous(m1), Continuous(m2)) =>
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
Continuous.combinePointwise(
|
Continuous.combinePointwise(
|
||||||
|
~combiner,
|
||||||
~integralSumCachesFn,
|
~integralSumCachesFn,
|
||||||
fn,
|
fn,
|
||||||
m1,
|
m1,
|
||||||
|
@ -85,6 +87,7 @@ let combinePointwise = (
|
||||||
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
||||||
| (Discrete(m1), Discrete(m2)) =>
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
Discrete.combinePointwise(
|
Discrete.combinePointwise(
|
||||||
|
~combiner,
|
||||||
~integralSumCachesFn,
|
~integralSumCachesFn,
|
||||||
~fn,
|
~fn,
|
||||||
m1,
|
m1,
|
||||||
|
@ -195,25 +198,16 @@ module T = Dist({
|
||||||
| Discrete(m) => Discrete.T.variance(m)
|
| Discrete(m) => Discrete.T.variance(m)
|
||||||
| Continuous(m) => Continuous.T.variance(m)
|
| Continuous(m) => Continuous.T.variance(m)
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) =>
|
|
||||||
switch (prediction, answer) {
|
|
||||||
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
|
|
||||||
| (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
|
|
||||||
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
|
||||||
}
|
|
||||||
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
switch (prior, prediction) {
|
|
||||||
| (Some(Continuous(t1)), Continuous(t2)) =>
|
|
||||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
|
|
||||||
| (None, Continuous(t2)) =>
|
|
||||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
|
|
||||||
| _ => Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
let logScore = (args: PointSetDist_Scoring.scoreArgs): result<float, Operation.Error.t> =>
|
||||||
|
PointSetDist_Scoring.logScore(
|
||||||
|
args,
|
||||||
|
~combineFn=combinePointwise,
|
||||||
|
~integrateFn=T.Integral.sum,
|
||||||
|
~toMixedFn=toMixed,
|
||||||
|
)
|
||||||
|
|
||||||
let pdf = (f: float, t: t) => {
|
let pdf = (f: float, t: t) => {
|
||||||
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
|
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
|
||||||
mixedPoint.continuous +. mixedPoint.discrete
|
mixedPoint.continuous +. mixedPoint.discrete
|
||||||
|
|
|
@ -1,46 +1,149 @@
|
||||||
module KLDivergence = {
|
type pointSetDist = PointSetTypes.pointSetDist
|
||||||
let logFn = Js.Math.log // base e
|
|
||||||
let integrand = (predictionElement: float, answerElement: float): result<
|
type scalar = float
|
||||||
|
type score = float
|
||||||
|
type abstractScoreArgs<'a, 'b> = {estimate: 'a, answer: 'b, prior: option<'a>}
|
||||||
|
type scoreArgs =
|
||||||
|
| DistAnswer(abstractScoreArgs<pointSetDist, pointSetDist>)
|
||||||
|
| ScalarAnswer(abstractScoreArgs<pointSetDist, scalar>)
|
||||||
|
|
||||||
|
let logFn = Js.Math.log // base e
|
||||||
|
let minusScaledLogOfQuotient = (~esti, ~answ): result<float, Operation.Error.t> => {
|
||||||
|
let quot = esti /. answ
|
||||||
|
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answ *. logFn(quot))
|
||||||
|
}
|
||||||
|
|
||||||
|
module WithDistAnswer = {
|
||||||
|
// The Kullback-Leibler divergence
|
||||||
|
let integrand = (estimateElement: float, answerElement: float): result<
|
||||||
float,
|
float,
|
||||||
Operation.Error.t,
|
Operation.Error.t,
|
||||||
> =>
|
> =>
|
||||||
// We decided that negative infinity, not an error at answerElement = 0.0, is a desirable value.
|
// We decided that 0.0, not an error at answerElement = 0.0, is a desirable value.
|
||||||
if answerElement == 0.0 {
|
if answerElement == 0.0 {
|
||||||
Ok(0.0)
|
Ok(0.0)
|
||||||
} else if predictionElement == 0.0 {
|
} else if estimateElement == 0.0 {
|
||||||
Ok(infinity)
|
Ok(infinity)
|
||||||
} else {
|
} else {
|
||||||
let quot = predictionElement /. answerElement
|
minusScaledLogOfQuotient(~esti=estimateElement, ~answ=answerElement)
|
||||||
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
|
}
|
||||||
|
|
||||||
|
let sum = (
|
||||||
|
~estimate: pointSetDist,
|
||||||
|
~answer: pointSetDist,
|
||||||
|
~combineFn,
|
||||||
|
~integrateFn,
|
||||||
|
~toMixedFn,
|
||||||
|
): result<score, Operation.Error.t> => {
|
||||||
|
let combineAndIntegrate = (estimate, answer) =>
|
||||||
|
combineFn(integrand, estimate, answer)->E.R2.fmap(integrateFn)
|
||||||
|
|
||||||
|
let getMixedSums = (estimate: pointSetDist, answer: pointSetDist) => {
|
||||||
|
let esti = estimate->toMixedFn
|
||||||
|
let answ = answer->toMixedFn
|
||||||
|
switch (
|
||||||
|
Mixed.T.toContinuous(esti),
|
||||||
|
Mixed.T.toDiscrete(esti),
|
||||||
|
Mixed.T.toContinuous(answ),
|
||||||
|
Mixed.T.toDiscrete(answ),
|
||||||
|
) {
|
||||||
|
| (
|
||||||
|
Some(estiContinuousPart),
|
||||||
|
Some(estiDiscretePart),
|
||||||
|
Some(answContinuousPart),
|
||||||
|
Some(answDiscretePart),
|
||||||
|
) =>
|
||||||
|
E.R.merge(
|
||||||
|
combineAndIntegrate(
|
||||||
|
PointSetTypes.Discrete(estiDiscretePart),
|
||||||
|
PointSetTypes.Discrete(answDiscretePart),
|
||||||
|
),
|
||||||
|
combineAndIntegrate(Continuous(estiContinuousPart), Continuous(answContinuousPart)),
|
||||||
|
)
|
||||||
|
| (_, _, _, _) => `unreachable state`->Operation.Other->Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (estimate, answer) {
|
||||||
|
| (Continuous(_), Continuous(_))
|
||||||
|
| (Discrete(_), Discrete(_)) =>
|
||||||
|
combineAndIntegrate(estimate, answer)
|
||||||
|
| (_, _) =>
|
||||||
|
getMixedSums(estimate, answer)->E.R2.fmap(((discretePart, continuousPart)) =>
|
||||||
|
discretePart +. continuousPart
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let sumWithPrior = (
|
||||||
|
~estimate: pointSetDist,
|
||||||
|
~answer: pointSetDist,
|
||||||
|
~prior: pointSetDist,
|
||||||
|
~combineFn,
|
||||||
|
~integrateFn,
|
||||||
|
~toMixedFn,
|
||||||
|
): result<score, Operation.Error.t> => {
|
||||||
|
let kl1 = sum(~estimate, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||||
|
let kl2 = sum(~estimate=prior, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||||
|
E.R.merge(kl1, kl2)->E.R2.fmap(((kl1', kl2')) => kl1' -. kl2')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module LogScoreWithPointResolution = {
|
module WithScalarAnswer = {
|
||||||
let logFn = Js.Math.log
|
let sum = (mp: PointSetTypes.MixedPoint.t): float => mp.continuous +. mp.discrete
|
||||||
let score = (
|
let score = (~estimate: pointSetDist, ~answer: scalar): result<score, Operation.Error.t> => {
|
||||||
~priorPdf: option<float => float>,
|
let _score = (~estimatePdf: float => option<float>, ~answer: float): result<
|
||||||
~predictionPdf: float => float,
|
score,
|
||||||
~answer: float,
|
Operation.Error.t,
|
||||||
): result<float, Operation.Error.t> => {
|
> => {
|
||||||
let numerator = answer->predictionPdf
|
let density = answer->estimatePdf
|
||||||
if numerator < 0.0 {
|
switch density {
|
||||||
|
| None => Operation.PdfInvalidError->Error
|
||||||
|
| Some(density') =>
|
||||||
|
if density' < 0.0 {
|
||||||
Operation.PdfInvalidError->Error
|
Operation.PdfInvalidError->Error
|
||||||
} else if numerator == 0.0 {
|
} else if density' == 0.0 {
|
||||||
infinity->Ok
|
infinity->Ok
|
||||||
} else {
|
} else {
|
||||||
-.(
|
density'->logFn->(x => -.x)->Ok
|
||||||
switch priorPdf {
|
|
||||||
| None => numerator->logFn
|
|
||||||
| Some(f) => {
|
|
||||||
let priorDensityOfAnswer = f(answer)
|
|
||||||
if priorDensityOfAnswer == 0.0 {
|
|
||||||
neg_infinity
|
|
||||||
} else {
|
|
||||||
(numerator /. priorDensityOfAnswer)->logFn
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)->Ok
|
|
||||||
|
let estimatePdf = x =>
|
||||||
|
switch estimate {
|
||||||
|
| Continuous(esti) => Continuous.T.xToY(x, esti)->sum->Some
|
||||||
|
| Discrete(esti) => Discrete.T.xToY(x, esti)->sum->Some
|
||||||
|
| Mixed(_) => None
|
||||||
}
|
}
|
||||||
|
_score(~estimatePdf, ~answer)
|
||||||
|
}
|
||||||
|
|
||||||
|
let scoreWithPrior = (~estimate: pointSetDist, ~answer: scalar, ~prior: pointSetDist): result<
|
||||||
|
score,
|
||||||
|
Operation.Error.t,
|
||||||
|
> => {
|
||||||
|
E.R.merge(score(~estimate, ~answer), score(~estimate=prior, ~answer))->E.R2.fmap(((s1, s2)) =>
|
||||||
|
s1 -. s2
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let twoGenericDistsToTwoPointSetDists = (~toPointSetFn, estimate, answer): result<
|
||||||
|
(pointSetDist, pointSetDist),
|
||||||
|
'e,
|
||||||
|
> => E.R.merge(toPointSetFn(estimate, ()), toPointSetFn(answer, ()))
|
||||||
|
|
||||||
|
let logScore = (args: scoreArgs, ~combineFn, ~integrateFn, ~toMixedFn): result<
|
||||||
|
score,
|
||||||
|
Operation.Error.t,
|
||||||
|
> =>
|
||||||
|
switch args {
|
||||||
|
| DistAnswer({estimate, answer, prior: None}) =>
|
||||||
|
WithDistAnswer.sum(~estimate, ~answer, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||||
|
| DistAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||||
|
WithDistAnswer.sumWithPrior(~estimate, ~answer, ~prior, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||||
|
| ScalarAnswer({estimate, answer, prior: None}) => WithScalarAnswer.score(~estimate, ~answer)
|
||||||
|
| ScalarAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||||
|
WithScalarAnswer.scoreWithPrior(~estimate, ~answer, ~prior)
|
||||||
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ type rec frType =
|
||||||
| FRTypeNumber
|
| FRTypeNumber
|
||||||
| FRTypeNumeric
|
| FRTypeNumeric
|
||||||
| FRTypeDistOrNumber
|
| FRTypeDistOrNumber
|
||||||
|
| FRTypeDist
|
||||||
| FRTypeLambda
|
| FRTypeLambda
|
||||||
| FRTypeRecord(frTypeRecord)
|
| FRTypeRecord(frTypeRecord)
|
||||||
| FRTypeDict(frType)
|
| FRTypeDict(frType)
|
||||||
|
@ -41,7 +42,7 @@ and frValueDistOrNumber = FRValueNumber(float) | FRValueDist(DistributionTypes.g
|
||||||
type fnDefinition = {
|
type fnDefinition = {
|
||||||
name: string,
|
name: string,
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
run: (array<frValue>, DistributionOperation.env) => result<internalExpressionValue, string>,
|
run: (array<frValue>, GenericDist.env) => result<internalExpressionValue, string>,
|
||||||
}
|
}
|
||||||
|
|
||||||
type function = {
|
type function = {
|
||||||
|
@ -60,6 +61,7 @@ module FRType = {
|
||||||
switch t {
|
switch t {
|
||||||
| FRTypeNumber => "number"
|
| FRTypeNumber => "number"
|
||||||
| FRTypeNumeric => "numeric"
|
| FRTypeNumeric => "numeric"
|
||||||
|
| FRTypeDist => "distribution"
|
||||||
| FRTypeDistOrNumber => "distribution|number"
|
| FRTypeDistOrNumber => "distribution|number"
|
||||||
| FRTypeRecord(r) => {
|
| FRTypeRecord(r) => {
|
||||||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||||
|
@ -98,6 +100,7 @@ module FRType = {
|
||||||
| (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
|
| (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
|
||||||
Some(FRValueDistOrNumber(FRValueNumber(f)))
|
Some(FRValueDistOrNumber(FRValueNumber(f)))
|
||||||
| (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
|
| (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
|
||||||
|
| (FRTypeDist, IEvDistribution(f)) => Some(FRValueDist(f))
|
||||||
| (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
|
| (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
|
||||||
| (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
|
| (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
|
||||||
| (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
|
| (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
|
||||||
|
@ -319,7 +322,7 @@ module FnDefinition = {
|
||||||
t.name ++ `(${inputs})`
|
t.name ++ `(${inputs})`
|
||||||
}
|
}
|
||||||
|
|
||||||
let run = (t: t, args: array<internalExpressionValue>, env: DistributionOperation.env) => {
|
let run = (t: t, args: array<internalExpressionValue>, env: GenericDist.env) => {
|
||||||
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
||||||
switch argValues {
|
switch argValues {
|
||||||
| Some(values) => t.run(values, env)
|
| Some(values) => t.run(values, env)
|
||||||
|
@ -374,7 +377,7 @@ module Registry = {
|
||||||
~registry: registry,
|
~registry: registry,
|
||||||
~fnName: string,
|
~fnName: string,
|
||||||
~args: array<internalExpressionValue>,
|
~args: array<internalExpressionValue>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
|
let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
|
||||||
//Js.log(toSimple(registry))
|
//Js.log(toSimple(registry))
|
||||||
|
|
|
@ -27,6 +27,12 @@ module Prepare = {
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let threeArgs = (inputs: ts): result<ts, err> =>
|
||||||
|
switch inputs {
|
||||||
|
| [FRValueRecord([(_, n1), (_, n2), (_, n3)])] => Ok([n1, n2, n3])
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
|
||||||
let toArgs = (inputs: ts): result<ts, err> =>
|
let toArgs = (inputs: ts): result<ts, err> =>
|
||||||
switch inputs {
|
switch inputs {
|
||||||
| [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
|
| [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
|
||||||
|
@ -57,6 +63,16 @@ module Prepare = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let twoDist = (values: ts): result<
|
||||||
|
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||||
|
err,
|
||||||
|
> => {
|
||||||
|
switch values {
|
||||||
|
| [FRValueDist(a1), FRValueDist(a2)] => Ok(a1, a2)
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let twoNumbers = (values: ts): result<(float, float), err> => {
|
let twoNumbers = (values: ts): result<(float, float), err> => {
|
||||||
switch values {
|
switch values {
|
||||||
| [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
|
| [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
|
||||||
|
@ -81,6 +97,11 @@ module Prepare = {
|
||||||
module Record = {
|
module Record = {
|
||||||
let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
|
let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
|
||||||
values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
|
values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
|
||||||
|
|
||||||
|
let twoDist = (values: ts): result<
|
||||||
|
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||||
|
err,
|
||||||
|
> => values->ToValueArray.Record.twoArgs->E.R.bind(twoDist)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,8 +149,7 @@ module Prepare = {
|
||||||
module Process = {
|
module Process = {
|
||||||
module DistOrNumberToDist = {
|
module DistOrNumberToDist = {
|
||||||
module Helpers = {
|
module Helpers = {
|
||||||
let toSampleSet = (r, env: DistributionOperation.env) =>
|
let toSampleSet = (r, env: GenericDist.env) => GenericDist.toSampleSetDist(r, env.sampleCount)
|
||||||
GenericDist.toSampleSetDist(r, env.sampleCount)
|
|
||||||
|
|
||||||
let mapFnResult = r =>
|
let mapFnResult = r =>
|
||||||
switch r {
|
switch r {
|
||||||
|
@ -166,7 +186,7 @@ module Process = {
|
||||||
let oneValue = (
|
let oneValue = (
|
||||||
~fn: float => result<DistributionTypes.genericDist, string>,
|
~fn: float => result<DistributionTypes.genericDist, string>,
|
||||||
~value: frValueDistOrNumber,
|
~value: frValueDistOrNumber,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): result<DistributionTypes.genericDist, string> => {
|
): result<DistributionTypes.genericDist, string> => {
|
||||||
switch value {
|
switch value {
|
||||||
| FRValueNumber(a1) => fn(a1)
|
| FRValueNumber(a1) => fn(a1)
|
||||||
|
@ -179,7 +199,7 @@ module Process = {
|
||||||
let twoValues = (
|
let twoValues = (
|
||||||
~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
|
~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
|
||||||
~values: (frValueDistOrNumber, frValueDistOrNumber),
|
~values: (frValueDistOrNumber, frValueDistOrNumber),
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): result<DistributionTypes.genericDist, string> => {
|
): result<DistributionTypes.genericDist, string> => {
|
||||||
switch values {
|
switch values {
|
||||||
| (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
|
| (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
|
||||||
|
|
|
@ -49,7 +49,7 @@ let inputsTodist = (inputs: array<FunctionRegistry_Core.frValue>, makeDist) => {
|
||||||
expressionValue
|
expressionValue
|
||||||
}
|
}
|
||||||
|
|
||||||
let registry = [
|
let registryStart = [
|
||||||
Function.make(
|
Function.make(
|
||||||
~name="toContinuousPointSet",
|
~name="toContinuousPointSet",
|
||||||
~definitions=[
|
~definitions=[
|
||||||
|
@ -510,3 +510,67 @@ to(5,10)
|
||||||
(),
|
(),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
let runScoring = (estimate, answer, prior, env) => {
|
||||||
|
GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
|
||||||
|
->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
|
||||||
|
->E.R2.errMap(DistributionTypes.Error.toString)
|
||||||
|
}
|
||||||
|
|
||||||
|
let scoreFunctions = [
|
||||||
|
Function.make(
|
||||||
|
~name="Score",
|
||||||
|
~definitions=[
|
||||||
|
FnDefinition.make(
|
||||||
|
~name="logScore",
|
||||||
|
~inputs=[
|
||||||
|
FRTypeRecord([
|
||||||
|
("estimate", FRTypeDist),
|
||||||
|
("answer", FRTypeDistOrNumber),
|
||||||
|
("prior", FRTypeDist),
|
||||||
|
]),
|
||||||
|
],
|
||||||
|
~run=(inputs, env) => {
|
||||||
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(inputs) {
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d)), FRValueDist(prior)]) =>
|
||||||
|
runScoring(estimate, Score_Dist(d), Some(prior), env)
|
||||||
|
| Ok([
|
||||||
|
FRValueDist(estimate),
|
||||||
|
FRValueDistOrNumber(FRValueNumber(d)),
|
||||||
|
FRValueDist(prior),
|
||||||
|
]) =>
|
||||||
|
runScoring(estimate, Score_Scalar(d), Some(prior), env)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
FnDefinition.make(
|
||||||
|
~name="logScore",
|
||||||
|
~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
|
||||||
|
~run=(inputs, env) => {
|
||||||
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(inputs) {
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d))]) =>
|
||||||
|
runScoring(estimate, Score_Dist(d), None, env)
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueNumber(d))]) =>
|
||||||
|
runScoring(estimate, Score_Scalar(d), None, env)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
FnDefinition.make(~name="klDivergence", ~inputs=[FRTypeDist, FRTypeDist], ~run=(
|
||||||
|
inputs,
|
||||||
|
env,
|
||||||
|
) => {
|
||||||
|
switch inputs {
|
||||||
|
| [FRValueDist(estimate), FRValueDist(d)] => runScoring(estimate, Score_Dist(d), None, env)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
(),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
let registry = E.A.append(registryStart, scoreFunctions)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
module IEV = ReducerInterface_InternalExpressionValue
|
module IEV = ReducerInterface_InternalExpressionValue
|
||||||
type internalExpressionValue = IEV.t
|
type internalExpressionValue = IEV.t
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
module IEV = ReducerInterface_InternalExpressionValue
|
module IEV = ReducerInterface_InternalExpressionValue
|
||||||
type internalExpressionValue = IEV.t
|
type internalExpressionValue = IEV.t
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -86,7 +86,7 @@ let toStringResult = x =>
|
||||||
}
|
}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type environment = DistributionOperation.env
|
type environment = GenericDist.env
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
||||||
|
|
|
@ -32,50 +32,38 @@ module Helpers = {
|
||||||
let toFloatFn = (
|
let toFloatFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
|
FromDist(#ToFloat(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toStringFn = (
|
let toStringFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toString,
|
fnCall: DistributionTypes.DistributionOperation.toString,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
|
FromDist(#ToString(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toBoolFn = (
|
let toBoolFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toBool,
|
fnCall: DistributionTypes.DistributionOperation.toBool,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
|
FromDist(#ToBool(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toDistFn = (
|
let toDistFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toDist,
|
fnCall: DistributionTypes.DistributionOperation.toDist,
|
||||||
dist,
|
dist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
|
FromDist(#ToDist(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: DistributionOperation.env) => {
|
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: GenericDist.env) => {
|
||||||
FromDist(
|
FromDist(
|
||||||
DistributionTypes.DistributionOperation.ToDistCombination(
|
#ToDistCombination(direction, arithmeticMap(arithmetic), #Dist(dist2)),
|
||||||
direction,
|
|
||||||
arithmeticMap(arithmetic),
|
|
||||||
#Dist(dist2),
|
|
||||||
),
|
|
||||||
dist1,
|
dist1,
|
||||||
)->DistributionOperation.run(~env)
|
)->DistributionOperation.run(~env)
|
||||||
}
|
}
|
||||||
|
@ -109,7 +97,7 @@ module Helpers = {
|
||||||
let mixtureWithGivenWeights = (
|
let mixtureWithGivenWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
weights: array<float>,
|
weights: array<float>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType =>
|
): DistributionOperation.outputType =>
|
||||||
E.A.length(distributions) == E.A.length(weights)
|
E.A.length(distributions) == E.A.length(weights)
|
||||||
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
||||||
|
@ -119,7 +107,7 @@ module Helpers = {
|
||||||
|
|
||||||
let mixtureWithDefaultWeights = (
|
let mixtureWithDefaultWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType => {
|
): DistributionOperation.outputType => {
|
||||||
let length = E.A.length(distributions)
|
let length = E.A.length(distributions)
|
||||||
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
||||||
|
@ -128,7 +116,7 @@ module Helpers = {
|
||||||
|
|
||||||
let mixture = (
|
let mixture = (
|
||||||
args: array<internalExpressionValue>,
|
args: array<internalExpressionValue>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType => {
|
): DistributionOperation.outputType => {
|
||||||
let error = (err: string): DistributionOperation.outputType =>
|
let error = (err: string): DistributionOperation.outputType =>
|
||||||
err->DistributionTypes.ArgumentError->GenDistError
|
err->DistributionTypes.ArgumentError->GenDistError
|
||||||
|
@ -167,20 +155,6 @@ module Helpers = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergenceWithPrior = (
|
|
||||||
prediction: DistributionTypes.genericDist,
|
|
||||||
answer: DistributionTypes.genericDist,
|
|
||||||
prior: DistributionTypes.genericDist,
|
|
||||||
env: DistributionOperation.env,
|
|
||||||
) => {
|
|
||||||
let term1 = DistributionOperation.Constructors.klDivergence(~env, prediction, answer)
|
|
||||||
let term2 = DistributionOperation.Constructors.klDivergence(~env, prior, answer)
|
|
||||||
switch E.R.merge(term1, term2)->E.R2.fmap(((a, b)) => a -. b) {
|
|
||||||
| Ok(x) => x->DistributionOperation.Float->Some
|
|
||||||
| Error(_) => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module SymbolicConstructors = {
|
module SymbolicConstructors = {
|
||||||
|
@ -199,7 +173,7 @@ module SymbolicConstructors = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperation.env): option<
|
let dispatchToGenericOutput = (call: IEV.functionCall, env: GenericDist.env): option<
|
||||||
DistributionOperation.outputType,
|
DistributionOperation.outputType,
|
||||||
> => {
|
> => {
|
||||||
let (fnName, args) = call
|
let (fnName, args) = call
|
||||||
|
@ -239,35 +213,6 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
|
||||||
~env,
|
~env,
|
||||||
)->Some
|
)->Some
|
||||||
| ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
| ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
||||||
| ("klDivergence", [IEvDistribution(prediction), IEvDistribution(answer)]) =>
|
|
||||||
Some(DistributionOperation.run(FromDist(ToScore(KLDivergence(answer)), prediction), ~env))
|
|
||||||
| (
|
|
||||||
"klDivergence",
|
|
||||||
[IEvDistribution(prediction), IEvDistribution(answer), IEvDistribution(prior)],
|
|
||||||
) =>
|
|
||||||
Helpers.klDivergenceWithPrior(prediction, answer, prior, env)
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[IEvDistribution(prediction), IEvNumber(answer), IEvDistribution(prior)],
|
|
||||||
)
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[
|
|
||||||
IEvDistribution(prediction),
|
|
||||||
IEvDistribution(Symbolic(#Float(answer))),
|
|
||||||
IEvDistribution(prior),
|
|
||||||
],
|
|
||||||
) =>
|
|
||||||
DistributionOperation.run(
|
|
||||||
FromDist(ToScore(LogScore(answer, prior->Some)), prediction),
|
|
||||||
~env,
|
|
||||||
)->Some
|
|
||||||
| ("logScoreWithPointAnswer", [IEvDistribution(prediction), IEvNumber(answer)])
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[IEvDistribution(prediction), IEvDistribution(Symbolic(#Float(answer)))],
|
|
||||||
) =>
|
|
||||||
DistributionOperation.run(FromDist(ToScore(LogScore(answer, None)), prediction), ~env)->Some
|
|
||||||
| ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
| ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
||||||
| ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
| ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
||||||
| ("scaleLog", [IEvDistribution(dist)]) =>
|
| ("scaleLog", [IEvDistribution(dist)]) =>
|
||||||
|
|
|
@ -24,7 +24,7 @@ module ScientificUnit = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -8,7 +8,7 @@ The below few seem to work fine. In the future there's definitely more work to d
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type samplingParams = DistributionOperation.env
|
type samplingParams = GenericDist.env
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type genericDist = DistributionTypes.genericDist
|
type genericDist = DistributionTypes.genericDist
|
||||||
|
|
|
@ -547,6 +547,7 @@ module A = {
|
||||||
let init = Array.init
|
let init = Array.init
|
||||||
let reduce = Belt.Array.reduce
|
let reduce = Belt.Array.reduce
|
||||||
let reducei = Belt.Array.reduceWithIndex
|
let reducei = Belt.Array.reduceWithIndex
|
||||||
|
let some = Belt.Array.some
|
||||||
let isEmpty = r => length(r) < 1
|
let isEmpty = r => length(r) < 1
|
||||||
let stableSortBy = Belt.SortArray.stableSortBy
|
let stableSortBy = Belt.SortArray.stableSortBy
|
||||||
let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
|
let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
|
||||||
|
|
|
@ -327,8 +327,8 @@ module Zipped = {
|
||||||
module PointwiseCombination = {
|
module PointwiseCombination = {
|
||||||
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
||||||
let combine: (
|
let combine: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
interpolator,
|
interpolator,
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
T.t,
|
T.t,
|
||||||
T.t,
|
T.t,
|
||||||
) => result<T.t, Operation.Error.t> = %raw(`
|
) => result<T.t, Operation.Error.t> = %raw(`
|
||||||
|
@ -337,7 +337,7 @@ module PointwiseCombination = {
|
||||||
// and interpolates the value on the other side, thus accumulating xs and ys.
|
// and interpolates the value on the other side, thus accumulating xs and ys.
|
||||||
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
||||||
|
|
||||||
function(fn, interpolator, t1, t2) {
|
function(interpolator, fn, t1, t2) {
|
||||||
let t1n = t1.xs.length;
|
let t1n = t1.xs.length;
|
||||||
let t2n = t2.xs.length;
|
let t2n = t2.xs.length;
|
||||||
let outX = [];
|
let outX = [];
|
||||||
|
@ -399,11 +399,11 @@ module PointwiseCombination = {
|
||||||
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||||
*/
|
*/
|
||||||
let combineAlongSupportOfSecondArgument0: (
|
let combineAlongSupportOfSecondArgument0: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
interpolator,
|
interpolator,
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
T.t,
|
T.t,
|
||||||
T.t,
|
T.t,
|
||||||
) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
|
) => result<T.t, Operation.Error.t> = (interpolator, fn, t1, t2) => {
|
||||||
let newYs = []
|
let newYs = []
|
||||||
let newXs = []
|
let newXs = []
|
||||||
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
||||||
|
@ -496,29 +496,9 @@ module PointwiseCombination = {
|
||||||
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
||||||
{xs: newXs, ys: newYs}
|
{xs: newXs, ys: newYs}
|
||||||
}
|
}
|
||||||
// This function is used for klDivergence
|
|
||||||
let combineAlongSupportOfSecondArgument: (
|
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
T.t,
|
|
||||||
T.t,
|
|
||||||
) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
|
|
||||||
let combineWithFn = (answerX: float, i: int) => {
|
|
||||||
let answerY = answer.ys[i]
|
|
||||||
let predictionY = XtoY.linear(answerX, prediction)
|
|
||||||
fn(predictionY, answerY)
|
|
||||||
}
|
|
||||||
let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
|
|
||||||
let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
|
|
||||||
let result = switch newYsOrError {
|
|
||||||
| Ok(a) => Ok({xs: answer.xs, ys: a})
|
|
||||||
| Error(b) => Error(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
||||||
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
combine(interpolator, (a, b) => Ok(a +. b), t1, t2)->E.R.toExn(
|
||||||
"Add operation should never fail",
|
"Add operation should never fail",
|
||||||
_,
|
_,
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user