More substantial CR; more named args
Value: [1e-6 to 1e-2]
This commit is contained in:
parent
1d2bb556de
commit
9e7319ed57
|
@ -150,7 +150,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToScore(LogScore(answer, prior)) =>
|
| ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.logScoreWithPointResolution(dist, answer, prior, ~toPointSetFn)
|
GenericDist.Score.logScoreWithPointResolution(
|
||||||
|
~prediction=dist,
|
||||||
|
~answer,
|
||||||
|
~prior,
|
||||||
|
~toPointSetFn,
|
||||||
|
)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
|
@ -267,8 +272,12 @@ module Constructors = {
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
||||||
let logScoreWithPointResolution = (~env, prediction, answer, prior) =>
|
let logScoreWithPointResolution = (
|
||||||
C.logScoreWithPointResolution(prediction, answer, prior)->run(~env)->toFloatR
|
~env,
|
||||||
|
~prediction: DistributionTypes.genericDist,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<DistributionTypes.genericDist>,
|
||||||
|
) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||||
|
|
|
@ -65,9 +65,9 @@ module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let logScoreWithPointResolution: (
|
let logScoreWithPointResolution: (
|
||||||
~env: env,
|
~env: env,
|
||||||
genericDist,
|
~prediction: genericDist,
|
||||||
float,
|
~answer: float,
|
||||||
option<genericDist>,
|
~prior: option<genericDist>,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||||
|
|
|
@ -162,9 +162,9 @@ module Constructors = {
|
||||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
||||||
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
||||||
let logScoreWithPointResolution = (prior, prediction, answer): t => FromDist(
|
let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
|
||||||
ToScore(LogScore(prediction, answer)),
|
ToScore(LogScore(answer, prior)),
|
||||||
prior,
|
prediction,
|
||||||
)
|
)
|
||||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
||||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
||||||
|
|
|
@ -68,18 +68,21 @@ module Score = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let logScoreWithPointResolution = (
|
let logScoreWithPointResolution = (
|
||||||
prediction,
|
~prediction: DistributionTypes.genericDist,
|
||||||
answer,
|
~answer: float,
|
||||||
prior,
|
~prior: option<DistributionTypes.genericDist>,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
): result<float, error> => {
|
): result<float, error> => {
|
||||||
switch prior {
|
switch prior {
|
||||||
| Some(prior') =>
|
| Some(prior') =>
|
||||||
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((a, b)) =>
|
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
|
||||||
|
prior'',
|
||||||
|
prediction'',
|
||||||
|
)) =>
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
b,
|
~prediction=prediction'',
|
||||||
answer,
|
~answer,
|
||||||
a->Some,
|
~prior=prior''->Some,
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
)
|
)
|
||||||
| None =>
|
| None =>
|
||||||
|
@ -87,9 +90,9 @@ module Score = {
|
||||||
->toPointSetFn
|
->toPointSetFn
|
||||||
->E.R.bind(x =>
|
->E.R.bind(x =>
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
x,
|
~prediction=x,
|
||||||
answer,
|
~answer,
|
||||||
None,
|
~prior=None,
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,9 @@ let toFloatOperation: (
|
||||||
module Score: {
|
module Score: {
|
||||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
||||||
let logScoreWithPointResolution: (
|
let logScoreWithPointResolution: (
|
||||||
t,
|
~prediction: t,
|
||||||
float,
|
~answer: float,
|
||||||
option<t>,
|
~prior: option<t>,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
}
|
}
|
||||||
|
|
|
@ -279,7 +279,7 @@ module T = Dist({
|
||||||
)
|
)
|
||||||
newShape->E.R2.fmap(x => x->make->integralEndY)
|
newShape->E.R2.fmap(x => x->make->integralEndY)
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
||||||
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
||||||
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
||||||
|
|
|
@ -229,7 +229,7 @@ module T = Dist({
|
||||||
answer,
|
answer,
|
||||||
)->E.R2.fmap(integralEndY)
|
)->E.R2.fmap(integralEndY)
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
Error(Operation.NotYetImplemented)
|
Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -34,7 +34,11 @@ module type dist = {
|
||||||
let mean: t => float
|
let mean: t => float
|
||||||
let variance: t => float
|
let variance: t => float
|
||||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
||||||
let logScoreWithPointResolution: (t, float, option<t>) => result<float, Operation.Error.t>
|
let logScoreWithPointResolution: (
|
||||||
|
~prediction: t,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<t>,
|
||||||
|
) => result<float, Operation.Error.t>
|
||||||
}
|
}
|
||||||
|
|
||||||
module Dist = (T: dist) => {
|
module Dist = (T: dist) => {
|
||||||
|
|
|
@ -306,7 +306,7 @@ module T = Dist({
|
||||||
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
||||||
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
Error(Operation.NotYetImplemented)
|
Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -203,11 +203,12 @@ module T = Dist({
|
||||||
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
||||||
}
|
}
|
||||||
|
|
||||||
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
switch (prior, prediction) {
|
switch (prior, prediction) {
|
||||||
| (Some(Continuous(t1)), Continuous(t2)) =>
|
| (Some(Continuous(t1)), Continuous(t2)) =>
|
||||||
Continuous.T.logScoreWithPointResolution(t2, answer, t1->Some)
|
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
|
||||||
| (None, Continuous(t2)) => Continuous.T.logScoreWithPointResolution(t2, answer, None)
|
| (None, Continuous(t2)) =>
|
||||||
|
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
|
||||||
| _ => Error(Operation.NotYetImplemented)
|
| _ => Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ module LogScoreWithPointResolution = {
|
||||||
): result<float, Operation.Error.t> => {
|
): result<float, Operation.Error.t> => {
|
||||||
let numerator = answer->predictionPdf
|
let numerator = answer->predictionPdf
|
||||||
if numerator < 0.0 {
|
if numerator < 0.0 {
|
||||||
Operation.ComplexNumberError->Error
|
Operation.PdfInvalidError->Error
|
||||||
} else if numerator == 0.0 {
|
} else if numerator == 0.0 {
|
||||||
infinity->Ok
|
infinity->Ok
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -162,24 +162,6 @@ module Helpers = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let constructNonNormalizedPointSet = (
|
|
||||||
~supportOf: DistributionTypes.genericDist,
|
|
||||||
fn: float => float,
|
|
||||||
env: DistributionOperation.env,
|
|
||||||
): DistributionTypes.genericDist => {
|
|
||||||
let cdf = x => toFloatFn(#Cdf(x), supportOf, ~env)
|
|
||||||
let leftEndpoint = cdf(MagicNumbers.Epsilon.ten)
|
|
||||||
let rightEndpoint = cdf(1.0 -. MagicNumbers.Epsilon.ten)
|
|
||||||
let xs = switch (leftEndpoint, rightEndpoint) {
|
|
||||||
| (Some(Float(a)), Some(Float(b))) =>
|
|
||||||
E.A.Floats.range(a, b, MagicNumbers.Environment.defaultXYPointLength)
|
|
||||||
| _ => []
|
|
||||||
}
|
|
||||||
{xs: xs, ys: E.A.fmap(fn, xs)}
|
|
||||||
->Continuous.make
|
|
||||||
->PointSetTypes.Continuous
|
|
||||||
->DistributionTypes.PointSet
|
|
||||||
}
|
|
||||||
|
|
||||||
let klDivergenceWithPrior = (
|
let klDivergenceWithPrior = (
|
||||||
prediction: DistributionTypes.genericDist,
|
prediction: DistributionTypes.genericDist,
|
||||||
|
|
|
@ -620,6 +620,7 @@ module A = {
|
||||||
| Some(o) => o
|
| Some(o) => o
|
||||||
| None => []
|
| None => []
|
||||||
}
|
}
|
||||||
|
// REturns `None` there are no non-`None` elements
|
||||||
let rec arrSomeToSomeArr = (optionals: array<option<'a>>): option<array<'a>> => {
|
let rec arrSomeToSomeArr = (optionals: array<option<'a>>): option<array<'a>> => {
|
||||||
let optionals' = optionals->Belt.List.fromArray
|
let optionals' = optionals->Belt.List.fromArray
|
||||||
switch optionals' {
|
switch optionals' {
|
||||||
|
@ -631,17 +632,7 @@ module A = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let rec firstSome = (optionals: array<option<'a>>): option<'a> => {
|
let firstSome = x => Belt.Array.getBy(x, O.isSome)
|
||||||
let optionals' = optionals->Belt.List.fromArray
|
|
||||||
switch optionals' {
|
|
||||||
| list{} => None
|
|
||||||
| list{x, ...xs} =>
|
|
||||||
switch x {
|
|
||||||
| Some(_) => x
|
|
||||||
| None => xs->Belt.List.toArray->firstSome
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module R = {
|
module R = {
|
||||||
|
|
|
@ -55,7 +55,7 @@ type operationError =
|
||||||
| ComplexNumberError
|
| ComplexNumberError
|
||||||
| InfinityError
|
| InfinityError
|
||||||
| NegativeInfinityError
|
| NegativeInfinityError
|
||||||
| LogicallyInconsistentPathwayError
|
| PdfInvalidError
|
||||||
| NotYetImplemented // should be removed when `klDivergence` for mixed and discrete is implemented.
|
| NotYetImplemented // should be removed when `klDivergence` for mixed and discrete is implemented.
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
|
@ -69,7 +69,7 @@ module Error = {
|
||||||
| ComplexNumberError => "Operation returned complex result"
|
| ComplexNumberError => "Operation returned complex result"
|
||||||
| InfinityError => "Operation returned positive infinity"
|
| InfinityError => "Operation returned positive infinity"
|
||||||
| NegativeInfinityError => "Operation returned negative infinity"
|
| NegativeInfinityError => "Operation returned negative infinity"
|
||||||
| LogicallyInconsistentPathwayError => "This pathway should have been logically unreachable"
|
| PdfInvalidError => "This Pdf is invalid"
|
||||||
| NotYetImplemented => "This pathway is not yet implemented"
|
| NotYetImplemented => "This pathway is not yet implemented"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user