logScore
on records now interprets almost every which way we're
interested in Value: [1e-3 to 9e-1]
This commit is contained in:
parent
b4a1137019
commit
bdbb86aa9e
|
@ -148,8 +148,8 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToScore(LogScore(prediction, answer)) =>
|
| ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.logScoreWithPointResolution(Some(dist), prediction, answer, ~toPointSetFn)
|
GenericDist.Score.logScoreWithPointResolution(dist, answer, prior, ~toPointSetFn)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
|
@ -266,8 +266,8 @@ module Constructors = {
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
||||||
let logScore = (~env, prior, prediction, answer) =>
|
let logScoreWithPointResolution = (~env, prediction, answer, prior) =>
|
||||||
C.logScoreWithPointResolution(prior, prediction, answer)->run(~env)->toFloatR
|
C.logScoreWithPointResolution(prediction, answer, prior)->run(~env)->toFloatR
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||||
|
|
|
@ -62,7 +62,12 @@ module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let logScore: (~env: env, genericDist, genericDist, float) => result<float, error>
|
let logScoreWithPointResolution: (
|
||||||
|
~env: env,
|
||||||
|
genericDist,
|
||||||
|
float,
|
||||||
|
option<genericDist>,
|
||||||
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
|
|
|
@ -91,7 +91,7 @@ module DistributionOperation = {
|
||||||
| ToString
|
| ToString
|
||||||
| ToSparkline(int)
|
| ToSparkline(int)
|
||||||
|
|
||||||
type toScore = KLDivergence(genericDist) | LogScore(genericDist, float)
|
type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
|
||||||
|
|
||||||
type fromDist =
|
type fromDist =
|
||||||
| ToFloat(toFloat)
|
| ToFloat(toFloat)
|
||||||
|
@ -120,7 +120,7 @@ module DistributionOperation = {
|
||||||
| ToFloat(#Sample) => `sample`
|
| ToFloat(#Sample) => `sample`
|
||||||
| ToFloat(#IntegralSum) => `integralSum`
|
| ToFloat(#IntegralSum) => `integralSum`
|
||||||
| ToScore(KLDivergence(_)) => `klDivergence`
|
| ToScore(KLDivergence(_)) => `klDivergence`
|
||||||
| ToScore(LogScore(_, x)) => `logScore against ${E.Float.toFixed(x)}`
|
| ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
|
||||||
| ToDist(Normalize) => `normalize`
|
| ToDist(Normalize) => `normalize`
|
||||||
| ToDist(ToPointSet) => `toPointSet`
|
| ToDist(ToPointSet) => `toPointSet`
|
||||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
|
|
|
@ -68,18 +68,18 @@ module Score = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let logScoreWithPointResolution = (
|
let logScoreWithPointResolution = (
|
||||||
prior,
|
|
||||||
prediction,
|
prediction,
|
||||||
answer,
|
answer,
|
||||||
|
prior,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
): result<float, error> => {
|
): result<float, error> => {
|
||||||
switch prior {
|
switch prior {
|
||||||
| Some(prior') =>
|
| Some(prior') =>
|
||||||
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((a, b)) =>
|
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((a, b)) =>
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
a->Some,
|
|
||||||
b,
|
b,
|
||||||
answer,
|
answer,
|
||||||
|
a->Some,
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
)
|
)
|
||||||
| None =>
|
| None =>
|
||||||
|
@ -87,9 +87,9 @@ module Score = {
|
||||||
->toPointSetFn
|
->toPointSetFn
|
||||||
->E.R.bind(x =>
|
->E.R.bind(x =>
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
None,
|
|
||||||
x,
|
x,
|
||||||
answer,
|
answer,
|
||||||
|
None,
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,9 @@ let toFloatOperation: (
|
||||||
module Score: {
|
module Score: {
|
||||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
||||||
let logScoreWithPointResolution: (
|
let logScoreWithPointResolution: (
|
||||||
option<t>,
|
|
||||||
t,
|
t,
|
||||||
float,
|
float,
|
||||||
|
option<t>,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
}
|
}
|
||||||
|
|
|
@ -279,7 +279,7 @@ module T = Dist({
|
||||||
)
|
)
|
||||||
newShape->E.R2.fmap(x => x->make->integralEndY)
|
newShape->E.R2.fmap(x => x->make->integralEndY)
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prior: option<t>, prediction: t, answer: float) => {
|
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
||||||
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
||||||
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
||||||
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
||||||
|
|
|
@ -229,7 +229,7 @@ module T = Dist({
|
||||||
answer,
|
answer,
|
||||||
)->E.R2.fmap(integralEndY)
|
)->E.R2.fmap(integralEndY)
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prior: option<t>, prediction: t, answer: float) => {
|
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
||||||
Error(Operation.NotYetImplemented)
|
Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -34,7 +34,7 @@ module type dist = {
|
||||||
let mean: t => float
|
let mean: t => float
|
||||||
let variance: t => float
|
let variance: t => float
|
||||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
||||||
let logScoreWithPointResolution: (option<t>, t, float) => result<float, Operation.Error.t>
|
let logScoreWithPointResolution: (t, float, option<t>) => result<float, Operation.Error.t>
|
||||||
}
|
}
|
||||||
|
|
||||||
module Dist = (T: dist) => {
|
module Dist = (T: dist) => {
|
||||||
|
|
|
@ -306,7 +306,7 @@ module T = Dist({
|
||||||
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
||||||
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
||||||
}
|
}
|
||||||
let logScoreWithPointResolution = (prior: option<t>, prediction: t, answer: float) => {
|
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
||||||
Error(Operation.NotYetImplemented)
|
Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -203,11 +203,11 @@ module T = Dist({
|
||||||
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
||||||
}
|
}
|
||||||
|
|
||||||
let logScoreWithPointResolution = (prior: option<t>, prediction: t, answer: float) => {
|
let logScoreWithPointResolution = (prediction: t, answer: float, prior: option<t>) => {
|
||||||
switch (prior, prediction) {
|
switch (prior, prediction) {
|
||||||
| (Some(Continuous(t1)), Continuous(t2)) =>
|
| (Some(Continuous(t1)), Continuous(t2)) =>
|
||||||
Continuous.T.logScoreWithPointResolution(t1->Some, t2, answer)
|
Continuous.T.logScoreWithPointResolution(t2, answer, t1->Some)
|
||||||
| (None, Continuous(t2)) => Continuous.T.logScoreWithPointResolution(None, t2, answer)
|
| (None, Continuous(t2)) => Continuous.T.logScoreWithPointResolution(t2, answer, None)
|
||||||
| _ => Error(Operation.NotYetImplemented)
|
| _ => Error(Operation.NotYetImplemented)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -251,27 +251,34 @@ let rec dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environm
|
||||||
| ("normalize", [EvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist)
|
| ("normalize", [EvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist)
|
||||||
| ("klDivergence", [EvDistribution(a), EvDistribution(b)]) =>
|
| ("klDivergence", [EvDistribution(a), EvDistribution(b)]) =>
|
||||||
Some(runGenericOperation(FromDist(ToScore(KLDivergence(b)), a)))
|
Some(runGenericOperation(FromDist(ToScore(KLDivergence(b)), a)))
|
||||||
| ("logScore", [EvDistribution(prior), EvDistribution(prediction), EvNumber(answer)])
|
|
||||||
| (
|
| (
|
||||||
"logScore",
|
"logScoreWithPointResolution",
|
||||||
[EvDistribution(prior), EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer)))],
|
[EvDistribution(prediction), EvNumber(answer), EvDistribution(prior)],
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
"logScoreWithPointResolution",
|
||||||
|
[EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer))), EvDistribution(prior)],
|
||||||
) =>
|
) =>
|
||||||
runGenericOperation(FromDist(ToScore(LogScore(prediction, answer)), prior))->Some
|
runGenericOperation(FromDist(ToScore(LogScore(answer, prior->Some)), prediction))->Some
|
||||||
| ("logScore", [EvRecord(r)]) =>
|
| ("logScoreWithPointResolution", [EvDistribution(prediction), EvNumber(answer)])
|
||||||
recurRecordArgs("logScore", ["prior", "prediction", "answer"], r, _environment)
|
|
||||||
| ("increment", [EvNumber(x)]) => (x +. 1.0)->DistributionOperation.Float->Some
|
|
||||||
| ("increment", [EvRecord(r)]) => recurRecordArgs("increment", ["incrementee"], r, _environment)
|
|
||||||
| ("logScoreAgainstImproperPrior", [EvDistribution(prediction), EvNumber(answer)])
|
|
||||||
| (
|
| (
|
||||||
"logScoreAgainstImproperPrior",
|
"logScoreWithPointResolution",
|
||||||
[EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer)))],
|
[EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer)))],
|
||||||
) =>
|
) =>
|
||||||
runGenericOperation(
|
runGenericOperation(FromDist(ToScore(LogScore(answer, None)), prediction))->Some
|
||||||
FromDist(
|
| ("logScore", [EvRecord(r)]) =>
|
||||||
ToScore(LogScore(prediction, answer)),
|
[
|
||||||
Helpers.constructNonNormalizedPointSet(~supportOf=prediction, _ => 1.0),
|
recurRecordArgs(
|
||||||
|
"logScoreWithPointResolution",
|
||||||
|
["estimate", "answer", "prior"],
|
||||||
|
r,
|
||||||
|
_environment,
|
||||||
),
|
),
|
||||||
)->Some
|
recurRecordArgs("klDivergence", ["estimate", "answer"], r, _environment),
|
||||||
|
recurRecordArgs("logScoreWithPointResolution", ["estimate", "answer"], r, _environment),
|
||||||
|
]->E.A.O.firstSome
|
||||||
|
| ("increment", [EvNumber(x)]) => (x +. 1.0)->DistributionOperation.Float->Some // this tests recurRecordArgs function
|
||||||
|
| ("increment", [EvRecord(r)]) => recurRecordArgs("increment", ["incrementee"], r, _environment)
|
||||||
| ("isNormalized", [EvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist)
|
| ("isNormalized", [EvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist)
|
||||||
| ("toPointSet", [EvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist)
|
| ("toPointSet", [EvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist)
|
||||||
| ("scaleLog", [EvDistribution(dist)]) =>
|
| ("scaleLog", [EvDistribution(dist)]) =>
|
||||||
|
|
|
@ -631,6 +631,17 @@ module A = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let rec firstSome = (optionals: array<option<'a>>): option<'a> => {
|
||||||
|
let optionals' = optionals->Belt.List.fromArray
|
||||||
|
switch optionals' {
|
||||||
|
| list{} => None
|
||||||
|
| list{x, ...xs} =>
|
||||||
|
switch x {
|
||||||
|
| Some(_) => x
|
||||||
|
| None => xs->Belt.List.toArray->firstSome
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module R = {
|
module R = {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user