intermediate commit

Value: [1e-10 to 1e-5]
This commit is contained in:
Quinn Dougherty 2022-05-23 12:07:03 -04:00
parent bafcb4f7b8
commit 3a56d6fca4
6 changed files with 73 additions and 66 deletions

View File

@ -271,19 +271,19 @@ module T = Dist({
let variance = (t: t): float => let variance = (t: t): float =>
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares) XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
let klDivergence = (prediction: t, answer: t) => { // let klDivergence = (prediction: t, answer: t) => {
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument( // let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
PointSetDist_Scoring.KLDivergence.integrand, // PointSetDist_Scoring.KLDivergence.integrand,
prediction.xyShape, // prediction.xyShape,
answer.xyShape, // answer.xyShape,
) // )
newShape->E.R2.fmap(x => x->make->integralEndY) // newShape->E.R2.fmap(x => x->make->integralEndY)
} // }
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => { // let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape)) // let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape) // let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer) // PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
} // }
}) })
let isNormalized = (t: t): bool => { let isNormalized = (t: t): bool => {

View File

@ -222,14 +222,14 @@ module T = Dist({
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares) XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
} }
let klDivergence = (prediction: t, answer: t) => { // let klDivergence = (prediction: t, answer: t) => {
combinePointwise( // combinePointwise(
~fn=PointSetDist_Scoring.KLDivergence.integrand, // ~fn=PointSetDist_Scoring.KLDivergence.integrand,
prediction, // prediction,
answer, // answer,
)->E.R2.fmap(integralEndY) // )->E.R2.fmap(integralEndY)
} // }
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => { // let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
Error(Operation.NotYetImplemented) // Error(Operation.NotYetImplemented)
} // }
}) })

View File

@ -33,12 +33,12 @@ module type dist = {
let mean: t => float let mean: t => float
let variance: t => float let variance: t => float
let klDivergence: (t, t) => result<float, Operation.Error.t> // let klDivergence: (t, t) => result<float, Operation.Error.t>
let logScoreWithPointResolution: ( // let logScoreWithPointResolution: (
~prediction: t, // ~prediction: t,
~answer: float, // ~answer: float,
~prior: option<t>, // ~prior: option<t>,
) => result<float, Operation.Error.t> // ) => result<float, Operation.Error.t>
} }
module Dist = (T: dist) => { module Dist = (T: dist) => {
@ -61,8 +61,8 @@ module Dist = (T: dist) => {
let mean = T.mean let mean = T.mean
let variance = T.variance let variance = T.variance
let integralEndY = T.integralEndY let integralEndY = T.integralEndY
let klDivergence = T.klDivergence // let klDivergence = T.klDivergence
let logScoreWithPointResolution = T.logScoreWithPointResolution // let logScoreWithPointResolution = T.logScoreWithPointResolution
let updateIntegralCache = T.updateIntegralCache let updateIntegralCache = T.updateIntegralCache

View File

@ -301,14 +301,14 @@ module T = Dist({
} }
} }
let klDivergence = (prediction: t, answer: t) => { // let klDivergence = (prediction: t, answer: t) => {
let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete) // let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous) // let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t)) // E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
} // }
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => { // let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
Error(Operation.NotYetImplemented) // Error(Operation.NotYetImplemented)
} // }
}) })
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => { let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {

View File

@ -196,24 +196,27 @@ module T = Dist({
| Continuous(m) => Continuous.T.variance(m) | Continuous(m) => Continuous.T.variance(m)
} }
let klDivergence = (prediction: t, answer: t) => // let klDivergence = (prediction: t, answer: t) =>
switch (prediction, answer) { // switch (prediction, answer) {
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2) // | (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
| (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2) // | (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed) // | (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
} // }
//
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => { // let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
switch (prior, prediction) { // switch (prior, prediction) {
| (Some(Continuous(t1)), Continuous(t2)) => // | (Some(Continuous(t1)), Continuous(t2)) =>
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some) // Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
| (None, Continuous(t2)) => // | (None, Continuous(t2)) =>
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None) // Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
| _ => Error(Operation.NotYetImplemented) // | _ => Error(Operation.NotYetImplemented)
} // }
} // }
}) })
let logScore = (args: PointSetDist_Scoring.scoreArgs): result<float, Operation.Error.t> =>
PointSetDist_Scoring.logScore(args)
let pdf = (f: float, t: t) => { let pdf = (f: float, t: t) => {
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t) let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
mixedPoint.continuous +. mixedPoint.discrete mixedPoint.continuous +. mixedPoint.discrete

View File

@ -1,4 +1,4 @@
type t = PointSetDist.pointSetDist type t = PointSetTypes.pointSetDist
type continuousShape = PointSetTypes.continuousShape type continuousShape = PointSetTypes.continuousShape
type discreteShape = PointSetTypes.discreteShape type discreteShape = PointSetTypes.discreteShape
type mixedShape = PointSetTypes.mixedShape type mixedShape = PointSetTypes.mixedShape
@ -30,16 +30,16 @@ module WithDistAnswer = {
minusScaledLogOfQuot(~esti=estimateElement, ~answ=answerElement) minusScaledLogOfQuot(~esti=estimateElement, ~answ=answerElement)
} }
let sum = (~estimate: t, ~answer: t, ~integrateFn) => let sum = (~estimate: t, ~answer: t, ~combineFn, ~integrateFn) =>
PointSetDist.combinePointwise(integrand, estimate, answer)->E.R2.fmap(integrateFn) combineFn(integrand, estimate, answer)->E.R2.fmap(integrateFn)
let sumWithPrior = (~estimate: t, ~answer: t, ~prior: t, ~integrateFn): result< let sumWithPrior = (~estimate: t, ~answer: t, ~prior: t, ~combineFn, ~integrateFn): result<
float, float,
Operation.Error.t, Operation.Error.t,
> => { > => {
let kl1 = sum(~estimate, ~answer, ~integrateFn) let kl1 = sum(~estimate, ~answer, ~combineFn, ~integrateFn)
let kl2 = sum(~estimate=prior, ~answer, ~integrateFn) let kl2 = sum(~estimate=prior, ~answer, ~combineFn, ~integrateFn)
E.R.merge(kl1, kl2)->E.R2.fmap(((k1', k2')) => kl1' -. kl2') E.R.merge(kl1, kl2)->E.R2.fmap(((kl1', kl2')) => kl1' -. kl2')
} }
} }
@ -69,11 +69,15 @@ module WithScalarAnswer = {
minusScaledLogOfQuot(~esti=numerator, ~answ=priorDensityOfAnswer) minusScaledLogOfQuot(~esti=numerator, ~answ=priorDensityOfAnswer)
} }
} }
let score = (~estimate: t, ~answer: t): result<float, Operation.Error.t> => { let score = (~estimate: t, ~answer: t, ~mapper): result<float, Operation.Error.t> => {
let estimatePdf = x => XYShape.XtoY.linear(x, estimate.xyShape) let pdf = (shape, ~x) => XYShape.XtoY.linear(x, shape.xyShape)
let estimatePdf = mapper((x => pdf(~x), x => pdf(~x), x => pdf(~x)))
score'(~estimatePdf, ~answer) score'(~estimatePdf, ~answer)
} }
let scoreWithPrior = (~estimate: t, ~answer: t, ~prior: t): result<float, Operation.Error.t> => { let scoreWithPrior = (~estimate: t, ~answer: t, ~prior: t, ~mapper): result<
float,
Operation.Error.t,
> => {
let estimatePdf = x => XYShape.XtoY.linear(x, estimate.xyShape) let estimatePdf = x => XYShape.XtoY.linear(x, estimate.xyShape)
let priorPdf = x => XYShape.XtoY.linear(x, prior.xyShape) let priorPdf = x => XYShape.XtoY.linear(x, prior.xyShape)
scoreWithPrior'(~estimatePdf, ~answer, ~priorPdf) scoreWithPrior'(~estimatePdf, ~answer, ~priorPdf)
@ -100,7 +104,7 @@ module TwoScalars = {
} }
} }
let logScore = (args: scoreArgs, ~integrateFn): result<float, Operation.Error.t> => let logScore = (args: scoreArgs, ~combineFn, ~integrateFn): result<float, Operation.Error.t> =>
switch args { switch args {
| DistEstimateDistAnswer({estimate, answer, prior: None}) => | DistEstimateDistAnswer({estimate, answer, prior: None}) =>
WithDistAnswer.sum(~estimate, ~answer, ~integrateFn) WithDistAnswer.sum(~estimate, ~answer, ~integrateFn)