Merge pull request #589 from quantified-uncertainty/scoring-cleanup-three
Scoring cleanup
This commit is contained in:
		
						commit
						606cbd8859
					
				| 
						 | 
				
			
			@ -1,7 +1,7 @@
 | 
			
		|||
open Jest
 | 
			
		||||
open Expect
 | 
			
		||||
 | 
			
		||||
let env: DistributionOperation.env = {
 | 
			
		||||
let env: GenericDist.env = {
 | 
			
		||||
  sampleCount: 100,
 | 
			
		||||
  xyPointLength: 100,
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -34,7 +34,7 @@ describe("sparkline", () => {
 | 
			
		|||
    expected: DistributionOperation.outputType,
 | 
			
		||||
  ) => {
 | 
			
		||||
    test(name, () => {
 | 
			
		||||
      let result = DistributionOperation.run(~env, FromDist(ToString(ToSparkline(20)), dist))
 | 
			
		||||
      let result = DistributionOperation.run(~env, FromDist(#ToString(ToSparkline(20)), dist))
 | 
			
		||||
      expect(result)->toEqual(expected)
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -81,8 +81,8 @@ describe("sparkline", () => {
 | 
			
		|||
describe("toPointSet", () => {
 | 
			
		||||
  test("on symbolic normal distribution", () => {
 | 
			
		||||
    let result =
 | 
			
		||||
      run(FromDist(ToDist(ToPointSet), normalDist5))
 | 
			
		||||
      ->outputMap(FromDist(ToFloat(#Mean)))
 | 
			
		||||
      run(FromDist(#ToDist(ToPointSet), normalDist5))
 | 
			
		||||
      ->outputMap(FromDist(#ToFloat(#Mean)))
 | 
			
		||||
      ->toFloat
 | 
			
		||||
      ->toExt
 | 
			
		||||
    expect(result)->toBeSoCloseTo(5.0, ~digits=0)
 | 
			
		||||
| 
						 | 
				
			
			@ -90,10 +90,10 @@ describe("toPointSet", () => {
 | 
			
		|||
 | 
			
		||||
  test("on sample set", () => {
 | 
			
		||||
    let result =
 | 
			
		||||
      run(FromDist(ToDist(ToPointSet), normalDist5))
 | 
			
		||||
      ->outputMap(FromDist(ToDist(ToSampleSet(1000))))
 | 
			
		||||
      ->outputMap(FromDist(ToDist(ToPointSet)))
 | 
			
		||||
      ->outputMap(FromDist(ToFloat(#Mean)))
 | 
			
		||||
      run(FromDist(#ToDist(ToPointSet), normalDist5))
 | 
			
		||||
      ->outputMap(FromDist(#ToDist(ToSampleSet(1000))))
 | 
			
		||||
      ->outputMap(FromDist(#ToDist(ToPointSet)))
 | 
			
		||||
      ->outputMap(FromDist(#ToFloat(#Mean)))
 | 
			
		||||
      ->toFloat
 | 
			
		||||
      ->toExt
 | 
			
		||||
    expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -19,7 +19,6 @@ exception MixtureFailed
 | 
			
		|||
let float1 = 1.0
 | 
			
		||||
let float2 = 2.0
 | 
			
		||||
let float3 = 3.0
 | 
			
		||||
let {mkDelta} = module(TestHelpers)
 | 
			
		||||
let point1 = mkDelta(float1)
 | 
			
		||||
let point2 = mkDelta(float2)
 | 
			
		||||
let point3 = mkDelta(float3)
 | 
			
		||||
let point1 = TestHelpers.mkDelta(float1)
 | 
			
		||||
let point2 = TestHelpers.mkDelta(float2)
 | 
			
		||||
let point3 = TestHelpers.mkDelta(float3)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -11,7 +11,7 @@ describe("mixture", () => {
 | 
			
		|||
      let (mean1, mean2) = tup
 | 
			
		||||
      let meanValue = {
 | 
			
		||||
        run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
 | 
			
		||||
          FromDist(ToFloat(#Mean)),
 | 
			
		||||
          FromDist(#ToFloat(#Mean)),
 | 
			
		||||
        )
 | 
			
		||||
      }
 | 
			
		||||
      meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
 | 
			
		||||
| 
						 | 
				
			
			@ -28,7 +28,7 @@ describe("mixture", () => {
 | 
			
		|||
      let meanValue = {
 | 
			
		||||
        run(
 | 
			
		||||
          Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
 | 
			
		||||
        )->outputMap(FromDist(ToFloat(#Mean)))
 | 
			
		||||
        )->outputMap(FromDist(#ToFloat(#Mean)))
 | 
			
		||||
      }
 | 
			
		||||
      let betaMean = 1.0 /. (1.0 +. beta /. alpha)
 | 
			
		||||
      let exponentialMean = 1.0 /. rate
 | 
			
		||||
| 
						 | 
				
			
			@ -52,7 +52,7 @@ describe("mixture", () => {
 | 
			
		|||
            (mkUniform(low, high), uniformWeight),
 | 
			
		||||
            (mkLognormal(mu, sigma), lognormalWeight),
 | 
			
		||||
          ]),
 | 
			
		||||
        )->outputMap(FromDist(ToFloat(#Mean)))
 | 
			
		||||
        )->outputMap(FromDist(#ToFloat(#Mean)))
 | 
			
		||||
      }
 | 
			
		||||
      let uniformMean = (low +. high) /. 2.0
 | 
			
		||||
      let lognormalMean = mu +. sigma ** 2.0 /. 2.0
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -3,6 +3,7 @@ open Expect
 | 
			
		|||
open TestHelpers
 | 
			
		||||
open GenericDist_Fixtures
 | 
			
		||||
 | 
			
		||||
let klDivergence = DistributionOperation.Constructors.LogScore.distEstimateDistAnswer(~env)
 | 
			
		||||
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
 | 
			
		||||
let klNormalUniform = (mean, stdev, low, high): float =>
 | 
			
		||||
  -.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
 | 
			
		||||
| 
						 | 
				
			
			@ -11,8 +12,6 @@ let klNormalUniform = (mean, stdev, low, high): float =>
 | 
			
		|||
  (mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
 | 
			
		||||
 | 
			
		||||
describe("klDivergence: continuous -> continuous -> float", () => {
 | 
			
		||||
  let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
 | 
			
		||||
 | 
			
		||||
  let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
 | 
			
		||||
    test("of two uniforms is equal to the analytic expression", () => {
 | 
			
		||||
      let answer =
 | 
			
		||||
| 
						 | 
				
			
			@ -58,7 +57,7 @@ describe("klDivergence: continuous -> continuous -> float", () => {
 | 
			
		|||
    let kl = E.R.liftJoin2(klDivergence, prediction, answer)
 | 
			
		||||
 | 
			
		||||
    switch kl {
 | 
			
		||||
    | Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=3)
 | 
			
		||||
    | Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=2)
 | 
			
		||||
    | Error(err) => {
 | 
			
		||||
        Js.Console.log(DistributionTypes.Error.toString(err))
 | 
			
		||||
        raise(KlFailed)
 | 
			
		||||
| 
						 | 
				
			
			@ -82,7 +81,6 @@ describe("klDivergence: continuous -> continuous -> float", () => {
 | 
			
		|||
})
 | 
			
		||||
 | 
			
		||||
describe("klDivergence: discrete -> discrete -> float", () => {
 | 
			
		||||
  let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
 | 
			
		||||
  let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
 | 
			
		||||
  let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
 | 
			
		||||
  let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
 | 
			
		||||
| 
						 | 
				
			
			@ -117,7 +115,6 @@ describe("klDivergence: discrete -> discrete -> float", () => {
 | 
			
		|||
})
 | 
			
		||||
 | 
			
		||||
describe("klDivergence: mixed -> mixed -> float", () => {
 | 
			
		||||
  let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
 | 
			
		||||
  let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
 | 
			
		||||
  let mixture = a => {
 | 
			
		||||
    let dist' = a->mixture'->run
 | 
			
		||||
| 
						 | 
				
			
			@ -189,15 +186,15 @@ describe("combineAlongSupportOfSecondArgument0", () => {
 | 
			
		|||
      uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
 | 
			
		||||
        s,
 | 
			
		||||
      ))
 | 
			
		||||
    let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
 | 
			
		||||
    let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
 | 
			
		||||
    let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
 | 
			
		||||
    let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
 | 
			
		||||
 | 
			
		||||
    let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
 | 
			
		||||
    let integrand = PointSetDist_Scoring.KLDivergence.integrand
 | 
			
		||||
    let integrand = PointSetDist_Scoring.WithDistAnswer.integrand
 | 
			
		||||
 | 
			
		||||
    let result = switch (answerWrapped, predictionWrapped) {
 | 
			
		||||
    | (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
 | 
			
		||||
      Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
 | 
			
		||||
      Some(combineAlongSupportOfSecondArgument(interpolator, integrand, a.xyShape, b.xyShape))
 | 
			
		||||
    | _ => None
 | 
			
		||||
    }
 | 
			
		||||
    result
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,68 @@
 | 
			
		|||
open Jest
 | 
			
		||||
open Expect
 | 
			
		||||
open TestHelpers
 | 
			
		||||
open GenericDist_Fixtures
 | 
			
		||||
exception ScoreFailed
 | 
			
		||||
 | 
			
		||||
describe("WithScalarAnswer: discrete -> scalar -> score", () => {
 | 
			
		||||
  let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
 | 
			
		||||
  let pointA = mkDelta(3.0)
 | 
			
		||||
  let pointB = mkDelta(2.0)
 | 
			
		||||
  let pointC = mkDelta(1.0)
 | 
			
		||||
  let pointD = mkDelta(0.0)
 | 
			
		||||
 | 
			
		||||
  test("score: agrees with analytical answer when finite", () => {
 | 
			
		||||
    let prediction' = [(pointA, 0.25), (pointB, 0.25), (pointC, 0.25), (pointD, 0.25)]->mixture->run
 | 
			
		||||
    let prediction = switch prediction' {
 | 
			
		||||
    | Dist(PointSet(p)) => p
 | 
			
		||||
    | _ => raise(MixtureFailed)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let answer = 2.0 // So this is: assigning 100% probability to 2.0
 | 
			
		||||
    let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
 | 
			
		||||
    switch result {
 | 
			
		||||
    | Ok(x) => x->expect->toEqual(-.Js.Math.log(0.25 /. 1.0))
 | 
			
		||||
    | _ => raise(ScoreFailed)
 | 
			
		||||
    }
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  test("score: agrees with analytical answer when finite", () => {
 | 
			
		||||
    let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
 | 
			
		||||
    let prediction = switch prediction' {
 | 
			
		||||
    | Dist(PointSet(p)) => p
 | 
			
		||||
    | _ => raise(MixtureFailed)
 | 
			
		||||
    }
 | 
			
		||||
    let answer = 3.0 // So this is: assigning 100% probability to 2.0
 | 
			
		||||
    let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
 | 
			
		||||
    switch result {
 | 
			
		||||
    | Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0))
 | 
			
		||||
    | _ => raise(ScoreFailed)
 | 
			
		||||
    }
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  test("scoreWithPrior: agrees with analytical answer when finite", () => {
 | 
			
		||||
    let prior' = [(pointA, 0.5), (pointB, 0.5)]->mixture->run
 | 
			
		||||
    let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
 | 
			
		||||
 | 
			
		||||
    let prediction = switch prediction' {
 | 
			
		||||
    | Dist(PointSet(p)) => p
 | 
			
		||||
    | _ => raise(MixtureFailed)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let prior = switch prior' {
 | 
			
		||||
    | Dist(PointSet(p)) => p
 | 
			
		||||
    | _ => raise(MixtureFailed)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let answer = 3.0 // So this is: assigning 100% probability to 2.0
 | 
			
		||||
    let result = PointSetDist_Scoring.WithScalarAnswer.scoreWithPrior(
 | 
			
		||||
      ~estimate=prediction,
 | 
			
		||||
      ~answer,
 | 
			
		||||
      ~prior,
 | 
			
		||||
    )
 | 
			
		||||
    switch result {
 | 
			
		||||
    | Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0) -. -.Js.Math.log(0.5 /. 1.0))
 | 
			
		||||
    | _ => raise(ScoreFailed)
 | 
			
		||||
    }
 | 
			
		||||
  })
 | 
			
		||||
})
 | 
			
		||||
| 
						 | 
				
			
			@ -8,34 +8,34 @@ let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean,
 | 
			
		|||
describe("(Symbolic) normalize", () => {
 | 
			
		||||
  testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
 | 
			
		||||
    let normalValue = mkNormal(mean, 2.0)
 | 
			
		||||
    let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
 | 
			
		||||
    let normalizedValue = run(FromDist(#ToDist(Normalize), normalValue))
 | 
			
		||||
    normalizedValue->unpackDist->expect->toEqual(normalValue)
 | 
			
		||||
  })
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
describe("(Symbolic) mean", () => {
 | 
			
		||||
  testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
 | 
			
		||||
    run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
 | 
			
		||||
    run(FromDist(#ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  Skip.test("of normal(0, -1) (it NaNs out)", () => {
 | 
			
		||||
    run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
 | 
			
		||||
    run(FromDist(#ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
 | 
			
		||||
    run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
 | 
			
		||||
    run(FromDist(#ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
 | 
			
		||||
    let meanValue = run(
 | 
			
		||||
      FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
 | 
			
		||||
      FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
 | 
			
		||||
    )
 | 
			
		||||
    meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  test("of a cauchy distribution", () => {
 | 
			
		||||
    let meanValue = run(
 | 
			
		||||
      FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
 | 
			
		||||
      FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
 | 
			
		||||
    )
 | 
			
		||||
    meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
 | 
			
		||||
    //-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
 | 
			
		||||
| 
						 | 
				
			
			@ -48,7 +48,7 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
      let (low, medium, high) = tup
 | 
			
		||||
      let meanValue = run(
 | 
			
		||||
        FromDist(
 | 
			
		||||
          ToFloat(#Mean),
 | 
			
		||||
          #ToFloat(#Mean),
 | 
			
		||||
          DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
 | 
			
		||||
        ),
 | 
			
		||||
      )
 | 
			
		||||
| 
						 | 
				
			
			@ -63,7 +63,7 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
    tup => {
 | 
			
		||||
      let (alpha, beta) = tup
 | 
			
		||||
      let meanValue = run(
 | 
			
		||||
        FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
 | 
			
		||||
        FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
 | 
			
		||||
      )
 | 
			
		||||
      meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
 | 
			
		||||
    },
 | 
			
		||||
| 
						 | 
				
			
			@ -72,7 +72,7 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
  // TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
 | 
			
		||||
  test("of beta(0, 0)", () => {
 | 
			
		||||
    let meanValue = run(
 | 
			
		||||
      FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
 | 
			
		||||
      FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
 | 
			
		||||
    )
 | 
			
		||||
    meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
 | 
			
		||||
  })
 | 
			
		||||
| 
						 | 
				
			
			@ -85,7 +85,7 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
      let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
 | 
			
		||||
      let meanValue =
 | 
			
		||||
        betaDistribution->E.R2.fmap(d =>
 | 
			
		||||
          run(FromDist(ToFloat(#Mean), d->DistributionTypes.Symbolic))
 | 
			
		||||
          run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
 | 
			
		||||
        )
 | 
			
		||||
      switch meanValue {
 | 
			
		||||
      | Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
 | 
			
		||||
| 
						 | 
				
			
			@ -100,7 +100,7 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
    tup => {
 | 
			
		||||
      let (mu, sigma) = tup
 | 
			
		||||
      let meanValue = run(
 | 
			
		||||
        FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
 | 
			
		||||
        FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
 | 
			
		||||
      )
 | 
			
		||||
      meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
 | 
			
		||||
    },
 | 
			
		||||
| 
						 | 
				
			
			@ -112,14 +112,14 @@ describe("(Symbolic) mean", () => {
 | 
			
		|||
    tup => {
 | 
			
		||||
      let (low, high) = tup
 | 
			
		||||
      let meanValue = run(
 | 
			
		||||
        FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
 | 
			
		||||
        FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
 | 
			
		||||
      )
 | 
			
		||||
      meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
 | 
			
		||||
    },
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  test("of a float", () => {
 | 
			
		||||
    let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
 | 
			
		||||
    let meanValue = run(FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
 | 
			
		||||
    meanValue->unpackFloat->expect->toBeCloseTo(7.7)
 | 
			
		||||
  })
 | 
			
		||||
})
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -29,7 +29,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
 | 
			
		|||
 | 
			
		||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
 | 
			
		||||
 | 
			
		||||
let env: DistributionOperation.env = {
 | 
			
		||||
let env: GenericDist.env = {
 | 
			
		||||
  sampleCount: MagicNumbers.Environment.defaultSampleCount,
 | 
			
		||||
  xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -4,12 +4,9 @@ type error = DistributionTypes.error
 | 
			
		|||
 | 
			
		||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
 | 
			
		||||
 | 
			
		||||
type env = {
 | 
			
		||||
  sampleCount: int,
 | 
			
		||||
  xyPointLength: int,
 | 
			
		||||
}
 | 
			
		||||
type env = GenericDist.env
 | 
			
		||||
 | 
			
		||||
let defaultEnv = {
 | 
			
		||||
let defaultEnv: env = {
 | 
			
		||||
  sampleCount: MagicNumbers.Environment.defaultSampleCount,
 | 
			
		||||
  xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -93,7 +90,7 @@ module OutputLocal = {
 | 
			
		|||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		||||
let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		||||
  let {sampleCount, xyPointLength} = env
 | 
			
		||||
 | 
			
		||||
  let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
 | 
			
		||||
| 
						 | 
				
			
			@ -101,14 +98,14 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
  }
 | 
			
		||||
 | 
			
		||||
  let toPointSetFn = r => {
 | 
			
		||||
    switch reCall(~functionCallInfo=FromDist(ToDist(ToPointSet), r), ()) {
 | 
			
		||||
    switch reCall(~functionCallInfo=FromDist(#ToDist(ToPointSet), r), ()) {
 | 
			
		||||
    | Dist(PointSet(p)) => Ok(p)
 | 
			
		||||
    | e => Error(OutputLocal.toErrorOrUnreachable(e))
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let toSampleSetFn = r => {
 | 
			
		||||
    switch reCall(~functionCallInfo=FromDist(ToDist(ToSampleSet(sampleCount)), r), ()) {
 | 
			
		||||
    switch reCall(~functionCallInfo=FromDist(#ToDist(ToSampleSet(sampleCount)), r), ()) {
 | 
			
		||||
    | Dist(SampleSet(p)) => Ok(p)
 | 
			
		||||
    | e => Error(OutputLocal.toErrorOrUnreachable(e))
 | 
			
		||||
    }
 | 
			
		||||
| 
						 | 
				
			
			@ -116,13 +113,13 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
 | 
			
		||||
  let scaleMultiply = (r, weight) =>
 | 
			
		||||
    reCall(
 | 
			
		||||
      ~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
 | 
			
		||||
      ~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
 | 
			
		||||
      (),
 | 
			
		||||
    )->OutputLocal.toDistR
 | 
			
		||||
 | 
			
		||||
  let pointwiseAdd = (r1, r2) =>
 | 
			
		||||
    reCall(
 | 
			
		||||
      ~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
 | 
			
		||||
      ~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
 | 
			
		||||
      (),
 | 
			
		||||
    )->OutputLocal.toDistR
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -131,49 +128,40 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
    dist: genericDist,
 | 
			
		||||
  ): outputType => {
 | 
			
		||||
    let response = switch subFnName {
 | 
			
		||||
    | ToFloat(distToFloatOperation) =>
 | 
			
		||||
    | #ToFloat(distToFloatOperation) =>
 | 
			
		||||
      GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
 | 
			
		||||
      ->E.R2.fmap(r => Float(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToString(ToString) => dist->GenericDist.toString->String
 | 
			
		||||
    | ToString(ToSparkline(bucketCount)) =>
 | 
			
		||||
    | #ToString(ToString) => dist->GenericDist.toString->String
 | 
			
		||||
    | #ToString(ToSparkline(bucketCount)) =>
 | 
			
		||||
      GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
 | 
			
		||||
      ->E.R2.fmap(r => String(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(Inspect) => {
 | 
			
		||||
    | #ToDist(Inspect) => {
 | 
			
		||||
        Js.log2("Console log requested: ", dist)
 | 
			
		||||
        Dist(dist)
 | 
			
		||||
      }
 | 
			
		||||
    | ToDist(Normalize) => dist->GenericDist.normalize->Dist
 | 
			
		||||
    | ToScore(KLDivergence(t2)) =>
 | 
			
		||||
      GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
 | 
			
		||||
      ->E.R2.fmap(r => Float(r))
 | 
			
		||||
    | #ToDist(Normalize) => dist->GenericDist.normalize->Dist
 | 
			
		||||
    | #ToScore(LogScore(answer, prior)) =>
 | 
			
		||||
      GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
 | 
			
		||||
      ->E.R2.fmap(s => Float(s))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToScore(LogScore(answer, prior)) =>
 | 
			
		||||
      GenericDist.Score.logScoreWithPointResolution(
 | 
			
		||||
        ~prediction=dist,
 | 
			
		||||
        ~answer,
 | 
			
		||||
        ~prior,
 | 
			
		||||
        ~toPointSetFn,
 | 
			
		||||
      )
 | 
			
		||||
      ->E.R2.fmap(r => Float(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
 | 
			
		||||
    | ToDist(Truncate(leftCutoff, rightCutoff)) =>
 | 
			
		||||
    | #ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
 | 
			
		||||
    | #ToDist(Truncate(leftCutoff, rightCutoff)) =>
 | 
			
		||||
      GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(ToSampleSet(n)) =>
 | 
			
		||||
    | #ToDist(ToSampleSet(n)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.toSampleSetDist(n)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(SampleSet(r)))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(ToPointSet) =>
 | 
			
		||||
    | #ToDist(ToPointSet) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
 | 
			
		||||
      ->E.R2.fmap(r => Dist(PointSet(r)))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
 | 
			
		||||
    | #ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombinationFloat(
 | 
			
		||||
        ~toPointSetFn,
 | 
			
		||||
| 
						 | 
				
			
			@ -182,23 +170,23 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
      )
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(Scale(#Multiply, f)) =>
 | 
			
		||||
    | #ToDist(Scale(#Multiply, f)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(Scale(#Logarithm, f)) =>
 | 
			
		||||
    | #ToDist(Scale(#Logarithm, f)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDist(Scale(#Power, f)) =>
 | 
			
		||||
    | #ToDist(Scale(#Power, f)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
 | 
			
		||||
    | ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
 | 
			
		||||
    | #ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
 | 
			
		||||
    | #ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.algebraicCombination(
 | 
			
		||||
        ~strategy,
 | 
			
		||||
| 
						 | 
				
			
			@ -209,12 +197,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
      )
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
 | 
			
		||||
    | #ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
      ->OutputLocal.fromResult
 | 
			
		||||
    | ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
 | 
			
		||||
    | #ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
 | 
			
		||||
      dist
 | 
			
		||||
      ->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
 | 
			
		||||
      ->E.R2.fmap(r => Dist(r))
 | 
			
		||||
| 
						 | 
				
			
			@ -225,8 +213,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
 | 
			
		|||
 | 
			
		||||
  switch functionCallInfo {
 | 
			
		||||
  | FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
 | 
			
		||||
  | FromFloat(subFnName, float) =>
 | 
			
		||||
    reCall(~functionCallInfo=FromDist(subFnName, GenericDist.fromFloat(float)), ())
 | 
			
		||||
  | FromFloat(subFnName, x) => reCall(~functionCallInfo=FromFloat(subFnName, x), ())
 | 
			
		||||
  | Mixture(dists) =>
 | 
			
		||||
    dists
 | 
			
		||||
    ->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
 | 
			
		||||
| 
						 | 
				
			
			@ -278,13 +265,16 @@ module Constructors = {
 | 
			
		|||
  let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
 | 
			
		||||
  let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
 | 
			
		||||
  let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
 | 
			
		||||
  let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
 | 
			
		||||
  let logScoreWithPointResolution = (
 | 
			
		||||
    ~env,
 | 
			
		||||
    ~prediction: DistributionTypes.genericDist,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
    ~prior: option<DistributionTypes.genericDist>,
 | 
			
		||||
  ) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
 | 
			
		||||
  module LogScore = {
 | 
			
		||||
    let distEstimateDistAnswer = (~env, estimate, answer) =>
 | 
			
		||||
      C.LogScore.distEstimateDistAnswer(estimate, answer)->run(~env)->toFloatR
 | 
			
		||||
    let distEstimateDistAnswerWithPrior = (~env, estimate, answer, prior) =>
 | 
			
		||||
      C.LogScore.distEstimateDistAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
 | 
			
		||||
    let distEstimateScalarAnswer = (~env, estimate, answer) =>
 | 
			
		||||
      C.LogScore.distEstimateScalarAnswer(estimate, answer)->run(~env)->toFloatR
 | 
			
		||||
    let distEstimateScalarAnswerWithPrior = (~env, estimate, answer, prior) =>
 | 
			
		||||
      C.LogScore.distEstimateScalarAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
 | 
			
		||||
  }
 | 
			
		||||
  let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
 | 
			
		||||
  let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
 | 
			
		||||
  let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,11 +1,5 @@
 | 
			
		|||
@genType
 | 
			
		||||
type env = {
 | 
			
		||||
  sampleCount: int,
 | 
			
		||||
  xyPointLength: int,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
let defaultEnv: env
 | 
			
		||||
let defaultEnv: GenericDist.env
 | 
			
		||||
 | 
			
		||||
open DistributionTypes
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -19,15 +13,18 @@ type outputType =
 | 
			
		|||
  | GenDistError(error)
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
let run: (~env: env, DistributionTypes.DistributionOperation.genericFunctionCallInfo) => outputType
 | 
			
		||||
let run: (
 | 
			
		||||
  ~env: GenericDist.env,
 | 
			
		||||
  DistributionTypes.DistributionOperation.genericFunctionCallInfo,
 | 
			
		||||
) => outputType
 | 
			
		||||
let runFromDist: (
 | 
			
		||||
  ~env: env,
 | 
			
		||||
  ~env: GenericDist.env,
 | 
			
		||||
  ~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
 | 
			
		||||
  genericDist,
 | 
			
		||||
) => outputType
 | 
			
		||||
let runFromFloat: (
 | 
			
		||||
  ~env: env,
 | 
			
		||||
  ~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
 | 
			
		||||
  ~env: GenericDist.env,
 | 
			
		||||
  ~functionCallInfo: DistributionTypes.DistributionOperation.fromFloat,
 | 
			
		||||
  float,
 | 
			
		||||
) => outputType
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -42,79 +39,147 @@ module Output: {
 | 
			
		|||
  let toBool: t => option<bool>
 | 
			
		||||
  let toBoolR: t => result<bool, error>
 | 
			
		||||
  let toError: t => option<error>
 | 
			
		||||
  let fmap: (~env: env, t, DistributionTypes.DistributionOperation.singleParamaterFunction) => t
 | 
			
		||||
  let fmap: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    t,
 | 
			
		||||
    DistributionTypes.DistributionOperation.singleParamaterFunction,
 | 
			
		||||
  ) => t
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module Constructors: {
 | 
			
		||||
  @genType
 | 
			
		||||
  let mean: (~env: env, genericDist) => result<float, error>
 | 
			
		||||
  let mean: (~env: GenericDist.env, genericDist) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let stdev: (~env: env, genericDist) => result<float, error>
 | 
			
		||||
  let stdev: (~env: GenericDist.env, genericDist) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let variance: (~env: env, genericDist) => result<float, error>
 | 
			
		||||
  let variance: (~env: GenericDist.env, genericDist) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let sample: (~env: env, genericDist) => result<float, error>
 | 
			
		||||
  let sample: (~env: GenericDist.env, genericDist) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let cdf: (~env: env, genericDist, float) => result<float, error>
 | 
			
		||||
  let cdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let inv: (~env: env, genericDist, float) => result<float, error>
 | 
			
		||||
  let inv: (~env: GenericDist.env, genericDist, float) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pdf: (~env: env, genericDist, float) => result<float, error>
 | 
			
		||||
  let pdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let normalize: (~env: env, genericDist) => result<genericDist, error>
 | 
			
		||||
  let normalize: (~env: GenericDist.env, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let isNormalized: (~env: env, genericDist) => result<bool, error>
 | 
			
		||||
  let isNormalized: (~env: GenericDist.env, genericDist) => result<bool, error>
 | 
			
		||||
  module LogScore: {
 | 
			
		||||
    @genType
 | 
			
		||||
    let distEstimateDistAnswer: (
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
      genericDist,
 | 
			
		||||
      genericDist,
 | 
			
		||||
    ) => result<float, error>
 | 
			
		||||
    @genType
 | 
			
		||||
    let distEstimateDistAnswerWithPrior: (
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
      genericDist,
 | 
			
		||||
      genericDist,
 | 
			
		||||
      genericDist,
 | 
			
		||||
    ) => result<float, error>
 | 
			
		||||
    @genType
 | 
			
		||||
    let distEstimateScalarAnswer: (
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
      genericDist,
 | 
			
		||||
      float,
 | 
			
		||||
    ) => result<float, error>
 | 
			
		||||
    @genType
 | 
			
		||||
    let distEstimateScalarAnswerWithPrior: (
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
      genericDist,
 | 
			
		||||
      float,
 | 
			
		||||
      genericDist,
 | 
			
		||||
    ) => result<float, error>
 | 
			
		||||
  }
 | 
			
		||||
  @genType
 | 
			
		||||
  let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
 | 
			
		||||
  let toPointSet: (~env: GenericDist.env, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let logScoreWithPointResolution: (
 | 
			
		||||
    ~env: env,
 | 
			
		||||
    ~prediction: genericDist,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
    ~prior: option<genericDist>,
 | 
			
		||||
  ) => result<float, error>
 | 
			
		||||
  let toSampleSet: (~env: GenericDist.env, genericDist, int) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let toPointSet: (~env: env, genericDist) => result<genericDist, error>
 | 
			
		||||
  let fromSamples: (~env: GenericDist.env, SampleSetDist.t) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
 | 
			
		||||
  let truncate: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    option<float>,
 | 
			
		||||
    option<float>,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let fromSamples: (~env: env, SampleSetDist.t) => result<genericDist, error>
 | 
			
		||||
  let inspect: (~env: GenericDist.env, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
 | 
			
		||||
  let toString: (~env: GenericDist.env, genericDist) => result<string, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let inspect: (~env: env, genericDist) => result<genericDist, error>
 | 
			
		||||
  let toSparkline: (~env: GenericDist.env, genericDist, int) => result<string, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let toString: (~env: env, genericDist) => result<string, error>
 | 
			
		||||
  let algebraicAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let toSparkline: (~env: env, genericDist, int) => result<string, error>
 | 
			
		||||
  let algebraicMultiply: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let algebraicDivide: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let algebraicSubtract: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let algebraicLogarithm: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let algebraicPower: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let scaleLogarithm: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let scaleMultiply: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  let scalePower: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let scaleMultiply: (~env: env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  let pointwiseAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
 | 
			
		||||
  let pointwiseMultiply: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let pointwiseDivide: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let pointwiseSubtract: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let pointwiseLogarithm: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  @genType
 | 
			
		||||
  let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
 | 
			
		||||
  let pointwisePower: (
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
    genericDist,
 | 
			
		||||
    genericDist,
 | 
			
		||||
  ) => result<genericDist, error>
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -98,61 +98,86 @@ module DistributionOperation = {
 | 
			
		|||
    | ToString
 | 
			
		||||
    | ToSparkline(int)
 | 
			
		||||
 | 
			
		||||
  type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
 | 
			
		||||
  type genericDistOrScalar = Score_Dist(genericDist) | Score_Scalar(float)
 | 
			
		||||
 | 
			
		||||
  type fromDist =
 | 
			
		||||
    | ToFloat(toFloat)
 | 
			
		||||
    | ToDist(toDist)
 | 
			
		||||
    | ToScore(toScore)
 | 
			
		||||
    | ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
 | 
			
		||||
    | ToString(toString)
 | 
			
		||||
    | ToBool(toBool)
 | 
			
		||||
  type toScore = LogScore(genericDistOrScalar, option<genericDist>)
 | 
			
		||||
 | 
			
		||||
  type fromFloat = [
 | 
			
		||||
    | #ToFloat(toFloat)
 | 
			
		||||
    | #ToDist(toDist)
 | 
			
		||||
    | #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
 | 
			
		||||
    | #ToString(toString)
 | 
			
		||||
    | #ToBool(toBool)
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  type fromDist = [
 | 
			
		||||
    | fromFloat
 | 
			
		||||
    | #ToScore(toScore)
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  type singleParamaterFunction =
 | 
			
		||||
    | FromDist(fromDist)
 | 
			
		||||
    | FromFloat(fromDist)
 | 
			
		||||
    | FromFloat(fromFloat)
 | 
			
		||||
 | 
			
		||||
  type genericFunctionCallInfo =
 | 
			
		||||
    | FromDist(fromDist, genericDist)
 | 
			
		||||
    | FromFloat(fromDist, float)
 | 
			
		||||
    | FromFloat(fromFloat, float)
 | 
			
		||||
    | FromSamples(array<float>)
 | 
			
		||||
    | Mixture(array<(genericDist, float)>)
 | 
			
		||||
 | 
			
		||||
  let distCallToString = (distFunction: fromDist): string =>
 | 
			
		||||
    switch distFunction {
 | 
			
		||||
    | ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToFloat(#Mean) => `mean`
 | 
			
		||||
    | ToFloat(#Min) => `min`
 | 
			
		||||
    | ToFloat(#Max) => `max`
 | 
			
		||||
    | ToFloat(#Stdev) => `stdev`
 | 
			
		||||
    | ToFloat(#Variance) => `variance`
 | 
			
		||||
    | ToFloat(#Mode) => `mode`
 | 
			
		||||
    | ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToFloat(#Sample) => `sample`
 | 
			
		||||
    | ToFloat(#IntegralSum) => `integralSum`
 | 
			
		||||
    | ToScore(KLDivergence(_)) => `klDivergence`
 | 
			
		||||
    | ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
 | 
			
		||||
    | ToDist(Normalize) => `normalize`
 | 
			
		||||
    | ToDist(ToPointSet) => `toPointSet`
 | 
			
		||||
    | ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
 | 
			
		||||
    | ToDist(Truncate(_, _)) => `truncate`
 | 
			
		||||
    | ToDist(Inspect) => `inspect`
 | 
			
		||||
    | ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
 | 
			
		||||
    | ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
 | 
			
		||||
  let floatCallToString = (floatFunction: fromFloat): string =>
 | 
			
		||||
    switch floatFunction {
 | 
			
		||||
    | #ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToFloat(#Mean) => `mean`
 | 
			
		||||
    | #ToFloat(#Min) => `min`
 | 
			
		||||
    | #ToFloat(#Max) => `max`
 | 
			
		||||
    | #ToFloat(#Stdev) => `stdev`
 | 
			
		||||
    | #ToFloat(#Variance) => `variance`
 | 
			
		||||
    | #ToFloat(#Mode) => `mode`
 | 
			
		||||
    | #ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToFloat(#Sample) => `sample`
 | 
			
		||||
    | #ToFloat(#IntegralSum) => `integralSum`
 | 
			
		||||
    | #ToDist(Normalize) => `normalize`
 | 
			
		||||
    | #ToDist(ToPointSet) => `toPointSet`
 | 
			
		||||
    | #ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
 | 
			
		||||
    | #ToDist(Truncate(_, _)) => `truncate`
 | 
			
		||||
    | #ToDist(Inspect) => `inspect`
 | 
			
		||||
    | #ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
 | 
			
		||||
    | #ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
 | 
			
		||||
      `scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
 | 
			
		||||
    | ToString(ToString) => `toString`
 | 
			
		||||
    | ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
 | 
			
		||||
    | ToBool(IsNormalized) => `isNormalized`
 | 
			
		||||
    | ToDistCombination(Algebraic(_), _, _) => `algebraic`
 | 
			
		||||
    | ToDistCombination(Pointwise, _, _) => `pointwise`
 | 
			
		||||
    | #ToString(ToString) => `toString`
 | 
			
		||||
    | #ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
 | 
			
		||||
    | #ToBool(IsNormalized) => `isNormalized`
 | 
			
		||||
    | #ToDistCombination(Algebraic(_), _, _) => `algebraic`
 | 
			
		||||
    | #ToDistCombination(Pointwise, _, _) => `pointwise`
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  let distCallToString = (
 | 
			
		||||
    distFunction: [
 | 
			
		||||
      | #ToFloat(toFloat)
 | 
			
		||||
      | #ToDist(toDist)
 | 
			
		||||
      | #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
 | 
			
		||||
      | #ToString(toString)
 | 
			
		||||
      | #ToBool(toBool)
 | 
			
		||||
      | #ToScore(toScore)
 | 
			
		||||
    ],
 | 
			
		||||
  ): string =>
 | 
			
		||||
    switch distFunction {
 | 
			
		||||
    | #ToScore(_) => `logScore`
 | 
			
		||||
    | #ToFloat(x) => floatCallToString(#ToFloat(x))
 | 
			
		||||
    | #ToDist(x) => floatCallToString(#ToDist(x))
 | 
			
		||||
    | #ToString(x) => floatCallToString(#ToString(x))
 | 
			
		||||
    | #ToBool(x) => floatCallToString(#ToBool(x))
 | 
			
		||||
    | #ToDistCombination(x, y, z) => floatCallToString(#ToDistCombination(x, y, z))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  let toString = (d: genericFunctionCallInfo): string =>
 | 
			
		||||
    switch d {
 | 
			
		||||
    | FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
 | 
			
		||||
    | FromDist(f, _) => distCallToString(f)
 | 
			
		||||
    | FromFloat(f, _) => floatCallToString(f)
 | 
			
		||||
    | Mixture(_) => `mixture`
 | 
			
		||||
    | FromSamples(_) => `fromSamples`
 | 
			
		||||
    }
 | 
			
		||||
| 
						 | 
				
			
			@ -162,80 +187,93 @@ module Constructors = {
 | 
			
		|||
 | 
			
		||||
  module UsingDists = {
 | 
			
		||||
    @genType
 | 
			
		||||
    let mean = (dist): t => FromDist(ToFloat(#Mean), dist)
 | 
			
		||||
    let stdev = (dist): t => FromDist(ToFloat(#Stdev), dist)
 | 
			
		||||
    let variance = (dist): t => FromDist(ToFloat(#Variance), dist)
 | 
			
		||||
    let sample = (dist): t => FromDist(ToFloat(#Sample), dist)
 | 
			
		||||
    let cdf = (dist, x): t => FromDist(ToFloat(#Cdf(x)), dist)
 | 
			
		||||
    let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
 | 
			
		||||
    let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
 | 
			
		||||
    let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
 | 
			
		||||
    let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
 | 
			
		||||
    let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
 | 
			
		||||
    let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
 | 
			
		||||
    let mean = (dist): t => FromDist(#ToFloat(#Mean), dist)
 | 
			
		||||
    let stdev = (dist): t => FromDist(#ToFloat(#Stdev), dist)
 | 
			
		||||
    let variance = (dist): t => FromDist(#ToFloat(#Variance), dist)
 | 
			
		||||
    let sample = (dist): t => FromDist(#ToFloat(#Sample), dist)
 | 
			
		||||
    let cdf = (dist, x): t => FromDist(#ToFloat(#Cdf(x)), dist)
 | 
			
		||||
    let inv = (dist, x): t => FromDist(#ToFloat(#Inv(x)), dist)
 | 
			
		||||
    let pdf = (dist, x): t => FromDist(#ToFloat(#Pdf(x)), dist)
 | 
			
		||||
    let normalize = (dist): t => FromDist(#ToDist(Normalize), dist)
 | 
			
		||||
    let isNormalized = (dist): t => FromDist(#ToBool(IsNormalized), dist)
 | 
			
		||||
    let toPointSet = (dist): t => FromDist(#ToDist(ToPointSet), dist)
 | 
			
		||||
    let toSampleSet = (dist, r): t => FromDist(#ToDist(ToSampleSet(r)), dist)
 | 
			
		||||
    let fromSamples = (xs): t => FromSamples(xs)
 | 
			
		||||
    let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
 | 
			
		||||
    let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
 | 
			
		||||
    let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
 | 
			
		||||
    let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
 | 
			
		||||
      ToScore(LogScore(answer, prior)),
 | 
			
		||||
      prediction,
 | 
			
		||||
    )
 | 
			
		||||
    let scaleMultiply = (dist, n): t => FromDist(ToDist(Scale(#Multiply, n)), dist)
 | 
			
		||||
    let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
 | 
			
		||||
    let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
 | 
			
		||||
    let truncate = (dist, left, right): t => FromDist(#ToDist(Truncate(left, right)), dist)
 | 
			
		||||
    let inspect = (dist): t => FromDist(#ToDist(Inspect), dist)
 | 
			
		||||
    module LogScore = {
 | 
			
		||||
      let distEstimateDistAnswer = (estimate, answer): t => FromDist(
 | 
			
		||||
        #ToScore(LogScore(Score_Dist(answer), None)),
 | 
			
		||||
        estimate,
 | 
			
		||||
      )
 | 
			
		||||
      let distEstimateDistAnswerWithPrior = (estimate, answer, prior): t => FromDist(
 | 
			
		||||
        #ToScore(LogScore(Score_Dist(answer), Some(prior))),
 | 
			
		||||
        estimate,
 | 
			
		||||
      )
 | 
			
		||||
      let distEstimateScalarAnswer = (estimate, answer): t => FromDist(
 | 
			
		||||
        #ToScore(LogScore(Score_Scalar(answer), None)),
 | 
			
		||||
        estimate,
 | 
			
		||||
      )
 | 
			
		||||
      let distEstimateScalarAnswerWithPrior = (estimate, answer, prior): t => FromDist(
 | 
			
		||||
        #ToScore(LogScore(Score_Scalar(answer), Some(prior))),
 | 
			
		||||
        estimate,
 | 
			
		||||
      )
 | 
			
		||||
    }
 | 
			
		||||
    let scaleMultiply = (dist, n): t => FromDist(#ToDist(Scale(#Multiply, n)), dist)
 | 
			
		||||
    let scalePower = (dist, n): t => FromDist(#ToDist(Scale(#Power, n)), dist)
 | 
			
		||||
    let scaleLogarithm = (dist, n): t => FromDist(#ToDist(Scale(#Logarithm, n)), dist)
 | 
			
		||||
    let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
 | 
			
		||||
      ToDist(Scale(#LogarithmWithThreshold(eps), n)),
 | 
			
		||||
      #ToDist(Scale(#LogarithmWithThreshold(eps), n)),
 | 
			
		||||
      dist,
 | 
			
		||||
    )
 | 
			
		||||
    let toString = (dist): t => FromDist(ToString(ToString), dist)
 | 
			
		||||
    let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
 | 
			
		||||
    let toString = (dist): t => FromDist(#ToString(ToString), dist)
 | 
			
		||||
    let toSparkline = (dist, n): t => FromDist(#ToString(ToSparkline(n)), dist)
 | 
			
		||||
    let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let algebraicMultiply = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let algebraicDivide = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let algebraicSubtract = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let algebraicLogarithm = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let algebraicPower = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwiseAdd = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Add, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Add, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwiseMultiply = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwiseDivide = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwiseSubtract = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwiseLogarithm = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
    let pointwisePower = (dist1, dist2): t => FromDist(
 | 
			
		||||
      ToDistCombination(Pointwise, #Power, #Dist(dist2)),
 | 
			
		||||
      #ToDistCombination(Pointwise, #Power, #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -6,6 +6,11 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
 | 
			
		|||
type scaleMultiplyFn = (t, float) => result<t, error>
 | 
			
		||||
type pointwiseAddFn = (t, t) => result<t, error>
 | 
			
		||||
 | 
			
		||||
type env = {
 | 
			
		||||
  sampleCount: int,
 | 
			
		||||
  xyPointLength: int,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let isPointSet = (t: t) =>
 | 
			
		||||
  switch t {
 | 
			
		||||
  | PointSet(_) => true
 | 
			
		||||
| 
						 | 
				
			
			@ -61,46 +66,6 @@ let integralEndY = (t: t): float =>
 | 
			
		|||
 | 
			
		||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
 | 
			
		||||
 | 
			
		||||
module Score = {
 | 
			
		||||
  let klDivergence = (prediction, answer, ~toPointSetFn: toPointSetFn): result<float, error> => {
 | 
			
		||||
    let pointSets = E.R.merge(toPointSetFn(prediction), toPointSetFn(answer))
 | 
			
		||||
    pointSets |> E.R2.bind(((predi, ans)) =>
 | 
			
		||||
      PointSetDist.T.klDivergence(predi, ans)->E.R2.errMap(x => DistributionTypes.OperationError(x))
 | 
			
		||||
    )
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let logScoreWithPointResolution = (
 | 
			
		||||
    ~prediction: DistributionTypes.genericDist,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
    ~prior: option<DistributionTypes.genericDist>,
 | 
			
		||||
    ~toPointSetFn: toPointSetFn,
 | 
			
		||||
  ): result<float, error> => {
 | 
			
		||||
    switch prior {
 | 
			
		||||
    | Some(prior') =>
 | 
			
		||||
      E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
 | 
			
		||||
        prior'',
 | 
			
		||||
        prediction'',
 | 
			
		||||
      )) =>
 | 
			
		||||
        PointSetDist.T.logScoreWithPointResolution(
 | 
			
		||||
          ~prediction=prediction'',
 | 
			
		||||
          ~answer,
 | 
			
		||||
          ~prior=prior''->Some,
 | 
			
		||||
        )->E.R2.errMap(x => DistributionTypes.OperationError(x))
 | 
			
		||||
      )
 | 
			
		||||
    | None =>
 | 
			
		||||
      prediction
 | 
			
		||||
      ->toPointSetFn
 | 
			
		||||
      ->E.R.bind(x =>
 | 
			
		||||
        PointSetDist.T.logScoreWithPointResolution(
 | 
			
		||||
          ~prediction=x,
 | 
			
		||||
          ~answer,
 | 
			
		||||
          ~prior=None,
 | 
			
		||||
        )->E.R2.errMap(x => DistributionTypes.OperationError(x))
 | 
			
		||||
      )
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let toFloatOperation = (
 | 
			
		||||
  t,
 | 
			
		||||
  ~toPointSetFn: toPointSetFn,
 | 
			
		||||
| 
						 | 
				
			
			@ -171,6 +136,70 @@ let toPointSet = (
 | 
			
		|||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module Score = {
 | 
			
		||||
  type genericDistOrScalar = DistributionTypes.DistributionOperation.genericDistOrScalar
 | 
			
		||||
 | 
			
		||||
  let argsMake = (~esti: t, ~answ: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
 | 
			
		||||
    PointSetDist_Scoring.scoreArgs,
 | 
			
		||||
    error,
 | 
			
		||||
  > => {
 | 
			
		||||
    let toPointSetFn = t =>
 | 
			
		||||
      toPointSet(
 | 
			
		||||
        t,
 | 
			
		||||
        ~xyPointLength=env.xyPointLength,
 | 
			
		||||
        ~sampleCount=env.sampleCount,
 | 
			
		||||
        ~xSelection=#ByWeight,
 | 
			
		||||
        (),
 | 
			
		||||
      )
 | 
			
		||||
    let prior': option<result<PointSetTypes.pointSetDist, error>> = switch prior {
 | 
			
		||||
    | None => None
 | 
			
		||||
    | Some(d) => toPointSetFn(d)->Some
 | 
			
		||||
    }
 | 
			
		||||
    let twoDists = (~toPointSetFn, esti': t, answ': t): result<
 | 
			
		||||
      (PointSetTypes.pointSetDist, PointSetTypes.pointSetDist),
 | 
			
		||||
      error,
 | 
			
		||||
    > => E.R.merge(toPointSetFn(esti'), toPointSetFn(answ'))
 | 
			
		||||
    switch (esti, answ, prior') {
 | 
			
		||||
    | (esti', Score_Dist(answ'), None) =>
 | 
			
		||||
      twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
 | 
			
		||||
        {estimate: esti'', answer: answ'', prior: None}->PointSetDist_Scoring.DistAnswer
 | 
			
		||||
      )
 | 
			
		||||
    | (esti', Score_Dist(answ'), Some(Ok(prior''))) =>
 | 
			
		||||
      twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
 | 
			
		||||
        {
 | 
			
		||||
          estimate: esti'',
 | 
			
		||||
          answer: answ'',
 | 
			
		||||
          prior: Some(prior''),
 | 
			
		||||
        }->PointSetDist_Scoring.DistAnswer
 | 
			
		||||
      )
 | 
			
		||||
    | (esti', Score_Scalar(answ'), None) =>
 | 
			
		||||
      toPointSetFn(esti')->E.R2.fmap(esti'' =>
 | 
			
		||||
        {
 | 
			
		||||
          estimate: esti'',
 | 
			
		||||
          answer: answ',
 | 
			
		||||
          prior: None,
 | 
			
		||||
        }->PointSetDist_Scoring.ScalarAnswer
 | 
			
		||||
      )
 | 
			
		||||
    | (esti', Score_Scalar(answ'), Some(Ok(prior''))) =>
 | 
			
		||||
      toPointSetFn(esti')->E.R2.fmap(esti'' =>
 | 
			
		||||
        {
 | 
			
		||||
          estimate: esti'',
 | 
			
		||||
          answer: answ',
 | 
			
		||||
          prior: Some(prior''),
 | 
			
		||||
        }->PointSetDist_Scoring.ScalarAnswer
 | 
			
		||||
      )
 | 
			
		||||
    | (_, _, Some(Error(err))) => err->Error
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let logScore = (~estimate: t, ~answer: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
 | 
			
		||||
    float,
 | 
			
		||||
    error,
 | 
			
		||||
  > =>
 | 
			
		||||
    argsMake(~esti=estimate, ~answ=answer, ~prior, ~env)->E.R.bind(x =>
 | 
			
		||||
      x->PointSetDist.logScore->E.R2.errMap(y => DistributionTypes.OperationError(y))
 | 
			
		||||
    )
 | 
			
		||||
}
 | 
			
		||||
/*
 | 
			
		||||
  PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
 | 
			
		||||
  It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the 
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -5,6 +5,9 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
 | 
			
		|||
type scaleMultiplyFn = (t, float) => result<t, error>
 | 
			
		||||
type pointwiseAddFn = (t, t) => result<t, error>
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
type env = {sampleCount: int, xyPointLength: int}
 | 
			
		||||
 | 
			
		||||
let sampleN: (t, int) => array<float>
 | 
			
		||||
let sample: t => float
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -25,12 +28,11 @@ let toFloatOperation: (
 | 
			
		|||
) => result<float, error>
 | 
			
		||||
 | 
			
		||||
module Score: {
 | 
			
		||||
  let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
 | 
			
		||||
  let logScoreWithPointResolution: (
 | 
			
		||||
    ~prediction: t,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
  let logScore: (
 | 
			
		||||
    ~estimate: t,
 | 
			
		||||
    ~answer: DistributionTypes.DistributionOperation.genericDistOrScalar,
 | 
			
		||||
    ~prior: option<t>,
 | 
			
		||||
    ~toPointSetFn: toPointSetFn,
 | 
			
		||||
    ~env: env,
 | 
			
		||||
  ) => result<float, error>
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -120,7 +120,7 @@ let combinePointwise = (
 | 
			
		|||
 | 
			
		||||
  let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
 | 
			
		||||
 | 
			
		||||
  combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
 | 
			
		||||
  combiner(interpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
 | 
			
		||||
    make(~integralSumCache=combinedIntegralSum, x)
 | 
			
		||||
  )
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -270,20 +270,6 @@ module T = Dist({
 | 
			
		|||
  }
 | 
			
		||||
  let variance = (t: t): float =>
 | 
			
		||||
    XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
 | 
			
		||||
 | 
			
		||||
  let klDivergence = (prediction: t, answer: t) => {
 | 
			
		||||
    let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
 | 
			
		||||
      PointSetDist_Scoring.KLDivergence.integrand,
 | 
			
		||||
      prediction.xyShape,
 | 
			
		||||
      answer.xyShape,
 | 
			
		||||
    )
 | 
			
		||||
    newShape->E.R2.fmap(x => x->make->integralEndY)
 | 
			
		||||
  }
 | 
			
		||||
  let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
 | 
			
		||||
    let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
 | 
			
		||||
    let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
 | 
			
		||||
    PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
let isNormalized = (t: t): bool => {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -49,7 +49,7 @@ let combinePointwise = (
 | 
			
		|||
  // TODO: does it ever make sense to pointwise combine the integrals here?
 | 
			
		||||
  // It could be done for pointwise additions, but is that ever needed?
 | 
			
		||||
 | 
			
		||||
  combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
 | 
			
		||||
  combiner(XYShape.XtoY.discreteInterpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let reduce = (
 | 
			
		||||
| 
						 | 
				
			
			@ -222,15 +222,4 @@ module T = Dist({
 | 
			
		|||
    let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
 | 
			
		||||
    XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let klDivergence = (prediction: t, answer: t) => {
 | 
			
		||||
    combinePointwise(
 | 
			
		||||
      ~fn=PointSetDist_Scoring.KLDivergence.integrand,
 | 
			
		||||
      prediction,
 | 
			
		||||
      answer,
 | 
			
		||||
    )->E.R2.fmap(integralEndY)
 | 
			
		||||
  }
 | 
			
		||||
  let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
 | 
			
		||||
    Error(Operation.NotYetImplemented)
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -33,12 +33,6 @@ module type dist = {
 | 
			
		|||
 | 
			
		||||
  let mean: t => float
 | 
			
		||||
  let variance: t => float
 | 
			
		||||
  let klDivergence: (t, t) => result<float, Operation.Error.t>
 | 
			
		||||
  let logScoreWithPointResolution: (
 | 
			
		||||
    ~prediction: t,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
    ~prior: option<t>,
 | 
			
		||||
  ) => result<float, Operation.Error.t>
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module Dist = (T: dist) => {
 | 
			
		||||
| 
						 | 
				
			
			@ -61,9 +55,6 @@ module Dist = (T: dist) => {
 | 
			
		|||
  let mean = T.mean
 | 
			
		||||
  let variance = T.variance
 | 
			
		||||
  let integralEndY = T.integralEndY
 | 
			
		||||
  let klDivergence = T.klDivergence
 | 
			
		||||
  let logScoreWithPointResolution = T.logScoreWithPointResolution
 | 
			
		||||
 | 
			
		||||
  let updateIntegralCache = T.updateIntegralCache
 | 
			
		||||
 | 
			
		||||
  module Integral = {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -302,15 +302,6 @@ module T = Dist({
 | 
			
		|||
    | _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let klDivergence = (prediction: t, answer: t) => {
 | 
			
		||||
    let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
 | 
			
		||||
    let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
 | 
			
		||||
    E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
 | 
			
		||||
  }
 | 
			
		||||
  let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
 | 
			
		||||
    Error(Operation.NotYetImplemented)
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -66,6 +66,7 @@ let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t
 | 
			
		|||
  }
 | 
			
		||||
 | 
			
		||||
let combinePointwise = (
 | 
			
		||||
  ~combiner=XYShape.PointwiseCombination.combine,
 | 
			
		||||
  ~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
 | 
			
		||||
  ~integralCachesFn: (
 | 
			
		||||
    PointSetTypes.continuousShape,
 | 
			
		||||
| 
						 | 
				
			
			@ -78,6 +79,7 @@ let combinePointwise = (
 | 
			
		|||
  switch (t1, t2) {
 | 
			
		||||
  | (Continuous(m1), Continuous(m2)) =>
 | 
			
		||||
    Continuous.combinePointwise(
 | 
			
		||||
      ~combiner,
 | 
			
		||||
      ~integralSumCachesFn,
 | 
			
		||||
      fn,
 | 
			
		||||
      m1,
 | 
			
		||||
| 
						 | 
				
			
			@ -85,6 +87,7 @@ let combinePointwise = (
 | 
			
		|||
    )->E.R2.fmap(x => PointSetTypes.Continuous(x))
 | 
			
		||||
  | (Discrete(m1), Discrete(m2)) =>
 | 
			
		||||
    Discrete.combinePointwise(
 | 
			
		||||
      ~combiner,
 | 
			
		||||
      ~integralSumCachesFn,
 | 
			
		||||
      ~fn,
 | 
			
		||||
      m1,
 | 
			
		||||
| 
						 | 
				
			
			@ -195,25 +198,16 @@ module T = Dist({
 | 
			
		|||
    | Discrete(m) => Discrete.T.variance(m)
 | 
			
		||||
    | Continuous(m) => Continuous.T.variance(m)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  let klDivergence = (prediction: t, answer: t) =>
 | 
			
		||||
    switch (prediction, answer) {
 | 
			
		||||
    | (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
 | 
			
		||||
    | (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
 | 
			
		||||
    | (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
 | 
			
		||||
    switch (prior, prediction) {
 | 
			
		||||
    | (Some(Continuous(t1)), Continuous(t2)) =>
 | 
			
		||||
      Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
 | 
			
		||||
    | (None, Continuous(t2)) =>
 | 
			
		||||
      Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
 | 
			
		||||
    | _ => Error(Operation.NotYetImplemented)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
let logScore = (args: PointSetDist_Scoring.scoreArgs): result<float, Operation.Error.t> =>
 | 
			
		||||
  PointSetDist_Scoring.logScore(
 | 
			
		||||
    args,
 | 
			
		||||
    ~combineFn=combinePointwise,
 | 
			
		||||
    ~integrateFn=T.Integral.sum,
 | 
			
		||||
    ~toMixedFn=toMixed,
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
let pdf = (f: float, t: t) => {
 | 
			
		||||
  let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
 | 
			
		||||
  mixedPoint.continuous +. mixedPoint.discrete
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,46 +1,149 @@
 | 
			
		|||
module KLDivergence = {
 | 
			
		||||
  let logFn = Js.Math.log // base e
 | 
			
		||||
  let integrand = (predictionElement: float, answerElement: float): result<
 | 
			
		||||
type pointSetDist = PointSetTypes.pointSetDist
 | 
			
		||||
 | 
			
		||||
type scalar = float
 | 
			
		||||
type score = float
 | 
			
		||||
type abstractScoreArgs<'a, 'b> = {estimate: 'a, answer: 'b, prior: option<'a>}
 | 
			
		||||
type scoreArgs =
 | 
			
		||||
  | DistAnswer(abstractScoreArgs<pointSetDist, pointSetDist>)
 | 
			
		||||
  | ScalarAnswer(abstractScoreArgs<pointSetDist, scalar>)
 | 
			
		||||
 | 
			
		||||
let logFn = Js.Math.log // base e
 | 
			
		||||
let minusScaledLogOfQuotient = (~esti, ~answ): result<float, Operation.Error.t> => {
 | 
			
		||||
  let quot = esti /. answ
 | 
			
		||||
  quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answ *. logFn(quot))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module WithDistAnswer = {
 | 
			
		||||
  // The Kullback-Leibler divergence
 | 
			
		||||
  let integrand = (estimateElement: float, answerElement: float): result<
 | 
			
		||||
    float,
 | 
			
		||||
    Operation.Error.t,
 | 
			
		||||
  > =>
 | 
			
		||||
    // We decided that negative infinity, not an error at answerElement = 0.0, is a desirable value.
 | 
			
		||||
    // We decided that 0.0, not an error at answerElement = 0.0, is a desirable value.
 | 
			
		||||
    if answerElement == 0.0 {
 | 
			
		||||
      Ok(0.0)
 | 
			
		||||
    } else if predictionElement == 0.0 {
 | 
			
		||||
    } else if estimateElement == 0.0 {
 | 
			
		||||
      Ok(infinity)
 | 
			
		||||
    } else {
 | 
			
		||||
      let quot = predictionElement /. answerElement
 | 
			
		||||
      quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
 | 
			
		||||
      minusScaledLogOfQuotient(~esti=estimateElement, ~answ=answerElement)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module LogScoreWithPointResolution = {
 | 
			
		||||
  let logFn = Js.Math.log
 | 
			
		||||
  let score = (
 | 
			
		||||
    ~priorPdf: option<float => float>,
 | 
			
		||||
    ~predictionPdf: float => float,
 | 
			
		||||
    ~answer: float,
 | 
			
		||||
  ): result<float, Operation.Error.t> => {
 | 
			
		||||
    let numerator = answer->predictionPdf
 | 
			
		||||
    if numerator < 0.0 {
 | 
			
		||||
      Operation.PdfInvalidError->Error
 | 
			
		||||
    } else if numerator == 0.0 {
 | 
			
		||||
      infinity->Ok
 | 
			
		||||
    } else {
 | 
			
		||||
      -.(
 | 
			
		||||
        switch priorPdf {
 | 
			
		||||
        | None => numerator->logFn
 | 
			
		||||
        | Some(f) => {
 | 
			
		||||
            let priorDensityOfAnswer = f(answer)
 | 
			
		||||
            if priorDensityOfAnswer == 0.0 {
 | 
			
		||||
              neg_infinity
 | 
			
		||||
            } else {
 | 
			
		||||
              (numerator /. priorDensityOfAnswer)->logFn
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      )->Ok
 | 
			
		||||
  let sum = (
 | 
			
		||||
    ~estimate: pointSetDist,
 | 
			
		||||
    ~answer: pointSetDist,
 | 
			
		||||
    ~combineFn,
 | 
			
		||||
    ~integrateFn,
 | 
			
		||||
    ~toMixedFn,
 | 
			
		||||
  ): result<score, Operation.Error.t> => {
 | 
			
		||||
    let combineAndIntegrate = (estimate, answer) =>
 | 
			
		||||
      combineFn(integrand, estimate, answer)->E.R2.fmap(integrateFn)
 | 
			
		||||
 | 
			
		||||
    let getMixedSums = (estimate: pointSetDist, answer: pointSetDist) => {
 | 
			
		||||
      let esti = estimate->toMixedFn
 | 
			
		||||
      let answ = answer->toMixedFn
 | 
			
		||||
      switch (
 | 
			
		||||
        Mixed.T.toContinuous(esti),
 | 
			
		||||
        Mixed.T.toDiscrete(esti),
 | 
			
		||||
        Mixed.T.toContinuous(answ),
 | 
			
		||||
        Mixed.T.toDiscrete(answ),
 | 
			
		||||
      ) {
 | 
			
		||||
      | (
 | 
			
		||||
          Some(estiContinuousPart),
 | 
			
		||||
          Some(estiDiscretePart),
 | 
			
		||||
          Some(answContinuousPart),
 | 
			
		||||
          Some(answDiscretePart),
 | 
			
		||||
        ) =>
 | 
			
		||||
        E.R.merge(
 | 
			
		||||
          combineAndIntegrate(
 | 
			
		||||
            PointSetTypes.Discrete(estiDiscretePart),
 | 
			
		||||
            PointSetTypes.Discrete(answDiscretePart),
 | 
			
		||||
          ),
 | 
			
		||||
          combineAndIntegrate(Continuous(estiContinuousPart), Continuous(answContinuousPart)),
 | 
			
		||||
        )
 | 
			
		||||
      | (_, _, _, _) => `unreachable state`->Operation.Other->Error
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    switch (estimate, answer) {
 | 
			
		||||
    | (Continuous(_), Continuous(_))
 | 
			
		||||
    | (Discrete(_), Discrete(_)) =>
 | 
			
		||||
      combineAndIntegrate(estimate, answer)
 | 
			
		||||
    | (_, _) =>
 | 
			
		||||
      getMixedSums(estimate, answer)->E.R2.fmap(((discretePart, continuousPart)) =>
 | 
			
		||||
        discretePart +. continuousPart
 | 
			
		||||
      )
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let sumWithPrior = (
 | 
			
		||||
    ~estimate: pointSetDist,
 | 
			
		||||
    ~answer: pointSetDist,
 | 
			
		||||
    ~prior: pointSetDist,
 | 
			
		||||
    ~combineFn,
 | 
			
		||||
    ~integrateFn,
 | 
			
		||||
    ~toMixedFn,
 | 
			
		||||
  ): result<score, Operation.Error.t> => {
 | 
			
		||||
    let kl1 = sum(~estimate, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
 | 
			
		||||
    let kl2 = sum(~estimate=prior, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
 | 
			
		||||
    E.R.merge(kl1, kl2)->E.R2.fmap(((kl1', kl2')) => kl1' -. kl2')
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module WithScalarAnswer = {
 | 
			
		||||
  let sum = (mp: PointSetTypes.MixedPoint.t): float => mp.continuous +. mp.discrete
 | 
			
		||||
  let score = (~estimate: pointSetDist, ~answer: scalar): result<score, Operation.Error.t> => {
 | 
			
		||||
    let _score = (~estimatePdf: float => option<float>, ~answer: float): result<
 | 
			
		||||
      score,
 | 
			
		||||
      Operation.Error.t,
 | 
			
		||||
    > => {
 | 
			
		||||
      let density = answer->estimatePdf
 | 
			
		||||
      switch density {
 | 
			
		||||
      | None => Operation.PdfInvalidError->Error
 | 
			
		||||
      | Some(density') =>
 | 
			
		||||
        if density' < 0.0 {
 | 
			
		||||
          Operation.PdfInvalidError->Error
 | 
			
		||||
        } else if density' == 0.0 {
 | 
			
		||||
          infinity->Ok
 | 
			
		||||
        } else {
 | 
			
		||||
          density'->logFn->(x => -.x)->Ok
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let estimatePdf = x =>
 | 
			
		||||
      switch estimate {
 | 
			
		||||
      | Continuous(esti) => Continuous.T.xToY(x, esti)->sum->Some
 | 
			
		||||
      | Discrete(esti) => Discrete.T.xToY(x, esti)->sum->Some
 | 
			
		||||
      | Mixed(_) => None
 | 
			
		||||
      }
 | 
			
		||||
    _score(~estimatePdf, ~answer)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let scoreWithPrior = (~estimate: pointSetDist, ~answer: scalar, ~prior: pointSetDist): result<
 | 
			
		||||
    score,
 | 
			
		||||
    Operation.Error.t,
 | 
			
		||||
  > => {
 | 
			
		||||
    E.R.merge(score(~estimate, ~answer), score(~estimate=prior, ~answer))->E.R2.fmap(((s1, s2)) =>
 | 
			
		||||
      s1 -. s2
 | 
			
		||||
    )
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let twoGenericDistsToTwoPointSetDists = (~toPointSetFn, estimate, answer): result<
 | 
			
		||||
  (pointSetDist, pointSetDist),
 | 
			
		||||
  'e,
 | 
			
		||||
> => E.R.merge(toPointSetFn(estimate, ()), toPointSetFn(answer, ()))
 | 
			
		||||
 | 
			
		||||
let logScore = (args: scoreArgs, ~combineFn, ~integrateFn, ~toMixedFn): result<
 | 
			
		||||
  score,
 | 
			
		||||
  Operation.Error.t,
 | 
			
		||||
> =>
 | 
			
		||||
  switch args {
 | 
			
		||||
  | DistAnswer({estimate, answer, prior: None}) =>
 | 
			
		||||
    WithDistAnswer.sum(~estimate, ~answer, ~integrateFn, ~combineFn, ~toMixedFn)
 | 
			
		||||
  | DistAnswer({estimate, answer, prior: Some(prior)}) =>
 | 
			
		||||
    WithDistAnswer.sumWithPrior(~estimate, ~answer, ~prior, ~integrateFn, ~combineFn, ~toMixedFn)
 | 
			
		||||
  | ScalarAnswer({estimate, answer, prior: None}) => WithScalarAnswer.score(~estimate, ~answer)
 | 
			
		||||
  | ScalarAnswer({estimate, answer, prior: Some(prior)}) =>
 | 
			
		||||
    WithScalarAnswer.scoreWithPrior(~estimate, ~answer, ~prior)
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -8,6 +8,7 @@ type rec frType =
 | 
			
		|||
  | FRTypeNumber
 | 
			
		||||
  | FRTypeNumeric
 | 
			
		||||
  | FRTypeDistOrNumber
 | 
			
		||||
  | FRTypeDist
 | 
			
		||||
  | FRTypeLambda
 | 
			
		||||
  | FRTypeRecord(frTypeRecord)
 | 
			
		||||
  | FRTypeDict(frType)
 | 
			
		||||
| 
						 | 
				
			
			@ -41,7 +42,7 @@ and frValueDistOrNumber = FRValueNumber(float) | FRValueDist(DistributionTypes.g
 | 
			
		|||
type fnDefinition = {
 | 
			
		||||
  name: string,
 | 
			
		||||
  inputs: array<frType>,
 | 
			
		||||
  run: (array<frValue>, DistributionOperation.env) => result<internalExpressionValue, string>,
 | 
			
		||||
  run: (array<frValue>, GenericDist.env) => result<internalExpressionValue, string>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type function = {
 | 
			
		||||
| 
						 | 
				
			
			@ -60,6 +61,7 @@ module FRType = {
 | 
			
		|||
    switch t {
 | 
			
		||||
    | FRTypeNumber => "number"
 | 
			
		||||
    | FRTypeNumeric => "numeric"
 | 
			
		||||
    | FRTypeDist => "distribution"
 | 
			
		||||
    | FRTypeDistOrNumber => "distribution|number"
 | 
			
		||||
    | FRTypeRecord(r) => {
 | 
			
		||||
        let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
 | 
			
		||||
| 
						 | 
				
			
			@ -98,6 +100,7 @@ module FRType = {
 | 
			
		|||
    | (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
 | 
			
		||||
      Some(FRValueDistOrNumber(FRValueNumber(f)))
 | 
			
		||||
    | (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
 | 
			
		||||
    | (FRTypeDist, IEvDistribution(f)) => Some(FRValueDist(f))
 | 
			
		||||
    | (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
 | 
			
		||||
    | (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
 | 
			
		||||
    | (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
 | 
			
		||||
| 
						 | 
				
			
			@ -319,7 +322,7 @@ module FnDefinition = {
 | 
			
		|||
    t.name ++ `(${inputs})`
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let run = (t: t, args: array<internalExpressionValue>, env: DistributionOperation.env) => {
 | 
			
		||||
  let run = (t: t, args: array<internalExpressionValue>, env: GenericDist.env) => {
 | 
			
		||||
    let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
 | 
			
		||||
    switch argValues {
 | 
			
		||||
    | Some(values) => t.run(values, env)
 | 
			
		||||
| 
						 | 
				
			
			@ -374,7 +377,7 @@ module Registry = {
 | 
			
		|||
    ~registry: registry,
 | 
			
		||||
    ~fnName: string,
 | 
			
		||||
    ~args: array<internalExpressionValue>,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
 | 
			
		||||
    //Js.log(toSimple(registry))
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -27,6 +27,12 @@ module Prepare = {
 | 
			
		|||
        | _ => Error(impossibleError)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
      let threeArgs = (inputs: ts): result<ts, err> =>
 | 
			
		||||
        switch inputs {
 | 
			
		||||
        | [FRValueRecord([(_, n1), (_, n2), (_, n3)])] => Ok([n1, n2, n3])
 | 
			
		||||
        | _ => Error(impossibleError)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
      let toArgs = (inputs: ts): result<ts, err> =>
 | 
			
		||||
        switch inputs {
 | 
			
		||||
        | [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
 | 
			
		||||
| 
						 | 
				
			
			@ -57,6 +63,16 @@ module Prepare = {
 | 
			
		|||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let twoDist = (values: ts): result<
 | 
			
		||||
      (DistributionTypes.genericDist, DistributionTypes.genericDist),
 | 
			
		||||
      err,
 | 
			
		||||
    > => {
 | 
			
		||||
      switch values {
 | 
			
		||||
      | [FRValueDist(a1), FRValueDist(a2)] => Ok(a1, a2)
 | 
			
		||||
      | _ => Error(impossibleError)
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let twoNumbers = (values: ts): result<(float, float), err> => {
 | 
			
		||||
      switch values {
 | 
			
		||||
      | [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
 | 
			
		||||
| 
						 | 
				
			
			@ -81,6 +97,11 @@ module Prepare = {
 | 
			
		|||
    module Record = {
 | 
			
		||||
      let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
 | 
			
		||||
        values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
 | 
			
		||||
 | 
			
		||||
      let twoDist = (values: ts): result<
 | 
			
		||||
        (DistributionTypes.genericDist, DistributionTypes.genericDist),
 | 
			
		||||
        err,
 | 
			
		||||
      > => values->ToValueArray.Record.twoArgs->E.R.bind(twoDist)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -128,8 +149,7 @@ module Prepare = {
 | 
			
		|||
module Process = {
 | 
			
		||||
  module DistOrNumberToDist = {
 | 
			
		||||
    module Helpers = {
 | 
			
		||||
      let toSampleSet = (r, env: DistributionOperation.env) =>
 | 
			
		||||
        GenericDist.toSampleSetDist(r, env.sampleCount)
 | 
			
		||||
      let toSampleSet = (r, env: GenericDist.env) => GenericDist.toSampleSetDist(r, env.sampleCount)
 | 
			
		||||
 | 
			
		||||
      let mapFnResult = r =>
 | 
			
		||||
        switch r {
 | 
			
		||||
| 
						 | 
				
			
			@ -166,7 +186,7 @@ module Process = {
 | 
			
		|||
    let oneValue = (
 | 
			
		||||
      ~fn: float => result<DistributionTypes.genericDist, string>,
 | 
			
		||||
      ~value: frValueDistOrNumber,
 | 
			
		||||
      ~env: DistributionOperation.env,
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
    ): result<DistributionTypes.genericDist, string> => {
 | 
			
		||||
      switch value {
 | 
			
		||||
      | FRValueNumber(a1) => fn(a1)
 | 
			
		||||
| 
						 | 
				
			
			@ -179,7 +199,7 @@ module Process = {
 | 
			
		|||
    let twoValues = (
 | 
			
		||||
      ~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
 | 
			
		||||
      ~values: (frValueDistOrNumber, frValueDistOrNumber),
 | 
			
		||||
      ~env: DistributionOperation.env,
 | 
			
		||||
      ~env: GenericDist.env,
 | 
			
		||||
    ): result<DistributionTypes.genericDist, string> => {
 | 
			
		||||
      switch values {
 | 
			
		||||
      | (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -49,7 +49,7 @@ let inputsTodist = (inputs: array<FunctionRegistry_Core.frValue>, makeDist) => {
 | 
			
		|||
  expressionValue
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let registry = [
 | 
			
		||||
let registryStart = [
 | 
			
		||||
  Function.make(
 | 
			
		||||
    ~name="toContinuousPointSet",
 | 
			
		||||
    ~definitions=[
 | 
			
		||||
| 
						 | 
				
			
			@ -510,3 +510,67 @@ to(5,10)
 | 
			
		|||
    (),
 | 
			
		||||
  ),
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
let runScoring = (estimate, answer, prior, env) => {
 | 
			
		||||
  GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
 | 
			
		||||
  ->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
 | 
			
		||||
  ->E.R2.errMap(DistributionTypes.Error.toString)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let scoreFunctions = [
 | 
			
		||||
  Function.make(
 | 
			
		||||
    ~name="Score",
 | 
			
		||||
    ~definitions=[
 | 
			
		||||
      FnDefinition.make(
 | 
			
		||||
        ~name="logScore",
 | 
			
		||||
        ~inputs=[
 | 
			
		||||
          FRTypeRecord([
 | 
			
		||||
            ("estimate", FRTypeDist),
 | 
			
		||||
            ("answer", FRTypeDistOrNumber),
 | 
			
		||||
            ("prior", FRTypeDist),
 | 
			
		||||
          ]),
 | 
			
		||||
        ],
 | 
			
		||||
        ~run=(inputs, env) => {
 | 
			
		||||
          switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(inputs) {
 | 
			
		||||
          | Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d)), FRValueDist(prior)]) =>
 | 
			
		||||
            runScoring(estimate, Score_Dist(d), Some(prior), env)
 | 
			
		||||
          | Ok([
 | 
			
		||||
              FRValueDist(estimate),
 | 
			
		||||
              FRValueDistOrNumber(FRValueNumber(d)),
 | 
			
		||||
              FRValueDist(prior),
 | 
			
		||||
            ]) =>
 | 
			
		||||
            runScoring(estimate, Score_Scalar(d), Some(prior), env)
 | 
			
		||||
          | Error(e) => Error(e)
 | 
			
		||||
          | _ => Error(FunctionRegistry_Helpers.impossibleError)
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
      ),
 | 
			
		||||
      FnDefinition.make(
 | 
			
		||||
        ~name="logScore",
 | 
			
		||||
        ~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
 | 
			
		||||
        ~run=(inputs, env) => {
 | 
			
		||||
          switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(inputs) {
 | 
			
		||||
          | Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d))]) =>
 | 
			
		||||
            runScoring(estimate, Score_Dist(d), None, env)
 | 
			
		||||
          | Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueNumber(d))]) =>
 | 
			
		||||
            runScoring(estimate, Score_Scalar(d), None, env)
 | 
			
		||||
          | Error(e) => Error(e)
 | 
			
		||||
          | _ => Error(FunctionRegistry_Helpers.impossibleError)
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
      ),
 | 
			
		||||
      FnDefinition.make(~name="klDivergence", ~inputs=[FRTypeDist, FRTypeDist], ~run=(
 | 
			
		||||
        inputs,
 | 
			
		||||
        env,
 | 
			
		||||
      ) => {
 | 
			
		||||
        switch inputs {
 | 
			
		||||
        | [FRValueDist(estimate), FRValueDist(d)] => runScoring(estimate, Score_Dist(d), None, env)
 | 
			
		||||
        | _ => Error(FunctionRegistry_Helpers.impossibleError)
 | 
			
		||||
        }
 | 
			
		||||
      }),
 | 
			
		||||
    ],
 | 
			
		||||
    (),
 | 
			
		||||
  ),
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
let registry = E.A.append(registryStart, scoreFunctions)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,7 +1,7 @@
 | 
			
		|||
module IEV = ReducerInterface_InternalExpressionValue
 | 
			
		||||
type internalExpressionValue = IEV.t
 | 
			
		||||
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
 | 
			
		||||
  result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
 | 
			
		||||
> => {
 | 
			
		||||
  switch call {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,7 +1,7 @@
 | 
			
		|||
module IEV = ReducerInterface_InternalExpressionValue
 | 
			
		||||
type internalExpressionValue = IEV.t
 | 
			
		||||
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
 | 
			
		||||
  result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
 | 
			
		||||
> => {
 | 
			
		||||
  switch call {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -86,7 +86,7 @@ let toStringResult = x =>
 | 
			
		|||
  }
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
type environment = DistributionOperation.env
 | 
			
		||||
type environment = GenericDist.env
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
let defaultEnvironment: environment = DistributionOperation.defaultEnv
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -32,50 +32,38 @@ module Helpers = {
 | 
			
		|||
  let toFloatFn = (
 | 
			
		||||
    fnCall: DistributionTypes.DistributionOperation.toFloat,
 | 
			
		||||
    dist: DistributionTypes.genericDist,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
 | 
			
		||||
    ->DistributionOperation.run(~env)
 | 
			
		||||
    ->Some
 | 
			
		||||
    FromDist(#ToFloat(fnCall), dist)->DistributionOperation.run(~env)->Some
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let toStringFn = (
 | 
			
		||||
    fnCall: DistributionTypes.DistributionOperation.toString,
 | 
			
		||||
    dist: DistributionTypes.genericDist,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
 | 
			
		||||
    ->DistributionOperation.run(~env)
 | 
			
		||||
    ->Some
 | 
			
		||||
    FromDist(#ToString(fnCall), dist)->DistributionOperation.run(~env)->Some
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let toBoolFn = (
 | 
			
		||||
    fnCall: DistributionTypes.DistributionOperation.toBool,
 | 
			
		||||
    dist: DistributionTypes.genericDist,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
 | 
			
		||||
    ->DistributionOperation.run(~env)
 | 
			
		||||
    ->Some
 | 
			
		||||
    FromDist(#ToBool(fnCall), dist)->DistributionOperation.run(~env)->Some
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let toDistFn = (
 | 
			
		||||
    fnCall: DistributionTypes.DistributionOperation.toDist,
 | 
			
		||||
    dist,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
 | 
			
		||||
    ->DistributionOperation.run(~env)
 | 
			
		||||
    ->Some
 | 
			
		||||
    FromDist(#ToDist(fnCall), dist)->DistributionOperation.run(~env)->Some
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: DistributionOperation.env) => {
 | 
			
		||||
  let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: GenericDist.env) => {
 | 
			
		||||
    FromDist(
 | 
			
		||||
      DistributionTypes.DistributionOperation.ToDistCombination(
 | 
			
		||||
        direction,
 | 
			
		||||
        arithmeticMap(arithmetic),
 | 
			
		||||
        #Dist(dist2),
 | 
			
		||||
      ),
 | 
			
		||||
      #ToDistCombination(direction, arithmeticMap(arithmetic), #Dist(dist2)),
 | 
			
		||||
      dist1,
 | 
			
		||||
    )->DistributionOperation.run(~env)
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -109,7 +97,7 @@ module Helpers = {
 | 
			
		|||
  let mixtureWithGivenWeights = (
 | 
			
		||||
    distributions: array<DistributionTypes.genericDist>,
 | 
			
		||||
    weights: array<float>,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ): DistributionOperation.outputType =>
 | 
			
		||||
    E.A.length(distributions) == E.A.length(weights)
 | 
			
		||||
      ? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
 | 
			
		||||
| 
						 | 
				
			
			@ -119,7 +107,7 @@ module Helpers = {
 | 
			
		|||
 | 
			
		||||
  let mixtureWithDefaultWeights = (
 | 
			
		||||
    distributions: array<DistributionTypes.genericDist>,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ): DistributionOperation.outputType => {
 | 
			
		||||
    let length = E.A.length(distributions)
 | 
			
		||||
    let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
 | 
			
		||||
| 
						 | 
				
			
			@ -128,7 +116,7 @@ module Helpers = {
 | 
			
		|||
 | 
			
		||||
  let mixture = (
 | 
			
		||||
    args: array<internalExpressionValue>,
 | 
			
		||||
    ~env: DistributionOperation.env,
 | 
			
		||||
    ~env: GenericDist.env,
 | 
			
		||||
  ): DistributionOperation.outputType => {
 | 
			
		||||
    let error = (err: string): DistributionOperation.outputType =>
 | 
			
		||||
      err->DistributionTypes.ArgumentError->GenDistError
 | 
			
		||||
| 
						 | 
				
			
			@ -167,20 +155,6 @@ module Helpers = {
 | 
			
		|||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let klDivergenceWithPrior = (
 | 
			
		||||
    prediction: DistributionTypes.genericDist,
 | 
			
		||||
    answer: DistributionTypes.genericDist,
 | 
			
		||||
    prior: DistributionTypes.genericDist,
 | 
			
		||||
    env: DistributionOperation.env,
 | 
			
		||||
  ) => {
 | 
			
		||||
    let term1 = DistributionOperation.Constructors.klDivergence(~env, prediction, answer)
 | 
			
		||||
    let term2 = DistributionOperation.Constructors.klDivergence(~env, prior, answer)
 | 
			
		||||
    switch E.R.merge(term1, term2)->E.R2.fmap(((a, b)) => a -. b) {
 | 
			
		||||
    | Ok(x) => x->DistributionOperation.Float->Some
 | 
			
		||||
    | Error(_) => None
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
module SymbolicConstructors = {
 | 
			
		||||
| 
						 | 
				
			
			@ -199,7 +173,7 @@ module SymbolicConstructors = {
 | 
			
		|||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperation.env): option<
 | 
			
		||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: GenericDist.env): option<
 | 
			
		||||
  DistributionOperation.outputType,
 | 
			
		||||
> => {
 | 
			
		||||
  let (fnName, args) = call
 | 
			
		||||
| 
						 | 
				
			
			@ -239,35 +213,6 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
 | 
			
		|||
      ~env,
 | 
			
		||||
    )->Some
 | 
			
		||||
  | ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
 | 
			
		||||
  | ("klDivergence", [IEvDistribution(prediction), IEvDistribution(answer)]) =>
 | 
			
		||||
    Some(DistributionOperation.run(FromDist(ToScore(KLDivergence(answer)), prediction), ~env))
 | 
			
		||||
  | (
 | 
			
		||||
      "klDivergence",
 | 
			
		||||
      [IEvDistribution(prediction), IEvDistribution(answer), IEvDistribution(prior)],
 | 
			
		||||
    ) =>
 | 
			
		||||
    Helpers.klDivergenceWithPrior(prediction, answer, prior, env)
 | 
			
		||||
  | (
 | 
			
		||||
    "logScoreWithPointAnswer",
 | 
			
		||||
    [IEvDistribution(prediction), IEvNumber(answer), IEvDistribution(prior)],
 | 
			
		||||
  )
 | 
			
		||||
  | (
 | 
			
		||||
    "logScoreWithPointAnswer",
 | 
			
		||||
    [
 | 
			
		||||
      IEvDistribution(prediction),
 | 
			
		||||
      IEvDistribution(Symbolic(#Float(answer))),
 | 
			
		||||
      IEvDistribution(prior),
 | 
			
		||||
    ],
 | 
			
		||||
  ) =>
 | 
			
		||||
    DistributionOperation.run(
 | 
			
		||||
      FromDist(ToScore(LogScore(answer, prior->Some)), prediction),
 | 
			
		||||
      ~env,
 | 
			
		||||
    )->Some
 | 
			
		||||
  | ("logScoreWithPointAnswer", [IEvDistribution(prediction), IEvNumber(answer)])
 | 
			
		||||
  | (
 | 
			
		||||
    "logScoreWithPointAnswer",
 | 
			
		||||
    [IEvDistribution(prediction), IEvDistribution(Symbolic(#Float(answer)))],
 | 
			
		||||
  ) =>
 | 
			
		||||
    DistributionOperation.run(FromDist(ToScore(LogScore(answer, None)), prediction), ~env)->Some
 | 
			
		||||
  | ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
 | 
			
		||||
  | ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
 | 
			
		||||
  | ("scaleLog", [IEvDistribution(dist)]) =>
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -24,7 +24,7 @@ module ScientificUnit = {
 | 
			
		|||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
 | 
			
		||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
 | 
			
		||||
  result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
 | 
			
		||||
> => {
 | 
			
		||||
  switch call {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -8,7 +8,7 @@ The below few seem to work fine. In the future there's definitely more work to d
 | 
			
		|||
*/
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
type samplingParams = DistributionOperation.env
 | 
			
		||||
type samplingParams = GenericDist.env
 | 
			
		||||
 | 
			
		||||
@genType
 | 
			
		||||
type genericDist = DistributionTypes.genericDist
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -547,6 +547,7 @@ module A = {
 | 
			
		|||
  let init = Array.init
 | 
			
		||||
  let reduce = Belt.Array.reduce
 | 
			
		||||
  let reducei = Belt.Array.reduceWithIndex
 | 
			
		||||
  let some = Belt.Array.some
 | 
			
		||||
  let isEmpty = r => length(r) < 1
 | 
			
		||||
  let stableSortBy = Belt.SortArray.stableSortBy
 | 
			
		||||
  let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -327,8 +327,8 @@ module Zipped = {
 | 
			
		|||
module PointwiseCombination = {
 | 
			
		||||
  // t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
 | 
			
		||||
  let combine: (
 | 
			
		||||
    (float, float) => result<float, Operation.Error.t>,
 | 
			
		||||
    interpolator,
 | 
			
		||||
    (float, float) => result<float, Operation.Error.t>,
 | 
			
		||||
    T.t,
 | 
			
		||||
    T.t,
 | 
			
		||||
  ) => result<T.t, Operation.Error.t> = %raw(`
 | 
			
		||||
| 
						 | 
				
			
			@ -337,7 +337,7 @@ module PointwiseCombination = {
 | 
			
		|||
      // and interpolates the value on the other side, thus accumulating xs and ys.
 | 
			
		||||
      // This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
 | 
			
		||||
 | 
			
		||||
      function(fn, interpolator, t1, t2) {
 | 
			
		||||
      function(interpolator, fn, t1, t2) {
 | 
			
		||||
        let t1n = t1.xs.length;
 | 
			
		||||
        let t2n = t2.xs.length;
 | 
			
		||||
        let outX = [];
 | 
			
		||||
| 
						 | 
				
			
			@ -399,11 +399,11 @@ module PointwiseCombination = {
 | 
			
		|||
    This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
 | 
			
		||||
 */
 | 
			
		||||
  let combineAlongSupportOfSecondArgument0: (
 | 
			
		||||
    (float, float) => result<float, Operation.Error.t>,
 | 
			
		||||
    interpolator,
 | 
			
		||||
    (float, float) => result<float, Operation.Error.t>,
 | 
			
		||||
    T.t,
 | 
			
		||||
    T.t,
 | 
			
		||||
  ) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
 | 
			
		||||
  ) => result<T.t, Operation.Error.t> = (interpolator, fn, t1, t2) => {
 | 
			
		||||
    let newYs = []
 | 
			
		||||
    let newXs = []
 | 
			
		||||
    let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
 | 
			
		||||
| 
						 | 
				
			
			@ -496,29 +496,9 @@ module PointwiseCombination = {
 | 
			
		|||
    let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
 | 
			
		||||
    {xs: newXs, ys: newYs}
 | 
			
		||||
  }
 | 
			
		||||
  // This function is used for klDivergence
 | 
			
		||||
  let combineAlongSupportOfSecondArgument: (
 | 
			
		||||
    (float, float) => result<float, Operation.Error.t>,
 | 
			
		||||
    T.t,
 | 
			
		||||
    T.t,
 | 
			
		||||
  ) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
 | 
			
		||||
    let combineWithFn = (answerX: float, i: int) => {
 | 
			
		||||
      let answerY = answer.ys[i]
 | 
			
		||||
      let predictionY = XtoY.linear(answerX, prediction)
 | 
			
		||||
      fn(predictionY, answerY)
 | 
			
		||||
    }
 | 
			
		||||
    let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
 | 
			
		||||
    let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
 | 
			
		||||
    let result = switch newYsOrError {
 | 
			
		||||
    | Ok(a) => Ok({xs: answer.xs, ys: a})
 | 
			
		||||
    | Error(b) => Error(b)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    result
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
 | 
			
		||||
    combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
 | 
			
		||||
    combine(interpolator, (a, b) => Ok(a +. b), t1, t2)->E.R.toExn(
 | 
			
		||||
      "Add operation should never fail",
 | 
			
		||||
      _,
 | 
			
		||||
    )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue
	
	Block a user