Final cleanup, but typescript is still borked

Value: [1e-8 to 1e-6]
This commit is contained in:
Quinn Dougherty 2022-05-06 15:09:00 -04:00
parent 33fd529696
commit 28cb6b9c88
5 changed files with 29 additions and 28 deletions

View File

@ -47,9 +47,6 @@ describe("kl divergence", () => {
(stdev2 ** 2.0 +. (mean2 -. mean1) ** 2.0) /. (2.0 *. stdev1 ** 2.0) -. 0.5
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
// Js.Console.log2("Analytical: ", analyticalKl)
// Js.Console.log2("Computed: ", kl)
switch kl {
| Ok(kl') => kl'->expect->toBeCloseTo(analyticalKl)
| Error(err) => {

View File

@ -3,17 +3,25 @@ open Expect
open TestHelpers
describe("Scale logarithm", () => {
// test("mean of the base e scalar logarithm of an exponential(10)", () => {
// let rate = 10.0
// let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkExponential(rate), MagicNumbers.Math.e)
//
// let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
// let meanAnalytical = Js.Math.log(rate /. MagicNumbers.Math.e)
// switch meanResult {
// | Ok(meanValue) => meanValue -> expect -> toBeCloseTo(meanAnalytical)
// | Error(err) => err -> expect -> toBe(DistributionTypes.OperationError(DivisionByZeroError))
// }
// })
/* These tests may not be important, because scalelog isn't normalized
The first one may be failing for a number of reasons.
*/
Skip.test("mean of the base e scalar logarithm of an exponential(10)", () => {
let rate = 10.0
let scalelog = DistributionOperation.Constructors.scaleLogarithm(
~env,
mkExponential(rate),
MagicNumbers.Math.e,
)
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
// expected value of log of exponential distribution.
let meanAnalytical = Js.Math.log(rate) +. 1.0
switch meanResult {
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
| Error(err) => err->expect->toBe(DistributionTypes.OperationError(DivisionByZeroError))
}
})
let low = 10.0
let high = 100.0
let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkUniform(low, high), 2.0)

View File

@ -271,17 +271,6 @@ module T = Dist({
let variance = (t: t): float =>
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
// let klDivergence0 = (prediction: t, answer: t) => {
// combinePointwise(
// ~combiner=XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument,
// PointSetDist_Scoring.KLDivergence.integrand,
// prediction,
// answer,
// )
// |> E.R.fmap(shapeMap(XYShape.T.filterYValues(Js.Float.isFinite)))
// |> E.R.fmap(integralEndY)
// }
let klDivergence = (prediction: t, answer: t) => {
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
PointSetDist_Scoring.KLDivergence.integrand,

View File

@ -390,7 +390,9 @@ module PointwiseCombination = {
}
`)
// This function is used for kl divergence
/*
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
*/
let combineAlongSupportOfSecondArgument0: (
(float, float) => result<float, Operation.Error.t>,
interpolator,
@ -450,7 +452,6 @@ module PointwiseCombination = {
}
| None => ()
}
// Js.Console.log(newYs)
}
T.filterOkYs(newXs, newYs)->Ok
}
@ -489,6 +490,7 @@ module PointwiseCombination = {
result
}
// This function is used for klDivergence
let combineAlongSupportOfSecondArgument: (
(float, float) => result<float, Operation.Error.t>,
T.t,

View File

@ -15,5 +15,10 @@
},
"target": "ES6",
"include": ["src/**/*"],
"exclude": ["node_modules", "**/*.spec.ts", "webpack.config.js"]
"exclude": [
"../../node_modules",
"node_modules",
"**/*.spec.ts",
"webpack.config.js"
]
}