Final cleanup, but typescript is still borked
Value: [1e-8 to 1e-6]
This commit is contained in:
parent
33fd529696
commit
28cb6b9c88
|
@ -47,9 +47,6 @@ describe("kl divergence", () => {
|
||||||
(stdev2 ** 2.0 +. (mean2 -. mean1) ** 2.0) /. (2.0 *. stdev1 ** 2.0) -. 0.5
|
(stdev2 ** 2.0 +. (mean2 -. mean1) ** 2.0) /. (2.0 *. stdev1 ** 2.0) -. 0.5
|
||||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
|
||||||
// Js.Console.log2("Analytical: ", analyticalKl)
|
|
||||||
// Js.Console.log2("Computed: ", kl)
|
|
||||||
|
|
||||||
switch kl {
|
switch kl {
|
||||||
| Ok(kl') => kl'->expect->toBeCloseTo(analyticalKl)
|
| Ok(kl') => kl'->expect->toBeCloseTo(analyticalKl)
|
||||||
| Error(err) => {
|
| Error(err) => {
|
||||||
|
|
|
@ -3,17 +3,25 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
describe("Scale logarithm", () => {
|
describe("Scale logarithm", () => {
|
||||||
// test("mean of the base e scalar logarithm of an exponential(10)", () => {
|
/* These tests may not be important, because scalelog isn't normalized
|
||||||
// let rate = 10.0
|
The first one may be failing for a number of reasons.
|
||||||
// let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkExponential(rate), MagicNumbers.Math.e)
|
*/
|
||||||
//
|
Skip.test("mean of the base e scalar logarithm of an exponential(10)", () => {
|
||||||
// let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
let rate = 10.0
|
||||||
// let meanAnalytical = Js.Math.log(rate /. MagicNumbers.Math.e)
|
let scalelog = DistributionOperation.Constructors.scaleLogarithm(
|
||||||
// switch meanResult {
|
~env,
|
||||||
// | Ok(meanValue) => meanValue -> expect -> toBeCloseTo(meanAnalytical)
|
mkExponential(rate),
|
||||||
// | Error(err) => err -> expect -> toBe(DistributionTypes.OperationError(DivisionByZeroError))
|
MagicNumbers.Math.e,
|
||||||
// }
|
)
|
||||||
// })
|
|
||||||
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
||||||
|
// expected value of log of exponential distribution.
|
||||||
|
let meanAnalytical = Js.Math.log(rate) +. 1.0
|
||||||
|
switch meanResult {
|
||||||
|
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||||
|
| Error(err) => err->expect->toBe(DistributionTypes.OperationError(DivisionByZeroError))
|
||||||
|
}
|
||||||
|
})
|
||||||
let low = 10.0
|
let low = 10.0
|
||||||
let high = 100.0
|
let high = 100.0
|
||||||
let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkUniform(low, high), 2.0)
|
let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkUniform(low, high), 2.0)
|
||||||
|
|
|
@ -271,17 +271,6 @@ module T = Dist({
|
||||||
let variance = (t: t): float =>
|
let variance = (t: t): float =>
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||||
|
|
||||||
// let klDivergence0 = (prediction: t, answer: t) => {
|
|
||||||
// combinePointwise(
|
|
||||||
// ~combiner=XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument,
|
|
||||||
// PointSetDist_Scoring.KLDivergence.integrand,
|
|
||||||
// prediction,
|
|
||||||
// answer,
|
|
||||||
// )
|
|
||||||
// |> E.R.fmap(shapeMap(XYShape.T.filterYValues(Js.Float.isFinite)))
|
|
||||||
// |> E.R.fmap(integralEndY)
|
|
||||||
// }
|
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
let klDivergence = (prediction: t, answer: t) => {
|
||||||
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
||||||
PointSetDist_Scoring.KLDivergence.integrand,
|
PointSetDist_Scoring.KLDivergence.integrand,
|
||||||
|
|
|
@ -390,7 +390,9 @@ module PointwiseCombination = {
|
||||||
}
|
}
|
||||||
`)
|
`)
|
||||||
|
|
||||||
// This function is used for kl divergence
|
/*
|
||||||
|
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||||
|
*/
|
||||||
let combineAlongSupportOfSecondArgument0: (
|
let combineAlongSupportOfSecondArgument0: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
interpolator,
|
interpolator,
|
||||||
|
@ -450,7 +452,6 @@ module PointwiseCombination = {
|
||||||
}
|
}
|
||||||
| None => ()
|
| None => ()
|
||||||
}
|
}
|
||||||
// Js.Console.log(newYs)
|
|
||||||
}
|
}
|
||||||
T.filterOkYs(newXs, newYs)->Ok
|
T.filterOkYs(newXs, newYs)->Ok
|
||||||
}
|
}
|
||||||
|
@ -489,6 +490,7 @@ module PointwiseCombination = {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This function is used for klDivergence
|
||||||
let combineAlongSupportOfSecondArgument: (
|
let combineAlongSupportOfSecondArgument: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
T.t,
|
T.t,
|
||||||
|
|
|
@ -15,5 +15,10 @@
|
||||||
},
|
},
|
||||||
"target": "ES6",
|
"target": "ES6",
|
||||||
"include": ["src/**/*"],
|
"include": ["src/**/*"],
|
||||||
"exclude": ["node_modules", "**/*.spec.ts", "webpack.config.js"]
|
"exclude": [
|
||||||
|
"../../node_modules",
|
||||||
|
"node_modules",
|
||||||
|
"**/*.spec.ts",
|
||||||
|
"webpack.config.js"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user