Minor refactor
This commit is contained in:
parent
64eef2b169
commit
dceea9c6b5
|
@ -1,11 +0,0 @@
|
|||
open Jest;
|
||||
open Expect;
|
||||
|
||||
describe("Shape", () => {
|
||||
describe("Continuous", () => {
|
||||
test("", () => {
|
||||
Js.log(Jstat.Jstat.normal);
|
||||
expect(Jstat.Jstat.normal##pdf(3.0, 3.0, 3.0)) |> toEqual(1.0);
|
||||
})
|
||||
})
|
||||
});
|
|
@ -1,13 +0,0 @@
|
|||
open Jest;
|
||||
open Expect;
|
||||
|
||||
describe("Shape", () => {
|
||||
describe("Parser", () => {
|
||||
test("", () => {
|
||||
let parsed1 = MathJsParser.fromString("mm(normal(0,1), normal(10,1))");
|
||||
Js.log(parsed1 |> E.R.fmap(Jstat.toString));
|
||||
Js.log(parsed1 |> E.R.fmap(Jstat.toShape(20)));
|
||||
expect(1.0) |> toEqual(1.0);
|
||||
})
|
||||
})
|
||||
});
|
|
@ -37,13 +37,13 @@ module DemoDist = {
|
|||
let parsed1 = MathJsParser.fromString(guesstimatorString);
|
||||
let shape =
|
||||
switch (parsed1) {
|
||||
| Ok(r) => Some(Jstat.toShape(10000, r))
|
||||
| Ok(r) => Some(SymbolicDist.toShape(10000, r))
|
||||
| _ => None
|
||||
};
|
||||
|
||||
let str =
|
||||
switch (parsed1) {
|
||||
| Ok(r) => Jstat.toString(r)
|
||||
| Ok(r) => SymbolicDist.toString(r)
|
||||
| Error(e) => e
|
||||
};
|
||||
|
||||
|
|
|
@ -1,189 +1,33 @@
|
|||
// Todo: Another way of doing this is with [@bs.scope "normal"], which may be more elegant
|
||||
module Jstat = {
|
||||
type normal = {
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
type lognormal = {
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
type uniform = {
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
[@bs.module "jStat"] external normal: normal = "normal";
|
||||
[@bs.module "jStat"] external lognormal: lognormal = "lognormal";
|
||||
[@bs.module "jStat"] external uniform: uniform = "uniform";
|
||||
};
|
||||
|
||||
type normal = {
|
||||
mean: float,
|
||||
stdev: float,
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
|
||||
type lognormal = {
|
||||
mu: float,
|
||||
sigma: float,
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
|
||||
type uniform = {
|
||||
low: float,
|
||||
high: float,
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
|
||||
module Normal = {
|
||||
type t = normal;
|
||||
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
||||
let inv = (p, t: t) => Jstat.normal##inv(p, t.mean, t.stdev);
|
||||
let sample = (t: t) => Jstat.normal##sample(t.mean, t.stdev);
|
||||
let toString = ({mean, stdev}: t) => {j|Normal($mean,$stdev)|j};
|
||||
type beta = {
|
||||
.
|
||||
[@bs.meth] "pdf": (float, float, float) => float,
|
||||
[@bs.meth] "cdf": (float, float, float) => float,
|
||||
[@bs.meth] "inv": (float, float, float) => float,
|
||||
[@bs.meth] "sample": (float, float) => float,
|
||||
};
|
||||
|
||||
module Lognormal = {
|
||||
type t = lognormal;
|
||||
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
||||
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
||||
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
||||
let toString = ({mu, sigma}: t) => {j|Lognormal($mu,$sigma)|j};
|
||||
};
|
||||
|
||||
module Uniform = {
|
||||
type t = uniform;
|
||||
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
||||
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
||||
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
||||
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
||||
};
|
||||
|
||||
type dist = [
|
||||
| `Normal(normal)
|
||||
| `Lognormal(lognormal)
|
||||
| `Uniform(uniform)
|
||||
];
|
||||
|
||||
module Mixed = {
|
||||
let pdf = (x, dist) =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.pdf(x, n)
|
||||
| `Lognormal(n) => Lognormal.pdf(x, n)
|
||||
| `Uniform(n) => Uniform.pdf(x, n)
|
||||
};
|
||||
|
||||
let inv = (x, dist) =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(x, n)
|
||||
| `Lognormal(n) => Lognormal.inv(x, n)
|
||||
| `Uniform(n) => Uniform.inv(x, n)
|
||||
};
|
||||
|
||||
let sample = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.sample(n)
|
||||
| `Lognormal(n) => Lognormal.sample(n)
|
||||
| `Uniform(n) => Uniform.sample(n)
|
||||
};
|
||||
|
||||
let toString = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.toString(n)
|
||||
| `Lognormal(n) => Lognormal.toString(n)
|
||||
| `Uniform(n) => Uniform.toString(n)
|
||||
};
|
||||
|
||||
let min = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(0.0001, n)
|
||||
| `Lognormal(n) => Lognormal.inv(0.0001, n)
|
||||
| `Uniform({low}) => low
|
||||
};
|
||||
|
||||
let max = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(0.9999, n)
|
||||
| `Lognormal(n) => Lognormal.inv(0.9999, n)
|
||||
| `Uniform({high}) => high
|
||||
};
|
||||
|
||||
// will space linear
|
||||
let toShape =
|
||||
(~xSelection: [ | `Linear | `ByWeight]=`Linear, dist: dist, sampleCount) => {
|
||||
let xs =
|
||||
switch (xSelection) {
|
||||
| `Linear => Functions.range(min(dist), max(dist), sampleCount)
|
||||
| `ByWeight =>
|
||||
Functions.range(0.00001, 0.99999, sampleCount)
|
||||
|> E.A.fmap(x => inv(x, dist))
|
||||
};
|
||||
let ys = xs |> E.A.fmap(r => pdf(r, dist));
|
||||
XYShape.T.fromArrays(xs, ys);
|
||||
};
|
||||
};
|
||||
|
||||
// module PointwiseCombination = {
|
||||
// type math = Multiply | Add | Exponent | Power;
|
||||
// let fn = fun
|
||||
// | Multiply => 3.0
|
||||
// | Add => 4.0
|
||||
// }
|
||||
|
||||
module PointwiseAddDistributionsWeighted = {
|
||||
type t = array((dist, float));
|
||||
|
||||
let normalizeWeights = (dists: t) => {
|
||||
let total = dists |> E.A.fmap(snd) |> Functions.sum;
|
||||
dists |> E.A.fmap(((a, b)) => (a, b /. total));
|
||||
};
|
||||
|
||||
let pdf = (dists: t, x: float) =>
|
||||
dists |> E.A.fmap(((e, w)) => Mixed.pdf(x, e) *. w) |> Functions.sum;
|
||||
|
||||
let min = (dists: t) =>
|
||||
dists |> E.A.fmap(d => d |> fst |> Mixed.min) |> Functions.min;
|
||||
|
||||
let max = (dists: t) =>
|
||||
dists |> E.A.fmap(d => d |> fst |> Mixed.max) |> Functions.max;
|
||||
|
||||
let toShape = (dists: t, sampleCount: int) => {
|
||||
let xs = Functions.range(min(dists), max(dists), sampleCount);
|
||||
let ys = xs |> E.A.fmap(pdf(dists));
|
||||
XYShape.T.fromArrays(xs, ys);
|
||||
};
|
||||
|
||||
let toString = (dists: t) => {
|
||||
let distString =
|
||||
dists
|
||||
|> E.A.fmap(d => Mixed.toString(fst(d)))
|
||||
|> Js.Array.joinWith(",");
|
||||
{j|pointwideAdded($distString)|j};
|
||||
};
|
||||
};
|
||||
|
||||
type bigDist = [
|
||||
| `Dist(dist)
|
||||
| `PointwiseCombination(PointwiseAddDistributionsWeighted.t)
|
||||
];
|
||||
|
||||
let toString = (r: bigDist) =>
|
||||
r
|
||||
|> (
|
||||
fun
|
||||
| `Dist(d) => Mixed.toString(d)
|
||||
| `PointwiseCombination(d) =>
|
||||
PointwiseAddDistributionsWeighted.toString(d)
|
||||
);
|
||||
|
||||
let toShape = n =>
|
||||
fun
|
||||
| `Dist(d) => Mixed.toShape(~xSelection=`ByWeight, d, n)
|
||||
| `PointwiseCombination(d) =>
|
||||
PointwiseAddDistributionsWeighted.toShape(d, n);
|
||||
[@bs.module "jStat"] external normal: normal = "normal";
|
||||
[@bs.module "jStat"] external lognormal: lognormal = "lognormal";
|
||||
[@bs.module "jStat"] external uniform: uniform = "uniform";
|
||||
[@bs.module "jStat"] external beta: beta = "beta";
|
|
@ -1,20 +1,20 @@
|
|||
open Jstat;
|
||||
|
||||
type arg =
|
||||
module MathJsonToMathJsAdt = {
|
||||
type arg =
|
||||
| Symbol(string)
|
||||
| Value(float)
|
||||
| Fn(fn)
|
||||
| Array(array(arg))
|
||||
and fn = {
|
||||
| Object(Js.Dict.t(arg))
|
||||
and fn = {
|
||||
name: string,
|
||||
args: array(arg),
|
||||
};
|
||||
};
|
||||
|
||||
let rec parseMathjs = (j: Js.Json.t) =>
|
||||
let rec run = (j: Js.Json.t) =>
|
||||
Json.Decode.(
|
||||
switch (field("mathjs", string, j)) {
|
||||
| "FunctionNode" =>
|
||||
let args = j |> field("args", array(parseMathjs));
|
||||
let args = j |> field("args", array(run));
|
||||
Some(
|
||||
Fn({
|
||||
name: j |> field("fn", field("name", string)),
|
||||
|
@ -22,89 +22,166 @@ let rec parseMathjs = (j: Js.Json.t) =>
|
|||
}),
|
||||
);
|
||||
| "OperatorNode" =>
|
||||
let args = j |> field("args", array(parseMathjs));
|
||||
let args = j |> field("args", array(run));
|
||||
Some(
|
||||
Fn({
|
||||
name: j |> field("fn", string),
|
||||
args: args |> E.A.O.concatSomes,
|
||||
}),
|
||||
);
|
||||
| "ConstantNode" => Some(Value(field("value", Json.Decode.float, j)))
|
||||
| "ConstantNode" =>
|
||||
optional(field("value", Json.Decode.float), j)
|
||||
|> E.O.fmap(r => Value(r))
|
||||
| "ObjectNode" =>
|
||||
let properties = j |> field("properties", dict(run));
|
||||
Js.Dict.entries(properties)
|
||||
|> E.A.fmap(((key, value)) => value |> E.O.fmap(v => (key, v)))
|
||||
|> E.A.O.concatSomes
|
||||
|> Js.Dict.fromArray
|
||||
|> (r => Some(Object(r)));
|
||||
| "ArrayNode" =>
|
||||
let items = field("items", array(parseMathjs), j);
|
||||
let items = field("items", array(run), j);
|
||||
Some(Array(items |> E.A.O.concatSomes));
|
||||
| "SymbolNode" => Some(Symbol(field("name", string, j)))
|
||||
| n =>
|
||||
Js.log2("Couldn't parse mathjs node", j);
|
||||
Js.log3("Couldn't parse mathjs node", j, n);
|
||||
None;
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
// let logHigh = math.log(high);
|
||||
// let logLow = math.log(low);
|
||||
module MathAdtToDistDst = {
|
||||
open MathJsonToMathJsAdt;
|
||||
|
||||
// let mean = (math.mean(logHigh, logLow)).toFixed(3);
|
||||
// let stdev = ((logHigh-logLow) / (2*1.645)).toFixed(3);
|
||||
module MathAdtCleaner = {
|
||||
let transformWithSymbol = (f: float, s: string) =>
|
||||
switch (s) {
|
||||
| "K"
|
||||
| "k" => f *. 1000.
|
||||
| "M"
|
||||
| "m" => f *. 1000000.
|
||||
| "B"
|
||||
| "b" => f *. 1000000000.
|
||||
| "T"
|
||||
| "t" => f *. 1000000000000.
|
||||
| _ => f
|
||||
};
|
||||
|
||||
let normal: array(arg) => result(bigDist, string) =
|
||||
let rec run =
|
||||
fun
|
||||
| [|Value(mean), Value(stdev)|] => Ok(`Dist(`Normal({mean, stdev})))
|
||||
| Fn({name: "multiply", args: [|Value(f), Symbol(s)|]}) =>
|
||||
Value(transformWithSymbol(f, s))
|
||||
| Fn({name, args}) => Fn({name, args: args |> E.A.fmap(run)})
|
||||
| Array(args) => Array(args |> E.A.fmap(run))
|
||||
| Symbol(s) => Symbol(s)
|
||||
| Value(v) => Value(v)
|
||||
| Object(v) =>
|
||||
Object(
|
||||
v
|
||||
|> Js.Dict.entries
|
||||
|> E.A.fmap(((key, value)) => (key, run(value)))
|
||||
|> Js.Dict.fromArray,
|
||||
);
|
||||
};
|
||||
|
||||
let normal: array(arg) => result(SymbolicDist.bigDist, string) =
|
||||
fun
|
||||
| [|Value(mean), Value(stdev)|] =>
|
||||
Ok(`Simple(`Normal({mean, stdev})))
|
||||
| _ => Error("Wrong number of variables in normal distribution");
|
||||
|
||||
let lognormal: array(arg) => result(bigDist, string) =
|
||||
let lognormal: array(arg) => result(SymbolicDist.bigDist, string) =
|
||||
fun
|
||||
| [|Value(mu), Value(sigma)|] => Ok(`Dist(`Lognormal({mu, sigma})))
|
||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||
|
||||
let to_: array(arg) => result(bigDist, string) =
|
||||
fun
|
||||
| [|Value(low), Value(high)|] => {
|
||||
let logLow = Js.Math.log(low);
|
||||
let logHigh = Js.Math.log(high);
|
||||
let mu = Functions.mean([|logLow, logHigh|]);
|
||||
let sigma = (logHigh -. logLow) /. (2.0 *. 1.645);
|
||||
Ok(`Dist(`Lognormal({mu, sigma})));
|
||||
| [|Value(mu), Value(sigma)|] => Ok(`Simple(`Lognormal({mu, sigma})))
|
||||
| [|Object(o)|] => {
|
||||
let g = Js.Dict.get(o);
|
||||
switch (g("mean"), g("stdev"), g("mu"), g("sigma")) {
|
||||
| (Some(Value(mean)), Some(Value(stdev)), _, _) =>
|
||||
Ok(`Simple(SymbolicDist.Lognormal.fromMeanAndStdev(mean, stdev)))
|
||||
| (_, _, Some(Value(mu)), Some(Value(sigma))) =>
|
||||
Ok(`Simple(`Lognormal({mu, sigma})))
|
||||
| _ => Error("Lognormal distribution would need mean and stdev")
|
||||
};
|
||||
}
|
||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||
|
||||
let uniform: array(arg) => result(bigDist, string) =
|
||||
let to_: array(arg) => result(SymbolicDist.bigDist, string) =
|
||||
fun
|
||||
| [|Value(low), Value(high)|] => Ok(`Dist(`Uniform({low, high})))
|
||||
| [|Value(low), Value(high)|] => {
|
||||
Ok(`Simple(SymbolicDist.Lognormal.from90PercentCI(low, high)));
|
||||
}
|
||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||
|
||||
let rec toValue = (r): result(bigDist, string) =>
|
||||
r
|
||||
|> (
|
||||
let uniform: array(arg) => result(SymbolicDist.bigDist, string) =
|
||||
fun
|
||||
| Value(_) => Error("Top level can't be value")
|
||||
| Fn({name: "normal", args}) => normal(args)
|
||||
| Fn({name: "lognormal", args}) => lognormal(args)
|
||||
| Fn({name: "uniform", args}) => uniform(args)
|
||||
| Fn({name: "to", args}) => to_(args)
|
||||
| Fn({name: "mm", args}) => {
|
||||
let dists: array(dist) =
|
||||
| [|Value(low), Value(high)|] => Ok(`Simple(`Uniform({low, high})))
|
||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||
|
||||
let beta: array(arg) => result(SymbolicDist.bigDist, string) =
|
||||
fun
|
||||
| [|Value(alpha), Value(beta)|] => Ok(`Simple(`Beta({alpha, beta})))
|
||||
| _ => Error("Wrong number of variables in lognormal distribution");
|
||||
|
||||
let multiModal = (args: array(result(SymbolicDist.bigDist, string))) => {
|
||||
let dists =
|
||||
args
|
||||
|> E.A.fmap(toValue)
|
||||
|> E.A.fmap(
|
||||
fun
|
||||
| Ok(`Dist(n)) => Some(n)
|
||||
| Ok(`Simple(n)) => Some(n)
|
||||
| _ => None,
|
||||
)
|
||||
|> E.A.O.concatSomes;
|
||||
switch (dists |> E.A.length) {
|
||||
| 0 => Error("Multimodals need at least one input")
|
||||
| _ =>
|
||||
dists
|
||||
|> E.A.fmap(r => (r, 1.0))
|
||||
|> (r => Ok(`PointwiseCombination(r)))
|
||||
};
|
||||
};
|
||||
|
||||
let inputs = dists |> E.A.fmap(r => (r, 1.0));
|
||||
Ok(`PointwiseCombination(inputs));
|
||||
let rec functionParser = (r): result(SymbolicDist.bigDist, string) =>
|
||||
r
|
||||
|> (
|
||||
fun
|
||||
| Fn({name: "normal", args}) => normal(args)
|
||||
| Fn({name: "lognormal", args}) => lognormal(args)
|
||||
| Fn({name: "uniform", args}) => uniform(args)
|
||||
| Fn({name: "beta", args}) => beta(args)
|
||||
| Fn({name: "to", args}) => to_(args)
|
||||
| Fn({name: "mm", args}) => {
|
||||
let dists = args |> E.A.fmap(functionParser);
|
||||
multiModal(dists);
|
||||
}
|
||||
| Fn({name}) => Error(name ++ ": name not found")
|
||||
| Array(_) => Error("Array not valid as top level")
|
||||
| Symbol(_) => Error("Symbol not valid as top level")
|
||||
| _ => Error("This type not currently supported")
|
||||
);
|
||||
|
||||
let fromString = str =>
|
||||
Mathjs.parseMath(str)
|
||||
|> E.R.bind(_, r =>
|
||||
switch (parseMathjs(r)) {
|
||||
| Some(r) => toValue(r)
|
||||
| None => Error("Second parse failed")
|
||||
let topLevel = (r): result(SymbolicDist.bigDist, string) =>
|
||||
r
|
||||
|> (
|
||||
fun
|
||||
| Fn(_) => functionParser(r)
|
||||
| Value(_) => Error("Top level can't be value")
|
||||
| Array(_) => Error("Array not valid as top level")
|
||||
| Symbol(_) => Error("Symbol not valid as top level")
|
||||
| Object(_) => Error("Object not valid as top level")
|
||||
);
|
||||
|
||||
let run = (r): result(SymbolicDist.bigDist, string) =>
|
||||
r |> MathAdtCleaner.run |> topLevel;
|
||||
};
|
||||
|
||||
let fromString = str => {
|
||||
let mathJsToJson = Mathjs.parseMath(str);
|
||||
let mathJsParse =
|
||||
E.R.bind(mathJsToJson, r =>
|
||||
switch (MathJsonToMathJsAdt.run(r)) {
|
||||
| Some(r) => Ok(r)
|
||||
| None => Error("MathJsParse Error")
|
||||
}
|
||||
);
|
||||
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run);
|
||||
Js.log4("fromString", mathJsToJson, mathJsParse, value);
|
||||
value;
|
||||
};
|
196
src/symbolic/SymbolicDist.re
Normal file
196
src/symbolic/SymbolicDist.re
Normal file
|
@ -0,0 +1,196 @@
|
|||
type normal = {
|
||||
mean: float,
|
||||
stdev: float,
|
||||
};
|
||||
|
||||
type lognormal = {
|
||||
mu: float,
|
||||
sigma: float,
|
||||
};
|
||||
|
||||
type uniform = {
|
||||
low: float,
|
||||
high: float,
|
||||
};
|
||||
|
||||
type beta = {
|
||||
alpha: float,
|
||||
beta: float,
|
||||
};
|
||||
|
||||
type dist = [
|
||||
| `Normal(normal)
|
||||
| `Beta(beta)
|
||||
| `Lognormal(lognormal)
|
||||
| `Uniform(uniform)
|
||||
];
|
||||
|
||||
type pointwiseAdd = array((dist, float));
|
||||
|
||||
type bigDist = [ | `Simple(dist) | `PointwiseCombination(pointwiseAdd)];
|
||||
|
||||
module Normal = {
|
||||
type t = normal;
|
||||
let pdf = (x, t: t) => Jstat.normal##pdf(x, t.mean, t.stdev);
|
||||
let inv = (p, t: t) => Jstat.normal##inv(p, t.mean, t.stdev);
|
||||
let sample = (t: t) => Jstat.normal##sample(t.mean, t.stdev);
|
||||
let toString = ({mean, stdev}: t) => {j|Normal($mean,$stdev)|j};
|
||||
};
|
||||
|
||||
module Beta = {
|
||||
type t = beta;
|
||||
let pdf = (x, t: t) => Jstat.beta##pdf(x, t.alpha, t.beta);
|
||||
let inv = (p, t: t) => Jstat.beta##inv(p, t.alpha, t.beta);
|
||||
let sample = (t: t) => Jstat.beta##sample(t.alpha, t.beta);
|
||||
let toString = ({alpha, beta}: t) => {j|Beta($alpha,$beta)|j};
|
||||
};
|
||||
|
||||
module Lognormal = {
|
||||
type t = lognormal;
|
||||
let pdf = (x, t: t) => Jstat.lognormal##pdf(x, t.mu, t.sigma);
|
||||
let inv = (p, t: t) => Jstat.lognormal##inv(p, t.mu, t.sigma);
|
||||
let sample = (t: t) => Jstat.lognormal##sample(t.mu, t.sigma);
|
||||
let toString = ({mu, sigma}: t) => {j|Lognormal($mu,$sigma)|j};
|
||||
let from90PercentCI = (low, high) => {
|
||||
let logLow = Js.Math.log(low);
|
||||
let logHigh = Js.Math.log(high);
|
||||
let mu = Functions.mean([|logLow, logHigh|]);
|
||||
let sigma = (logHigh -. logLow) /. (2.0 *. 1.645);
|
||||
`Lognormal({mu, sigma});
|
||||
};
|
||||
let fromMeanAndStdev = (mean, stdev) => {
|
||||
let variance = Js.Math.pow_float(~base=stdev, ~exp=2.0);
|
||||
let meanSquared = Js.Math.pow_float(~base=mean, ~exp=2.0);
|
||||
let mu =
|
||||
Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance /. meanSquared +. 1.0);
|
||||
let sigma =
|
||||
Js.Math.pow_float(
|
||||
~base=Js.Math.log(variance /. meanSquared +. 1.0),
|
||||
~exp=0.5,
|
||||
);
|
||||
`Lognormal({mu, sigma});
|
||||
};
|
||||
};
|
||||
|
||||
module Uniform = {
|
||||
type t = uniform;
|
||||
let pdf = (x, t: t) => Jstat.uniform##pdf(x, t.low, t.high);
|
||||
let inv = (p, t: t) => Jstat.uniform##inv(p, t.low, t.high);
|
||||
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
||||
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
||||
};
|
||||
|
||||
module GenericSimple = {
|
||||
let minCdfValue = 0.0001;
|
||||
let maxCdfValue = 0.9999;
|
||||
|
||||
let pdf = (x, dist) =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.pdf(x, n)
|
||||
| `Lognormal(n) => Lognormal.pdf(x, n)
|
||||
| `Uniform(n) => Uniform.pdf(x, n)
|
||||
| `Beta(n) => Beta.pdf(x, n)
|
||||
};
|
||||
|
||||
let inv = (x, dist) =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(x, n)
|
||||
| `Lognormal(n) => Lognormal.inv(x, n)
|
||||
| `Uniform(n) => Uniform.inv(x, n)
|
||||
| `Beta(n) => Beta.inv(x, n)
|
||||
};
|
||||
|
||||
let sample = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.sample(n)
|
||||
| `Lognormal(n) => Lognormal.sample(n)
|
||||
| `Uniform(n) => Uniform.sample(n)
|
||||
| `Beta(n) => Beta.sample(n)
|
||||
};
|
||||
|
||||
let toString = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.toString(n)
|
||||
| `Lognormal(n) => Lognormal.toString(n)
|
||||
| `Uniform(n) => Uniform.toString(n)
|
||||
| `Beta(n) => Beta.toString(n)
|
||||
};
|
||||
|
||||
let min = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(minCdfValue, n)
|
||||
| `Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
||||
| `Uniform({low}) => low
|
||||
| `Beta(n) => Beta.inv(minCdfValue, n)
|
||||
};
|
||||
|
||||
let max = dist =>
|
||||
switch (dist) {
|
||||
| `Normal(n) => Normal.inv(maxCdfValue, n)
|
||||
| `Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
||||
| `Beta(n) => Beta.inv(maxCdfValue, n)
|
||||
| `Uniform({high}) => high
|
||||
};
|
||||
|
||||
let toShape =
|
||||
(~xSelection: [ | `Linear | `ByWeight]=`Linear, dist: dist, sampleCount) => {
|
||||
let xs =
|
||||
switch (xSelection) {
|
||||
| `Linear => Functions.range(min(dist), max(dist), sampleCount)
|
||||
| `ByWeight =>
|
||||
Functions.range(minCdfValue, maxCdfValue, sampleCount)
|
||||
|> E.A.fmap(x => inv(x, dist))
|
||||
};
|
||||
let ys = xs |> E.A.fmap(r => pdf(r, dist));
|
||||
XYShape.T.fromArrays(xs, ys);
|
||||
};
|
||||
};
|
||||
|
||||
module PointwiseAddDistributionsWeighted = {
|
||||
type t = pointwiseAdd;
|
||||
|
||||
let normalizeWeights = (dists: t) => {
|
||||
let total = dists |> E.A.fmap(snd) |> Functions.sum;
|
||||
dists |> E.A.fmap(((a, b)) => (a, b /. total));
|
||||
};
|
||||
|
||||
let pdf = (dists: t, x: float) =>
|
||||
dists
|
||||
|> E.A.fmap(((e, w)) => GenericSimple.pdf(x, e) *. w)
|
||||
|> Functions.sum;
|
||||
|
||||
let min = (dists: t) =>
|
||||
dists |> E.A.fmap(d => d |> fst |> GenericSimple.min) |> Functions.min;
|
||||
|
||||
let max = (dists: t) =>
|
||||
dists |> E.A.fmap(d => d |> fst |> GenericSimple.max) |> Functions.max;
|
||||
|
||||
let toShape = (dists: t, sampleCount: int) => {
|
||||
let xs = Functions.range(min(dists), max(dists), sampleCount);
|
||||
let ys = xs |> E.A.fmap(pdf(dists));
|
||||
XYShape.T.fromArrays(xs, ys);
|
||||
};
|
||||
|
||||
let toString = (dists: t) => {
|
||||
let distString =
|
||||
dists
|
||||
|> E.A.fmap(d => GenericSimple.toString(fst(d)))
|
||||
|> Js.Array.joinWith(",");
|
||||
{j|multimodal($distString)|j};
|
||||
};
|
||||
};
|
||||
|
||||
let toString = (r: bigDist) =>
|
||||
r
|
||||
|> (
|
||||
fun
|
||||
| `Simple(d) => GenericSimple.toString(d)
|
||||
| `PointwiseCombination(d) =>
|
||||
PointwiseAddDistributionsWeighted.toString(d)
|
||||
);
|
||||
|
||||
let toShape = n =>
|
||||
fun
|
||||
| `Simple(d) => GenericSimple.toShape(~xSelection=`ByWeight, d, n)
|
||||
| `PointwiseCombination(d) =>
|
||||
PointwiseAddDistributionsWeighted.toShape(d, n);
|
Loading…
Reference in New Issue
Block a user