Fix merge conflicts
This commit is contained in:
commit
954fcf4344
|
@ -27,7 +27,6 @@
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@foretold/components": "0.0.6",
|
"@foretold/components": "0.0.6",
|
||||||
"@foretold/guesstimator": "1.0.11",
|
|
||||||
"@glennsl/bs-json": "^5.0.2",
|
"@glennsl/bs-json": "^5.0.2",
|
||||||
"antd": "3.17.0",
|
"antd": "3.17.0",
|
||||||
"autoprefixer": "9.7.4",
|
"autoprefixer": "9.7.4",
|
||||||
|
|
14
src/App.re
14
src/App.re
|
@ -1,8 +1,6 @@
|
||||||
type route =
|
type route =
|
||||||
| Model(string)
|
| Model(string)
|
||||||
| DistBuilder
|
| DistBuilder
|
||||||
| DistBuilder2
|
|
||||||
| DistBuilder3
|
|
||||||
| Drawer
|
| Drawer
|
||||||
| Home
|
| Home
|
||||||
| NotFound;
|
| NotFound;
|
||||||
|
@ -11,8 +9,6 @@ let routeToPath = route =>
|
||||||
switch (route) {
|
switch (route) {
|
||||||
| Model(modelId) => "/m/" ++ modelId
|
| Model(modelId) => "/m/" ++ modelId
|
||||||
| DistBuilder => "/dist-builder"
|
| DistBuilder => "/dist-builder"
|
||||||
| DistBuilder2 => "/dist-builder2"
|
|
||||||
| DistBuilder3 => "/dist-builder3"
|
|
||||||
| Drawer => "/drawer"
|
| Drawer => "/drawer"
|
||||||
| Home => "/"
|
| Home => "/"
|
||||||
| _ => "/"
|
| _ => "/"
|
||||||
|
@ -75,12 +71,6 @@ module Menu = {
|
||||||
<Item href={routeToPath(DistBuilder)} key="dist-builder">
|
<Item href={routeToPath(DistBuilder)} key="dist-builder">
|
||||||
{"Dist Builder" |> R.ste}
|
{"Dist Builder" |> R.ste}
|
||||||
</Item>
|
</Item>
|
||||||
<Item href={routeToPath(DistBuilder2)} key="dist-builder-2">
|
|
||||||
{"Dist Builder 2" |> R.ste}
|
|
||||||
</Item>
|
|
||||||
<Item href={routeToPath(DistBuilder3)} key="dist-builder-3">
|
|
||||||
{"Dist Builder 3" |> R.ste}
|
|
||||||
</Item>
|
|
||||||
<Item href={routeToPath(Drawer)} key="drawer">
|
<Item href={routeToPath(Drawer)} key="drawer">
|
||||||
{"Drawer" |> R.ste}
|
{"Drawer" |> R.ste}
|
||||||
</Item>
|
</Item>
|
||||||
|
@ -97,8 +87,6 @@ let make = () => {
|
||||||
switch (url.path) {
|
switch (url.path) {
|
||||||
| ["m", modelId] => Model(modelId)
|
| ["m", modelId] => Model(modelId)
|
||||||
| ["dist-builder"] => DistBuilder
|
| ["dist-builder"] => DistBuilder
|
||||||
| ["dist-builder2"] => DistBuilder2
|
|
||||||
| ["dist-builder3"] => DistBuilder3
|
|
||||||
| ["drawer"] => Drawer
|
| ["drawer"] => Drawer
|
||||||
| [] => Home
|
| [] => Home
|
||||||
| _ => NotFound
|
| _ => NotFound
|
||||||
|
@ -113,8 +101,6 @@ let make = () => {
|
||||||
| None => <div> {"Page is not found" |> R.ste} </div>
|
| None => <div> {"Page is not found" |> R.ste} </div>
|
||||||
}
|
}
|
||||||
| DistBuilder => <DistBuilder />
|
| DistBuilder => <DistBuilder />
|
||||||
| DistBuilder2 => <DistBuilder2 />
|
|
||||||
| DistBuilder3 => <DistBuilder3 />
|
|
||||||
| Drawer => <Drawer />
|
| Drawer => <Drawer />
|
||||||
| Home => <Home />
|
| Home => <Home />
|
||||||
| _ => <div> {"Page is not found" |> R.ste} </div>
|
| _ => <div> {"Page is not found" |> R.ste} </div>
|
||||||
|
|
|
@ -1,105 +0,0 @@
|
||||||
open BsReform;
|
|
||||||
open Antd.Grid;
|
|
||||||
|
|
||||||
module FormConfig = [%lenses type state = {guesstimatorString: string}];
|
|
||||||
|
|
||||||
module Form = ReForm.Make(FormConfig);
|
|
||||||
|
|
||||||
let schema = Form.Validation.Schema([||]);
|
|
||||||
|
|
||||||
module FieldString = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~field, ~label) => {
|
|
||||||
<Form.Field
|
|
||||||
field
|
|
||||||
render={({handleChange, error, value, validate}) =>
|
|
||||||
<Antd.Form.Item label={label |> R.ste}>
|
|
||||||
<Antd.Input
|
|
||||||
value
|
|
||||||
onChange={BsReform.Helpers.handleChange(handleChange)}
|
|
||||||
onBlur={_ => validate()}
|
|
||||||
/>
|
|
||||||
</Antd.Form.Item>
|
|
||||||
}
|
|
||||||
/>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module Styles = {
|
|
||||||
open Css;
|
|
||||||
let dist = style([padding(em(1.))]);
|
|
||||||
let spacer = style([marginTop(em(1.))]);
|
|
||||||
};
|
|
||||||
|
|
||||||
module DemoDist = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~guesstimatorString: string) => {
|
|
||||||
let (ys, xs, isEmpty) =
|
|
||||||
DistEditor.getPdfFromUserInput(guesstimatorString);
|
|
||||||
let inside =
|
|
||||||
isEmpty
|
|
||||||
? "Nothing to show" |> R.ste
|
|
||||||
: {
|
|
||||||
let distPlus =
|
|
||||||
DistPlus.make(
|
|
||||||
~shape=
|
|
||||||
Continuous(
|
|
||||||
Continuous.make(`Linear, {xs, ys}, None, None),
|
|
||||||
),
|
|
||||||
~domain=Complete,
|
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
~guesstimatorString=None,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
|> DistPlus.T.normalize;
|
|
||||||
<DistPlusPlot distPlus />;
|
|
||||||
};
|
|
||||||
<Antd.Card title={"Distribution" |> R.ste}>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
inside
|
|
||||||
</Antd.Card>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
[@react.component]
|
|
||||||
let make = () => {
|
|
||||||
let reform =
|
|
||||||
Form.use(
|
|
||||||
~validationStrategy=OnDemand,
|
|
||||||
~schema,
|
|
||||||
~onSubmit=({state}) => {None},
|
|
||||||
~initialState={guesstimatorString: "lognormal(6.1, 1)"},
|
|
||||||
(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let demoDist =
|
|
||||||
React.useMemo1(
|
|
||||||
() => {
|
|
||||||
<DemoDist
|
|
||||||
guesstimatorString={reform.state.values.guesstimatorString}
|
|
||||||
/>
|
|
||||||
},
|
|
||||||
[|reform.state.values.guesstimatorString|],
|
|
||||||
);
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
demoDist
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
<Antd.Card title={"Distribution Form" |> R.ste}>
|
|
||||||
<Form.Provider value=reform>
|
|
||||||
<Antd.Form>
|
|
||||||
<Row _type=`flex>
|
|
||||||
<Col span=12>
|
|
||||||
<FieldString
|
|
||||||
field=FormConfig.GuesstimatorString
|
|
||||||
label="Guesstimator String"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Antd.Form>
|
|
||||||
</Form.Provider>
|
|
||||||
</Antd.Card>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
</div>;
|
|
||||||
};
|
|
|
@ -1,114 +0,0 @@
|
||||||
open BsReform;
|
|
||||||
open Antd.Grid;
|
|
||||||
|
|
||||||
module FormConfig = [%lenses type state = {guesstimatorString: string}];
|
|
||||||
|
|
||||||
module Form = ReForm.Make(FormConfig);
|
|
||||||
|
|
||||||
let schema = Form.Validation.Schema([||]);
|
|
||||||
|
|
||||||
module FieldString = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~field, ~label) => {
|
|
||||||
<Form.Field
|
|
||||||
field
|
|
||||||
render={({handleChange, error, value, validate}) =>
|
|
||||||
<Antd.Form.Item label={label |> R.ste}>
|
|
||||||
<Antd.Input
|
|
||||||
value
|
|
||||||
onChange={BsReform.Helpers.handleChange(handleChange)}
|
|
||||||
onBlur={_ => validate()}
|
|
||||||
/>
|
|
||||||
</Antd.Form.Item>
|
|
||||||
}
|
|
||||||
/>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module Styles = {
|
|
||||||
open Css;
|
|
||||||
let dist = style([padding(em(1.))]);
|
|
||||||
let spacer = style([marginTop(em(1.))]);
|
|
||||||
};
|
|
||||||
|
|
||||||
module DemoDist = {
|
|
||||||
[@react.component]
|
|
||||||
let make = (~guesstimatorString: string) => {
|
|
||||||
let parsed1 = MathJsParser.fromString(guesstimatorString);
|
|
||||||
let shape =
|
|
||||||
switch (parsed1) {
|
|
||||||
| Ok(r) => Some(ExpressionTree.toShape(10000, r))
|
|
||||||
| _ => None
|
|
||||||
};
|
|
||||||
|
|
||||||
let str =
|
|
||||||
switch (parsed1) {
|
|
||||||
| Ok(r) => ExpressionTree.toString(r)
|
|
||||||
| Error(e) => e
|
|
||||||
};
|
|
||||||
|
|
||||||
let inside =
|
|
||||||
shape
|
|
||||||
|> E.O.fmap(shape => {
|
|
||||||
let distPlus =
|
|
||||||
DistPlus.make(
|
|
||||||
~shape,
|
|
||||||
~domain=Complete,
|
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
~guesstimatorString=None,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
|> DistPlus.T.normalize;
|
|
||||||
<DistPlusPlot distPlus />;
|
|
||||||
})
|
|
||||||
|> E.O.default(ReasonReact.null);
|
|
||||||
<Antd.Card title={"Distribution" |> R.ste}>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
inside
|
|
||||||
{str |> ReasonReact.string}
|
|
||||||
</Antd.Card>;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
[@react.component]
|
|
||||||
let make = () => {
|
|
||||||
let reform =
|
|
||||||
Form.use(
|
|
||||||
~validationStrategy=OnDemand,
|
|
||||||
~schema,
|
|
||||||
~onSubmit=({state}) => {None},
|
|
||||||
~initialState={guesstimatorString: "mm(1 to 100, 50 to 200, [.5,.5])"},
|
|
||||||
(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let demoDist =
|
|
||||||
React.useMemo1(
|
|
||||||
() => {
|
|
||||||
<DemoDist
|
|
||||||
guesstimatorString={reform.state.values.guesstimatorString}
|
|
||||||
/>
|
|
||||||
},
|
|
||||||
[|reform.state.values.guesstimatorString|],
|
|
||||||
);
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
demoDist
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
<Antd.Card title={"Distribution Form" |> R.ste}>
|
|
||||||
<Form.Provider value=reform>
|
|
||||||
<Antd.Form>
|
|
||||||
<Row _type=`flex>
|
|
||||||
<Col span=12>
|
|
||||||
<FieldString
|
|
||||||
field=FormConfig.GuesstimatorString
|
|
||||||
label="Guesstimator String"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Antd.Form>
|
|
||||||
</Form.Provider>
|
|
||||||
</Antd.Card>
|
|
||||||
<div className=Styles.spacer />
|
|
||||||
</div>;
|
|
||||||
};
|
|
|
@ -161,7 +161,6 @@ module Convert = {
|
||||||
let canvasShapeToContinuousShape =
|
let canvasShapeToContinuousShape =
|
||||||
(~canvasShape: Types.canvasShape, ~canvasElement: Dom.element)
|
(~canvasShape: Types.canvasShape, ~canvasElement: Dom.element)
|
||||||
: Types.continuousShape => {
|
: Types.continuousShape => {
|
||||||
|
|
||||||
let xs = canvasShape.xValues;
|
let xs = canvasShape.xValues;
|
||||||
let hs = canvasShape.hs;
|
let hs = canvasShape.hs;
|
||||||
let rectangle: Types.rectangle =
|
let rectangle: Types.rectangle =
|
||||||
|
@ -170,8 +169,8 @@ module Convert = {
|
||||||
let paddingFactorY = CanvasContext.paddingFactorX(rectangle.height);
|
let paddingFactorY = CanvasContext.paddingFactorX(rectangle.height);
|
||||||
let windowScrollY: float = [%raw "window.scrollY"];
|
let windowScrollY: float = [%raw "window.scrollY"];
|
||||||
|
|
||||||
let y0Line = bottom+.windowScrollY-.paddingFactorY;
|
let y0Line = bottom +. windowScrollY -. paddingFactorY;
|
||||||
let ys = E.A.fmap( h => y0Line -. h, hs);
|
let ys = E.A.fmap(h => y0Line -. h, hs);
|
||||||
|
|
||||||
let xyShape: Types.xyShape = {xs, ys};
|
let xyShape: Types.xyShape = {xs, ys};
|
||||||
let continuousShape: Types.continuousShape = {
|
let continuousShape: Types.continuousShape = {
|
||||||
|
@ -391,7 +390,12 @@ module Draw = {
|
||||||
let numSamples = 3000;
|
let numSamples = 3000;
|
||||||
|
|
||||||
let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
|
let normal: SymbolicTypes.symbolicDist = `Normal({mean, stdev});
|
||||||
let normalShape = ExpressionTree.toShape(numSamples, `SymbolicDist(normal));
|
let normalShape =
|
||||||
|
ExpressionTree.toShape(
|
||||||
|
numSamples,
|
||||||
|
{sampleCount: 10000, outputXYPoints: 10000, kernelWidth: None},
|
||||||
|
`SymbolicDist(normal),
|
||||||
|
);
|
||||||
let xyShape: Types.xyShape =
|
let xyShape: Types.xyShape =
|
||||||
switch (normalShape) {
|
switch (normalShape) {
|
||||||
| Mixed(_) => {xs: [||], ys: [||]}
|
| Mixed(_) => {xs: [||], ys: [||]}
|
||||||
|
@ -670,10 +674,7 @@ module State = {
|
||||||
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
Convert.canvasShapeToContinuousShape(~canvasShape, ~canvasElement);
|
||||||
|
|
||||||
/* create a cdf from a pdf */
|
/* create a cdf from a pdf */
|
||||||
let _pdf =
|
let _pdf = Continuous.T.normalize(pdf);
|
||||||
Continuous.T.normalize(
|
|
||||||
pdf,
|
|
||||||
);
|
|
||||||
|
|
||||||
let cdf = Continuous.T.integral(_pdf);
|
let cdf = Continuous.T.integral(_pdf);
|
||||||
let xs = [||];
|
let xs = [||];
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
[@bs.module "./main.js"]
|
|
||||||
external getPdfFromUserInput: string => (array(float), array(float), bool) =
|
|
||||||
"get_pdf_from_user_input";
|
|
|
@ -1,247 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
const jStat = require("jstat");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This module defines an abstract BinnedDistribution class, which
|
|
||||||
* should be implemented for each distribution. You need to decide
|
|
||||||
* how to bin the distribution (use _adabin unless there's a nicer
|
|
||||||
* way for your distr) and how to choose the distribution's support.
|
|
||||||
*/
|
|
||||||
|
|
||||||
math.import({
|
|
||||||
normal: jStat.normal,
|
|
||||||
beta: jStat.beta,
|
|
||||||
lognormal: jStat.lognormal,
|
|
||||||
uniform: jStat.uniform
|
|
||||||
});
|
|
||||||
|
|
||||||
class BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @param args
|
|
||||||
*/
|
|
||||||
constructor(args) {
|
|
||||||
this._set_props();
|
|
||||||
this.max_bin_size = 0.005;
|
|
||||||
this.min_bin_size = 0;
|
|
||||||
this.increment = 0.0001;
|
|
||||||
this.desired_delta = 0.001;
|
|
||||||
this.start_bin_size = 0.0001;
|
|
||||||
|
|
||||||
[this.params, this.pdf_func, this.sample] = this.get_params_and_pdf_func(
|
|
||||||
args
|
|
||||||
);
|
|
||||||
|
|
||||||
[this.start_point, this.end_point] = this.get_bounds();
|
|
||||||
[this.pdf_vals, this.divider_pts] = this.bin();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* this is hacky but class properties aren't always supported
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//Adaptive binning. Specify a desired change in density to get adjusted bin sizes.
|
|
||||||
/**
|
|
||||||
* @returns {(number[]|[*])[]}
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_adabin() {
|
|
||||||
let point = this.start_point;
|
|
||||||
let vals = [this.pdf_func(point)];
|
|
||||||
let divider_pts = [point];
|
|
||||||
let support = this.end_point - this.start_point;
|
|
||||||
let bin_size = this.start_bin_size * support;
|
|
||||||
|
|
||||||
while (point < this.end_point) {
|
|
||||||
let val = this.pdf_func(point + bin_size);
|
|
||||||
if (Math.abs(val - vals[vals.length - 1]) > this.desired_delta) {
|
|
||||||
while (
|
|
||||||
(Math.abs(val - vals[vals.length - 1]) > this.desired_delta) &
|
|
||||||
(bin_size - this.increment * support > this.min_bin_size)
|
|
||||||
) {
|
|
||||||
bin_size -= this.increment;
|
|
||||||
val = this.pdf_func(point + bin_size);
|
|
||||||
}
|
|
||||||
} else if (Math.abs(val - vals[vals.length - 1]) < this.desired_delta) {
|
|
||||||
while (
|
|
||||||
(Math.abs(val - vals[vals.length - 1]) < this.desired_delta) &
|
|
||||||
(bin_size < this.max_bin_size)
|
|
||||||
) {
|
|
||||||
bin_size += this.increment;
|
|
||||||
val = this.pdf_func(point + bin_size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
point += bin_size;
|
|
||||||
vals.push(val);
|
|
||||||
divider_pts.push(point);
|
|
||||||
}
|
|
||||||
vals = vals.map((_, idx) => vals[idx] / 2 + vals[idx + 1] / 2);
|
|
||||||
vals = vals.slice(0, -1);
|
|
||||||
return [vals, divider_pts];
|
|
||||||
}
|
|
||||||
|
|
||||||
bin() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
get_bounds() {
|
|
||||||
throw new Error("NotImplementedError");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param args
|
|
||||||
* @returns {(any|(function(*=): *))[]}
|
|
||||||
*/
|
|
||||||
get_params_and_pdf_func(args) {
|
|
||||||
let args_str = args.toString() + ")";
|
|
||||||
let substr = this.name + ".pdf(x, " + args_str;
|
|
||||||
let compiled = math.compile(substr);
|
|
||||||
|
|
||||||
function pdf_func(x) {
|
|
||||||
return compiled.evaluate({ x: x });
|
|
||||||
}
|
|
||||||
|
|
||||||
let mc_compiled = math.compile(this.name + ".sample(" + args_str);
|
|
||||||
let kv_pairs = this.param_names.map((val, idx) => [val, args[idx]]);
|
|
||||||
let params = Object.fromEntries(new Map(kv_pairs));
|
|
||||||
return [params, pdf_func, mc_compiled.evaluate];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class NormalDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "normal";
|
|
||||||
this.param_names = ["mean", "std"];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(number|*)[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
return [
|
|
||||||
this.params.mean - 4 * this.params.std,
|
|
||||||
this.params.mean + 4 * this.params.std
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {[[*], [*]]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
return this._adabin(this.params.std);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class UniformDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "uniform";
|
|
||||||
this.param_names = ["start_point", "end_point"];
|
|
||||||
this.num_bins = 200;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
return [this.params.start_point, this.params.end_point];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(*[])[]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
let divider_pts = evenly_spaced_grid(
|
|
||||||
this.params.start_point,
|
|
||||||
this.params.end_point,
|
|
||||||
this.num_bins
|
|
||||||
);
|
|
||||||
let vals = divider_pts.map(x =>
|
|
||||||
this.pdf_func(this.params.start_point / 2 + this.params.end_point / 2)
|
|
||||||
);
|
|
||||||
vals = vals.slice(0, -1);
|
|
||||||
return [vals, divider_pts];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class LogNormalDistributionBinned extends BaseDistributionBinned {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_set_props() {
|
|
||||||
this.name = "lognormal";
|
|
||||||
this.param_names = ["normal_mean", "normal_std"];
|
|
||||||
this.n_bounds_samples = 10000;
|
|
||||||
this.n_largest_bound_sample = 10;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param samples
|
|
||||||
* @param n
|
|
||||||
* @returns {any}
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_nth_largest(samples, n) {
|
|
||||||
var largest_buffer = Array(n).fill(-Infinity);
|
|
||||||
for (const sample of samples) {
|
|
||||||
if (sample > largest_buffer[n - 1]) {
|
|
||||||
var i = n;
|
|
||||||
while ((i > 0) & (sample > largest_buffer[i - 1])) {
|
|
||||||
i -= 1;
|
|
||||||
}
|
|
||||||
largest_buffer[i] = sample;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return largest_buffer[n - 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {(*|any)[]}
|
|
||||||
*/
|
|
||||||
get_bounds() {
|
|
||||||
let samples = Array(this.n_bounds_samples)
|
|
||||||
.fill(0)
|
|
||||||
.map(() => this.sample());
|
|
||||||
return [
|
|
||||||
math.min(samples),
|
|
||||||
this._nth_largest(samples, this.n_largest_bound_sample)
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {[[*], [*]]}
|
|
||||||
*/
|
|
||||||
bin() {
|
|
||||||
return this._adabin();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param start
|
|
||||||
* @param stop
|
|
||||||
* @param numel
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function evenly_spaced_grid(start, stop, numel) {
|
|
||||||
return Array(numel)
|
|
||||||
.fill(0)
|
|
||||||
.map((_, idx) => start + (idx / numel) * (stop - start));
|
|
||||||
}
|
|
||||||
|
|
||||||
const distrs = {
|
|
||||||
normal: NormalDistributionBinned,
|
|
||||||
lognormal: LogNormalDistributionBinned,
|
|
||||||
uniform: UniformDistributionBinned
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.distrs = distrs;
|
|
|
@ -1,364 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const bst = require("binary-search-tree");
|
|
||||||
|
|
||||||
const distrs = require("./distribution.js").distrs;
|
|
||||||
const parse = require("./parse.js");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
|
|
||||||
const NUM_MC_SAMPLES = 3000;
|
|
||||||
const OUTPUT_GRID_NUMEL = 3000;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The main algorithmic work is done by functions in this module.
|
|
||||||
* It also contains the main function, taking the user's string
|
|
||||||
* and returning pdf values and x's.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param start
|
|
||||||
* @param stop
|
|
||||||
* @param numel
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function evenly_spaced_grid(start, stop, numel) {
|
|
||||||
return Array(numel)
|
|
||||||
.fill(0)
|
|
||||||
.map((_, idx) => start + (idx / numel) * (stop - start));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes an array of strings like "normal(0, 1)" and
|
|
||||||
* returns the corresponding distribution objects
|
|
||||||
* @param substrings
|
|
||||||
* @returns {*}
|
|
||||||
*/
|
|
||||||
function get_distributions(substrings) {
|
|
||||||
let names_and_args = substrings.map(parse.get_distr_name_and_args);
|
|
||||||
let pdfs = names_and_args.map(x => new distrs[x[0]](x[1]));
|
|
||||||
return pdfs;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* update the binary search tree with bin points of
|
|
||||||
* deterministic_pdf transformed by tansform func
|
|
||||||
* (transfrom func can be a stocahstic func with parameters
|
|
||||||
* sampled from mc_distrs)
|
|
||||||
*
|
|
||||||
* @param transform_func
|
|
||||||
* @param deterministic_pdf
|
|
||||||
* @param mc_distrs
|
|
||||||
* @param track_idx
|
|
||||||
* @param num_mc_samples
|
|
||||||
* @param bst_pts_and_idxs
|
|
||||||
* @returns {(number)[]}
|
|
||||||
*/
|
|
||||||
function update_transformed_divider_points_bst(
|
|
||||||
transform_func,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
track_idx,
|
|
||||||
num_mc_samples,
|
|
||||||
bst_pts_and_idxs
|
|
||||||
) {
|
|
||||||
var transformed_pts = [];
|
|
||||||
var pdf_inner_idxs = [];
|
|
||||||
var factors = [];
|
|
||||||
var start_pt = Infinity;
|
|
||||||
var end_pt = -Infinity;
|
|
||||||
let use_mc = mc_distrs.length > 0;
|
|
||||||
var num_outer_iters = use_mc ? num_mc_samples : 1;
|
|
||||||
|
|
||||||
for (let sample_idx = 0; sample_idx < num_outer_iters; ++sample_idx) {
|
|
||||||
var this_transformed_pts = deterministic_pdf.divider_pts;
|
|
||||||
if (use_mc) {
|
|
||||||
let samples = mc_distrs.map(x => x.sample());
|
|
||||||
this_transformed_pts = this_transformed_pts.map(x =>
|
|
||||||
transform_func([x].concat(samples))
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
this_transformed_pts = this_transformed_pts.map(x => transform_func([x]));
|
|
||||||
}
|
|
||||||
var this_transformed_pts_paired = [];
|
|
||||||
for (let tp_idx = 0; tp_idx < this_transformed_pts.length - 1; tp_idx++) {
|
|
||||||
let sorted = [
|
|
||||||
this_transformed_pts[tp_idx],
|
|
||||||
this_transformed_pts[tp_idx + 1]
|
|
||||||
].sort((a, b) => a - b);
|
|
||||||
if (sorted[0] < start_pt) {
|
|
||||||
start_pt = sorted[0];
|
|
||||||
}
|
|
||||||
if (sorted[1] > end_pt) {
|
|
||||||
end_pt = sorted[1];
|
|
||||||
}
|
|
||||||
this_transformed_pts_paired.push(sorted);
|
|
||||||
}
|
|
||||||
|
|
||||||
transformed_pts = transformed_pts.concat(this_transformed_pts_paired);
|
|
||||||
|
|
||||||
pdf_inner_idxs = pdf_inner_idxs.concat([
|
|
||||||
...Array(this_transformed_pts_paired.length).keys()
|
|
||||||
]);
|
|
||||||
var this_factors = [];
|
|
||||||
for (let idx = 0; idx < this_transformed_pts_paired.length; idx++) {
|
|
||||||
this_factors.push(
|
|
||||||
(deterministic_pdf.divider_pts[idx + 1] -
|
|
||||||
deterministic_pdf.divider_pts[idx]) /
|
|
||||||
(this_transformed_pts_paired[idx][1] -
|
|
||||||
this_transformed_pts_paired[idx][0])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
factors = factors.concat(this_factors);
|
|
||||||
}
|
|
||||||
for (let i = 0; i < transformed_pts.length; ++i) {
|
|
||||||
bst_pts_and_idxs.insert(transformed_pts[i][0], {
|
|
||||||
start: transformed_pts[i][0],
|
|
||||||
end: transformed_pts[i][1],
|
|
||||||
idx: [track_idx, pdf_inner_idxs[i]],
|
|
||||||
factor: factors[i] / num_outer_iters
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return [start_pt, end_pt];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Take the binary search tree with transformed bin points,
|
|
||||||
* and an array of pdf values associated with the bins,
|
|
||||||
* and return a pdf over an evenly spaced grid
|
|
||||||
*
|
|
||||||
* @param pdf_vals
|
|
||||||
* @param bst_pts_and_idxs
|
|
||||||
* @param output_grid
|
|
||||||
* @returns {[]}
|
|
||||||
*/
|
|
||||||
function get_final_pdf(pdf_vals, bst_pts_and_idxs, output_grid) {
|
|
||||||
var offset = output_grid[1] / 2 - output_grid[0] / 2;
|
|
||||||
var active_intervals = new Map();
|
|
||||||
var active_endpoints = new bst.AVLTree();
|
|
||||||
var final_pdf_vals = [];
|
|
||||||
|
|
||||||
for (
|
|
||||||
let out_grid_idx = 0;
|
|
||||||
out_grid_idx < output_grid.length;
|
|
||||||
++out_grid_idx
|
|
||||||
) {
|
|
||||||
let startpoints_within_bin = bst_pts_and_idxs.betweenBounds({
|
|
||||||
$gte: output_grid[out_grid_idx] - offset,
|
|
||||||
$lt: output_grid[out_grid_idx] + offset
|
|
||||||
});
|
|
||||||
for (let interval of startpoints_within_bin) {
|
|
||||||
active_intervals.set(interval.idx, [
|
|
||||||
interval.start,
|
|
||||||
interval.end,
|
|
||||||
interval.factor
|
|
||||||
]);
|
|
||||||
active_endpoints.insert(interval.end, interval.idx);
|
|
||||||
}
|
|
||||||
var contrib = 0;
|
|
||||||
for (let [pdf_idx, bounds_and_ratio] of active_intervals.entries()) {
|
|
||||||
let overlap_start = Math.max(
|
|
||||||
output_grid[out_grid_idx] - offset,
|
|
||||||
bounds_and_ratio[0]
|
|
||||||
);
|
|
||||||
let overlap_end = Math.min(
|
|
||||||
output_grid[out_grid_idx] + offset,
|
|
||||||
bounds_and_ratio[1]
|
|
||||||
);
|
|
||||||
let interval_size = bounds_and_ratio[1] - bounds_and_ratio[0];
|
|
||||||
let contrib_frac =
|
|
||||||
interval_size === 0
|
|
||||||
? 0
|
|
||||||
: (overlap_end - overlap_start) * bounds_and_ratio[2];
|
|
||||||
let t = contrib_frac * pdf_vals[pdf_idx[0]][pdf_idx[1]];
|
|
||||||
contrib += t;
|
|
||||||
}
|
|
||||||
final_pdf_vals.push(contrib);
|
|
||||||
let endpoints_within_bin = active_endpoints.betweenBounds({
|
|
||||||
$gte: output_grid[out_grid_idx] - offset,
|
|
||||||
$lt: output_grid[out_grid_idx] + offset
|
|
||||||
});
|
|
||||||
for (let interval_idx of endpoints_within_bin) {
|
|
||||||
active_intervals.delete(interval_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return final_pdf_vals;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} str
|
|
||||||
* @param {string} char
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
function get_count_of_chars(str, char) {
|
|
||||||
return str.split(char).length - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Entrypoint. Pass user input strings to this function,
|
|
||||||
* get the corresponding pdf values and input points back.
|
|
||||||
* If the pdf requires monte carlo (it contains a between-distr function)
|
|
||||||
* we first determing which distr to have deterministic
|
|
||||||
* and which to sample from. This is decided based on which
|
|
||||||
* choice gives the least variance.
|
|
||||||
*
|
|
||||||
* @param user_input_string
|
|
||||||
* @returns {([]|*[])[]}
|
|
||||||
*/
|
|
||||||
function get_pdf_from_user_input(user_input_string) {
|
|
||||||
try {
|
|
||||||
const count_opened_bracket = get_count_of_chars(user_input_string, '(');
|
|
||||||
const count_closed_bracket = get_count_of_chars(user_input_string, ')');
|
|
||||||
if (count_opened_bracket !== count_closed_bracket) {
|
|
||||||
throw new Error('Count of brackets are not equal.');
|
|
||||||
}
|
|
||||||
|
|
||||||
let parsed = parse.parse_initial_string(user_input_string);
|
|
||||||
let mm_args = parse.separate_mm_args(parsed.mm_args_string);
|
|
||||||
const is_mm = mm_args.distrs.length > 0;
|
|
||||||
if (!parsed.outer_string) {
|
|
||||||
throw new Error('Parse string is empty.');
|
|
||||||
}
|
|
||||||
|
|
||||||
let tree = new bst.AVLTree();
|
|
||||||
let possible_start_pts = [];
|
|
||||||
let possible_end_pts = [];
|
|
||||||
let all_vals = [];
|
|
||||||
let weights = is_mm ? math.compile(mm_args.weights).evaluate()._data : [1];
|
|
||||||
let weights_sum = weights.reduce((a, b) => a + b);
|
|
||||||
weights = weights.map(x => x / weights_sum);
|
|
||||||
let n_iters = is_mm ? mm_args.distrs.length : 1;
|
|
||||||
|
|
||||||
for (let i = 0; i < n_iters; ++i) {
|
|
||||||
let distr_string = is_mm ? mm_args.distrs[i] : parsed.outer_string;
|
|
||||||
var [deterministic_pdf, mc_distrs] = choose_pdf_func(distr_string);
|
|
||||||
var grid_transform = get_grid_transform(distr_string);
|
|
||||||
var [start_pt, end_pt] = update_transformed_divider_points_bst(
|
|
||||||
grid_transform,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
i,
|
|
||||||
NUM_MC_SAMPLES,
|
|
||||||
tree
|
|
||||||
);
|
|
||||||
possible_start_pts.push(start_pt);
|
|
||||||
possible_end_pts.push(end_pt);
|
|
||||||
all_vals.push(deterministic_pdf.pdf_vals.map(x => x * weights[i]));
|
|
||||||
}
|
|
||||||
|
|
||||||
start_pt = Math.min(...possible_start_pts);
|
|
||||||
end_pt = Math.max(...possible_end_pts);
|
|
||||||
|
|
||||||
let output_grid = evenly_spaced_grid(start_pt, end_pt, OUTPUT_GRID_NUMEL);
|
|
||||||
let final_pdf_vals = get_final_pdf(all_vals, tree, output_grid);
|
|
||||||
|
|
||||||
return [final_pdf_vals, output_grid, false];
|
|
||||||
} catch (e) {
|
|
||||||
return [[], [], true];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param vals
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
function variance(vals) {
|
|
||||||
var vari = 0;
|
|
||||||
for (let i = 0; i < vals[0].length; ++i) {
|
|
||||||
let mean = 0;
|
|
||||||
let this_vari = 0;
|
|
||||||
for (let val of vals) {
|
|
||||||
mean += val[i] / vals.length;
|
|
||||||
}
|
|
||||||
for (let val of vals) {
|
|
||||||
this_vari += (val[i] - mean) ** 2;
|
|
||||||
}
|
|
||||||
vari += this_vari;
|
|
||||||
}
|
|
||||||
return vari;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param array
|
|
||||||
* @param idx
|
|
||||||
* @returns {*[]}
|
|
||||||
*/
|
|
||||||
function pluck_from_array(array, idx) {
|
|
||||||
return [array[idx], array.slice(0, idx).concat(array.slice(idx + 1))];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If distr_string requires MC, try all possible
|
|
||||||
* choices for the deterministic distribution,
|
|
||||||
* and pick the one with the least variance.
|
|
||||||
* It's much better to sample from a normal than a lognormal.
|
|
||||||
*
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {(*|*[])[]|*[]}
|
|
||||||
*/
|
|
||||||
function choose_pdf_func(distr_string) {
|
|
||||||
var variances = [];
|
|
||||||
let transform_func = get_grid_transform(distr_string);
|
|
||||||
let substrings = parse.get_distr_substrings(distr_string);
|
|
||||||
var pdfs = get_distributions(substrings);
|
|
||||||
if (pdfs.length === 1) {
|
|
||||||
return [pdfs[0], []];
|
|
||||||
}
|
|
||||||
var start_pt = 0;
|
|
||||||
var end_pt = 0;
|
|
||||||
for (let i = 0; i < pdfs.length; ++i) {
|
|
||||||
var outputs = [];
|
|
||||||
for (let j = 0; j < 20; ++j) {
|
|
||||||
let tree = new bst.AVLTree();
|
|
||||||
let [deterministic_pdf, mc_distrs] = pluck_from_array(pdfs, i);
|
|
||||||
let [this_start_pt, this_end_pt] = update_transformed_divider_points_bst(
|
|
||||||
transform_func,
|
|
||||||
deterministic_pdf,
|
|
||||||
mc_distrs,
|
|
||||||
0,
|
|
||||||
10,
|
|
||||||
tree
|
|
||||||
);
|
|
||||||
[start_pt, end_pt] =
|
|
||||||
j === 0 ? [this_start_pt, this_end_pt] : [start_pt, end_pt];
|
|
||||||
var output_grid = evenly_spaced_grid(start_pt, end_pt, 100);
|
|
||||||
let final_pdf_vals = get_final_pdf(
|
|
||||||
[deterministic_pdf.pdf_vals],
|
|
||||||
tree,
|
|
||||||
output_grid
|
|
||||||
);
|
|
||||||
outputs.push(final_pdf_vals);
|
|
||||||
}
|
|
||||||
variances.push(variance(outputs));
|
|
||||||
}
|
|
||||||
let best_variance = Math.min(...variances);
|
|
||||||
let best_idx = variances
|
|
||||||
.map((val, idx) => [val, idx])
|
|
||||||
.filter(x => x[0] === best_variance)[0][1];
|
|
||||||
let mc_distrs = pdfs.slice(0, best_idx).concat(pdfs.slice(best_idx + 1));
|
|
||||||
return [pdfs[best_idx], mc_distrs];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {function(*): *}
|
|
||||||
*/
|
|
||||||
function get_grid_transform(distr_string) {
|
|
||||||
let substrings = parse.get_distr_substrings(distr_string);
|
|
||||||
let arg_strings = [];
|
|
||||||
for (let i = 0; i < substrings.length; ++i) {
|
|
||||||
distr_string = distr_string.replace(substrings[i], "x_" + i.toString());
|
|
||||||
arg_strings.push("x_" + i.toString());
|
|
||||||
}
|
|
||||||
let compiled = math.compile(distr_string);
|
|
||||||
|
|
||||||
function grid_transform(x) {
|
|
||||||
let kv_pairs = arg_strings.map((val, idx) => [val, x[idx]]);
|
|
||||||
let args_obj = Object.fromEntries(new Map(kv_pairs));
|
|
||||||
return compiled.evaluate(args_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
return grid_transform;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.get_pdf_from_user_input = get_pdf_from_user_input;
|
|
|
@ -1,139 +0,0 @@
|
||||||
const _math = require("mathjs");
|
|
||||||
const math = _math.create(_math.all);
|
|
||||||
|
|
||||||
// Functions for parsing/processing user input strings are here
|
|
||||||
|
|
||||||
// @todo: Do not use objects.
|
|
||||||
const DISTR_REGEXS = [
|
|
||||||
/beta\(/g,
|
|
||||||
/(log)?normal\(/g,
|
|
||||||
/multimodal\(/g,
|
|
||||||
/mm\(/g,
|
|
||||||
/uniform\(/g
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param user_input_string
|
|
||||||
* @returns {{mm_args_string: string, outer_string: string}}
|
|
||||||
*/
|
|
||||||
function parse_initial_string(user_input_string) {
|
|
||||||
let outer_output_string = "";
|
|
||||||
let mm_args_string = "";
|
|
||||||
let idx = 0;
|
|
||||||
|
|
||||||
while (idx < user_input_string.length) {
|
|
||||||
if (
|
|
||||||
user_input_string.substring(idx - 11, idx) === "multimodal(" ||
|
|
||||||
user_input_string.substring(idx - 3, idx) === "mm("
|
|
||||||
) {
|
|
||||||
let num_open_brackets = 1;
|
|
||||||
while (num_open_brackets > 0 && idx < user_input_string.length) {
|
|
||||||
mm_args_string += user_input_string[idx];
|
|
||||||
idx += 1;
|
|
||||||
if (user_input_string[idx] === ")") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
} else if (user_input_string[idx] === "(") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
outer_output_string += ")";
|
|
||||||
idx += 1;
|
|
||||||
} else {
|
|
||||||
outer_output_string += user_input_string[idx];
|
|
||||||
idx += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
outer_string: outer_output_string,
|
|
||||||
mm_args_string: mm_args_string
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param mm_args_string
|
|
||||||
* @returns {{distrs: [], weights: string}}
|
|
||||||
*/
|
|
||||||
function separate_mm_args(mm_args_string) {
|
|
||||||
if (mm_args_string.endsWith(",")) {
|
|
||||||
mm_args_string = mm_args_string.slice(0, -1);
|
|
||||||
}
|
|
||||||
let args_array = [];
|
|
||||||
let num_open_brackets = 0;
|
|
||||||
let arg_substring = "";
|
|
||||||
for (let char of mm_args_string) {
|
|
||||||
if (num_open_brackets === 0 && char === ",") {
|
|
||||||
args_array.push(arg_substring.trim());
|
|
||||||
arg_substring = "";
|
|
||||||
} else {
|
|
||||||
if (char === ")" || char === "]") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
} else if (char === "(" || char === "[") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
}
|
|
||||||
arg_substring += char;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
distrs: args_array,
|
|
||||||
weights: arg_substring.trim()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param distr_string
|
|
||||||
* @returns {[]}
|
|
||||||
*/
|
|
||||||
function get_distr_substrings(distr_string) {
|
|
||||||
let substrings = [];
|
|
||||||
for (let regex of DISTR_REGEXS) {
|
|
||||||
let matches = distr_string.matchAll(regex);
|
|
||||||
for (let match of matches) {
|
|
||||||
let idx = match.index + match[0].length;
|
|
||||||
let num_open_brackets = 1;
|
|
||||||
let distr_substring = "";
|
|
||||||
while (num_open_brackets !== 0 && idx < distr_string.length) {
|
|
||||||
distr_substring += distr_string[idx];
|
|
||||||
if (distr_string[idx] === "(") {
|
|
||||||
num_open_brackets += 1;
|
|
||||||
} else if (distr_string[idx] === ")") {
|
|
||||||
num_open_brackets -= 1;
|
|
||||||
}
|
|
||||||
idx += 1;
|
|
||||||
}
|
|
||||||
substrings.push((match[0] + distr_substring).trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return substrings;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param substr
|
|
||||||
* @returns {(string|*)[]}
|
|
||||||
*/
|
|
||||||
function get_distr_name_and_args(substr) {
|
|
||||||
let distr_name = "";
|
|
||||||
let args_str = "";
|
|
||||||
let args_flag = false;
|
|
||||||
for (let char of substr) {
|
|
||||||
if (!args_flag && char !== "(") {
|
|
||||||
distr_name += char;
|
|
||||||
}
|
|
||||||
if (args_flag && char !== ")") {
|
|
||||||
args_str += char;
|
|
||||||
}
|
|
||||||
if (char === "(") {
|
|
||||||
args_str += "[";
|
|
||||||
args_flag = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
args_str += "]";
|
|
||||||
let args = math.compile(args_str).evaluate()._data;
|
|
||||||
return [distr_name, args];
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.get_distr_name_and_args = get_distr_name_and_args;
|
|
||||||
exports.get_distr_substrings = get_distr_substrings;
|
|
||||||
exports.separate_mm_args = separate_mm_args;
|
|
||||||
exports.parse_initial_string = parse_initial_string;
|
|
|
@ -143,6 +143,7 @@ module T =
|
||||||
let minX = shapeFn(XYShape.T.minX);
|
let minX = shapeFn(XYShape.T.minX);
|
||||||
let maxX = shapeFn(XYShape.T.maxX);
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
let mapY = mapY;
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
let toDiscreteProbabilityMassFraction = _ => 0.0;
|
let toDiscreteProbabilityMassFraction = _ => 0.0;
|
||||||
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
let toShape = (t: t): DistTypes.shape => Continuous(t);
|
||||||
let xToY = (f, {interpolation, xyShape}: t) => {
|
let xToY = (f, {interpolation, xyShape}: t) => {
|
||||||
|
@ -283,7 +284,8 @@ let combineAlgebraicallyWithDiscrete =
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let combineAlgebraically = (op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
|
let combineAlgebraically =
|
||||||
|
(op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
|
||||||
let s1 = t1 |> getShape;
|
let s1 = t1 |> getShape;
|
||||||
let s2 = t2 |> getShape;
|
let s2 = t2 |> getShape;
|
||||||
let t1n = s1 |> XYShape.T.length;
|
let t1n = s1 |> XYShape.T.length;
|
||||||
|
|
|
@ -162,6 +162,7 @@ module T =
|
||||||
let maxX = shapeFn(XYShape.T.maxX);
|
let maxX = shapeFn(XYShape.T.maxX);
|
||||||
let toDiscreteProbabilityMassFraction = _ => 1.0;
|
let toDiscreteProbabilityMassFraction = _ => 1.0;
|
||||||
let mapY = mapY;
|
let mapY = mapY;
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
let toShape = (t: t): DistTypes.shape => Discrete(t);
|
||||||
let toContinuous = _ => None;
|
let toContinuous = _ => None;
|
||||||
let toDiscrete = t => Some(t);
|
let toDiscrete = t => Some(t);
|
||||||
|
|
|
@ -33,7 +33,6 @@ let update =
|
||||||
};
|
};
|
||||||
|
|
||||||
let updateShape = (shape, t) => {
|
let updateShape = (shape, t) => {
|
||||||
Js.log("Updating the shape, recalculating the integral");
|
|
||||||
let integralCache = shapeIntegral(shape);
|
let integralCache = shapeIntegral(shape);
|
||||||
update(~shape, ~integralCache, t);
|
update(~shape, ~integralCache, t);
|
||||||
};
|
};
|
||||||
|
@ -109,6 +108,9 @@ module T =
|
||||||
let integral = (t: t) =>
|
let integral = (t: t) =>
|
||||||
updateShape(Continuous(t.integralCache), t);
|
updateShape(Continuous(t.integralCache), t);
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache: option(DistTypes.continuousShape), t) =>
|
||||||
|
update(~integralCache=E.O.default(t.integralCache, integralCache), t);
|
||||||
|
|
||||||
let downsample = (i, t): t =>
|
let downsample = (i, t): t =>
|
||||||
updateShape(t |> toShape |> Shape.T.downsample(i), t);
|
updateShape(t |> toShape |> Shape.T.downsample(i), t);
|
||||||
// todo: adjust for limit, maybe?
|
// todo: adjust for limit, maybe?
|
||||||
|
|
|
@ -16,6 +16,8 @@ module type dist = {
|
||||||
let downsample: (int, t) => t;
|
let downsample: (int, t) => t;
|
||||||
let truncate: (option(float), option(float), t) => t;
|
let truncate: (option(float), option(float), t) => t;
|
||||||
|
|
||||||
|
let updateIntegralCache: (option(DistTypes.continuousShape), t) => t;
|
||||||
|
|
||||||
let integral: (t) => integral;
|
let integral: (t) => integral;
|
||||||
let integralEndY: (t) => float;
|
let integralEndY: (t) => float;
|
||||||
let integralXtoY: (float, t) => float;
|
let integralXtoY: (float, t) => float;
|
||||||
|
@ -46,6 +48,8 @@ module Dist = (T: dist) => {
|
||||||
let mean = T.mean;
|
let mean = T.mean;
|
||||||
let variance = T.variance;
|
let variance = T.variance;
|
||||||
|
|
||||||
|
let updateIntegralCache = T.updateIntegralCache;
|
||||||
|
|
||||||
module Integral = {
|
module Integral = {
|
||||||
type t = T.integral;
|
type t = T.integral;
|
||||||
let get = T.integral;
|
let get = T.integral;
|
||||||
|
|
|
@ -22,6 +22,11 @@ let scaleBy = (~scale=1.0, t: t): t => {
|
||||||
let toContinuous = ({continuous}: t) => Some(continuous);
|
let toContinuous = ({continuous}: t) => Some(continuous);
|
||||||
let toDiscrete = ({discrete}: t) => Some(discrete);
|
let toDiscrete = ({discrete}: t) => Some(discrete);
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
|
...t,
|
||||||
|
integralCache,
|
||||||
|
};
|
||||||
|
|
||||||
module T =
|
module T =
|
||||||
Dist({
|
Dist({
|
||||||
type t = DistTypes.mixedShape;
|
type t = DistTypes.mixedShape;
|
||||||
|
@ -33,6 +38,8 @@ module T =
|
||||||
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete));
|
||||||
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
let toShape = (t: t): DistTypes.shape => Mixed(t);
|
||||||
|
|
||||||
|
let updateIntegralCache = updateIntegralCache;
|
||||||
|
|
||||||
let toContinuous = toContinuous;
|
let toContinuous = toContinuous;
|
||||||
let toDiscrete = toDiscrete;
|
let toDiscrete = toDiscrete;
|
||||||
|
|
||||||
|
@ -257,15 +264,14 @@ let combineAlgebraically =
|
||||||
// An alternative (to be explored in the future) may be to first perform the full convolution and then to downsample the result;
|
// An alternative (to be explored in the future) may be to first perform the full convolution and then to downsample the result;
|
||||||
// to use non-uniform fast Fourier transforms (for addition only), add web workers or gpu.js, etc. ...
|
// to use non-uniform fast Fourier transforms (for addition only), add web workers or gpu.js, etc. ...
|
||||||
|
|
||||||
// TODO: figure out when to downsample strategically. Could be an evaluationParam?
|
// we have to figure out where to downsample, and how to effectively
|
||||||
/*let downsampleIfTooLarge = (t: t) => {
|
//let downsampleIfTooLarge = (t: t) => {
|
||||||
let sqtl = sqrt(float_of_int(totalLength(t)));
|
// let sqtl = sqrt(float_of_int(totalLength(t)));
|
||||||
sqtl > 10. && downsample ? T.downsample(int_of_float(sqtl), t) : t;
|
// sqtl > 10 ? T.downsample(int_of_float(sqtl), t) : t;
|
||||||
};
|
//};
|
||||||
|
|
||||||
let t1d = downsampleIfTooLarge(t1);
|
let t1d = t1;
|
||||||
let t2d = downsampleIfTooLarge(t2);
|
let t2d = t2;
|
||||||
*/
|
|
||||||
|
|
||||||
// continuous (*) continuous => continuous, but also
|
// continuous (*) continuous => continuous, but also
|
||||||
// discrete (*) continuous => continuous (and vice versa). We have to take care of all combos and then combine them:
|
// discrete (*) continuous => continuous (and vice versa). We have to take care of all combos and then combine them:
|
||||||
|
|
|
@ -121,6 +121,13 @@ module T =
|
||||||
Continuous.T.normalize
|
Continuous.T.normalize
|
||||||
));
|
));
|
||||||
|
|
||||||
|
let updateIntegralCache = (integralCache, t: t): t =>
|
||||||
|
fmap((
|
||||||
|
Mixed.T.updateIntegralCache(integralCache),
|
||||||
|
Discrete.T.updateIntegralCache(integralCache),
|
||||||
|
Continuous.T.updateIntegralCache(integralCache),
|
||||||
|
), t);
|
||||||
|
|
||||||
let toContinuous =
|
let toContinuous =
|
||||||
mapToAll((
|
mapToAll((
|
||||||
Mixed.T.toContinuous,
|
Mixed.T.toContinuous,
|
||||||
|
@ -211,9 +218,26 @@ let pdf = (f: float, t: t) => {
|
||||||
let inv = T.Integral.yToX;
|
let inv = T.Integral.yToX;
|
||||||
let cdf = T.Integral.xToY;
|
let cdf = T.Integral.xToY;
|
||||||
|
|
||||||
|
let doN = (n, fn) => {
|
||||||
|
let items = Belt.Array.make(n, 0.0);
|
||||||
|
for (x in 0 to n - 1) {
|
||||||
|
let _ = Belt.Array.set(items, x, fn());
|
||||||
|
();
|
||||||
|
};
|
||||||
|
items;
|
||||||
|
};
|
||||||
|
|
||||||
let sample = (t: t): float => {
|
let sample = (t: t): float => {
|
||||||
// this can go, already taken care of in Ozzie's sampling branch
|
let randomItem = Random.float(1.);
|
||||||
0.0
|
let bar = t |> T.Integral.yToX(randomItem);
|
||||||
|
bar;
|
||||||
|
};
|
||||||
|
|
||||||
|
let sampleNRendered = (n, dist) => {
|
||||||
|
let integralCache = T.Integral.get(dist);
|
||||||
|
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist);
|
||||||
|
|
||||||
|
doN(n, () => sample(distWithUpdatedIntegralCache));
|
||||||
};
|
};
|
||||||
|
|
||||||
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s): float =>
|
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s): float =>
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
open ExpressionTypes.ExpressionTree;
|
open ExpressionTypes.ExpressionTree;
|
||||||
|
|
||||||
let toShape = (sampleCount: int, node: node) => {
|
let toShape = (intendedShapeLength: int, samplingInputs, node: node) => {
|
||||||
let renderResult =
|
let renderResult =
|
||||||
`Render(`Normalize(node))
|
`Render(`Normalize(node))
|
||||||
|> ExpressionTreeEvaluator.toLeaf({sampleCount: sampleCount, evaluateNode: ExpressionTreeEvaluator.toLeaf});
|
|> ExpressionTreeEvaluator.toLeaf({
|
||||||
|
samplingInputs,
|
||||||
|
intendedShapeLength,
|
||||||
|
evaluateNode: ExpressionTreeEvaluator.toLeaf,
|
||||||
|
});
|
||||||
|
|
||||||
switch (renderResult) {
|
switch (renderResult) {
|
||||||
| Ok(`RenderedDist(rs)) =>
|
| Ok(`RenderedDist(rs)) =>
|
||||||
|
|
|
@ -4,8 +4,6 @@ open ExpressionTypes.ExpressionTree;
|
||||||
type t = node;
|
type t = node;
|
||||||
type tResult = node => result(node, string);
|
type tResult = node => result(node, string);
|
||||||
|
|
||||||
type renderParams = {sampleCount: int};
|
|
||||||
|
|
||||||
/* Given two random variables A and B, this returns the distribution
|
/* Given two random variables A and B, this returns the distribution
|
||||||
of a new variable that is the result of the operation on A and B.
|
of a new variable that is the result of the operation on A and B.
|
||||||
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
|
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
|
||||||
|
@ -22,20 +20,62 @@ module AlgebraicCombination = {
|
||||||
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
|
| _ => Ok(`AlgebraicCombination((operation, t1, t2)))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let tryCombination = (n, algebraicOp, t1: node, t2: node) => {
|
||||||
|
let sampleN =
|
||||||
|
mapRenderable(Shape.sampleNRendered(n), SymbolicDist.T.sampleN(n));
|
||||||
|
switch (sampleN(t1), sampleN(t2)) {
|
||||||
|
| (Some(a), Some(b)) =>
|
||||||
|
Some(
|
||||||
|
Belt.Array.zip(a, b)
|
||||||
|
|> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
|
||||||
|
)
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let renderIfNotRendered = (params, t) =>
|
||||||
|
!renderable(t)
|
||||||
|
? switch (render(params, t)) {
|
||||||
|
| Ok(r) => Ok(r)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
}
|
||||||
|
: Ok(t);
|
||||||
|
|
||||||
let combineAsShapes =
|
let combineAsShapes =
|
||||||
(evaluationParams: evaluationParams, algebraicOp, t1, t2) => {
|
(evaluationParams: evaluationParams, algebraicOp, t1: node, t2: node) => {
|
||||||
let renderShape = render(evaluationParams);
|
let i1 = renderIfNotRendered(evaluationParams, t1);
|
||||||
switch (renderShape(t1), renderShape(t2)) {
|
let i2 = renderIfNotRendered(evaluationParams, t2);
|
||||||
| (Ok(`RenderedDist(s1)), Ok(`RenderedDist(s2))) =>
|
E.R.merge(i1, i2)
|
||||||
Ok(
|
|> E.R.bind(
|
||||||
`RenderedDist(
|
_,
|
||||||
Shape.combineAlgebraically(algebraicOp, s1, s2),
|
((a, b)) => {
|
||||||
|
let samples =
|
||||||
|
tryCombination(
|
||||||
|
evaluationParams.samplingInputs.sampleCount,
|
||||||
|
algebraicOp,
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
);
|
||||||
|
let shape =
|
||||||
|
samples
|
||||||
|
|> E.O.fmap(
|
||||||
|
Samples.T.fromSamples(
|
||||||
|
~samplingInputs={
|
||||||
|
sampleCount:
|
||||||
|
Some(evaluationParams.samplingInputs.sampleCount),
|
||||||
|
outputXYPoints:
|
||||||
|
Some(evaluationParams.samplingInputs.outputXYPoints),
|
||||||
|
kernelWidth: evaluationParams.samplingInputs.kernelWidth,
|
||||||
|
},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
| (Error(e1), _) => Error(e1)
|
|> E.O.bind(_, (r: RenderTypes.ShapeRenderer.Sampling.outputs) =>
|
||||||
| (_, Error(e2)) => Error(e2)
|
r.shape
|
||||||
| _ => Error("Algebraic combination: rendering failed.")
|
)
|
||||||
};
|
|> E.O.toResult("No response");
|
||||||
|
shape |> E.R.fmap(r => `Normalize(`RenderedDist(r)));
|
||||||
|
},
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let operationToLeaf =
|
let operationToLeaf =
|
||||||
|
@ -128,11 +168,9 @@ module Truncate = {
|
||||||
| (Some(lc), Some(rc), t) when lc > rc =>
|
| (Some(lc), Some(rc), t) when lc > rc =>
|
||||||
`Error("Left truncation bound must be smaller than right bound.")
|
`Error("Left truncation bound must be smaller than right bound.")
|
||||||
| (lc, rc, `SymbolicDist(`Uniform(u))) =>
|
| (lc, rc, `SymbolicDist(`Uniform(u))) =>
|
||||||
// just create a new Uniform distribution
|
`Solution(
|
||||||
let nu: SymbolicTypes.uniform = u;
|
`SymbolicDist(`Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))),
|
||||||
let newLow = max(E.O.default(neg_infinity, lc), nu.low);
|
)
|
||||||
let newHigh = min(E.O.default(infinity, rc), nu.high);
|
|
||||||
`Solution(`SymbolicDist(`Uniform({low: newLow, high: newHigh})));
|
|
||||||
| _ => `NoSolution
|
| _ => `NoSolution
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -143,9 +181,7 @@ module Truncate = {
|
||||||
// of a distribution we otherwise wouldn't get at all
|
// of a distribution we otherwise wouldn't get at all
|
||||||
switch (render(evaluationParams, t)) {
|
switch (render(evaluationParams, t)) {
|
||||||
| Ok(`RenderedDist(rs)) =>
|
| Ok(`RenderedDist(rs)) =>
|
||||||
let truncatedShape =
|
Ok(`RenderedDist(Shape.T.truncate(leftCutoff, rightCutoff, rs)))
|
||||||
rs |> Shape.T.truncate(leftCutoff, rightCutoff);
|
|
||||||
Ok(`RenderedDist(truncatedShape));
|
|
||||||
| Error(e) => Error(e)
|
| Error(e) => Error(e)
|
||||||
| _ => Error("Could not truncate distribution.")
|
| _ => Error("Could not truncate distribution.")
|
||||||
};
|
};
|
||||||
|
@ -174,8 +210,7 @@ module Truncate = {
|
||||||
module Normalize = {
|
module Normalize = {
|
||||||
let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => {
|
let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => {
|
||||||
switch (t) {
|
switch (t) {
|
||||||
| `RenderedDist(s) =>
|
| `RenderedDist(s) => Ok(`RenderedDist(Shape.T.normalize(s)))
|
||||||
Ok(`RenderedDist(Shape.T.normalize(s)))
|
|
||||||
| `SymbolicDist(_) => Ok(t)
|
| `SymbolicDist(_) => Ok(t)
|
||||||
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
|
| _ => evaluateAndRetry(evaluationParams, operationToLeaf, t)
|
||||||
};
|
};
|
||||||
|
@ -209,7 +244,7 @@ module Render = {
|
||||||
| `SymbolicDist(d) =>
|
| `SymbolicDist(d) =>
|
||||||
Ok(
|
Ok(
|
||||||
`RenderedDist(
|
`RenderedDist(
|
||||||
SymbolicDist.T.toShape(evaluationParams.sampleCount, d),
|
SymbolicDist.T.toShape(evaluationParams.intendedShapeLength, d),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
| `RenderedDist(_) as t => Ok(t) // already a rendered shape, we're done here
|
| `RenderedDist(_) as t => Ok(t) // already a rendered shape, we're done here
|
||||||
|
|
|
@ -16,8 +16,15 @@ module ExpressionTree = {
|
||||||
| `FloatFromDist(distToFloatOperation, node)
|
| `FloatFromDist(distToFloatOperation, node)
|
||||||
];
|
];
|
||||||
|
|
||||||
type evaluationParams = {
|
type samplingInputs = {
|
||||||
sampleCount: int,
|
sampleCount: int,
|
||||||
|
outputXYPoints: int,
|
||||||
|
kernelWidth: option(float),
|
||||||
|
};
|
||||||
|
|
||||||
|
type evaluationParams = {
|
||||||
|
samplingInputs,
|
||||||
|
intendedShapeLength: int,
|
||||||
evaluateNode: (evaluationParams, node) => Belt.Result.t(node, string),
|
evaluateNode: (evaluationParams, node) => Belt.Result.t(node, string),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -28,7 +35,22 @@ module ExpressionTree = {
|
||||||
evaluateNode(evaluationParams, `Render(r));
|
evaluateNode(evaluationParams, `Render(r));
|
||||||
|
|
||||||
let evaluateAndRetry = (evaluationParams, fn, node) =>
|
let evaluateAndRetry = (evaluationParams, fn, node) =>
|
||||||
node |> evaluationParams.evaluateNode(evaluationParams) |> E.R.bind(_, fn(evaluationParams));
|
node
|
||||||
|
|> evaluationParams.evaluateNode(evaluationParams)
|
||||||
|
|> E.R.bind(_, fn(evaluationParams));
|
||||||
|
|
||||||
|
let renderable =
|
||||||
|
fun
|
||||||
|
| `SymbolicDist(_) => true
|
||||||
|
| `RenderedDist(_) => true
|
||||||
|
| _ => false;
|
||||||
|
|
||||||
|
let mapRenderable = (renderedFn, symFn, item: node) =>
|
||||||
|
switch (item) {
|
||||||
|
| `SymbolicDist(s) => Some(symFn(s))
|
||||||
|
| `RenderedDist(r) => Some(renderedFn(r))
|
||||||
|
| _ => None
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
type simplificationResult = [
|
type simplificationResult = [
|
||||||
|
|
|
@ -14,14 +14,24 @@ let formatString = str => {
|
||||||
str |> formatMessyArray;
|
str |> formatMessyArray;
|
||||||
};
|
};
|
||||||
|
|
||||||
let runSymbolic = (guesstimatorString, length) => {
|
let runSymbolic = (inputs: RenderTypes.ShapeRenderer.Combined.inputs) => {
|
||||||
let str = formatString(guesstimatorString);
|
let str = formatString(inputs.guesstimatorString);
|
||||||
let graph = MathJsParser.fromString(str);
|
let graph = MathJsParser.fromString(str);
|
||||||
graph
|
graph
|
||||||
|> E.R.fmap(g =>
|
|> E.R.fmap(g =>
|
||||||
RenderTypes.ShapeRenderer.Symbolic.make(
|
RenderTypes.ShapeRenderer.Symbolic.make(
|
||||||
g,
|
g,
|
||||||
ExpressionTree.toShape(length, g),
|
ExpressionTree.toShape(
|
||||||
|
inputs.symbolicInputs.length,
|
||||||
|
{
|
||||||
|
sampleCount:
|
||||||
|
inputs.samplingInputs.sampleCount |> E.O.default(10000),
|
||||||
|
outputXYPoints:
|
||||||
|
inputs.samplingInputs.outputXYPoints |> E.O.default(10000),
|
||||||
|
kernelWidth: inputs.samplingInputs.kernelWidth,
|
||||||
|
},
|
||||||
|
g,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
@ -29,17 +39,6 @@ let runSymbolic = (guesstimatorString, length) => {
|
||||||
let run =
|
let run =
|
||||||
(inputs: RenderTypes.ShapeRenderer.Combined.inputs)
|
(inputs: RenderTypes.ShapeRenderer.Combined.inputs)
|
||||||
: RenderTypes.ShapeRenderer.Combined.outputs => {
|
: RenderTypes.ShapeRenderer.Combined.outputs => {
|
||||||
let symbolic =
|
let symbolic = runSymbolic(inputs);
|
||||||
runSymbolic(inputs.guesstimatorString, inputs.symbolicInputs.length);
|
{symbolic: Some(symbolic), sampling: None};
|
||||||
let sampling =
|
|
||||||
switch (symbolic) {
|
|
||||||
| Ok(_) => None
|
|
||||||
| Error(_) =>
|
|
||||||
Samples.T.fromGuesstimatorString(
|
|
||||||
~guesstimatorString=inputs.guesstimatorString,
|
|
||||||
~samplingInputs=inputs.samplingInputs,
|
|
||||||
(),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
{symbolic: Some(symbolic), sampling};
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
[@bs.deriving abstract]
|
|
||||||
type discrete = {
|
|
||||||
xs: array(float),
|
|
||||||
ys: array(float),
|
|
||||||
};
|
|
||||||
|
|
||||||
let jsToDistDiscrete = (d: discrete): DistTypes.discreteShape => {xyShape: {
|
|
||||||
xs: xsGet(d),
|
|
||||||
ys: ysGet(d),
|
|
||||||
}, integralSumCache: None, integralCache: None};
|
|
||||||
|
|
||||||
[@bs.module "./GuesstimatorLibrary.js"]
|
|
||||||
external stringToSamples: (string, int) => array(float) = "stringToSamples";
|
|
|
@ -1,37 +0,0 @@
|
||||||
const _ = require("lodash");
|
|
||||||
const {
|
|
||||||
Guesstimator
|
|
||||||
} = require('@foretold/guesstimator/src');
|
|
||||||
|
|
||||||
const stringToSamples = (
|
|
||||||
text,
|
|
||||||
sampleCount,
|
|
||||||
inputs = [],
|
|
||||||
) => {
|
|
||||||
const [_error, {
|
|
||||||
parsedInput,
|
|
||||||
parsedError
|
|
||||||
}] = Guesstimator.parse({
|
|
||||||
text: "=" + text
|
|
||||||
});
|
|
||||||
|
|
||||||
const guesstimator = new Guesstimator({
|
|
||||||
parsedInput
|
|
||||||
});
|
|
||||||
const {
|
|
||||||
values,
|
|
||||||
errors
|
|
||||||
} = guesstimator.sample(
|
|
||||||
sampleCount,
|
|
||||||
inputs,
|
|
||||||
);
|
|
||||||
if (errors.length > 0) {
|
|
||||||
return []
|
|
||||||
} else {
|
|
||||||
return _.filter(values, _.isFinite)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
stringToSamples,
|
|
||||||
};
|
|
|
@ -175,26 +175,4 @@ module T = {
|
||||||
RenderTypes.ShapeRenderer.Sampling.Inputs.toF(samplingInputs);
|
RenderTypes.ShapeRenderer.Sampling.Inputs.toF(samplingInputs);
|
||||||
toShape(~samples, ~samplingInputs, ());
|
toShape(~samples, ~samplingInputs, ());
|
||||||
};
|
};
|
||||||
|
|
||||||
let fromGuesstimatorString =
|
|
||||||
(
|
|
||||||
~guesstimatorString,
|
|
||||||
~samplingInputs=RenderTypes.ShapeRenderer.Sampling.Inputs.empty,
|
|
||||||
(),
|
|
||||||
) => {
|
|
||||||
let hasValidSamples =
|
|
||||||
Guesstimator.stringToSamples(guesstimatorString, 10) |> E.A.length > 0;
|
|
||||||
let _samplingInputs =
|
|
||||||
RenderTypes.ShapeRenderer.Sampling.Inputs.toF(samplingInputs);
|
|
||||||
switch (hasValidSamples) {
|
|
||||||
| false => None
|
|
||||||
| true =>
|
|
||||||
let samples =
|
|
||||||
Guesstimator.stringToSamples(
|
|
||||||
guesstimatorString,
|
|
||||||
_samplingInputs.sampleCount,
|
|
||||||
);
|
|
||||||
Some(fromSamples(~samplingInputs, samples));
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -137,6 +137,11 @@ module Uniform = {
|
||||||
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
let sample = (t: t) => Jstat.uniform##sample(t.low, t.high);
|
||||||
let mean = (t: t) => Ok(Jstat.uniform##mean(t.low, t.high));
|
let mean = (t: t) => Ok(Jstat.uniform##mean(t.low, t.high));
|
||||||
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
let toString = ({low, high}: t) => {j|Uniform($low,$high)|j};
|
||||||
|
let truncate = (low, high, t: t): t => {
|
||||||
|
let newLow = max(E.O.default(neg_infinity, low), t.low);
|
||||||
|
let newHigh = min(E.O.default(infinity, high), t.high);
|
||||||
|
{low: newLow, high: newHigh};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
module Float = {
|
module Float = {
|
||||||
|
@ -198,7 +203,20 @@ module T = {
|
||||||
| `Lognormal(n) => Lognormal.sample(n)
|
| `Lognormal(n) => Lognormal.sample(n)
|
||||||
| `Uniform(n) => Uniform.sample(n)
|
| `Uniform(n) => Uniform.sample(n)
|
||||||
| `Beta(n) => Beta.sample(n)
|
| `Beta(n) => Beta.sample(n)
|
||||||
| `Float(n) => Float.sample(n)
|
| `Float(n) => Float.sample(n);
|
||||||
|
|
||||||
|
let doN = (n, fn) => {
|
||||||
|
let items = Belt.Array.make(n, 0.0);
|
||||||
|
for (x in 0 to n - 1) {
|
||||||
|
let _ = Belt.Array.set(items, x, fn());
|
||||||
|
();
|
||||||
|
};
|
||||||
|
items;
|
||||||
|
};
|
||||||
|
|
||||||
|
let sampleN = (n, dist) => {
|
||||||
|
doN(n, () => sample(dist));
|
||||||
|
};
|
||||||
|
|
||||||
let toString: symbolicDist => string =
|
let toString: symbolicDist => string =
|
||||||
fun
|
fun
|
||||||
|
@ -209,7 +227,7 @@ module T = {
|
||||||
| `Lognormal(n) => Lognormal.toString(n)
|
| `Lognormal(n) => Lognormal.toString(n)
|
||||||
| `Uniform(n) => Uniform.toString(n)
|
| `Uniform(n) => Uniform.toString(n)
|
||||||
| `Beta(n) => Beta.toString(n)
|
| `Beta(n) => Beta.toString(n)
|
||||||
| `Float(n) => Float.toString(n)
|
| `Float(n) => Float.toString(n);
|
||||||
|
|
||||||
let min: symbolicDist => float =
|
let min: symbolicDist => float =
|
||||||
fun
|
fun
|
||||||
|
|
|
@ -145,6 +145,12 @@ module R = {
|
||||||
let id = e => e |> result(U.id, U.id);
|
let id = e => e |> result(U.id, U.id);
|
||||||
let fmap = Rationale.Result.fmap;
|
let fmap = Rationale.Result.fmap;
|
||||||
let bind = Rationale.Result.bind;
|
let bind = Rationale.Result.bind;
|
||||||
|
let merge = (a, b) =>
|
||||||
|
switch (a, b) {
|
||||||
|
| (Error(e), _) => Error(e)
|
||||||
|
| (_, Error(e)) => Error(e)
|
||||||
|
| (Ok(a), Ok(b)) => Ok((a, b))
|
||||||
|
};
|
||||||
let toOption = (e: Belt.Result.t('a, 'b)) =>
|
let toOption = (e: Belt.Result.t('a, 'b)) =>
|
||||||
switch (e) {
|
switch (e) {
|
||||||
| Ok(r) => Some(r)
|
| Ok(r) => Some(r)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user