Merge branch 'develop' into reducer-dev

This commit is contained in:
Sam Nolan 2022-04-22 11:04:43 -04:00
commit ad8be5273f
26 changed files with 51 additions and 1607 deletions

View File

@ -6,7 +6,7 @@ import {
errorValueToString, errorValueToString,
squiggleExpression, squiggleExpression,
} from "@quri/squiggle-lang"; } from "@quri/squiggle-lang";
import type { samplingParams, exportEnv } from "@quri/squiggle-lang"; import type { samplingParams } from "@quri/squiggle-lang";
import { NumberShower } from "./NumberShower"; import { NumberShower } from "./NumberShower";
import { DistributionChart } from "./DistributionChart"; import { DistributionChart } from "./DistributionChart";
import { ErrorBox } from "./ErrorBox"; import { ErrorBox } from "./ErrorBox";
@ -129,9 +129,9 @@ export interface SquiggleChartProps {
/** If the result is a function, how many points along the function it samples */ /** If the result is a function, how many points along the function it samples */
diagramCount?: number; diagramCount?: number;
/** variables declared before this expression */ /** variables declared before this expression */
environment?: exportEnv; environment?: unknown;
/** When the environment changes */ /** When the environment changes */
onEnvChange?(env: exportEnv): void; onEnvChange?(env: unknown): void;
/** CSS width of the element */ /** CSS width of the element */
width?: number; width?: number;
height?: number; height?: number;
@ -155,7 +155,7 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = ({
sampleCount: sampleCount, sampleCount: sampleCount,
xyPointLength: outputXYPoints, xyPointLength: outputXYPoints,
}; };
let expressionResult = run(squiggleString, samplingInputs, environment); let expressionResult = run(squiggleString, samplingInputs);
let internal: JSX.Element; let internal: JSX.Element;
if (expressionResult.tag === "Ok") { if (expressionResult.tag === "Ok") {
onEnvChange(environment); onEnvChange(environment);

View File

@ -2,7 +2,6 @@ import * as React from "react";
import * as ReactDOM from "react-dom"; import * as ReactDOM from "react-dom";
import { SquiggleChart } from "./SquiggleChart"; import { SquiggleChart } from "./SquiggleChart";
import { CodeEditor } from "./CodeEditor"; import { CodeEditor } from "./CodeEditor";
import type { exportEnv } from "@quri/squiggle-lang";
import styled from "styled-components"; import styled from "styled-components";
export interface SquiggleEditorProps { export interface SquiggleEditorProps {
@ -21,9 +20,9 @@ export interface SquiggleEditorProps {
/** If the result is a function, how many points along the function it samples */ /** If the result is a function, how many points along the function it samples */
diagramCount?: number; diagramCount?: number;
/** The environment, other variables that were already declared */ /** The environment, other variables that were already declared */
environment?: exportEnv; environment?: unknown;
/** when the environment changes. Used again for notebook magic*/ /** when the environment changes. Used again for notebook magic*/
onEnvChange?(env: exportEnv): void; onEnvChange?(env: unknown): void;
/** The width of the element */ /** The width of the element */
width: number; width: number;
} }

View File

@ -20,12 +20,7 @@
], ],
"suffix": ".bs.js", "suffix": ".bs.js",
"namespace": true, "namespace": true,
"bs-dependencies": [ "bs-dependencies": ["@glennsl/rescript-jest", "rationale", "bisect_ppx"],
"@glennsl/rescript-jest",
"@glennsl/bs-json",
"rationale",
"bisect_ppx"
],
"gentypeconfig": { "gentypeconfig": {
"language": "typescript", "language": "typescript",
"module": "commonjs", "module": "commonjs",
@ -37,7 +32,7 @@
}, },
"refmt": 3, "refmt": 3,
"warnings": { "warnings": {
"number": "+A-42-48-9-30-4-102-20-27-41" "number": "+A-42-48-9-30-4"
}, },
"ppx-flags": [ "ppx-flags": [
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"] ["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]

View File

@ -31,7 +31,6 @@
"author": "Quantified Uncertainty Research Institute", "author": "Quantified Uncertainty Research Institute",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@glennsl/bs-json": "^5.0.2",
"bisect_ppx": "^2.7.1", "bisect_ppx": "^2.7.1",
"jstat": "^1.9.5", "jstat": "^1.9.5",
"lodash": "4.17.21", "lodash": "4.17.21",

View File

@ -1,9 +1,4 @@
import * as _ from "lodash"; import * as _ from "lodash";
import type {
exportEnv,
exportDistribution,
} from "../rescript/ProgramEvaluator.gen";
export type { exportEnv, exportDistribution };
import { import {
genericDist, genericDist,
samplingParams, samplingParams,
@ -48,7 +43,6 @@ import {
Constructors_pointwiseLogarithm, Constructors_pointwiseLogarithm,
Constructors_pointwisePower, Constructors_pointwisePower,
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen"; } from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
import { pointSetDistFn } from "../rescript/OldInterpreter/DistPlus.bs";
export type { samplingParams, errorValue }; export type { samplingParams, errorValue };
export let defaultSamplingInputs: samplingParams = { export let defaultSamplingInputs: samplingParams = {
@ -98,8 +92,7 @@ export type squiggleExpression =
| tagged<"record", { [key: string]: squiggleExpression }>; | tagged<"record", { [key: string]: squiggleExpression }>;
export function run( export function run(
squiggleString: string, squiggleString: string,
samplingInputs?: samplingParams, samplingInputs?: samplingParams
_environment?: exportEnv
): result<squiggleExpression, errorValue> { ): result<squiggleExpression, errorValue> {
let si: samplingParams = samplingInputs let si: samplingParams = samplingInputs
? samplingInputs ? samplingInputs

View File

@ -69,7 +69,7 @@ let toPointSet = (
~xyPointLength, ~xyPointLength,
~sampleCount, ~sampleCount,
~xSelection: GenericDist_Types.Operation.pointsetXSelection=#ByWeight, ~xSelection: GenericDist_Types.Operation.pointsetXSelection=#ByWeight,
unit, (),
): result<PointSetTypes.pointSetDist, error> => { ): result<PointSetTypes.pointSetDist, error> => {
switch (t: t) { switch (t: t) {
| PointSet(pointSet) => Ok(pointSet) | PointSet(pointSet) => Ok(pointSet)
@ -93,7 +93,7 @@ let toPointSet = (
xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3 xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3
fairly arbitrarily. fairly arbitrarily.
*/ */
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, unit): result<string, error> => let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, ()): result<string, error> =>
t t
->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ()) ->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
->E.R.bind(r => ->E.R.bind(r =>
@ -101,10 +101,16 @@ let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, unit): result<
) )
module Truncate = { module Truncate = {
let trySymbolicSimplification = (leftCutoff, rightCutoff, t: t): option<t> => let trySymbolicSimplification = (
leftCutoff: option<float>,
rightCutoff: option<float>,
t: t,
): option<t> =>
switch (leftCutoff, rightCutoff, t) { switch (leftCutoff, rightCutoff, t) {
| (None, None, _) => None | (None, None, _) => None
| (lc, rc, Symbolic(#Uniform(u))) if lc < rc => | (Some(lc), Some(rc), Symbolic(#Uniform(u))) if lc < rc =>
Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(Some(lc), Some(rc), u))))
| (lc, rc, Symbolic(#Uniform(u))) =>
Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u)))) Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))))
| _ => None | _ => None
} }

View File

@ -87,7 +87,6 @@ let stepwiseToLinear = (t: t): t =>
// Note: This results in a distribution with as many points as the sum of those in t1 and t2. // Note: This results in a distribution with as many points as the sum of those in t1 and t2.
let combinePointwise = ( let combinePointwise = (
~integralSumCachesFn=(_, _) => None, ~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
~distributionType: PointSetTypes.distributionType=#PDF, ~distributionType: PointSetTypes.distributionType=#PDF,
fn: (float, float) => float, fn: (float, float) => float,
t1: PointSetTypes.continuousShape, t1: PointSetTypes.continuousShape,
@ -143,14 +142,9 @@ let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: inte
let reduce = ( let reduce = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None, ~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
fn, fn,
continuousShapes, continuousShapes,
) => ) => continuousShapes |> E.A.fold_left(combinePointwise(~integralSumCachesFn, fn), empty)
continuousShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t) => let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t) =>
make( make(

View File

@ -34,11 +34,6 @@ let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let combinePointwise = ( let combinePointwise = (
~integralSumCachesFn=(_, _) => None, ~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (
PointSetTypes.continuousShape,
PointSetTypes.continuousShape,
) => option<PointSetTypes.continuousShape>=(_, _) => None,
fn,
t1: PointSetTypes.discreteShape, t1: PointSetTypes.discreteShape,
t2: PointSetTypes.discreteShape, t2: PointSetTypes.discreteShape,
): PointSetTypes.discreteShape => { ): PointSetTypes.discreteShape => {
@ -62,16 +57,8 @@ let combinePointwise = (
) )
} }
let reduce = ( let reduce = (~integralSumCachesFn=(_, _) => None, discreteShapes): PointSetTypes.discreteShape =>
~integralSumCachesFn=(_, _) => None, discreteShapes |> E.A.fold_left(combinePointwise(~integralSumCachesFn), empty)
~integralCachesFn=(_, _) => None,
fn,
discreteShapes,
): PointSetTypes.discreteShape =>
discreteShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
let updateIntegralSumCache = (integralSumCache, t: t): t => { let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t, ...t,

View File

@ -164,12 +164,7 @@ module T = Dist({
// This pipes all ys (continuous and discrete) through fn. // This pipes all ys (continuous and discrete) through fn.
// If mapY is a linear operation, we might be able to update the integralSumCaches as well; // If mapY is a linear operation, we might be able to update the integralSumCaches as well;
// if not, they'll be set to None. // if not, they'll be set to None.
let mapY = ( let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t): t => {
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
t: t,
): t => {
let yMappedDiscrete: PointSetTypes.discreteShape = let yMappedDiscrete: PointSetTypes.discreteShape =
t.discrete t.discrete
|> Discrete.T.mapY(~fn) |> Discrete.T.mapY(~fn)
@ -271,16 +266,13 @@ let combinePointwise = (
t2: t, t2: t,
): t => { ): t => {
let reducedDiscrete = let reducedDiscrete =
[t1, t2] [t1, t2] |> E.A.fmap(toDiscrete) |> E.A.O.concatSomes |> Discrete.reduce(~integralSumCachesFn)
|> E.A.fmap(toDiscrete)
|> E.A.O.concatSomes
|> Discrete.reduce(~integralSumCachesFn, ~integralCachesFn, fn)
let reducedContinuous = let reducedContinuous =
[t1, t2] [t1, t2]
|> E.A.fmap(toContinuous) |> E.A.fmap(toContinuous)
|> E.A.O.concatSomes |> E.A.O.concatSomes
|> Continuous.reduce(~integralSumCachesFn, ~integralCachesFn, fn) |> Continuous.reduce(~integralSumCachesFn, fn)
let combinedIntegralSum = Common.combineIntegralSums( let combinedIntegralSum = Common.combineIntegralSums(
integralSumCachesFn, integralSumCachesFn,

View File

@ -59,13 +59,9 @@ let combinePointwise = (
) => ) =>
switch (t1, t2) { switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) => | (Continuous(m1), Continuous(m2)) =>
PointSetTypes.Continuous( PointSetTypes.Continuous(Continuous.combinePointwise(~integralSumCachesFn, fn, m1, m2))
Continuous.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
| (Discrete(m1), Discrete(m2)) => | (Discrete(m1), Discrete(m2)) =>
PointSetTypes.Discrete( PointSetTypes.Discrete(Discrete.combinePointwise(~integralSumCachesFn, m1, m2))
Discrete.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
| (m1, m2) => | (m1, m2) =>
PointSetTypes.Mixed( PointSetTypes.Mixed(
Mixed.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, toMixed(m1), toMixed(m2)), Mixed.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, toMixed(m1), toMixed(m2)),
@ -134,11 +130,7 @@ module T = Dist({
let integralYtoX = f => let integralYtoX = f =>
mapToAll((Mixed.T.Integral.yToX(f), Discrete.T.Integral.yToX(f), Continuous.T.Integral.yToX(f))) mapToAll((Mixed.T.Integral.yToX(f), Discrete.T.Integral.yToX(f), Continuous.T.Integral.yToX(f)))
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX)) let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX))
let mapY = ( let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn) =>
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
) =>
fmap(( fmap((
Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn), Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn), Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),

View File

@ -1,24 +0,0 @@
open ASTTypes
let toString = ASTTypes.Node.toString
let envs = (samplingInputs, environment) => {
samplingInputs: samplingInputs,
environment: environment,
evaluateNode: ASTEvaluator.toLeaf,
}
let toLeaf = (samplingInputs, environment, node: node) =>
ASTEvaluator.toLeaf(envs(samplingInputs, environment), node)
let toPointSetDist = (samplingInputs, environment, node: node) =>
switch toLeaf(samplingInputs, environment, node) {
| Ok(#RenderedDist(pointSetDist)) => Ok(pointSetDist)
| Ok(_) => Error("Rendering failed.")
| Error(e) => Error(e)
}
let runFunction = (samplingInputs, environment, inputs, fn: ASTTypes.Function.t) => {
let params = envs(samplingInputs, environment)
ASTTypes.Function.run(params, inputs, fn)
}

View File

@ -1,257 +0,0 @@
open ASTTypes
type tResult = node => result<node, string>
/* Given two random variables A and B, this returns the distribution
of a new variable that is the result of the operation on A and B.
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
In general, this is implemented via convolution. */
module AlgebraicCombination = {
let tryAnalyticalSimplification = (operation, t1: node, t2: node) =>
switch (operation, t1, t2) {
| (operation, #SymbolicDist(d1), #SymbolicDist(d2)) =>
switch SymbolicDist.T.tryAnalyticalSimplification(d1, d2, operation) {
| #AnalyticalSolution(symbolicDist) => Ok(#SymbolicDist(symbolicDist))
| #Error(er) => Error(er)
| #NoSolution => Ok(#AlgebraicCombination(operation, t1, t2))
}
| _ => Ok(#AlgebraicCombination(operation, t1, t2))
}
let combinationByRendering = (evaluationParams, algebraicOp, t1: node, t2: node): result<
node,
string,
> =>
E.R.merge(
Node.ensureIsRenderedAndGetShape(evaluationParams, t1),
Node.ensureIsRenderedAndGetShape(evaluationParams, t2),
) |> E.R.fmap(((a, b)) => #RenderedDist(PointSetDist.combineAlgebraically(algebraicOp, a, b)))
let nodeScore: node => int = x =>
switch x {
| #SymbolicDist(#Float(_)) => 1
| #SymbolicDist(_) => 1000
| #RenderedDist(Discrete(m)) => m.xyShape |> XYShape.T.length
| #RenderedDist(Mixed(_)) => 1000
| #RenderedDist(Continuous(_)) => 1000
| _ => 1000
}
let choose = (t1: node, t2: node) =>
nodeScore(t1) * nodeScore(t2) > 10000 ? #Sampling : #Analytical
let combine = (evaluationParams, algebraicOp, t1: node, t2: node): result<node, string> =>
E.R.merge(
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(evaluationParams, t1),
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(evaluationParams, t2),
) |> E.R.bind(_, ((a, b)) =>
switch choose(a, b) {
| #Sampling =>
ASTTypes.SamplingDistribution.combineShapesUsingSampling(
evaluationParams,
algebraicOp,
a,
b,
)
| #Analytical => combinationByRendering(evaluationParams, algebraicOp, a, b)
}
)
let operationToLeaf = (
evaluationParams: evaluationParams,
algebraicOp: Operation.algebraicOperation,
t1: node,
t2: node,
): result<node, string> =>
algebraicOp
|> tryAnalyticalSimplification(_, t1, t2)
|> E.R.bind(_, x =>
switch x {
| #SymbolicDist(_) as t => Ok(t)
| _ => combine(evaluationParams, algebraicOp, t1, t2)
}
)
}
module PointwiseCombination = {
//TODO: This is crude and slow. It forces everything to be pointSetDist, even though much
//of the process could happen on symbolic distributions without a conversion to be a pointSetDist.
let pointwiseAdd = (evaluationParams: evaluationParams, t1: node, t2: node) =>
switch (Node.render(evaluationParams, t1), Node.render(evaluationParams, t2)) {
| (Ok(#RenderedDist(rs1)), Ok(#RenderedDist(rs2))) =>
Ok(
#RenderedDist(
PointSetDist.combinePointwise(
~integralSumCachesFn=(a, b) => Some(a +. b),
~integralCachesFn=(a, b) => Some(
Continuous.combinePointwise(~distributionType=#CDF, \"+.", a, b),
),
\"+.",
rs1,
rs2,
),
),
)
| (Error(e1), _) => Error(e1)
| (_, Error(e2)) => Error(e2)
| _ => Error("Pointwise combination: rendering failed.")
}
let pointwiseCombine = (fn, evaluationParams: evaluationParams, t1: node, t2: node) =>
switch // TODO: construct a function that we can easily sample from, to construct
// a RenderedDist. Use the xMin and xMax of the rendered pointSetDists to tell the sampling function where to look.
// TODO: This should work for symbolic distributions too!
(Node.render(evaluationParams, t1), Node.render(evaluationParams, t2)) {
| (Ok(#RenderedDist(rs1)), Ok(#RenderedDist(rs2))) =>
Ok(#RenderedDist(PointSetDist.combinePointwise(fn, rs1, rs2)))
| (Error(e1), _) => Error(e1)
| (_, Error(e2)) => Error(e2)
| _ => Error("Pointwise combination: rendering failed.")
}
let operationToLeaf = (
evaluationParams: evaluationParams,
pointwiseOp: Operation.pointwiseOperation,
t1: node,
t2: node,
) =>
switch pointwiseOp {
| #Add => pointwiseAdd(evaluationParams, t1, t2)
| #Multiply => pointwiseCombine(\"*.", evaluationParams, t1, t2)
| #Power => pointwiseCombine(\"**", evaluationParams, t1, t2)
}
}
module Truncate = {
type simplificationResult = [
| #Solution(ASTTypes.node)
| #Error(string)
| #NoSolution
]
let trySimplification = (leftCutoff, rightCutoff, t): simplificationResult =>
switch (leftCutoff, rightCutoff, t) {
| (None, None, t) => #Solution(t)
| (Some(lc), Some(rc), _) if lc > rc =>
#Error("Left truncation bound must be smaller than right truncation bound.")
| (lc, rc, #SymbolicDist(#Uniform(u))) =>
#Solution(#SymbolicDist(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))))
| _ => #NoSolution
}
let truncateAsShape = (evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) =>
switch // TODO: use named args for xMin/xMax in renderToShape; if we're lucky we can at least get the tail
// of a distribution we otherwise wouldn't get at all
Node.ensureIsRendered(evaluationParams, t) {
| Ok(#RenderedDist(rs)) =>
Ok(#RenderedDist(PointSetDist.T.truncate(leftCutoff, rightCutoff, rs)))
| Error(e) => Error(e)
| _ => Error("Could not truncate distribution.")
}
let operationToLeaf = (
evaluationParams,
leftCutoff: option<float>,
rightCutoff: option<float>,
t: node,
): result<node, string> =>
t
|> trySimplification(leftCutoff, rightCutoff)
|> (
x =>
switch x {
| #Solution(t) => Ok(t)
| #Error(e) => Error(e)
| #NoSolution => truncateAsShape(evaluationParams, leftCutoff, rightCutoff, t)
}
)
}
module Normalize = {
let rec operationToLeaf = (evaluationParams, t: node): result<node, string> =>
switch t {
| #RenderedDist(s) => Ok(#RenderedDist(PointSetDist.T.normalize(s)))
| #SymbolicDist(_) => Ok(t)
| _ => ASTTypes.Node.evaluateAndRetry(evaluationParams, operationToLeaf, t)
}
}
module FunctionCall = {
let _runHardcodedFunction = (name, evaluationParams, args) =>
TypeSystem.Function.Ts.findByNameAndRun(HardcodedFunctions.all, name, evaluationParams, args)
let _runLocalFunction = (name, evaluationParams: evaluationParams, args) =>
Environment.getFunction(evaluationParams.environment, name) |> E.R.bind(_, ((argNames, fn)) =>
ASTTypes.Function.run(evaluationParams, args, (argNames, fn))
)
let _runWithEvaluatedInputs = (
evaluationParams: ASTTypes.evaluationParams,
name,
args: array<ASTTypes.node>,
) =>
_runHardcodedFunction(name, evaluationParams, args) |> E.O.default(
_runLocalFunction(name, evaluationParams, args),
)
// TODO: This forces things to be floats
let run = (evaluationParams, name, args) =>
args
|> E.A.fmap(a => evaluationParams.evaluateNode(evaluationParams, a))
|> E.A.R.firstErrorOrOpen
|> E.R.bind(_, _runWithEvaluatedInputs(evaluationParams, name))
}
module Render = {
let rec operationToLeaf = (evaluationParams: evaluationParams, t: node): result<node, string> =>
switch t {
| #Function(_) => Error("Cannot render a function")
| #SymbolicDist(d) =>
Ok(
#RenderedDist(
SymbolicDist.T.toPointSetDist(evaluationParams.samplingInputs.pointSetDistLength, d),
),
)
| #RenderedDist(_) as t => Ok(t) // already a rendered pointSetDist, we're done here
| _ => ASTTypes.Node.evaluateAndRetry(evaluationParams, operationToLeaf, t)
}
}
/* This function recursively goes through the nodes of the parse tree,
replacing each Operation node and its subtree with a Data node.
Whenever possible, the replacement produces a new Symbolic Data node,
but most often it will produce a RenderedDist.
This function is used mainly to turn a parse tree into a single RenderedDist
that can then be displayed to the user. */
let rec toLeaf = (evaluationParams: ASTTypes.evaluationParams, node: node): result<node, string> =>
switch node {
// Leaf nodes just stay leaf nodes
| #SymbolicDist(_)
| #Function(_)
| #RenderedDist(_) =>
Ok(node)
| #Array(args) =>
args |> E.A.fmap(toLeaf(evaluationParams)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(r => #Array(r))
// Operations nevaluationParamsd to be turned into leaves
| #AlgebraicCombination(algebraicOp, t1, t2) =>
AlgebraicCombination.operationToLeaf(evaluationParams, algebraicOp, t1, t2)
| #PointwiseCombination(pointwiseOp, t1, t2) =>
PointwiseCombination.operationToLeaf(evaluationParams, pointwiseOp, t1, t2)
| #Truncate(leftCutoff, rightCutoff, t) =>
Truncate.operationToLeaf(evaluationParams, leftCutoff, rightCutoff, t)
| #Normalize(t) => Normalize.operationToLeaf(evaluationParams, t)
| #Render(t) => Render.operationToLeaf(evaluationParams, t)
| #Hash(t) =>
t
|> E.A.fmap(((name: string, node: node)) =>
toLeaf(evaluationParams, node) |> E.R.fmap(r => (name, r))
)
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
| #Symbol(r) =>
ASTTypes.Environment.get(evaluationParams.environment, r)
|> E.O.toResult("Undeclared variable " ++ r)
|> E.R.bind(_, toLeaf(evaluationParams))
| #FunctionCall(name, args) =>
FunctionCall.run(evaluationParams, name, args) |> E.R.bind(_, toLeaf(evaluationParams))
}

View File

@ -1,233 +0,0 @@
@genType
type rec hash = array<(string, node)>
and node = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
| #Symbol(string)
| #Hash(hash)
| #Array(array<node>)
| #Function(array<string>, node)
| #AlgebraicCombination(Operation.algebraicOperation, node, node)
| #PointwiseCombination(Operation.pointwiseOperation, node, node)
| #Normalize(node)
| #Render(node)
| #Truncate(option<float>, option<float>, node)
| #FunctionCall(string, array<node>)
]
type statement = [
| #Assignment(string, node)
| #Expression(node)
]
type program = array<statement>
type environment = Belt.Map.String.t<node>
type rec evaluationParams = {
samplingInputs: SamplingInputs.samplingInputs,
environment: environment,
evaluateNode: (evaluationParams, node) => Belt.Result.t<node, string>,
}
module Environment = {
type t = environment
module MS = Belt.Map.String
let fromArray = MS.fromArray
let empty: t = []->fromArray
let mergeKeepSecond = (a: t, b: t) =>
MS.merge(a, b, (_, a, b) =>
switch (a, b) {
| (_, Some(b)) => Some(b)
| (Some(a), _) => Some(a)
| _ => None
}
)
let update = (t, str, fn) => MS.update(t, str, fn)
let get = (t: t, str) => MS.get(t, str)
let getFunction = (t: t, str) =>
switch get(t, str) {
| Some(#Function(argNames, fn)) => Ok((argNames, fn))
| _ => Error("Function " ++ (str ++ " not found"))
}
}
module Node = {
let getFloat = (node: node) =>
node |> (
x =>
switch x {
| #RenderedDist(Discrete({xyShape: {xs: [x], ys: [1.0]}})) => Some(x)
| #SymbolicDist(#Float(x)) => Some(x)
| _ => None
}
)
let evaluate = (evaluationParams: evaluationParams) =>
evaluationParams.evaluateNode(evaluationParams)
let evaluateAndRetry = (evaluationParams, fn, node) =>
node |> evaluationParams.evaluateNode(evaluationParams) |> E.R.bind(_, fn(evaluationParams))
let rec toString: node => string = x =>
switch x {
| #SymbolicDist(d) => SymbolicDist.T.toString(d)
| #RenderedDist(_) => "[renderedShape]"
| #AlgebraicCombination(op, t1, t2) =>
Operation.Algebraic.format(op, toString(t1), toString(t2))
| #PointwiseCombination(op, t1, t2) =>
Operation.Pointwise.format(op, toString(t1), toString(t2))
| #Normalize(t) => "normalize(k" ++ (toString(t) ++ ")")
| #Truncate(lc, rc, t) => Operation.Truncate.toString(lc, rc, toString(t))
| #Render(t) => toString(t)
| #Symbol(t) => "Symbol: " ++ t
| #FunctionCall(name, args) =>
"[Function call: (" ++
(name ++
((args |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ ")]"))
| #Function(args, internal) =>
"[Function: (" ++ ((args |> Js.String.concatMany(_, ",")) ++ (toString(internal) ++ ")]"))
| #Array(a) => "[" ++ ((a |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ "]")
| #Hash(h) =>
"{" ++
((h
|> E.A.fmap(((name, value)) => name ++ (":" ++ toString(value)))
|> Js.String.concatMany(_, ",")) ++
"}")
}
let render = (evaluationParams: evaluationParams, r) => #Render(r) |> evaluate(evaluationParams)
let ensureIsRendered = (params, t) =>
switch t {
| #RenderedDist(_) => Ok(t)
| _ =>
switch render(params, t) {
| Ok(#RenderedDist(r)) => Ok(#RenderedDist(r))
| Ok(_) => Error("Did not render as requested")
| Error(e) => Error(e)
}
}
let ensureIsRenderedAndGetShape = (params, t) =>
switch ensureIsRendered(params, t) {
| Ok(#RenderedDist(r)) => Ok(r)
| Ok(_) => Error("Did not render as requested")
| Error(e) => Error(e)
}
let toPointSetDist = (item: node) =>
switch item {
| #RenderedDist(r) => Some(r)
| _ => None
}
let _toFloat = (t: PointSetTypes.pointSetDist) =>
switch t {
| Discrete({xyShape: {xs: [x], ys: [1.0]}}) => Some(#SymbolicDist(#Float(x)))
| _ => None
}
let toFloat = (item: node): result<node, string> =>
item |> toPointSetDist |> E.O.bind(_, _toFloat) |> E.O.toResult("Not valid shape")
}
module Function = {
type t = (array<string>, node)
let fromNode: node => option<t> = node =>
switch node {
| #Function(r) => Some(r)
| _ => None
}
let argumentNames = ((a, _): t) => a
let internals = ((_, b): t) => b
let run = (evaluationParams: evaluationParams, args: array<node>, t: t) =>
if E.A.length(args) == E.A.length(argumentNames(t)) {
let newEnvironment = Belt.Array.zip(argumentNames(t), args) |> Environment.fromArray
let newEvaluationParams: evaluationParams = {
samplingInputs: evaluationParams.samplingInputs,
environment: Environment.mergeKeepSecond(evaluationParams.environment, newEnvironment),
evaluateNode: evaluationParams.evaluateNode,
}
evaluationParams.evaluateNode(newEvaluationParams, internals(t))
} else {
Error("Wrong number of variables")
}
}
module SamplingDistribution = {
type t = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
]
let isSamplingDistribution: node => bool = x =>
switch x {
| #SymbolicDist(_) => true
| #RenderedDist(_) => true
| _ => false
}
let fromNode: node => result<t, string> = x =>
switch x {
| #SymbolicDist(n) => Ok(#SymbolicDist(n))
| #RenderedDist(n) => Ok(#RenderedDist(n))
| _ => Error("Not valid type")
}
let renderIfIsNotSamplingDistribution = (params, t): result<node, string> =>
!isSamplingDistribution(t)
? switch Node.render(params, t) {
| Ok(r) => Ok(r)
| Error(e) => Error(e)
}
: Ok(t)
let map = (~renderedDistFn, ~symbolicDistFn, node: node) =>
node |> (
x =>
switch x {
| #RenderedDist(r) => Some(renderedDistFn(r))
| #SymbolicDist(s) => Some(symbolicDistFn(s))
| _ => None
}
)
let sampleN = n =>
map(~renderedDistFn=PointSetDist.sampleNRendered(n), ~symbolicDistFn=SymbolicDist.T.sampleN(n))
let getCombinationSamples = (n, algebraicOp, t1: node, t2: node) =>
switch (sampleN(n, t1), sampleN(n, t2)) {
| (Some(a), Some(b)) =>
Some(
Belt.Array.zip(a, b) |> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
)
| _ => None
}
let combineShapesUsingSampling = (
evaluationParams: evaluationParams,
algebraicOp,
t1: node,
t2: node,
) => {
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1)
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2)
E.R.merge(i1, i2) |> E.R.bind(_, ((a, b)) => {
let samples =
getCombinationSamples(
evaluationParams.samplingInputs.sampleCount,
algebraicOp,
a,
b,
) |> E.O.toResult("Could not get samples")
let sampleSetDist = samples->E.R.bind(SampleSetDist.make)
let pointSetDist =
sampleSetDist->E.R.bind(r =>
SampleSetDist.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r)
)
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
})
}
}

View File

@ -1,87 +0,0 @@
open PointSetTypes
@genType
type t = PointSetTypes.distPlus
let pointSetDistIntegral = pointSetDist => PointSetDist.T.Integral.get(pointSetDist)
let make = (~pointSetDist, ~squiggleString, ()): t => {
let integral = pointSetDistIntegral(pointSetDist)
{pointSetDist: pointSetDist, integralCache: integral, squiggleString: squiggleString}
}
let update = (~pointSetDist=?, ~integralCache=?, ~squiggleString=?, t: t) => {
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
integralCache: E.O.default(t.integralCache, integralCache),
squiggleString: E.O.default(t.squiggleString, squiggleString),
}
let updateShape = (pointSetDist, t) => {
let integralCache = pointSetDistIntegral(pointSetDist)
update(~pointSetDist, ~integralCache, t)
}
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist)
module T = Distributions.Dist({
type t = PointSetTypes.distPlus
type integral = PointSetTypes.distPlus
let toPointSetDist = toPointSetDist
let toContinuous = pointSetDistFn(PointSetDist.T.toContinuous)
let toDiscrete = pointSetDistFn(PointSetDist.T.toDiscrete)
let normalize = (t: t): t => {
let normalizedShape = t |> toPointSetDist |> PointSetDist.T.normalize
t |> updateShape(normalizedShape)
}
let truncate = (leftCutoff, rightCutoff, t: t): t => {
let truncatedShape = t |> toPointSetDist |> PointSetDist.T.truncate(leftCutoff, rightCutoff)
t |> updateShape(truncatedShape)
}
let xToY = (f, t: t) => t |> toPointSetDist |> PointSetDist.T.xToY(f)
let minX = pointSetDistFn(PointSetDist.T.minX)
let maxX = pointSetDistFn(PointSetDist.T.maxX)
let toDiscreteProbabilityMassFraction = pointSetDistFn(
PointSetDist.T.toDiscreteProbabilityMassFraction,
)
// This bit is kind of awkward, could probably use rethinking.
let integral = (t: t) => updateShape(Continuous(t.integralCache), t)
let updateIntegralCache = (integralCache: option<PointSetTypes.continuousShape>, t) =>
update(~integralCache=E.O.default(t.integralCache, integralCache), t)
let downsample = (i, t): t => updateShape(t |> toPointSetDist |> PointSetDist.T.downsample(i), t)
// todo: adjust for limit, maybe?
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegralCache => None,
~fn,
{pointSetDist, _} as t: t,
): t => PointSetDist.T.mapY(~integralSumCacheFn, ~fn, pointSetDist) |> updateShape(_, t)
// get the total of everything
let integralEndY = (t: t) => {
PointSetDist.T.Integral.sum(toPointSetDist(t))
}
// TODO: Fix this below, obviously. Adjust for limits
let integralXtoY = (f, t: t) => {
PointSetDist.T.Integral.xToY(f, toPointSetDist(t))
}
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
let integralYtoX = (f, t: t) => {
PointSetDist.T.Integral.yToX(f, toPointSetDist(t))
}
let mean = (t: t) => {
PointSetDist.T.mean(t.pointSetDist)
}
let variance = (t: t) => PointSetDist.T.variance(t.pointSetDist)
})

View File

@ -1,240 +0,0 @@
open TypeSystem
let wrongInputsError = (r: array<typedValue>) => {
let inputs = r |> E.A.fmap(TypedValue.toString) |> Js.String.concatMany(_, ",")
Js.log3("Inputs were", inputs, r)
Error("Wrong inputs. The inputs were:" ++ inputs)
}
let to_: (float, float) => result<node, string> = (low, high) =>
switch (low, high) {
| (low, high) if low <= 0.0 && low < high =>
Ok(#SymbolicDist(SymbolicDist.Normal.from90PercentCI(low, high)))
| (low, high) if low < high =>
Ok(#SymbolicDist(SymbolicDist.Lognormal.from90PercentCI(low, high)))
| (_, _) => Error("Low value must be less than high value.")
}
let makeSymbolicFromTwoFloats = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
)
let makeSymbolicFromOneFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#Float],
~run=x =>
switch x {
| [#Float(a)] => fn(a) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
)
let makeDistFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution, #Float],
~run=x =>
switch x {
| [#SamplingDist(a), #Float(b)] => fn(a, b)
| [#RenderedDist(a), #Float(b)] => fn(#RenderedDist(a), b)
| e => wrongInputsError(e)
},
(),
)
let makeRenderedDistFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#RenderedDistribution,
~inputTypes=[#RenderedDistribution, #Float],
~shouldCoerceTypes=true,
~run=x =>
switch x {
| [#RenderedDist(a), #Float(b)] => fn(a, b)
| e => wrongInputsError(e)
},
(),
)
let makeDist = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution],
~run=x =>
switch x {
| [#SamplingDist(a)] => fn(a)
| [#RenderedDist(a)] => fn(#RenderedDist(a))
| e => wrongInputsError(e)
},
(),
)
let floatFromDist = (
distToFloatOp: Operation.distToFloatOperation,
t: TypeSystem.samplingDist,
): result<node, string> =>
switch t {
| #SymbolicDist(s) =>
SymbolicDist.T.operate(distToFloatOp, s) |> E.R.bind(_, v => Ok(#SymbolicDist(#Float(v))))
| #RenderedDist(rs) =>
PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
}
let verticalScaling = (scaleOp, rs, scaleBy) => {
// scaleBy has to be a single float, otherwise we'll return an error.
let fn = (secondary, main) => Operation.Scale.toFn(scaleOp, main, secondary)
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(scaleOp)
let integralCacheFn = Operation.Scale.toIntegralCacheFn(scaleOp)
Ok(
#RenderedDist(
PointSetDist.T.mapY(
~integralSumCacheFn=integralSumCacheFn(scaleBy),
~integralCacheFn=integralCacheFn(scaleBy),
~fn=fn(scaleBy),
rs,
),
),
)
}
module Multimodal = {
let getByNameResult = Hash.getByNameResult
let _paramsToDistsAndWeights = (r: array<typedValue>) =>
switch r {
| [#Hash(r)] =>
let dists =
getByNameResult(r, "dists")
->E.R.bind(TypeSystem.TypedValue.toArray)
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toDist) |> E.A.R.firstErrorOrOpen)
let weights =
getByNameResult(r, "weights")
->E.R.bind(TypeSystem.TypedValue.toArray)
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toFloat) |> E.A.R.firstErrorOrOpen)
E.R.merge(dists, weights)->E.R.bind(((a, b)) =>
E.A.length(b) > E.A.length(a)
? Error("Too many weights provided")
: Ok(
E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (
a |> E.O.toExn(""),
b |> E.O.default(1.0),
)),
)
)
| _ => Error("Needs items")
}
let _runner: array<typedValue> => result<node, string> = r => {
let paramsToDistsAndWeights =
_paramsToDistsAndWeights(r) |> E.R.fmap(
E.A.fmap(((dist, weight)) =>
#FunctionCall("scaleMultiply", [dist, #SymbolicDist(#Float(weight))])
),
)
let pointwiseSum: result<node, string> =
paramsToDistsAndWeights->E.R.bind(E.R.errorIfCondition(E.A.isEmpty, "Needs one input"))
|> E.R.fmap(r =>
r
|> Js.Array.sliceFrom(1)
|> E.A.fold_left((acc, x) => #PointwiseCombination(#Add, acc, x), E.A.unsafe_get(r, 0))
)
pointwiseSum
}
let _function = Function.T.make(
~name="multimodal",
~outputType=#SamplingDistribution,
~inputTypes=[#Hash([("dists", #Array(#SamplingDistribution)), ("weights", #Array(#Float))])],
~run=_runner,
(),
)
}
let all = [
makeSymbolicFromTwoFloats("normal", SymbolicDist.Normal.make),
makeSymbolicFromTwoFloats("uniform", SymbolicDist.Uniform.make),
makeSymbolicFromTwoFloats("beta", SymbolicDist.Beta.make),
makeSymbolicFromTwoFloats("lognormal", SymbolicDist.Lognormal.make),
makeSymbolicFromTwoFloats("lognormalFromMeanAndStdDev", SymbolicDist.Lognormal.fromMeanAndStdev),
makeSymbolicFromOneFloat("exponential", SymbolicDist.Exponential.make),
Function.T.make(
~name="to",
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b)] => to_(a, b)
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="triangular",
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b), #Float(c)] =>
SymbolicDist.Triangular.make(a, b, c) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="log",
~outputType=#Float,
~inputTypes=[#Float],
~run=x =>
switch x {
| [#Float(a)] => Ok(#SymbolicDist(#Float(Js.Math.log(a))))
| e => wrongInputsError(e)
},
(),
),
makeDistFloat("pdf", (dist, float) => floatFromDist(#Pdf(float), dist)),
makeDistFloat("inv", (dist, float) => floatFromDist(#Inv(float), dist)),
makeDistFloat("cdf", (dist, float) => floatFromDist(#Cdf(float), dist)),
makeDist("mean", dist => floatFromDist(#Mean, dist)),
makeDist("sample", dist => floatFromDist(#Sample, dist)),
Function.T.make(
~name="render",
~outputType=#RenderedDistribution,
~inputTypes=[#RenderedDistribution],
~run=x =>
switch x {
| [#RenderedDist(c)] => Ok(#RenderedDist(c))
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="normalize",
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution],
~run=x =>
switch x {
| [#SamplingDist(#SymbolicDist(c))] => Ok(#SymbolicDist(c))
| [#SamplingDist(#RenderedDist(c))] => Ok(#RenderedDist(PointSetDist.T.normalize(c)))
| e => wrongInputsError(e)
},
(),
),
makeRenderedDistFloat("scaleExp", (dist, float) => verticalScaling(#Power, dist, float)),
makeRenderedDistFloat("scaleMultiply", (dist, float) => verticalScaling(#Multiply, dist, float)),
makeRenderedDistFloat("scaleLog", (dist, float) => verticalScaling(#Logarithm, dist, float)),
Multimodal._function,
]

View File

@ -1,196 +0,0 @@
type node = ASTTypes.node
let getFloat = ASTTypes.Node.getFloat
type samplingDist = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
]
type rec hashType = array<(string, _type)>
and _type = [
| #Float
| #SamplingDistribution
| #RenderedDistribution
| #Array(_type)
| #Hash(hashType)
]
type rec hashTypedValue = array<(string, typedValue)>
and typedValue = [
| #Float(float)
| #RenderedDist(PointSetTypes.pointSetDist)
| #SamplingDist(samplingDist)
| #Array(array<typedValue>)
| #Hash(hashTypedValue)
]
type _function = {
name: string,
inputTypes: array<_type>,
outputType: _type,
run: array<typedValue> => result<node, string>,
shouldCoerceTypes: bool,
}
type functions = array<_function>
type inputNodes = array<node>
module TypedValue = {
let rec toString: typedValue => string = x =>
switch x {
| #SamplingDist(_) => "[sampling dist]"
| #RenderedDist(_) => "[rendered PointSetDist]"
| #Float(f) => "Float: " ++ Js.Float.toString(f)
| #Array(a) => "[" ++ ((a |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ "]")
| #Hash(v) =>
"{" ++
((v
|> E.A.fmap(((name, value)) => name ++ (":" ++ toString(value)))
|> Js.String.concatMany(_, ",")) ++
"}")
}
let rec fromNode = (node: node): result<typedValue, string> =>
switch node {
| #SymbolicDist(#Float(r)) => Ok(#Float(r))
| #SymbolicDist(s) => Ok(#SamplingDist(#SymbolicDist(s)))
| #RenderedDist(s) => Ok(#RenderedDist(s))
| #Array(r) => r |> E.A.fmap(fromNode) |> E.A.R.firstErrorOrOpen |> E.R.fmap(r => #Array(r))
| #Hash(hash) =>
hash
|> E.A.fmap(((name, t)) => fromNode(t) |> E.R.fmap(r => (name, r)))
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
| e => Error("Wrong type: " ++ ASTTypes.Node.toString(e))
}
// todo: Arrays and hashes
let rec fromNodeWithTypeCoercion = (evaluationParams, _type: _type, node) =>
switch (_type, node) {
| (#Float, _) =>
switch getFloat(node) {
| Some(a) => Ok(#Float(a))
| _ => Error("Type Error: Expected float.")
}
| (#SamplingDistribution, _) =>
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(
evaluationParams,
node,
) |> E.R.bind(_, fromNode)
| (#RenderedDistribution, _) =>
ASTTypes.Node.render(evaluationParams, node) |> E.R.bind(_, fromNode)
| (#Array(_type), #Array(b)) =>
b
|> E.A.fmap(fromNodeWithTypeCoercion(evaluationParams, _type))
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Array(r))
| (#Hash(named), #Hash(r)) =>
let keyValues =
named |> E.A.fmap(((name, intendedType)) => (name, intendedType, Hash.getByName(r, name)))
let typedHash =
keyValues
|> E.A.fmap(((name, intendedType, optionNode)) =>
switch optionNode {
| Some(node) =>
fromNodeWithTypeCoercion(evaluationParams, intendedType, node) |> E.R.fmap(node => (
name,
node,
))
| None => Error("Hash parameter not present in hash.")
}
)
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
typedHash
| _ => Error("fromNodeWithTypeCoercion error, sorry.")
}
let toFloat: typedValue => result<float, string> = x =>
switch x {
| #Float(x) => Ok(x)
| _ => Error("Not a float")
}
let toArray: typedValue => result<array<'a>, string> = x =>
switch x {
| #Array(x) => Ok(x)
| _ => Error("Not an array")
}
let toNamed: typedValue => result<hashTypedValue, string> = x =>
switch x {
| #Hash(x) => Ok(x)
| _ => Error("Not a named item")
}
let toDist: typedValue => result<node, string> = x =>
switch x {
| #SamplingDist(#SymbolicDist(c)) => Ok(#SymbolicDist(c))
| #SamplingDist(#RenderedDist(c)) => Ok(#RenderedDist(c))
| #RenderedDist(c) => Ok(#RenderedDist(c))
| #Float(x) => Ok(#SymbolicDist(#Float(x)))
| x => Error("Cannot be converted into a distribution: " ++ toString(x))
}
}
module Function = {
type t = _function
type ts = functions
module T = {
let make = (~name, ~inputTypes, ~outputType, ~run, ~shouldCoerceTypes=true, _): t => {
name: name,
inputTypes: inputTypes,
outputType: outputType,
run: run,
shouldCoerceTypes: shouldCoerceTypes,
}
let _inputLengthCheck = (inputNodes: inputNodes, t: t) => {
let expectedLength = E.A.length(t.inputTypes)
let actualLength = E.A.length(inputNodes)
expectedLength == actualLength
? Ok(inputNodes)
: Error(
"Wrong number of inputs. Expected" ++
((expectedLength |> E.I.toString) ++
(". Got:" ++ (actualLength |> E.I.toString))),
)
}
let _coerceInputNodes = (evaluationParams, inputTypes, shouldCoerce, inputNodes) =>
Belt.Array.zip(inputTypes, inputNodes)
|> E.A.fmap(((def, input)) =>
shouldCoerce
? TypedValue.fromNodeWithTypeCoercion(evaluationParams, def, input)
: TypedValue.fromNode(input)
)
|> E.A.R.firstErrorOrOpen
let inputsToTypedValues = (
evaluationParams: ASTTypes.evaluationParams,
inputNodes: inputNodes,
t: t,
) =>
_inputLengthCheck(inputNodes, t)->E.R.bind(
_coerceInputNodes(evaluationParams, t.inputTypes, t.shouldCoerceTypes),
)
let run = (evaluationParams: ASTTypes.evaluationParams, inputNodes: inputNodes, t: t) =>
inputsToTypedValues(evaluationParams, inputNodes, t)->E.R.bind(t.run)
|> (
x =>
switch x {
| Ok(i) => Ok(i)
| Error(r) => Error("Function " ++ (t.name ++ (" error: " ++ r)))
}
)
}
module Ts = {
let findByName = (ts: ts, n: string) => ts |> Belt.Array.getBy(_, ({name}) => name == n)
let findByNameAndRun = (ts: ts, n: string, evaluationParams, inputTypes) =>
findByName(ts, n) |> E.O.fmap(T.run(evaluationParams, inputTypes))
}
}

View File

@ -1,290 +0,0 @@
module MathJsonToMathJsAdt = {
type rec arg =
| Symbol(string)
| Value(float)
| Fn(fn)
| Array(array<arg>)
| Blocks(array<arg>)
| Object(Js.Dict.t<arg>)
| Assignment(arg, arg)
| FunctionAssignment(fnAssignment)
and fn = {
name: string,
args: array<arg>,
}
and fnAssignment = {
name: string,
args: array<string>,
expression: arg,
}
let rec run = (j: Js.Json.t) => {
open Json.Decode
switch field("mathjs", string, j) {
| "FunctionNode" =>
let args = j |> field("args", array(run))
let name = j |> optional(field("fn", field("name", string)))
name |> E.O.fmap(name => Fn({name: name, args: args |> E.A.O.concatSomes}))
| "OperatorNode" =>
let args = j |> field("args", array(run))
Some(
Fn({
name: j |> field("fn", string),
args: args |> E.A.O.concatSomes,
}),
)
| "ConstantNode" => optional(field("value", Json.Decode.float), j) |> E.O.fmap(r => Value(r))
| "ParenthesisNode" => j |> field("content", run)
| "ObjectNode" =>
let properties = j |> field("properties", dict(run))
Js.Dict.entries(properties)
|> E.A.fmap(((key, value)) => value |> E.O.fmap(v => (key, v)))
|> E.A.O.concatSomes
|> Js.Dict.fromArray
|> (r => Some(Object(r)))
| "ArrayNode" =>
let items = field("items", array(run), j)
Some(Array(items |> E.A.O.concatSomes))
| "SymbolNode" => Some(Symbol(field("name", string, j)))
| "AssignmentNode" =>
let object_ = j |> field("object", run)
let value_ = j |> field("value", run)
switch (object_, value_) {
| (Some(o), Some(v)) => Some(Assignment(o, v))
| _ => None
}
| "BlockNode" =>
let block = r => r |> field("node", run)
let args = j |> field("blocks", array(block)) |> E.A.O.concatSomes
Some(Blocks(args))
| "FunctionAssignmentNode" =>
let name = j |> field("name", string)
let args = j |> field("params", array(field("name", string)))
let expression = j |> field("expr", run)
expression |> E.O.fmap(expression => FunctionAssignment({
name: name,
args: args,
expression: expression,
}))
| n =>
Js.log3("Couldn't parse mathjs node", j, n)
None
}
}
}
module MathAdtToDistDst = {
open MathJsonToMathJsAdt
let handleSymbol = sym => Ok(#Symbol(sym))
// TODO: This only works on the top level, which needs to be refactored. Also, I think functions don't need to be done like this anymore.
module MathAdtCleaner = {
let transformWithSymbol = (f: float, s: string) =>
switch s {
| "K" => Some(f *. 1000.)
| "M" => Some(f *. 1000000.)
| "B" => Some(f *. 1000000000.)
| "T" => Some(f *. 1000000000000.)
| _ => None
}
let rec run = x =>
switch x {
| Fn({name: "multiply", args: [Value(f), Symbol(s)]}) as doNothing =>
transformWithSymbol(f, s) |> E.O.fmap(r => Value(r)) |> E.O.default(doNothing)
| Fn({name: "unaryMinus", args: [Value(f)]}) => Value(-1.0 *. f)
| Fn({name, args}) => Fn({name: name, args: args |> E.A.fmap(run)})
| Array(args) => Array(args |> E.A.fmap(run))
| Symbol(s) => Symbol(s)
| Value(v) => Value(v)
| Blocks(args) => Blocks(args |> E.A.fmap(run))
| Assignment(a, b) => Assignment(a, run(b))
| FunctionAssignment(a) => FunctionAssignment(a)
| Object(v) =>
Object(
v
|> Js.Dict.entries
|> E.A.fmap(((key, value)) => (key, run(value)))
|> Js.Dict.fromArray,
)
}
}
let lognormal = (args, parseArgs, nodeParser) =>
switch args {
| [Object(o)] =>
let g = s =>
Js.Dict.get(o, s) |> E.O.toResult("Variable was empty") |> E.R.bind(_, nodeParser)
switch (g("mean"), g("stdev"), g("mu"), g("sigma")) {
| (Ok(mean), Ok(stdev), _, _) =>
Ok(#FunctionCall("lognormalFromMeanAndStdDev", [mean, stdev]))
| (_, _, Ok(mu), Ok(sigma)) => Ok(#FunctionCall("lognormal", [mu, sigma]))
| _ => Error("Lognormal distribution needs either mean and stdev or mu and sigma")
}
| _ => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall("lognormal", args))
}
// Error("Dotwise exponentiation needs two operands")
let operationParser = (name: string, args: result<array<ASTTypes.node>, string>): result<
ASTTypes.node,
string,
> => {
let toOkAlgebraic = r => Ok(#AlgebraicCombination(r))
let toOkPointwise = r => Ok(#PointwiseCombination(r))
let toOkTruncate = r => Ok(#Truncate(r))
args |> E.R.bind(_, args =>
switch (name, args) {
| ("add", [l, r]) => toOkAlgebraic((#Add, l, r))
| ("add", _) => Error("Addition needs two operands")
| ("unaryMinus", [l]) => toOkAlgebraic((#Multiply, #SymbolicDist(#Float(-1.0)), l))
| ("subtract", [l, r]) => toOkAlgebraic((#Subtract, l, r))
| ("subtract", _) => Error("Subtraction needs two operands")
| ("multiply", [l, r]) => toOkAlgebraic((#Multiply, l, r))
| ("multiply", _) => Error("Multiplication needs two operands")
| ("pow", [l, r]) => toOkAlgebraic((#Power, l, r))
| ("pow", _) => Error("Exponentiation needs two operands")
| ("dotMultiply", [l, r]) => toOkPointwise((#Multiply, l, r))
| ("dotMultiply", _) => Error("Dotwise multiplication needs two operands")
| ("dotPow", [l, r]) => toOkPointwise((#Power, l, r))
| ("dotPow", _) => Error("Dotwise exponentiation needs two operands")
| ("rightLogShift", [l, r]) => toOkPointwise((#Add, l, r))
| ("rightLogShift", _) => Error("Dotwise addition needs two operands")
| ("divide", [l, r]) => toOkAlgebraic((#Divide, l, r))
| ("divide", _) => Error("Division needs two operands")
| ("leftTruncate", [d, #SymbolicDist(#Float(lc))]) => toOkTruncate((Some(lc), None, d))
| ("leftTruncate", _) =>
Error("leftTruncate needs two arguments: the expression and the cutoff")
| ("rightTruncate", [d, #SymbolicDist(#Float(rc))]) => toOkTruncate((None, Some(rc), d))
| ("rightTruncate", _) =>
Error("rightTruncate needs two arguments: the expression and the cutoff")
| ("truncate", [d, #SymbolicDist(#Float(lc)), #SymbolicDist(#Float(rc))]) =>
toOkTruncate((Some(lc), Some(rc), d))
| ("truncate", _) => Error("truncate needs three arguments: the expression and both cutoffs")
| _ => Error("This type not currently supported")
}
)
}
let functionParser = (
nodeParser: MathJsonToMathJsAdt.arg => Belt.Result.t<ASTTypes.node, string>,
name: string,
args: array<MathJsonToMathJsAdt.arg>,
): result<ASTTypes.node, string> => {
let parseArray = ags => ags |> E.A.fmap(nodeParser) |> E.A.R.firstErrorOrOpen
let parseArgs = () => parseArray(args)
switch name {
| "lognormal" => lognormal(args, parseArgs, nodeParser)
| "multimodal"
| "add"
| "subtract"
| "multiply"
| "unaryMinus"
| "dotMultiply"
| "dotPow"
| "rightLogShift"
| "divide"
| "pow"
| "leftTruncate"
| "rightTruncate"
| "truncate" =>
operationParser(name, parseArgs())
| "mm" =>
let weights =
args
|> E.A.last
|> E.O.bind(_, x =>
switch x {
| Array(values) => Some(parseArray(values))
| _ => None
}
)
let possibleDists = E.O.isSome(weights)
? Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1)
: args
let dists = parseArray(possibleDists)
switch (weights, dists) {
| (Some(Error(r)), _) => Error(r)
| (_, Error(r)) => Error(r)
| (None, Ok(dists)) =>
let hash: ASTTypes.node = #FunctionCall(
"multimodal",
[#Hash([("dists", #Array(dists)), ("weights", #Array([]))])],
)
Ok(hash)
| (Some(Ok(weights)), Ok(dists)) =>
let hash: ASTTypes.node = #FunctionCall(
"multimodal",
[#Hash([("dists", #Array(dists)), ("weights", #Array(weights))])],
)
Ok(hash)
}
| name => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall(name, args))
}
}
let rec nodeParser: MathJsonToMathJsAdt.arg => result<ASTTypes.node, string> = x =>
switch x {
| Value(f) => Ok(#SymbolicDist(#Float(f)))
| Symbol(sym) => Ok(#Symbol(sym))
| Fn({name, args}) => functionParser(nodeParser, name, args)
| _ => Error("This type not currently supported")
}
// | FunctionAssignment({name, args, expression}) => {
// let evaluatedExpression = run(expression);
// `Function(_ => Ok(evaluatedExpression));
// }
let rec topLevel = (r): result<ASTTypes.program, string> =>
switch r {
| FunctionAssignment({name, args, expression}) =>
switch nodeParser(expression) {
| Ok(r) => Ok([#Assignment(name, #Function(args, r))])
| Error(r) => Error(r)
}
| Value(_) as r => nodeParser(r) |> E.R.fmap(r => [#Expression(r)])
| Fn(_) as r => nodeParser(r) |> E.R.fmap(r => [#Expression(r)])
| Array(_) => Error("Array not valid as top level")
| Symbol(s) => handleSymbol(s) |> E.R.fmap(r => [#Expression(r)])
| Object(_) => Error("Object not valid as top level")
| Assignment(name, value) =>
switch name {
| Symbol(symbol) => nodeParser(value) |> E.R.fmap(r => [#Assignment(symbol, r)])
| _ => Error("Symbol not a string")
}
| Blocks(blocks) =>
blocks |> E.A.fmap(b => topLevel(b)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(E.A.concatMany)
}
let run = (r): result<ASTTypes.program, string> => r |> MathAdtCleaner.run |> topLevel
}
/* The MathJs parser doesn't support '.+' syntax, but we want it because it
would make sense with '.*'. Our workaround is to change this to >>>, which is
logShift in mathJS. We don't expect to use logShift anytime soon, so this tradeoff
seems fine.
*/
let pointwiseToRightLogShift = Js.String.replaceByRe(%re("/\.\+/g"), ">>>")
let fromString2 = str => {
/* We feed the user-typed string into Mathjs.parseMath,
which returns a JSON with (hopefully) a single-element array.
This array element is the top-level node of a nested-object tree
representing the functions/arguments/values/etc. in the string.
The function MathJsonToMathJsAdt then recursively unpacks this JSON into a typed data structure we can use.
Inside of this function, MathAdtToDistDst is called whenever a distribution function is encountered.
*/
let mathJsToJson = str |> pointwiseToRightLogShift |> Mathjs.parseMath
let mathJsParse = E.R.bind(mathJsToJson, r =>
switch MathJsonToMathJsAdt.run(r) {
| Some(r) => Ok(r)
| None => Error("MathJsParse Error")
}
)
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run)
value
}
let fromString = str => fromString2(str)

View File

@ -1,185 +0,0 @@
// TODO: This setup is more confusing than it should be, there's more work to do in cleanup here.
module Inputs = {
module SamplingInputs = {
type t = {
sampleCount: option<int>,
outputXYPoints: option<int>,
kernelWidth: option<float>,
pointDistLength: option<int>,
}
}
let defaultRecommendedLength = 100
let defaultShouldDownsample = true
type inputs = {
squiggleString: string,
samplingInputs: SamplingInputs.t,
environment: ASTTypes.environment,
}
let empty: SamplingInputs.t = {
sampleCount: None,
outputXYPoints: None,
kernelWidth: None,
pointDistLength: None,
}
let make = (
~samplingInputs=empty,
~squiggleString,
~environment=ASTTypes.Environment.empty,
(),
): inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: environment,
}
}
type exportDistribution = [
| #DistPlus(DistPlus.t)
| #Float(float)
| #Function(float => Belt.Result.t<DistPlus.t, string>)
]
type exportEnv = array<(string, ASTTypes.node)>
type exportType = {
environment: exportEnv,
exports: array<exportDistribution>,
}
module Internals = {
let addVariable = (
{samplingInputs, squiggleString, environment}: Inputs.inputs,
str,
node,
): Inputs.inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: ASTTypes.Environment.update(environment, str, _ => Some(node)),
}
type outputs = {
graph: ASTTypes.node,
pointSetDist: PointSetTypes.pointSetDist,
}
let makeOutputs = (graph, shape): outputs => {graph: graph, pointSetDist: shape}
let makeInputs = (inputs: Inputs.inputs): SamplingInputs.samplingInputs => {
sampleCount: inputs.samplingInputs.sampleCount |> E.O.default(10000),
outputXYPoints: inputs.samplingInputs.outputXYPoints |> E.O.default(10000),
kernelWidth: inputs.samplingInputs.kernelWidth,
pointSetDistLength: inputs.samplingInputs.pointDistLength |> E.O.default(10000),
}
let runNode = (inputs, node) => AST.toLeaf(makeInputs(inputs), inputs.environment, node)
let renderIfNeeded = (inputs: Inputs.inputs, node: ASTTypes.node): result<
ASTTypes.node,
string,
> =>
node |> (
x =>
switch x {
| #Normalize(_) as n
| #SymbolicDist(_) as n =>
#Render(n)
|> runNode(inputs)
|> (
x =>
switch x {
| Ok(#RenderedDist(_)) as r => r
| Error(r) => Error(r)
| _ => Error("Didn't render, but intended to")
}
)
| n => Ok(n)
}
)
let outputToDistPlus = (inputs: Inputs.inputs, pointSetDist: PointSetTypes.pointSetDist) =>
DistPlus.make(~pointSetDist, ~squiggleString=Some(inputs.squiggleString), ())
let rec returnDist = (
functionInfo: (array<string>, ASTTypes.node),
inputs: Inputs.inputs,
env: ASTTypes.environment,
) => {
(input: float) => {
let foo: Inputs.inputs = {...inputs, environment: env}
evaluateFunction(foo, functionInfo, [#SymbolicDist(#Float(input))]) |> E.R.bind(_, a =>
switch a {
| #DistPlus(d) => Ok(DistPlus.T.normalize(d))
| n =>
Js.log2("Error here", n)
Error("wrong type")
}
)
}
}
// TODO: Consider using ExpressionTypes.ExpressionTree.getFloat or similar in this function
and coersionToExportedTypes = (inputs, env: ASTTypes.environment, ex: ASTTypes.node): result<
exportDistribution,
string,
> =>
ex
|> renderIfNeeded(inputs)
|> E.R.bind(_, x =>
switch x {
| #RenderedDist(Discrete({xyShape: {xs: [x], ys: [1.0]}})) => Ok(#Float(x))
| #SymbolicDist(#Float(x)) => Ok(#Float(x))
| #RenderedDist(n) => Ok(#DistPlus(outputToDistPlus(inputs, n)))
| #Function(n) => Ok(#Function(returnDist(n, inputs, env)))
| n => Error("Didn't output a rendered distribution. Format:" ++ AST.toString(n))
}
)
and evaluateFunction = (inputs: Inputs.inputs, fn: (array<string>, ASTTypes.node), fnInputs) => {
let output = AST.runFunction(makeInputs(inputs), inputs.environment, fnInputs, fn)
output |> E.R.bind(_, coersionToExportedTypes(inputs, inputs.environment))
}
let runProgram = (inputs: Inputs.inputs, p: ASTTypes.program) => {
let ins = ref(inputs)
p
|> E.A.fmap(x =>
switch x {
| #Assignment(name, node) =>
ins := addVariable(ins.contents, name, node)
None
| #Expression(node) => Some(runNode(ins.contents, node))
}
)
|> E.A.O.concatSomes
|> E.A.R.firstErrorOrOpen
|> E.R.bind(_, d =>
d
|> E.A.fmap(x => coersionToExportedTypes(inputs, ins.contents.environment, x))
|> E.A.R.firstErrorOrOpen
)
|> E.R.fmap(ex => {
environment: Belt.Map.String.toArray(ins.contents.environment),
exports: ex,
})
}
let inputsToLeaf = (inputs: Inputs.inputs) =>
Parser.fromString(inputs.squiggleString) |> E.R.bind(_, g => runProgram(inputs, g))
}
@genType
let runAll: (string, Inputs.SamplingInputs.t, exportEnv) => result<exportType, string> = (
squiggleString,
samplingInputs,
environment,
) => {
let inputs = Inputs.make(
~samplingInputs,
~squiggleString,
~environment=Belt.Map.String.fromArray(environment),
(),
)
Internals.inputsToLeaf(inputs)
}

View File

@ -72,7 +72,7 @@ let dispatchMacroCall = (
let doExportVariableExpression = (bindings: ExpressionT.bindings) => { let doExportVariableExpression = (bindings: ExpressionT.bindings) => {
let emptyDictionary: Js.Dict.t<ExpressionValue.expressionValue> = Js.Dict.empty() let emptyDictionary: Js.Dict.t<ExpressionValue.expressionValue> = Js.Dict.empty()
let reducedBindings = bindings->Belt.Map.String.keep((key, value) => let reducedBindings = bindings->Belt.Map.String.keep((_key, value) =>
switch value { switch value {
| ExpressionT.EValue(_) => true | ExpressionT.EValue(_) => true
| _ => false | _ => false

View File

@ -15,7 +15,7 @@ type t = expression
*/ */
let rec toString = expression => let rec toString = expression =>
switch expression { switch expression {
| T.EBindings(bindings) => "$$bound" | T.EBindings(_) => "$$bound"
| T.EList(aList) => | T.EList(aList) =>
`(${Belt.List.map(aList, aValue => toString(aValue)) `(${Belt.List.map(aList, aValue => toString(aValue))
->Extra.List.interperse(" ") ->Extra.List.interperse(" ")

View File

@ -12,7 +12,7 @@ type answer = {"value": unit}
Rescript cannot type cast on basic values passed on their own. Rescript cannot type cast on basic values passed on their own.
This is why we call evalua inside Javascript and wrap the result in an Object This is why we call evalua inside Javascript and wrap the result in an Object
*/ */
let eval__ = %raw(`function (expr) { return {value: Mathjs.evaluate(expr)}; }`) let eval__: string => 'a = %raw(`function (expr) { return {value: Mathjs.evaluate(expr)}; }`)
/* /*
Call MathJs evaluate and return as a variant Call MathJs evaluate and return as a variant

View File

@ -126,7 +126,7 @@ module Helpers = {
| Error(err) => GenDistError(ArgumentError(err)) | Error(err) => GenDistError(ArgumentError(err))
} }
} }
| Some(EvDistribution(b)) => | Some(EvDistribution(_)) =>
switch parseDistributionArray(args) { switch parseDistributionArray(args) {
| Ok(distributions) => mixtureWithDefaultWeights(distributions) | Ok(distributions) => mixtureWithDefaultWeights(distributions)
| Error(err) => GenDistError(ArgumentError(err)) | Error(err) => GenDistError(ArgumentError(err))
@ -226,7 +226,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
Helpers.twoDiststoDistFn(Algebraic, "log", a, GenericDist.fromFloat(10.0))->Some Helpers.twoDiststoDistFn(Algebraic, "log", a, GenericDist.fromFloat(10.0))->Some
| ("unaryMinus", [EvDistribution(a)]) => | ("unaryMinus", [EvDistribution(a)]) =>
Helpers.twoDiststoDistFn(Algebraic, "multiply", a, GenericDist.fromFloat(-1.0))->Some Helpers.twoDiststoDistFn(Algebraic, "multiply", a, GenericDist.fromFloat(-1.0))->Some
| (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [a, b] as args) => | (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [_, _] as args) =>
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) => Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
Helpers.twoDiststoDistFn(Algebraic, arithmetic, fst, snd) Helpers.twoDiststoDistFn(Algebraic, arithmetic, fst, snd)
) )
@ -237,7 +237,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
| "dotDivide" | "dotDivide"
| "dotPow" | "dotPow"
| "dotLog") as arithmetic, | "dotLog") as arithmetic,
[a, b] as args, [_, _] as args,
) => ) =>
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) => Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
Helpers.twoDiststoDistFn(Pointwise, arithmetic, fst, snd) Helpers.twoDiststoDistFn(Pointwise, arithmetic, fst, snd)

View File

@ -28,7 +28,7 @@ module Int = {
} }
/* Utils */ /* Utils */
module U = { module U = {
let isEqual = (a, b) => a == b let isEqual = \"=="
let toA = a => [a] let toA = a => [a]
let id = e => e let id = e => e
} }
@ -340,8 +340,6 @@ module A = {
let reduce = Belt.Array.reduce let reduce = Belt.Array.reduce
let reducei = Belt.Array.reduceWithIndex let reducei = Belt.Array.reduceWithIndex
let isEmpty = r => length(r) < 1 let isEmpty = r => length(r) < 1
let min = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i < j ? i : j))
let max = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i > j ? i : j))
let stableSortBy = Belt.SortArray.stableSortBy let stableSortBy = Belt.SortArray.stableSortBy
let toRanges = (a: array<'a>) => let toRanges = (a: array<'a>) =>
switch a |> Belt.Array.length { switch a |> Belt.Array.length {
@ -448,8 +446,11 @@ module A = {
| (Some(min), Some(max)) => Some(max -. min) | (Some(min), Some(max)) => Some(max -. min)
| _ => None | _ => None
} }
let floatCompare: (float, float) => int = compare
let binarySearchFirstElementGreaterIndex = (ar: array<'a>, el: 'a) => { let binarySearchFirstElementGreaterIndex = (ar: array<'a>, el: 'a) => {
let el = Belt.SortArray.binarySearchBy(ar, el, compare) let el = Belt.SortArray.binarySearchBy(ar, el, floatCompare)
let el = el < 0 ? el * -1 - 1 : el let el = el < 0 ? el * -1 - 1 : el
switch el { switch el {
| e if e >= length(ar) => #overMax | e if e >= length(ar) => #overMax
@ -460,13 +461,13 @@ module A = {
let concat = (t1: array<'a>, t2: array<'a>) => { let concat = (t1: array<'a>, t2: array<'a>) => {
let ts = Belt.Array.concat(t1, t2) let ts = Belt.Array.concat(t1, t2)
ts |> Array.fast_sort(compare) ts |> Array.fast_sort(floatCompare)
ts ts
} }
let concatMany = (t1: array<array<'a>>) => { let concatMany = (t1: array<array<'a>>) => {
let ts = Belt.Array.concatMany(t1) let ts = Belt.Array.concatMany(t1)
ts |> Array.fast_sort(compare) ts |> Array.fast_sort(floatCompare)
ts ts
} }
@ -525,6 +526,9 @@ module A = {
let diff = (max -. min) /. Belt.Float.fromInt(n - 1) let diff = (max -. min) /. Belt.Float.fromInt(n - 1)
Belt.Array.makeBy(n, i => min +. Belt.Float.fromInt(i) *. diff) Belt.Array.makeBy(n, i => min +. Belt.Float.fromInt(i) *. diff)
} }
let min = Js.Math.minMany_float
let max = Js.Math.maxMany_float
} }
} }

View File

@ -16,7 +16,7 @@ let create = (relativeHeights: array<float>, ~maximum=?, ()) => {
if E.A.length(relativeHeights) === 0 { if E.A.length(relativeHeights) === 0 {
"" ""
} else { } else {
let maximum = maximum->E.O2.default(E.A.max(relativeHeights)->E.O2.toExn("")) let maximum = maximum->E.O2.default(E.A.Floats.max(relativeHeights))
relativeHeights relativeHeights
->E.A2.fmap(_heightToTickIndex(maximum)) ->E.A2.fmap(_heightToTickIndex(maximum))

View File

@ -60,8 +60,8 @@ module T = {
module Ts = { module Ts = {
type t = T.ts type t = T.ts
let minX = (t: t) => t |> E.A.fmap(T.minX) |> E.A.min |> extImp let minX = (t: t) => t |> E.A.fmap(T.minX) |> E.A.Floats.min
let maxX = (t: t) => t |> E.A.fmap(T.maxX) |> E.A.max |> extImp let maxX = (t: t) => t |> E.A.fmap(T.maxX) |> E.A.Floats.max
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength) let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
let allXs = (t: t) => t |> E.A.fmap(T.xs) |> E.A.Sorted.concatMany let allXs = (t: t) => t |> E.A.fmap(T.xs) |> E.A.Sorted.concatMany
} }
@ -199,7 +199,7 @@ module XtoY = {
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine. /* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
For discrete distributions, the probability density between points is zero, so we just return zero here. */ For discrete distributions, the probability density between points is zero, so we just return zero here. */
let discreteInterpolator: interpolator = (t: T.t, leftIndex: int, x: float) => 0.0 let discreteInterpolator: interpolator = (_: T.t, _: int, _: float) => 0.0
} }
module XsConversion = { module XsConversion = {
@ -220,8 +220,8 @@ module XsConversion = {
module Zipped = { module Zipped = {
type zipped = array<(float, float)> type zipped = array<(float, float)>
let compareYs = ((_, y1), (_, y2)) => y1 > y2 ? 1 : 0 let compareYs = ((_, y1): (float, float), (_, y2): (float, float)) => y1 > y2 ? 1 : 0
let compareXs = ((x1, _), (x2, _)) => x1 > x2 ? 1 : 0 let compareXs = ((x1, _): (float, float), (x2, _): (float, float)) => x1 > x2 ? 1 : 0
let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs) let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs)
let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs) let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs)
let filterByX = (testFn: float => bool, t: zipped) => t |> E.A.filter(((x, _)) => testFn(x)) let filterByX = (testFn: float => bool, t: zipped) => t |> E.A.filter(((x, _)) => testFn(x))
@ -229,7 +229,7 @@ module Zipped = {
module PointwiseCombination = { module PointwiseCombination = {
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat. // t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
let combine = %raw(` // : (float => float => float, T.t, T.t, bool) => T.t let combine: ((float, float) => float, interpolator, T.t, T.t) => T.t = %raw(`
// This function combines two xyShapes by looping through both of them simultaneously. // This function combines two xyShapes by looping through both of them simultaneously.
// It always moves on to the next smallest x, whether that's in the first or second input's xs, // It always moves on to the next smallest x, whether that's in the first or second input's xs,
// and interpolates the value on the other side, thus accumulating xs and ys. // and interpolates the value on the other side, thus accumulating xs and ys.

View File

@ -1928,11 +1928,6 @@
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw== integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==
"@glennsl/bs-json@^5.0.2":
version "5.0.4"
resolved "https://registry.yarnpkg.com/@glennsl/bs-json/-/bs-json-5.0.4.tgz#8a80906f3b5e04d78dc06722e5987ff6499c89a8"
integrity sha512-Th9DetZjRlMZrb74kgGJ44oWcoFyOTE884WlSuXft0Cd+J09vHRxiB7eVyK7Gthb4cSevsBBJDHYAbGGL25wPw==
"@glennsl/rescript-jest@^0.9.0": "@glennsl/rescript-jest@^0.9.0":
version "0.9.1" version "0.9.1"
resolved "https://registry.yarnpkg.com/@glennsl/rescript-jest/-/rescript-jest-0.9.1.tgz#a85a6f0e4c3b79010b5a917c3652aa70d374e4d1" resolved "https://registry.yarnpkg.com/@glennsl/rescript-jest/-/rescript-jest-0.9.1.tgz#a85a6f0e4c3b79010b5a917c3652aa70d374e4d1"