Merge branch 'Q-develop' into develop

This commit is contained in:
Umur Ozkul 2022-04-22 21:18:08 +02:00
commit 05886451b1
30 changed files with 130 additions and 1720 deletions

View File

@ -7,3 +7,5 @@ node_modules
packages/*/node_modules
packages/website/.docusaurus
packages/squiggle-lang/lib
packages/squiggle-lang/.nyc_output/
packages/squiggle-lang/coverage/

View File

@ -11,7 +11,7 @@
"@types/lodash": "^4.14.182",
"@types/node": "^17.0.25",
"@types/react": "^18.0.3",
"@types/react-dom": "^18.0.1",
"@types/react-dom": "^18.0.2",
"antd": "^4.19.3",
"cross-env": "^7.0.3",
"lodash": "^4.17.21",

View File

@ -6,7 +6,7 @@ import {
errorValueToString,
squiggleExpression,
} from "@quri/squiggle-lang";
import type { samplingParams, exportEnv } from "@quri/squiggle-lang";
import type { samplingParams } from "@quri/squiggle-lang";
import { NumberShower } from "./NumberShower";
import { DistributionChart } from "./DistributionChart";
import { ErrorBox } from "./ErrorBox";
@ -129,9 +129,9 @@ export interface SquiggleChartProps {
/** If the result is a function, how many points along the function it samples */
diagramCount?: number;
/** variables declared before this expression */
environment?: exportEnv;
environment?: unknown;
/** When the environment changes */
onEnvChange?(env: exportEnv): void;
onEnvChange?(env: unknown): void;
/** CSS width of the element */
width?: number;
height?: number;
@ -155,7 +155,7 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = ({
sampleCount: sampleCount,
xyPointLength: outputXYPoints,
};
let expressionResult = run(squiggleString, samplingInputs, environment);
let expressionResult = run(squiggleString, samplingInputs);
let internal: JSX.Element;
if (expressionResult.tag === "Ok") {
onEnvChange(environment);

View File

@ -2,7 +2,6 @@ import * as React from "react";
import * as ReactDOM from "react-dom";
import { SquiggleChart } from "./SquiggleChart";
import { CodeEditor } from "./CodeEditor";
import type { exportEnv } from "@quri/squiggle-lang";
import styled from "styled-components";
export interface SquiggleEditorProps {
@ -21,9 +20,9 @@ export interface SquiggleEditorProps {
/** If the result is a function, how many points along the function it samples */
diagramCount?: number;
/** The environment, other variables that were already declared */
environment?: exportEnv;
environment?: unknown;
/** when the environment changes. Used again for notebook magic*/
onEnvChange?(env: exportEnv): void;
onEnvChange?(env: unknown): void;
/** The width of the element */
width: number;
}

View File

@ -67,7 +67,7 @@ describe("eval on distribution functions", () => {
testEval("lognormal(10,2) / lognormal(5,2)", "Ok(Lognormal(5,2.8284271247461903))")
testEval("lognormal(5, 2) / 2", "Ok(Lognormal(4.306852819440055,2))")
testEval("2 / lognormal(5, 2)", "Ok(Lognormal(-4.306852819440055,2))")
testEval("2 / normal(10, 2)", "Ok(Point Set Distribution)")
testEval("2 / normal(10, 2)", "Ok(Sample Set Distribution)")
testEval("normal(10, 2) / 2", "Ok(Normal(5,1))")
})
describe("truncate", () => {
@ -77,21 +77,21 @@ describe("eval on distribution functions", () => {
})
describe("exp", () => {
testEval("exp(normal(5,2))", "Ok(Point Set Distribution)")
testEval("exp(normal(5,2))", "Ok(Sample Set Distribution)")
})
describe("pow", () => {
testEval("pow(3, uniform(5,8))", "Ok(Point Set Distribution)")
testEval("pow(uniform(5,8), 3)", "Ok(Point Set Distribution)")
testEval("pow(3, uniform(5,8))", "Ok(Sample Set Distribution)")
testEval("pow(uniform(5,8), 3)", "Ok(Sample Set Distribution)")
testEval("pow(uniform(5,8), uniform(9, 10))", "Ok(Sample Set Distribution)")
})
describe("log", () => {
testEval("log(2, uniform(5,8))", "Ok(Point Set Distribution)")
testEval("log(normal(5,2), 3)", "Ok(Point Set Distribution)")
testEval("log(2, uniform(5,8))", "Ok(Sample Set Distribution)")
testEval("log(normal(5,2), 3)", "Ok(Sample Set Distribution)")
testEval("log(normal(5,2), normal(10,1))", "Ok(Sample Set Distribution)")
testEval("log(uniform(5,8))", "Ok(Point Set Distribution)")
testEval("log10(uniform(5,8))", "Ok(Point Set Distribution)")
testEval("log(uniform(5,8))", "Ok(Sample Set Distribution)")
testEval("log10(uniform(5,8))", "Ok(Sample Set Distribution)")
})
describe("dotLog", () => {

View File

@ -20,12 +20,7 @@
],
"suffix": ".bs.js",
"namespace": true,
"bs-dependencies": [
"@glennsl/rescript-jest",
"@glennsl/bs-json",
"rationale",
"bisect_ppx"
],
"bs-dependencies": ["@glennsl/rescript-jest", "rationale", "bisect_ppx"],
"gentypeconfig": {
"language": "typescript",
"module": "commonjs",
@ -37,7 +32,7 @@
},
"refmt": 3,
"warnings": {
"number": "+A-42-48-9-30-4-102-20-27-41"
"number": "+A-42-48-9-30-4"
},
"ppx-flags": [
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]

View File

@ -7,11 +7,10 @@
"bundle": "webpack",
"start": "rescript build -w -with-deps",
"clean": "rescript clean",
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
"test:reducer": "jest __tests__/Reducer*/",
"test": "jest",
"test:ts": "jest __tests__/TS/",
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
"test:reducer": "jest __tests__/Reducer*/",
"test:watch": "jest --watchAll",
"coverage:rescript": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test:rescript; bisect-ppx-report html",
"coverage:ts": "nyc --reporter=lcov yarn test:ts",
@ -31,32 +30,30 @@
"author": "Quantified Uncertainty Research Institute",
"license": "MIT",
"dependencies": {
"@glennsl/bs-json": "^5.0.2",
"bisect_ppx": "^2.7.1",
"jstat": "^1.9.5",
"lodash": "4.17.21",
"mathjs": "10.5.0",
"pdfast": "^0.2.0",
"rationale": "0.2.0",
"rescript": "^9.1.4",
"bisect_ppx": "^2.7.1"
"rescript": "^9.1.4"
},
"devDependencies": {
"@glennsl/rescript-jest": "^0.9.0",
"@istanbuljs/nyc-config-typescript": "^1.0.2",
"@types/jest": "^27.4.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"docsify": "^4.12.2",
"codecov": "3.8.3",
"fast-check": "2.24.0",
"gentype": "^4.3.0",
"jest": "^27.5.1",
"moduleserve": "0.9.1",
"nyc": "^15.1.0",
"ts-jest": "^27.1.4",
"ts-loader": "^9.2.8",
"fast-check": "2.24.0",
"typescript": "^4.6.3",
"webpack": "^5.72.0",
"webpack-cli": "^4.9.2",
"nyc": "^15.1.0",
"@istanbuljs/nyc-config-typescript": "^1.0.2",
"codecov": "3.8.3"
"webpack-cli": "^4.9.2"
},
"source": "./src/js/index.ts",
"main": "./dist/bundle.js",

View File

@ -1,9 +1,4 @@
import * as _ from "lodash";
import type {
exportEnv,
exportDistribution,
} from "../rescript/ProgramEvaluator.gen";
export type { exportEnv, exportDistribution };
import {
genericDist,
samplingParams,
@ -48,7 +43,6 @@ import {
Constructors_pointwiseLogarithm,
Constructors_pointwisePower,
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
import { pointSetDistFn } from "../rescript/OldInterpreter/DistPlus.bs";
export type { samplingParams, errorValue };
export let defaultSamplingInputs: samplingParams = {
@ -98,8 +92,7 @@ export type squiggleExpression =
| tagged<"record", { [key: string]: squiggleExpression }>;
export function run(
squiggleString: string,
samplingInputs?: samplingParams,
_environment?: exportEnv
samplingInputs?: samplingParams
): result<squiggleExpression, errorValue> {
let si: samplingParams = samplingInputs
? samplingInputs

View File

@ -69,7 +69,7 @@ let toPointSet = (
~xyPointLength,
~sampleCount,
~xSelection: GenericDist_Types.Operation.pointsetXSelection=#ByWeight,
unit,
(),
): result<PointSetTypes.pointSetDist, error> => {
switch (t: t) {
| PointSet(pointSet) => Ok(pointSet)
@ -93,7 +93,7 @@ let toPointSet = (
xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3
fairly arbitrarily.
*/
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, unit): result<string, error> =>
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, ()): result<string, error> =>
t
->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
->E.R.bind(r =>
@ -101,10 +101,16 @@ let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, unit): result<
)
module Truncate = {
let trySymbolicSimplification = (leftCutoff, rightCutoff, t: t): option<t> =>
let trySymbolicSimplification = (
leftCutoff: option<float>,
rightCutoff: option<float>,
t: t,
): option<t> =>
switch (leftCutoff, rightCutoff, t) {
| (None, None, _) => None
| (lc, rc, Symbolic(#Uniform(u))) if lc < rc =>
| (Some(lc), Some(rc), Symbolic(#Uniform(u))) if lc < rc =>
Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(Some(lc), Some(rc), u))))
| (lc, rc, Symbolic(#Uniform(u))) =>
Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))))
| _ => None
}
@ -158,7 +164,7 @@ module AlgebraicCombination = {
let runConvolution = (
toPointSet: toPointSetFn,
arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
arithmeticOperation: Operation.convolutionOperation,
t1: t,
t2: t,
) =>
@ -191,10 +197,23 @@ module AlgebraicCombination = {
| _ => 1000
}
let chooseConvolutionOrMonteCarlo = (t2: t, t1: t) =>
expectedConvolutionCost(t1) * expectedConvolutionCost(t2) > 10000
? #CalculateWithMonteCarlo
: #CalculateWithConvolution
type calculationMethod = MonteCarlo | Convolution(Operation.convolutionOperation)
let chooseConvolutionOrMonteCarlo = (
op: Operation.algebraicOperation,
t2: t,
t1: t,
): calculationMethod =>
switch op {
| #Divide
| #Power
| #Logarithm =>
MonteCarlo
| (#Add | #Subtract | #Multiply) as convOp =>
expectedConvolutionCost(t1) * expectedConvolutionCost(t2) > 10000
? MonteCarlo
: Convolution(convOp)
}
let run = (
t1: t,
@ -207,15 +226,10 @@ module AlgebraicCombination = {
| Some(Ok(symbolicDist)) => Ok(Symbolic(symbolicDist))
| Some(Error(e)) => Error(Other(e))
| None =>
switch chooseConvolutionOrMonteCarlo(t1, t2) {
| #CalculateWithMonteCarlo => runMonteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
| #CalculateWithConvolution =>
runConvolution(
toPointSetFn,
arithmeticOperation,
t1,
t2,
)->E.R2.fmap(r => DistributionTypes.PointSet(r))
switch chooseConvolutionOrMonteCarlo(arithmeticOperation, t1, t2) {
| MonteCarlo => runMonteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
| Convolution(convOp) =>
runConvolution(toPointSetFn, convOp, t1, t2)->E.R2.fmap(r => DistributionTypes.PointSet(r))
}
}
}

View File

@ -96,36 +96,25 @@ let toDiscretePointMassesFromTriangulars = (
}
let combineShapesContinuousContinuous = (
op: Operation.algebraicOperation,
op: Operation.convolutionOperation,
s1: PointSetTypes.xyShape,
s2: PointSetTypes.xyShape,
): PointSetTypes.xyShape => {
// if we add the two distributions, we should probably use normal filters.
// if we multiply the two distributions, we should probably use lognormal filters.
let t1m = toDiscretePointMassesFromTriangulars(s1)
let t2m = switch op {
| #Divide => toDiscretePointMassesFromTriangulars(~inverse=true, s2)
| _ => toDiscretePointMassesFromTriangulars(~inverse=false, s2)
}
let t2m = toDiscretePointMassesFromTriangulars(~inverse=false, s2)
let combineMeansFn = switch op {
| #Add => (m1, m2) => m1 +. m2
| #Subtract => (m1, m2) => m1 -. m2
| #Multiply => (m1, m2) => m1 *. m2
| #Divide => (m1, mInv2) => m1 *. mInv2
| #Power => (m1, mInv2) => m1 ** mInv2
| #Logarithm => (m1, m2) => log(m1) /. log(m2)
} // note: here, mInv2 = mean(1 / t2) ~= 1 / mean(t2)
// TODO: Variances are for exponentatiation or logarithms are almost totally made up and very likely very wrong.
// converts the variances and means of the two inputs into the variance of the output
let combineVariancesFn = switch op {
| #Add => (v1, v2, _, _) => v1 +. v2
| #Subtract => (v1, v2, _, _) => v1 +. v2
| #Multiply => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
| #Power => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
| #Logarithm => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
| #Divide => (v1, vInv2, m1, mInv2) => v1 *. vInv2 +. v1 *. mInv2 ** 2. +. vInv2 *. m1 ** 2.
}
// TODO: If operating on two positive-domain distributions, we should take that into account
@ -199,7 +188,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
}
let combineShapesContinuousDiscrete = (
op: Operation.algebraicOperation,
op: Operation.convolutionOperation,
continuousShape: PointSetTypes.xyShape,
discreteShape: PointSetTypes.xyShape,
): PointSetTypes.xyShape => {
@ -207,7 +196,7 @@ let combineShapesContinuousDiscrete = (
let t2n = discreteShape |> XYShape.T.length
// each x pair is added/subtracted
let fn = Operation.Algebraic.toFn(op)
let fn = Operation.Convolution.toFn(op)
let outXYShapes: array<array<(float, float)>> = Belt.Array.makeUninitializedUnsafe(t2n)
@ -231,10 +220,7 @@ let combineShapesContinuousDiscrete = (
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore
()
}
| #Multiply
| #Power
| #Logarithm
| #Divide =>
| #Multiply =>
for j in 0 to t2n - 1 {
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
let dxyShape: array<(float, float)> = Belt.Array.makeUninitializedUnsafe(t1n)

View File

@ -87,7 +87,6 @@ let stepwiseToLinear = (t: t): t =>
// Note: This results in a distribution with as many points as the sum of those in t1 and t2.
let combinePointwise = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
~distributionType: PointSetTypes.distributionType=#PDF,
fn: (float, float) => float,
t1: PointSetTypes.continuousShape,
@ -143,14 +142,9 @@ let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: inte
let reduce = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
fn,
continuousShapes,
) =>
continuousShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
) => continuousShapes |> E.A.fold_left(combinePointwise(~integralSumCachesFn, fn), empty)
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t) =>
make(
@ -247,7 +241,7 @@ let downsampleEquallyOverX = (length, t): t =>
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
each discrete data point, and then adds them all together. */
let combineAlgebraicallyWithDiscrete = (
op: Operation.algebraicOperation,
op: Operation.convolutionOperation,
t1: t,
t2: PointSetTypes.discreteShape,
) => {
@ -269,8 +263,7 @@ let combineAlgebraicallyWithDiscrete = (
)
let combinedIntegralSum = switch op {
| #Multiply
| #Divide =>
| #Multiply =>
Common.combineIntegralSums((a, b) => Some(a *. b), t1.integralSumCache, t2.integralSumCache)
| _ => None
}
@ -280,7 +273,7 @@ let combineAlgebraicallyWithDiscrete = (
}
}
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t) => {
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t) => {
let s1 = t1 |> getShape
let s2 = t2 |> getShape
let t1n = s1 |> XYShape.T.length

View File

@ -34,11 +34,6 @@ let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let combinePointwise = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (
PointSetTypes.continuousShape,
PointSetTypes.continuousShape,
) => option<PointSetTypes.continuousShape>=(_, _) => None,
fn,
t1: PointSetTypes.discreteShape,
t2: PointSetTypes.discreteShape,
): PointSetTypes.discreteShape => {
@ -62,16 +57,8 @@ let combinePointwise = (
)
}
let reduce = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn=(_, _) => None,
fn,
discreteShapes,
): PointSetTypes.discreteShape =>
discreteShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
let reduce = (~integralSumCachesFn=(_, _) => None, discreteShapes): PointSetTypes.discreteShape =>
discreteShapes |> E.A.fold_left(combinePointwise(~integralSumCachesFn), empty)
let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t,
@ -85,7 +72,7 @@ let updateIntegralCache = (integralCache, t: t): t => {
/* This multiples all of the data points together and creates a new discrete distribution from the results.
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t => {
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
let t1s = t1 |> getShape
let t2s = t2 |> getShape
let t1n = t1s |> XYShape.T.length
@ -97,7 +84,7 @@ let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t =
t2.integralSumCache,
)
let fn = Operation.Algebraic.toFn(op)
let fn = Operation.Convolution.toFn(op)
let xToYMap = E.FloatFloatMap.empty()
for i in 0 to t1n - 1 {

View File

@ -164,12 +164,7 @@ module T = Dist({
// This pipes all ys (continuous and discrete) through fn.
// If mapY is a linear operation, we might be able to update the integralSumCaches as well;
// if not, they'll be set to None.
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
t: t,
): t => {
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t): t => {
let yMappedDiscrete: PointSetTypes.discreteShape =
t.discrete
|> Discrete.T.mapY(~fn)
@ -226,7 +221,7 @@ module T = Dist({
}
})
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t => {
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
// Discrete convolution can cause a huge increase in the number of samples,
// so we'll first downsample.
@ -271,16 +266,13 @@ let combinePointwise = (
t2: t,
): t => {
let reducedDiscrete =
[t1, t2]
|> E.A.fmap(toDiscrete)
|> E.A.O.concatSomes
|> Discrete.reduce(~integralSumCachesFn, ~integralCachesFn, fn)
[t1, t2] |> E.A.fmap(toDiscrete) |> E.A.O.concatSomes |> Discrete.reduce(~integralSumCachesFn)
let reducedContinuous =
[t1, t2]
|> E.A.fmap(toContinuous)
|> E.A.O.concatSomes
|> Continuous.reduce(~integralSumCachesFn, ~integralCachesFn, fn)
|> Continuous.reduce(~integralSumCachesFn, fn)
let combinedIntegralSum = Common.combineIntegralSums(
integralSumCachesFn,

View File

@ -35,7 +35,7 @@ let toMixed = mapToAll((
))
//TODO WARNING: The combineAlgebraicallyWithDiscrete will break for subtraction and division, like, discrete - continous
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t =>
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t =>
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
Continuous.combineAlgebraically(op, m1, m2) |> Continuous.T.toPointSetDist
@ -59,13 +59,9 @@ let combinePointwise = (
) =>
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
PointSetTypes.Continuous(
Continuous.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
PointSetTypes.Continuous(Continuous.combinePointwise(~integralSumCachesFn, fn, m1, m2))
| (Discrete(m1), Discrete(m2)) =>
PointSetTypes.Discrete(
Discrete.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
PointSetTypes.Discrete(Discrete.combinePointwise(~integralSumCachesFn, m1, m2))
| (m1, m2) =>
PointSetTypes.Mixed(
Mixed.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, toMixed(m1), toMixed(m2)),
@ -134,11 +130,7 @@ module T = Dist({
let integralYtoX = f =>
mapToAll((Mixed.T.Integral.yToX(f), Discrete.T.Integral.yToX(f), Continuous.T.Integral.yToX(f)))
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX))
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
) =>
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn) =>
fmap((
Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),

View File

@ -1,24 +0,0 @@
open ASTTypes
let toString = ASTTypes.Node.toString
let envs = (samplingInputs, environment) => {
samplingInputs: samplingInputs,
environment: environment,
evaluateNode: ASTEvaluator.toLeaf,
}
let toLeaf = (samplingInputs, environment, node: node) =>
ASTEvaluator.toLeaf(envs(samplingInputs, environment), node)
let toPointSetDist = (samplingInputs, environment, node: node) =>
switch toLeaf(samplingInputs, environment, node) {
| Ok(#RenderedDist(pointSetDist)) => Ok(pointSetDist)
| Ok(_) => Error("Rendering failed.")
| Error(e) => Error(e)
}
let runFunction = (samplingInputs, environment, inputs, fn: ASTTypes.Function.t) => {
let params = envs(samplingInputs, environment)
ASTTypes.Function.run(params, inputs, fn)
}

View File

@ -1,257 +0,0 @@
open ASTTypes
type tResult = node => result<node, string>
/* Given two random variables A and B, this returns the distribution
of a new variable that is the result of the operation on A and B.
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
In general, this is implemented via convolution. */
module AlgebraicCombination = {
let tryAnalyticalSimplification = (operation, t1: node, t2: node) =>
switch (operation, t1, t2) {
| (operation, #SymbolicDist(d1), #SymbolicDist(d2)) =>
switch SymbolicDist.T.tryAnalyticalSimplification(d1, d2, operation) {
| #AnalyticalSolution(symbolicDist) => Ok(#SymbolicDist(symbolicDist))
| #Error(er) => Error(er)
| #NoSolution => Ok(#AlgebraicCombination(operation, t1, t2))
}
| _ => Ok(#AlgebraicCombination(operation, t1, t2))
}
let combinationByRendering = (evaluationParams, algebraicOp, t1: node, t2: node): result<
node,
string,
> =>
E.R.merge(
Node.ensureIsRenderedAndGetShape(evaluationParams, t1),
Node.ensureIsRenderedAndGetShape(evaluationParams, t2),
) |> E.R.fmap(((a, b)) => #RenderedDist(PointSetDist.combineAlgebraically(algebraicOp, a, b)))
let nodeScore: node => int = x =>
switch x {
| #SymbolicDist(#Float(_)) => 1
| #SymbolicDist(_) => 1000
| #RenderedDist(Discrete(m)) => m.xyShape |> XYShape.T.length
| #RenderedDist(Mixed(_)) => 1000
| #RenderedDist(Continuous(_)) => 1000
| _ => 1000
}
let choose = (t1: node, t2: node) =>
nodeScore(t1) * nodeScore(t2) > 10000 ? #Sampling : #Analytical
let combine = (evaluationParams, algebraicOp, t1: node, t2: node): result<node, string> =>
E.R.merge(
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(evaluationParams, t1),
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(evaluationParams, t2),
) |> E.R.bind(_, ((a, b)) =>
switch choose(a, b) {
| #Sampling =>
ASTTypes.SamplingDistribution.combineShapesUsingSampling(
evaluationParams,
algebraicOp,
a,
b,
)
| #Analytical => combinationByRendering(evaluationParams, algebraicOp, a, b)
}
)
let operationToLeaf = (
evaluationParams: evaluationParams,
algebraicOp: Operation.algebraicOperation,
t1: node,
t2: node,
): result<node, string> =>
algebraicOp
|> tryAnalyticalSimplification(_, t1, t2)
|> E.R.bind(_, x =>
switch x {
| #SymbolicDist(_) as t => Ok(t)
| _ => combine(evaluationParams, algebraicOp, t1, t2)
}
)
}
module PointwiseCombination = {
//TODO: This is crude and slow. It forces everything to be pointSetDist, even though much
//of the process could happen on symbolic distributions without a conversion to be a pointSetDist.
let pointwiseAdd = (evaluationParams: evaluationParams, t1: node, t2: node) =>
switch (Node.render(evaluationParams, t1), Node.render(evaluationParams, t2)) {
| (Ok(#RenderedDist(rs1)), Ok(#RenderedDist(rs2))) =>
Ok(
#RenderedDist(
PointSetDist.combinePointwise(
~integralSumCachesFn=(a, b) => Some(a +. b),
~integralCachesFn=(a, b) => Some(
Continuous.combinePointwise(~distributionType=#CDF, \"+.", a, b),
),
\"+.",
rs1,
rs2,
),
),
)
| (Error(e1), _) => Error(e1)
| (_, Error(e2)) => Error(e2)
| _ => Error("Pointwise combination: rendering failed.")
}
let pointwiseCombine = (fn, evaluationParams: evaluationParams, t1: node, t2: node) =>
switch // TODO: construct a function that we can easily sample from, to construct
// a RenderedDist. Use the xMin and xMax of the rendered pointSetDists to tell the sampling function where to look.
// TODO: This should work for symbolic distributions too!
(Node.render(evaluationParams, t1), Node.render(evaluationParams, t2)) {
| (Ok(#RenderedDist(rs1)), Ok(#RenderedDist(rs2))) =>
Ok(#RenderedDist(PointSetDist.combinePointwise(fn, rs1, rs2)))
| (Error(e1), _) => Error(e1)
| (_, Error(e2)) => Error(e2)
| _ => Error("Pointwise combination: rendering failed.")
}
let operationToLeaf = (
evaluationParams: evaluationParams,
pointwiseOp: Operation.pointwiseOperation,
t1: node,
t2: node,
) =>
switch pointwiseOp {
| #Add => pointwiseAdd(evaluationParams, t1, t2)
| #Multiply => pointwiseCombine(\"*.", evaluationParams, t1, t2)
| #Power => pointwiseCombine(\"**", evaluationParams, t1, t2)
}
}
module Truncate = {
type simplificationResult = [
| #Solution(ASTTypes.node)
| #Error(string)
| #NoSolution
]
let trySimplification = (leftCutoff, rightCutoff, t): simplificationResult =>
switch (leftCutoff, rightCutoff, t) {
| (None, None, t) => #Solution(t)
| (Some(lc), Some(rc), _) if lc > rc =>
#Error("Left truncation bound must be smaller than right truncation bound.")
| (lc, rc, #SymbolicDist(#Uniform(u))) =>
#Solution(#SymbolicDist(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))))
| _ => #NoSolution
}
let truncateAsShape = (evaluationParams: evaluationParams, leftCutoff, rightCutoff, t) =>
switch // TODO: use named args for xMin/xMax in renderToShape; if we're lucky we can at least get the tail
// of a distribution we otherwise wouldn't get at all
Node.ensureIsRendered(evaluationParams, t) {
| Ok(#RenderedDist(rs)) =>
Ok(#RenderedDist(PointSetDist.T.truncate(leftCutoff, rightCutoff, rs)))
| Error(e) => Error(e)
| _ => Error("Could not truncate distribution.")
}
let operationToLeaf = (
evaluationParams,
leftCutoff: option<float>,
rightCutoff: option<float>,
t: node,
): result<node, string> =>
t
|> trySimplification(leftCutoff, rightCutoff)
|> (
x =>
switch x {
| #Solution(t) => Ok(t)
| #Error(e) => Error(e)
| #NoSolution => truncateAsShape(evaluationParams, leftCutoff, rightCutoff, t)
}
)
}
module Normalize = {
let rec operationToLeaf = (evaluationParams, t: node): result<node, string> =>
switch t {
| #RenderedDist(s) => Ok(#RenderedDist(PointSetDist.T.normalize(s)))
| #SymbolicDist(_) => Ok(t)
| _ => ASTTypes.Node.evaluateAndRetry(evaluationParams, operationToLeaf, t)
}
}
module FunctionCall = {
let _runHardcodedFunction = (name, evaluationParams, args) =>
TypeSystem.Function.Ts.findByNameAndRun(HardcodedFunctions.all, name, evaluationParams, args)
let _runLocalFunction = (name, evaluationParams: evaluationParams, args) =>
Environment.getFunction(evaluationParams.environment, name) |> E.R.bind(_, ((argNames, fn)) =>
ASTTypes.Function.run(evaluationParams, args, (argNames, fn))
)
let _runWithEvaluatedInputs = (
evaluationParams: ASTTypes.evaluationParams,
name,
args: array<ASTTypes.node>,
) =>
_runHardcodedFunction(name, evaluationParams, args) |> E.O.default(
_runLocalFunction(name, evaluationParams, args),
)
// TODO: This forces things to be floats
let run = (evaluationParams, name, args) =>
args
|> E.A.fmap(a => evaluationParams.evaluateNode(evaluationParams, a))
|> E.A.R.firstErrorOrOpen
|> E.R.bind(_, _runWithEvaluatedInputs(evaluationParams, name))
}
module Render = {
let rec operationToLeaf = (evaluationParams: evaluationParams, t: node): result<node, string> =>
switch t {
| #Function(_) => Error("Cannot render a function")
| #SymbolicDist(d) =>
Ok(
#RenderedDist(
SymbolicDist.T.toPointSetDist(evaluationParams.samplingInputs.pointSetDistLength, d),
),
)
| #RenderedDist(_) as t => Ok(t) // already a rendered pointSetDist, we're done here
| _ => ASTTypes.Node.evaluateAndRetry(evaluationParams, operationToLeaf, t)
}
}
/* This function recursively goes through the nodes of the parse tree,
replacing each Operation node and its subtree with a Data node.
Whenever possible, the replacement produces a new Symbolic Data node,
but most often it will produce a RenderedDist.
This function is used mainly to turn a parse tree into a single RenderedDist
that can then be displayed to the user. */
let rec toLeaf = (evaluationParams: ASTTypes.evaluationParams, node: node): result<node, string> =>
switch node {
// Leaf nodes just stay leaf nodes
| #SymbolicDist(_)
| #Function(_)
| #RenderedDist(_) =>
Ok(node)
| #Array(args) =>
args |> E.A.fmap(toLeaf(evaluationParams)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(r => #Array(r))
// Operations nevaluationParamsd to be turned into leaves
| #AlgebraicCombination(algebraicOp, t1, t2) =>
AlgebraicCombination.operationToLeaf(evaluationParams, algebraicOp, t1, t2)
| #PointwiseCombination(pointwiseOp, t1, t2) =>
PointwiseCombination.operationToLeaf(evaluationParams, pointwiseOp, t1, t2)
| #Truncate(leftCutoff, rightCutoff, t) =>
Truncate.operationToLeaf(evaluationParams, leftCutoff, rightCutoff, t)
| #Normalize(t) => Normalize.operationToLeaf(evaluationParams, t)
| #Render(t) => Render.operationToLeaf(evaluationParams, t)
| #Hash(t) =>
t
|> E.A.fmap(((name: string, node: node)) =>
toLeaf(evaluationParams, node) |> E.R.fmap(r => (name, r))
)
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
| #Symbol(r) =>
ASTTypes.Environment.get(evaluationParams.environment, r)
|> E.O.toResult("Undeclared variable " ++ r)
|> E.R.bind(_, toLeaf(evaluationParams))
| #FunctionCall(name, args) =>
FunctionCall.run(evaluationParams, name, args) |> E.R.bind(_, toLeaf(evaluationParams))
}

View File

@ -1,233 +0,0 @@
@genType
type rec hash = array<(string, node)>
and node = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
| #Symbol(string)
| #Hash(hash)
| #Array(array<node>)
| #Function(array<string>, node)
| #AlgebraicCombination(Operation.algebraicOperation, node, node)
| #PointwiseCombination(Operation.pointwiseOperation, node, node)
| #Normalize(node)
| #Render(node)
| #Truncate(option<float>, option<float>, node)
| #FunctionCall(string, array<node>)
]
type statement = [
| #Assignment(string, node)
| #Expression(node)
]
type program = array<statement>
type environment = Belt.Map.String.t<node>
type rec evaluationParams = {
samplingInputs: SamplingInputs.samplingInputs,
environment: environment,
evaluateNode: (evaluationParams, node) => Belt.Result.t<node, string>,
}
module Environment = {
type t = environment
module MS = Belt.Map.String
let fromArray = MS.fromArray
let empty: t = []->fromArray
let mergeKeepSecond = (a: t, b: t) =>
MS.merge(a, b, (_, a, b) =>
switch (a, b) {
| (_, Some(b)) => Some(b)
| (Some(a), _) => Some(a)
| _ => None
}
)
let update = (t, str, fn) => MS.update(t, str, fn)
let get = (t: t, str) => MS.get(t, str)
let getFunction = (t: t, str) =>
switch get(t, str) {
| Some(#Function(argNames, fn)) => Ok((argNames, fn))
| _ => Error("Function " ++ (str ++ " not found"))
}
}
module Node = {
let getFloat = (node: node) =>
node |> (
x =>
switch x {
| #RenderedDist(Discrete({xyShape: {xs: [x], ys: [1.0]}})) => Some(x)
| #SymbolicDist(#Float(x)) => Some(x)
| _ => None
}
)
let evaluate = (evaluationParams: evaluationParams) =>
evaluationParams.evaluateNode(evaluationParams)
let evaluateAndRetry = (evaluationParams, fn, node) =>
node |> evaluationParams.evaluateNode(evaluationParams) |> E.R.bind(_, fn(evaluationParams))
let rec toString: node => string = x =>
switch x {
| #SymbolicDist(d) => SymbolicDist.T.toString(d)
| #RenderedDist(_) => "[renderedShape]"
| #AlgebraicCombination(op, t1, t2) =>
Operation.Algebraic.format(op, toString(t1), toString(t2))
| #PointwiseCombination(op, t1, t2) =>
Operation.Pointwise.format(op, toString(t1), toString(t2))
| #Normalize(t) => "normalize(k" ++ (toString(t) ++ ")")
| #Truncate(lc, rc, t) => Operation.Truncate.toString(lc, rc, toString(t))
| #Render(t) => toString(t)
| #Symbol(t) => "Symbol: " ++ t
| #FunctionCall(name, args) =>
"[Function call: (" ++
(name ++
((args |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ ")]"))
| #Function(args, internal) =>
"[Function: (" ++ ((args |> Js.String.concatMany(_, ",")) ++ (toString(internal) ++ ")]"))
| #Array(a) => "[" ++ ((a |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ "]")
| #Hash(h) =>
"{" ++
((h
|> E.A.fmap(((name, value)) => name ++ (":" ++ toString(value)))
|> Js.String.concatMany(_, ",")) ++
"}")
}
let render = (evaluationParams: evaluationParams, r) => #Render(r) |> evaluate(evaluationParams)
let ensureIsRendered = (params, t) =>
switch t {
| #RenderedDist(_) => Ok(t)
| _ =>
switch render(params, t) {
| Ok(#RenderedDist(r)) => Ok(#RenderedDist(r))
| Ok(_) => Error("Did not render as requested")
| Error(e) => Error(e)
}
}
let ensureIsRenderedAndGetShape = (params, t) =>
switch ensureIsRendered(params, t) {
| Ok(#RenderedDist(r)) => Ok(r)
| Ok(_) => Error("Did not render as requested")
| Error(e) => Error(e)
}
let toPointSetDist = (item: node) =>
switch item {
| #RenderedDist(r) => Some(r)
| _ => None
}
let _toFloat = (t: PointSetTypes.pointSetDist) =>
switch t {
| Discrete({xyShape: {xs: [x], ys: [1.0]}}) => Some(#SymbolicDist(#Float(x)))
| _ => None
}
let toFloat = (item: node): result<node, string> =>
item |> toPointSetDist |> E.O.bind(_, _toFloat) |> E.O.toResult("Not valid shape")
}
module Function = {
type t = (array<string>, node)
let fromNode: node => option<t> = node =>
switch node {
| #Function(r) => Some(r)
| _ => None
}
let argumentNames = ((a, _): t) => a
let internals = ((_, b): t) => b
let run = (evaluationParams: evaluationParams, args: array<node>, t: t) =>
if E.A.length(args) == E.A.length(argumentNames(t)) {
let newEnvironment = Belt.Array.zip(argumentNames(t), args) |> Environment.fromArray
let newEvaluationParams: evaluationParams = {
samplingInputs: evaluationParams.samplingInputs,
environment: Environment.mergeKeepSecond(evaluationParams.environment, newEnvironment),
evaluateNode: evaluationParams.evaluateNode,
}
evaluationParams.evaluateNode(newEvaluationParams, internals(t))
} else {
Error("Wrong number of variables")
}
}
module SamplingDistribution = {
type t = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
]
let isSamplingDistribution: node => bool = x =>
switch x {
| #SymbolicDist(_) => true
| #RenderedDist(_) => true
| _ => false
}
let fromNode: node => result<t, string> = x =>
switch x {
| #SymbolicDist(n) => Ok(#SymbolicDist(n))
| #RenderedDist(n) => Ok(#RenderedDist(n))
| _ => Error("Not valid type")
}
let renderIfIsNotSamplingDistribution = (params, t): result<node, string> =>
!isSamplingDistribution(t)
? switch Node.render(params, t) {
| Ok(r) => Ok(r)
| Error(e) => Error(e)
}
: Ok(t)
let map = (~renderedDistFn, ~symbolicDistFn, node: node) =>
node |> (
x =>
switch x {
| #RenderedDist(r) => Some(renderedDistFn(r))
| #SymbolicDist(s) => Some(symbolicDistFn(s))
| _ => None
}
)
let sampleN = n =>
map(~renderedDistFn=PointSetDist.sampleNRendered(n), ~symbolicDistFn=SymbolicDist.T.sampleN(n))
let getCombinationSamples = (n, algebraicOp, t1: node, t2: node) =>
switch (sampleN(n, t1), sampleN(n, t2)) {
| (Some(a), Some(b)) =>
Some(
Belt.Array.zip(a, b) |> E.A.fmap(((a, b)) => Operation.Algebraic.toFn(algebraicOp, a, b)),
)
| _ => None
}
let combineShapesUsingSampling = (
evaluationParams: evaluationParams,
algebraicOp,
t1: node,
t2: node,
) => {
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1)
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2)
E.R.merge(i1, i2) |> E.R.bind(_, ((a, b)) => {
let samples =
getCombinationSamples(
evaluationParams.samplingInputs.sampleCount,
algebraicOp,
a,
b,
) |> E.O.toResult("Could not get samples")
let sampleSetDist = samples->E.R.bind(SampleSetDist.make)
let pointSetDist =
sampleSetDist->E.R.bind(r =>
SampleSetDist.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r)
)
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
})
}
}

View File

@ -1,87 +0,0 @@
open PointSetTypes
@genType
type t = PointSetTypes.distPlus
let pointSetDistIntegral = pointSetDist => PointSetDist.T.Integral.get(pointSetDist)
let make = (~pointSetDist, ~squiggleString, ()): t => {
let integral = pointSetDistIntegral(pointSetDist)
{pointSetDist: pointSetDist, integralCache: integral, squiggleString: squiggleString}
}
let update = (~pointSetDist=?, ~integralCache=?, ~squiggleString=?, t: t) => {
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
integralCache: E.O.default(t.integralCache, integralCache),
squiggleString: E.O.default(t.squiggleString, squiggleString),
}
let updateShape = (pointSetDist, t) => {
let integralCache = pointSetDistIntegral(pointSetDist)
update(~pointSetDist, ~integralCache, t)
}
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist)
module T = Distributions.Dist({
type t = PointSetTypes.distPlus
type integral = PointSetTypes.distPlus
let toPointSetDist = toPointSetDist
let toContinuous = pointSetDistFn(PointSetDist.T.toContinuous)
let toDiscrete = pointSetDistFn(PointSetDist.T.toDiscrete)
let normalize = (t: t): t => {
let normalizedShape = t |> toPointSetDist |> PointSetDist.T.normalize
t |> updateShape(normalizedShape)
}
let truncate = (leftCutoff, rightCutoff, t: t): t => {
let truncatedShape = t |> toPointSetDist |> PointSetDist.T.truncate(leftCutoff, rightCutoff)
t |> updateShape(truncatedShape)
}
let xToY = (f, t: t) => t |> toPointSetDist |> PointSetDist.T.xToY(f)
let minX = pointSetDistFn(PointSetDist.T.minX)
let maxX = pointSetDistFn(PointSetDist.T.maxX)
let toDiscreteProbabilityMassFraction = pointSetDistFn(
PointSetDist.T.toDiscreteProbabilityMassFraction,
)
// This bit is kind of awkward, could probably use rethinking.
let integral = (t: t) => updateShape(Continuous(t.integralCache), t)
let updateIntegralCache = (integralCache: option<PointSetTypes.continuousShape>, t) =>
update(~integralCache=E.O.default(t.integralCache, integralCache), t)
let downsample = (i, t): t => updateShape(t |> toPointSetDist |> PointSetDist.T.downsample(i), t)
// todo: adjust for limit, maybe?
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegralCache => None,
~fn,
{pointSetDist, _} as t: t,
): t => PointSetDist.T.mapY(~integralSumCacheFn, ~fn, pointSetDist) |> updateShape(_, t)
// get the total of everything
let integralEndY = (t: t) => {
PointSetDist.T.Integral.sum(toPointSetDist(t))
}
// TODO: Fix this below, obviously. Adjust for limits
let integralXtoY = (f, t: t) => {
PointSetDist.T.Integral.xToY(f, toPointSetDist(t))
}
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
let integralYtoX = (f, t: t) => {
PointSetDist.T.Integral.yToX(f, toPointSetDist(t))
}
let mean = (t: t) => {
PointSetDist.T.mean(t.pointSetDist)
}
let variance = (t: t) => PointSetDist.T.variance(t.pointSetDist)
})

View File

@ -1,240 +0,0 @@
open TypeSystem
let wrongInputsError = (r: array<typedValue>) => {
let inputs = r |> E.A.fmap(TypedValue.toString) |> Js.String.concatMany(_, ",")
Js.log3("Inputs were", inputs, r)
Error("Wrong inputs. The inputs were:" ++ inputs)
}
let to_: (float, float) => result<node, string> = (low, high) =>
switch (low, high) {
| (low, high) if low <= 0.0 && low < high =>
Ok(#SymbolicDist(SymbolicDist.Normal.from90PercentCI(low, high)))
| (low, high) if low < high =>
Ok(#SymbolicDist(SymbolicDist.Lognormal.from90PercentCI(low, high)))
| (_, _) => Error("Low value must be less than high value.")
}
let makeSymbolicFromTwoFloats = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
)
let makeSymbolicFromOneFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#Float],
~run=x =>
switch x {
| [#Float(a)] => fn(a) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
)
let makeDistFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution, #Float],
~run=x =>
switch x {
| [#SamplingDist(a), #Float(b)] => fn(a, b)
| [#RenderedDist(a), #Float(b)] => fn(#RenderedDist(a), b)
| e => wrongInputsError(e)
},
(),
)
let makeRenderedDistFloat = (name, fn) =>
Function.T.make(
~name,
~outputType=#RenderedDistribution,
~inputTypes=[#RenderedDistribution, #Float],
~shouldCoerceTypes=true,
~run=x =>
switch x {
| [#RenderedDist(a), #Float(b)] => fn(a, b)
| e => wrongInputsError(e)
},
(),
)
let makeDist = (name, fn) =>
Function.T.make(
~name,
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution],
~run=x =>
switch x {
| [#SamplingDist(a)] => fn(a)
| [#RenderedDist(a)] => fn(#RenderedDist(a))
| e => wrongInputsError(e)
},
(),
)
let floatFromDist = (
distToFloatOp: Operation.distToFloatOperation,
t: TypeSystem.samplingDist,
): result<node, string> =>
switch t {
| #SymbolicDist(s) =>
SymbolicDist.T.operate(distToFloatOp, s) |> E.R.bind(_, v => Ok(#SymbolicDist(#Float(v))))
| #RenderedDist(rs) =>
PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
}
let verticalScaling = (scaleOp, rs, scaleBy) => {
// scaleBy has to be a single float, otherwise we'll return an error.
let fn = (secondary, main) => Operation.Scale.toFn(scaleOp, main, secondary)
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(scaleOp)
let integralCacheFn = Operation.Scale.toIntegralCacheFn(scaleOp)
Ok(
#RenderedDist(
PointSetDist.T.mapY(
~integralSumCacheFn=integralSumCacheFn(scaleBy),
~integralCacheFn=integralCacheFn(scaleBy),
~fn=fn(scaleBy),
rs,
),
),
)
}
module Multimodal = {
let getByNameResult = Hash.getByNameResult
let _paramsToDistsAndWeights = (r: array<typedValue>) =>
switch r {
| [#Hash(r)] =>
let dists =
getByNameResult(r, "dists")
->E.R.bind(TypeSystem.TypedValue.toArray)
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toDist) |> E.A.R.firstErrorOrOpen)
let weights =
getByNameResult(r, "weights")
->E.R.bind(TypeSystem.TypedValue.toArray)
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toFloat) |> E.A.R.firstErrorOrOpen)
E.R.merge(dists, weights)->E.R.bind(((a, b)) =>
E.A.length(b) > E.A.length(a)
? Error("Too many weights provided")
: Ok(
E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (
a |> E.O.toExn(""),
b |> E.O.default(1.0),
)),
)
)
| _ => Error("Needs items")
}
let _runner: array<typedValue> => result<node, string> = r => {
let paramsToDistsAndWeights =
_paramsToDistsAndWeights(r) |> E.R.fmap(
E.A.fmap(((dist, weight)) =>
#FunctionCall("scaleMultiply", [dist, #SymbolicDist(#Float(weight))])
),
)
let pointwiseSum: result<node, string> =
paramsToDistsAndWeights->E.R.bind(E.R.errorIfCondition(E.A.isEmpty, "Needs one input"))
|> E.R.fmap(r =>
r
|> Js.Array.sliceFrom(1)
|> E.A.fold_left((acc, x) => #PointwiseCombination(#Add, acc, x), E.A.unsafe_get(r, 0))
)
pointwiseSum
}
let _function = Function.T.make(
~name="multimodal",
~outputType=#SamplingDistribution,
~inputTypes=[#Hash([("dists", #Array(#SamplingDistribution)), ("weights", #Array(#Float))])],
~run=_runner,
(),
)
}
let all = [
makeSymbolicFromTwoFloats("normal", SymbolicDist.Normal.make),
makeSymbolicFromTwoFloats("uniform", SymbolicDist.Uniform.make),
makeSymbolicFromTwoFloats("beta", SymbolicDist.Beta.make),
makeSymbolicFromTwoFloats("lognormal", SymbolicDist.Lognormal.make),
makeSymbolicFromTwoFloats("lognormalFromMeanAndStdDev", SymbolicDist.Lognormal.fromMeanAndStdev),
makeSymbolicFromOneFloat("exponential", SymbolicDist.Exponential.make),
Function.T.make(
~name="to",
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b)] => to_(a, b)
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="triangular",
~outputType=#SamplingDistribution,
~inputTypes=[#Float, #Float, #Float],
~run=x =>
switch x {
| [#Float(a), #Float(b), #Float(c)] =>
SymbolicDist.Triangular.make(a, b, c) |> E.R.fmap(r => #SymbolicDist(r))
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="log",
~outputType=#Float,
~inputTypes=[#Float],
~run=x =>
switch x {
| [#Float(a)] => Ok(#SymbolicDist(#Float(Js.Math.log(a))))
| e => wrongInputsError(e)
},
(),
),
makeDistFloat("pdf", (dist, float) => floatFromDist(#Pdf(float), dist)),
makeDistFloat("inv", (dist, float) => floatFromDist(#Inv(float), dist)),
makeDistFloat("cdf", (dist, float) => floatFromDist(#Cdf(float), dist)),
makeDist("mean", dist => floatFromDist(#Mean, dist)),
makeDist("sample", dist => floatFromDist(#Sample, dist)),
Function.T.make(
~name="render",
~outputType=#RenderedDistribution,
~inputTypes=[#RenderedDistribution],
~run=x =>
switch x {
| [#RenderedDist(c)] => Ok(#RenderedDist(c))
| e => wrongInputsError(e)
},
(),
),
Function.T.make(
~name="normalize",
~outputType=#SamplingDistribution,
~inputTypes=[#SamplingDistribution],
~run=x =>
switch x {
| [#SamplingDist(#SymbolicDist(c))] => Ok(#SymbolicDist(c))
| [#SamplingDist(#RenderedDist(c))] => Ok(#RenderedDist(PointSetDist.T.normalize(c)))
| e => wrongInputsError(e)
},
(),
),
makeRenderedDistFloat("scaleExp", (dist, float) => verticalScaling(#Power, dist, float)),
makeRenderedDistFloat("scaleMultiply", (dist, float) => verticalScaling(#Multiply, dist, float)),
makeRenderedDistFloat("scaleLog", (dist, float) => verticalScaling(#Logarithm, dist, float)),
Multimodal._function,
]

View File

@ -1,196 +0,0 @@
type node = ASTTypes.node
let getFloat = ASTTypes.Node.getFloat
type samplingDist = [
| #SymbolicDist(SymbolicDistTypes.symbolicDist)
| #RenderedDist(PointSetTypes.pointSetDist)
]
type rec hashType = array<(string, _type)>
and _type = [
| #Float
| #SamplingDistribution
| #RenderedDistribution
| #Array(_type)
| #Hash(hashType)
]
type rec hashTypedValue = array<(string, typedValue)>
and typedValue = [
| #Float(float)
| #RenderedDist(PointSetTypes.pointSetDist)
| #SamplingDist(samplingDist)
| #Array(array<typedValue>)
| #Hash(hashTypedValue)
]
type _function = {
name: string,
inputTypes: array<_type>,
outputType: _type,
run: array<typedValue> => result<node, string>,
shouldCoerceTypes: bool,
}
type functions = array<_function>
type inputNodes = array<node>
module TypedValue = {
let rec toString: typedValue => string = x =>
switch x {
| #SamplingDist(_) => "[sampling dist]"
| #RenderedDist(_) => "[rendered PointSetDist]"
| #Float(f) => "Float: " ++ Js.Float.toString(f)
| #Array(a) => "[" ++ ((a |> E.A.fmap(toString) |> Js.String.concatMany(_, ",")) ++ "]")
| #Hash(v) =>
"{" ++
((v
|> E.A.fmap(((name, value)) => name ++ (":" ++ toString(value)))
|> Js.String.concatMany(_, ",")) ++
"}")
}
let rec fromNode = (node: node): result<typedValue, string> =>
switch node {
| #SymbolicDist(#Float(r)) => Ok(#Float(r))
| #SymbolicDist(s) => Ok(#SamplingDist(#SymbolicDist(s)))
| #RenderedDist(s) => Ok(#RenderedDist(s))
| #Array(r) => r |> E.A.fmap(fromNode) |> E.A.R.firstErrorOrOpen |> E.R.fmap(r => #Array(r))
| #Hash(hash) =>
hash
|> E.A.fmap(((name, t)) => fromNode(t) |> E.R.fmap(r => (name, r)))
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
| e => Error("Wrong type: " ++ ASTTypes.Node.toString(e))
}
// todo: Arrays and hashes
let rec fromNodeWithTypeCoercion = (evaluationParams, _type: _type, node) =>
switch (_type, node) {
| (#Float, _) =>
switch getFloat(node) {
| Some(a) => Ok(#Float(a))
| _ => Error("Type Error: Expected float.")
}
| (#SamplingDistribution, _) =>
ASTTypes.SamplingDistribution.renderIfIsNotSamplingDistribution(
evaluationParams,
node,
) |> E.R.bind(_, fromNode)
| (#RenderedDistribution, _) =>
ASTTypes.Node.render(evaluationParams, node) |> E.R.bind(_, fromNode)
| (#Array(_type), #Array(b)) =>
b
|> E.A.fmap(fromNodeWithTypeCoercion(evaluationParams, _type))
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Array(r))
| (#Hash(named), #Hash(r)) =>
let keyValues =
named |> E.A.fmap(((name, intendedType)) => (name, intendedType, Hash.getByName(r, name)))
let typedHash =
keyValues
|> E.A.fmap(((name, intendedType, optionNode)) =>
switch optionNode {
| Some(node) =>
fromNodeWithTypeCoercion(evaluationParams, intendedType, node) |> E.R.fmap(node => (
name,
node,
))
| None => Error("Hash parameter not present in hash.")
}
)
|> E.A.R.firstErrorOrOpen
|> E.R.fmap(r => #Hash(r))
typedHash
| _ => Error("fromNodeWithTypeCoercion error, sorry.")
}
let toFloat: typedValue => result<float, string> = x =>
switch x {
| #Float(x) => Ok(x)
| _ => Error("Not a float")
}
let toArray: typedValue => result<array<'a>, string> = x =>
switch x {
| #Array(x) => Ok(x)
| _ => Error("Not an array")
}
let toNamed: typedValue => result<hashTypedValue, string> = x =>
switch x {
| #Hash(x) => Ok(x)
| _ => Error("Not a named item")
}
let toDist: typedValue => result<node, string> = x =>
switch x {
| #SamplingDist(#SymbolicDist(c)) => Ok(#SymbolicDist(c))
| #SamplingDist(#RenderedDist(c)) => Ok(#RenderedDist(c))
| #RenderedDist(c) => Ok(#RenderedDist(c))
| #Float(x) => Ok(#SymbolicDist(#Float(x)))
| x => Error("Cannot be converted into a distribution: " ++ toString(x))
}
}
module Function = {
type t = _function
type ts = functions
module T = {
let make = (~name, ~inputTypes, ~outputType, ~run, ~shouldCoerceTypes=true, _): t => {
name: name,
inputTypes: inputTypes,
outputType: outputType,
run: run,
shouldCoerceTypes: shouldCoerceTypes,
}
let _inputLengthCheck = (inputNodes: inputNodes, t: t) => {
let expectedLength = E.A.length(t.inputTypes)
let actualLength = E.A.length(inputNodes)
expectedLength == actualLength
? Ok(inputNodes)
: Error(
"Wrong number of inputs. Expected" ++
((expectedLength |> E.I.toString) ++
(". Got:" ++ (actualLength |> E.I.toString))),
)
}
let _coerceInputNodes = (evaluationParams, inputTypes, shouldCoerce, inputNodes) =>
Belt.Array.zip(inputTypes, inputNodes)
|> E.A.fmap(((def, input)) =>
shouldCoerce
? TypedValue.fromNodeWithTypeCoercion(evaluationParams, def, input)
: TypedValue.fromNode(input)
)
|> E.A.R.firstErrorOrOpen
let inputsToTypedValues = (
evaluationParams: ASTTypes.evaluationParams,
inputNodes: inputNodes,
t: t,
) =>
_inputLengthCheck(inputNodes, t)->E.R.bind(
_coerceInputNodes(evaluationParams, t.inputTypes, t.shouldCoerceTypes),
)
let run = (evaluationParams: ASTTypes.evaluationParams, inputNodes: inputNodes, t: t) =>
inputsToTypedValues(evaluationParams, inputNodes, t)->E.R.bind(t.run)
|> (
x =>
switch x {
| Ok(i) => Ok(i)
| Error(r) => Error("Function " ++ (t.name ++ (" error: " ++ r)))
}
)
}
module Ts = {
let findByName = (ts: ts, n: string) => ts |> Belt.Array.getBy(_, ({name}) => name == n)
let findByNameAndRun = (ts: ts, n: string, evaluationParams, inputTypes) =>
findByName(ts, n) |> E.O.fmap(T.run(evaluationParams, inputTypes))
}
}

View File

@ -1,290 +0,0 @@
module MathJsonToMathJsAdt = {
type rec arg =
| Symbol(string)
| Value(float)
| Fn(fn)
| Array(array<arg>)
| Blocks(array<arg>)
| Object(Js.Dict.t<arg>)
| Assignment(arg, arg)
| FunctionAssignment(fnAssignment)
and fn = {
name: string,
args: array<arg>,
}
and fnAssignment = {
name: string,
args: array<string>,
expression: arg,
}
let rec run = (j: Js.Json.t) => {
open Json.Decode
switch field("mathjs", string, j) {
| "FunctionNode" =>
let args = j |> field("args", array(run))
let name = j |> optional(field("fn", field("name", string)))
name |> E.O.fmap(name => Fn({name: name, args: args |> E.A.O.concatSomes}))
| "OperatorNode" =>
let args = j |> field("args", array(run))
Some(
Fn({
name: j |> field("fn", string),
args: args |> E.A.O.concatSomes,
}),
)
| "ConstantNode" => optional(field("value", Json.Decode.float), j) |> E.O.fmap(r => Value(r))
| "ParenthesisNode" => j |> field("content", run)
| "ObjectNode" =>
let properties = j |> field("properties", dict(run))
Js.Dict.entries(properties)
|> E.A.fmap(((key, value)) => value |> E.O.fmap(v => (key, v)))
|> E.A.O.concatSomes
|> Js.Dict.fromArray
|> (r => Some(Object(r)))
| "ArrayNode" =>
let items = field("items", array(run), j)
Some(Array(items |> E.A.O.concatSomes))
| "SymbolNode" => Some(Symbol(field("name", string, j)))
| "AssignmentNode" =>
let object_ = j |> field("object", run)
let value_ = j |> field("value", run)
switch (object_, value_) {
| (Some(o), Some(v)) => Some(Assignment(o, v))
| _ => None
}
| "BlockNode" =>
let block = r => r |> field("node", run)
let args = j |> field("blocks", array(block)) |> E.A.O.concatSomes
Some(Blocks(args))
| "FunctionAssignmentNode" =>
let name = j |> field("name", string)
let args = j |> field("params", array(field("name", string)))
let expression = j |> field("expr", run)
expression |> E.O.fmap(expression => FunctionAssignment({
name: name,
args: args,
expression: expression,
}))
| n =>
Js.log3("Couldn't parse mathjs node", j, n)
None
}
}
}
module MathAdtToDistDst = {
open MathJsonToMathJsAdt
let handleSymbol = sym => Ok(#Symbol(sym))
// TODO: This only works on the top level, which needs to be refactored. Also, I think functions don't need to be done like this anymore.
module MathAdtCleaner = {
let transformWithSymbol = (f: float, s: string) =>
switch s {
| "K" => Some(f *. 1000.)
| "M" => Some(f *. 1000000.)
| "B" => Some(f *. 1000000000.)
| "T" => Some(f *. 1000000000000.)
| _ => None
}
let rec run = x =>
switch x {
| Fn({name: "multiply", args: [Value(f), Symbol(s)]}) as doNothing =>
transformWithSymbol(f, s) |> E.O.fmap(r => Value(r)) |> E.O.default(doNothing)
| Fn({name: "unaryMinus", args: [Value(f)]}) => Value(-1.0 *. f)
| Fn({name, args}) => Fn({name: name, args: args |> E.A.fmap(run)})
| Array(args) => Array(args |> E.A.fmap(run))
| Symbol(s) => Symbol(s)
| Value(v) => Value(v)
| Blocks(args) => Blocks(args |> E.A.fmap(run))
| Assignment(a, b) => Assignment(a, run(b))
| FunctionAssignment(a) => FunctionAssignment(a)
| Object(v) =>
Object(
v
|> Js.Dict.entries
|> E.A.fmap(((key, value)) => (key, run(value)))
|> Js.Dict.fromArray,
)
}
}
let lognormal = (args, parseArgs, nodeParser) =>
switch args {
| [Object(o)] =>
let g = s =>
Js.Dict.get(o, s) |> E.O.toResult("Variable was empty") |> E.R.bind(_, nodeParser)
switch (g("mean"), g("stdev"), g("mu"), g("sigma")) {
| (Ok(mean), Ok(stdev), _, _) =>
Ok(#FunctionCall("lognormalFromMeanAndStdDev", [mean, stdev]))
| (_, _, Ok(mu), Ok(sigma)) => Ok(#FunctionCall("lognormal", [mu, sigma]))
| _ => Error("Lognormal distribution needs either mean and stdev or mu and sigma")
}
| _ => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall("lognormal", args))
}
// Error("Dotwise exponentiation needs two operands")
let operationParser = (name: string, args: result<array<ASTTypes.node>, string>): result<
ASTTypes.node,
string,
> => {
let toOkAlgebraic = r => Ok(#AlgebraicCombination(r))
let toOkPointwise = r => Ok(#PointwiseCombination(r))
let toOkTruncate = r => Ok(#Truncate(r))
args |> E.R.bind(_, args =>
switch (name, args) {
| ("add", [l, r]) => toOkAlgebraic((#Add, l, r))
| ("add", _) => Error("Addition needs two operands")
| ("unaryMinus", [l]) => toOkAlgebraic((#Multiply, #SymbolicDist(#Float(-1.0)), l))
| ("subtract", [l, r]) => toOkAlgebraic((#Subtract, l, r))
| ("subtract", _) => Error("Subtraction needs two operands")
| ("multiply", [l, r]) => toOkAlgebraic((#Multiply, l, r))
| ("multiply", _) => Error("Multiplication needs two operands")
| ("pow", [l, r]) => toOkAlgebraic((#Power, l, r))
| ("pow", _) => Error("Exponentiation needs two operands")
| ("dotMultiply", [l, r]) => toOkPointwise((#Multiply, l, r))
| ("dotMultiply", _) => Error("Dotwise multiplication needs two operands")
| ("dotPow", [l, r]) => toOkPointwise((#Power, l, r))
| ("dotPow", _) => Error("Dotwise exponentiation needs two operands")
| ("rightLogShift", [l, r]) => toOkPointwise((#Add, l, r))
| ("rightLogShift", _) => Error("Dotwise addition needs two operands")
| ("divide", [l, r]) => toOkAlgebraic((#Divide, l, r))
| ("divide", _) => Error("Division needs two operands")
| ("leftTruncate", [d, #SymbolicDist(#Float(lc))]) => toOkTruncate((Some(lc), None, d))
| ("leftTruncate", _) =>
Error("leftTruncate needs two arguments: the expression and the cutoff")
| ("rightTruncate", [d, #SymbolicDist(#Float(rc))]) => toOkTruncate((None, Some(rc), d))
| ("rightTruncate", _) =>
Error("rightTruncate needs two arguments: the expression and the cutoff")
| ("truncate", [d, #SymbolicDist(#Float(lc)), #SymbolicDist(#Float(rc))]) =>
toOkTruncate((Some(lc), Some(rc), d))
| ("truncate", _) => Error("truncate needs three arguments: the expression and both cutoffs")
| _ => Error("This type not currently supported")
}
)
}
let functionParser = (
nodeParser: MathJsonToMathJsAdt.arg => Belt.Result.t<ASTTypes.node, string>,
name: string,
args: array<MathJsonToMathJsAdt.arg>,
): result<ASTTypes.node, string> => {
let parseArray = ags => ags |> E.A.fmap(nodeParser) |> E.A.R.firstErrorOrOpen
let parseArgs = () => parseArray(args)
switch name {
| "lognormal" => lognormal(args, parseArgs, nodeParser)
| "multimodal"
| "add"
| "subtract"
| "multiply"
| "unaryMinus"
| "dotMultiply"
| "dotPow"
| "rightLogShift"
| "divide"
| "pow"
| "leftTruncate"
| "rightTruncate"
| "truncate" =>
operationParser(name, parseArgs())
| "mm" =>
let weights =
args
|> E.A.last
|> E.O.bind(_, x =>
switch x {
| Array(values) => Some(parseArray(values))
| _ => None
}
)
let possibleDists = E.O.isSome(weights)
? Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1)
: args
let dists = parseArray(possibleDists)
switch (weights, dists) {
| (Some(Error(r)), _) => Error(r)
| (_, Error(r)) => Error(r)
| (None, Ok(dists)) =>
let hash: ASTTypes.node = #FunctionCall(
"multimodal",
[#Hash([("dists", #Array(dists)), ("weights", #Array([]))])],
)
Ok(hash)
| (Some(Ok(weights)), Ok(dists)) =>
let hash: ASTTypes.node = #FunctionCall(
"multimodal",
[#Hash([("dists", #Array(dists)), ("weights", #Array(weights))])],
)
Ok(hash)
}
| name => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall(name, args))
}
}
let rec nodeParser: MathJsonToMathJsAdt.arg => result<ASTTypes.node, string> = x =>
switch x {
| Value(f) => Ok(#SymbolicDist(#Float(f)))
| Symbol(sym) => Ok(#Symbol(sym))
| Fn({name, args}) => functionParser(nodeParser, name, args)
| _ => Error("This type not currently supported")
}
// | FunctionAssignment({name, args, expression}) => {
// let evaluatedExpression = run(expression);
// `Function(_ => Ok(evaluatedExpression));
// }
let rec topLevel = (r): result<ASTTypes.program, string> =>
switch r {
| FunctionAssignment({name, args, expression}) =>
switch nodeParser(expression) {
| Ok(r) => Ok([#Assignment(name, #Function(args, r))])
| Error(r) => Error(r)
}
| Value(_) as r => nodeParser(r) |> E.R.fmap(r => [#Expression(r)])
| Fn(_) as r => nodeParser(r) |> E.R.fmap(r => [#Expression(r)])
| Array(_) => Error("Array not valid as top level")
| Symbol(s) => handleSymbol(s) |> E.R.fmap(r => [#Expression(r)])
| Object(_) => Error("Object not valid as top level")
| Assignment(name, value) =>
switch name {
| Symbol(symbol) => nodeParser(value) |> E.R.fmap(r => [#Assignment(symbol, r)])
| _ => Error("Symbol not a string")
}
| Blocks(blocks) =>
blocks |> E.A.fmap(b => topLevel(b)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(E.A.concatMany)
}
let run = (r): result<ASTTypes.program, string> => r |> MathAdtCleaner.run |> topLevel
}
/* The MathJs parser doesn't support '.+' syntax, but we want it because it
would make sense with '.*'. Our workaround is to change this to >>>, which is
logShift in mathJS. We don't expect to use logShift anytime soon, so this tradeoff
seems fine.
*/
let pointwiseToRightLogShift = Js.String.replaceByRe(%re("/\.\+/g"), ">>>")
let fromString2 = str => {
/* We feed the user-typed string into Mathjs.parseMath,
which returns a JSON with (hopefully) a single-element array.
This array element is the top-level node of a nested-object tree
representing the functions/arguments/values/etc. in the string.
The function MathJsonToMathJsAdt then recursively unpacks this JSON into a typed data structure we can use.
Inside of this function, MathAdtToDistDst is called whenever a distribution function is encountered.
*/
let mathJsToJson = str |> pointwiseToRightLogShift |> Mathjs.parseMath
let mathJsParse = E.R.bind(mathJsToJson, r =>
switch MathJsonToMathJsAdt.run(r) {
| Some(r) => Ok(r)
| None => Error("MathJsParse Error")
}
)
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run)
value
}
let fromString = str => fromString2(str)

View File

@ -1,185 +0,0 @@
// TODO: This setup is more confusing than it should be, there's more work to do in cleanup here.
module Inputs = {
module SamplingInputs = {
type t = {
sampleCount: option<int>,
outputXYPoints: option<int>,
kernelWidth: option<float>,
pointDistLength: option<int>,
}
}
let defaultRecommendedLength = 100
let defaultShouldDownsample = true
type inputs = {
squiggleString: string,
samplingInputs: SamplingInputs.t,
environment: ASTTypes.environment,
}
let empty: SamplingInputs.t = {
sampleCount: None,
outputXYPoints: None,
kernelWidth: None,
pointDistLength: None,
}
let make = (
~samplingInputs=empty,
~squiggleString,
~environment=ASTTypes.Environment.empty,
(),
): inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: environment,
}
}
type exportDistribution = [
| #DistPlus(DistPlus.t)
| #Float(float)
| #Function(float => Belt.Result.t<DistPlus.t, string>)
]
type exportEnv = array<(string, ASTTypes.node)>
type exportType = {
environment: exportEnv,
exports: array<exportDistribution>,
}
module Internals = {
let addVariable = (
{samplingInputs, squiggleString, environment}: Inputs.inputs,
str,
node,
): Inputs.inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: ASTTypes.Environment.update(environment, str, _ => Some(node)),
}
type outputs = {
graph: ASTTypes.node,
pointSetDist: PointSetTypes.pointSetDist,
}
let makeOutputs = (graph, shape): outputs => {graph: graph, pointSetDist: shape}
let makeInputs = (inputs: Inputs.inputs): SamplingInputs.samplingInputs => {
sampleCount: inputs.samplingInputs.sampleCount |> E.O.default(10000),
outputXYPoints: inputs.samplingInputs.outputXYPoints |> E.O.default(10000),
kernelWidth: inputs.samplingInputs.kernelWidth,
pointSetDistLength: inputs.samplingInputs.pointDistLength |> E.O.default(10000),
}
let runNode = (inputs, node) => AST.toLeaf(makeInputs(inputs), inputs.environment, node)
let renderIfNeeded = (inputs: Inputs.inputs, node: ASTTypes.node): result<
ASTTypes.node,
string,
> =>
node |> (
x =>
switch x {
| #Normalize(_) as n
| #SymbolicDist(_) as n =>
#Render(n)
|> runNode(inputs)
|> (
x =>
switch x {
| Ok(#RenderedDist(_)) as r => r
| Error(r) => Error(r)
| _ => Error("Didn't render, but intended to")
}
)
| n => Ok(n)
}
)
let outputToDistPlus = (inputs: Inputs.inputs, pointSetDist: PointSetTypes.pointSetDist) =>
DistPlus.make(~pointSetDist, ~squiggleString=Some(inputs.squiggleString), ())
let rec returnDist = (
functionInfo: (array<string>, ASTTypes.node),
inputs: Inputs.inputs,
env: ASTTypes.environment,
) => {
(input: float) => {
let foo: Inputs.inputs = {...inputs, environment: env}
evaluateFunction(foo, functionInfo, [#SymbolicDist(#Float(input))]) |> E.R.bind(_, a =>
switch a {
| #DistPlus(d) => Ok(DistPlus.T.normalize(d))
| n =>
Js.log2("Error here", n)
Error("wrong type")
}
)
}
}
// TODO: Consider using ExpressionTypes.ExpressionTree.getFloat or similar in this function
and coersionToExportedTypes = (inputs, env: ASTTypes.environment, ex: ASTTypes.node): result<
exportDistribution,
string,
> =>
ex
|> renderIfNeeded(inputs)
|> E.R.bind(_, x =>
switch x {
| #RenderedDist(Discrete({xyShape: {xs: [x], ys: [1.0]}})) => Ok(#Float(x))
| #SymbolicDist(#Float(x)) => Ok(#Float(x))
| #RenderedDist(n) => Ok(#DistPlus(outputToDistPlus(inputs, n)))
| #Function(n) => Ok(#Function(returnDist(n, inputs, env)))
| n => Error("Didn't output a rendered distribution. Format:" ++ AST.toString(n))
}
)
and evaluateFunction = (inputs: Inputs.inputs, fn: (array<string>, ASTTypes.node), fnInputs) => {
let output = AST.runFunction(makeInputs(inputs), inputs.environment, fnInputs, fn)
output |> E.R.bind(_, coersionToExportedTypes(inputs, inputs.environment))
}
let runProgram = (inputs: Inputs.inputs, p: ASTTypes.program) => {
let ins = ref(inputs)
p
|> E.A.fmap(x =>
switch x {
| #Assignment(name, node) =>
ins := addVariable(ins.contents, name, node)
None
| #Expression(node) => Some(runNode(ins.contents, node))
}
)
|> E.A.O.concatSomes
|> E.A.R.firstErrorOrOpen
|> E.R.bind(_, d =>
d
|> E.A.fmap(x => coersionToExportedTypes(inputs, ins.contents.environment, x))
|> E.A.R.firstErrorOrOpen
)
|> E.R.fmap(ex => {
environment: Belt.Map.String.toArray(ins.contents.environment),
exports: ex,
})
}
let inputsToLeaf = (inputs: Inputs.inputs) =>
Parser.fromString(inputs.squiggleString) |> E.R.bind(_, g => runProgram(inputs, g))
}
@genType
let runAll: (string, Inputs.SamplingInputs.t, exportEnv) => result<exportType, string> = (
squiggleString,
samplingInputs,
environment,
) => {
let inputs = Inputs.make(
~samplingInputs,
~squiggleString,
~environment=Belt.Map.String.fromArray(environment),
(),
)
Internals.inputsToLeaf(inputs)
}

View File

@ -15,7 +15,7 @@ type t = expression
*/
let rec toString = expression =>
switch expression {
| T.EBindings(bindings) => "$$bound"
| T.EBindings(_) => "$$bound"
| T.EList(aList) =>
`(${Belt.List.map(aList, aValue => toString(aValue))
->Extra.List.interperse(" ")
@ -119,7 +119,7 @@ let reduceExpression = (expression: t, bindings: T.bindings): result<expressionV
let rec seekMacros = (expression: t, bindings: T.bindings): result<t, 'e> =>
switch expression {
| T.EValue(value) => expression->Ok
| T.EValue(_) => expression->Ok
| T.EList(list) => {
let racc: result<list<t>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
racc,
@ -156,7 +156,7 @@ let reduceExpression = (expression: t, bindings: T.bindings): result<expressionV
)
racc->Result.flatMap(acc => acc->reduceValueList)
}
| T.EBindings(bindings) => RETodo("Cannot return bindings")->Error
| T.EBindings(_) => RETodo("Cannot return bindings")->Error
}
let rExpandedExpression: result<t, 'e> = expression->seekMacros(bindings)

View File

@ -12,7 +12,7 @@ type answer = {"value": unit}
Rescript cannot type cast on basic values passed on their own.
This is why we call evalua inside Javascript and wrap the result in an Object
*/
let eval__ = %raw(`function (expr) { return {value: Mathjs.evaluate(expr)}; }`)
let eval__: string => 'a = %raw(`function (expr) { return {value: Mathjs.evaluate(expr)}; }`)
/*
Call MathJs evaluate and return as a variant

View File

@ -126,7 +126,7 @@ module Helpers = {
| Error(err) => GenDistError(ArgumentError(err))
}
}
| Some(EvDistribution(b)) =>
| Some(EvDistribution(_)) =>
switch parseDistributionArray(args) {
| Ok(distributions) => mixtureWithDefaultWeights(distributions)
| Error(err) => GenDistError(ArgumentError(err))
@ -226,7 +226,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
Helpers.twoDiststoDistFn(Algebraic, "log", a, GenericDist.fromFloat(10.0))->Some
| ("unaryMinus", [EvDistribution(a)]) =>
Helpers.twoDiststoDistFn(Algebraic, "multiply", a, GenericDist.fromFloat(-1.0))->Some
| (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [a, b] as args) =>
| (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [_, _] as args) =>
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
Helpers.twoDiststoDistFn(Algebraic, arithmetic, fst, snd)
)
@ -237,7 +237,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
| "dotDivide"
| "dotPow"
| "dotLog") as arithmetic,
[a, b] as args,
[_, _] as args,
) =>
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
Helpers.twoDiststoDistFn(Pointwise, arithmetic, fst, snd)

View File

@ -28,7 +28,7 @@ module Int = {
}
/* Utils */
module U = {
let isEqual = (a, b) => a == b
let isEqual = \"=="
let toA = a => [a]
let id = e => e
}
@ -340,8 +340,6 @@ module A = {
let reduce = Belt.Array.reduce
let reducei = Belt.Array.reduceWithIndex
let isEmpty = r => length(r) < 1
let min = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i < j ? i : j))
let max = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i > j ? i : j))
let stableSortBy = Belt.SortArray.stableSortBy
let toRanges = (a: array<'a>) =>
switch a |> Belt.Array.length {
@ -448,8 +446,11 @@ module A = {
| (Some(min), Some(max)) => Some(max -. min)
| _ => None
}
let floatCompare: (float, float) => int = compare
let binarySearchFirstElementGreaterIndex = (ar: array<'a>, el: 'a) => {
let el = Belt.SortArray.binarySearchBy(ar, el, compare)
let el = Belt.SortArray.binarySearchBy(ar, el, floatCompare)
let el = el < 0 ? el * -1 - 1 : el
switch el {
| e if e >= length(ar) => #overMax
@ -460,13 +461,13 @@ module A = {
let concat = (t1: array<'a>, t2: array<'a>) => {
let ts = Belt.Array.concat(t1, t2)
ts |> Array.fast_sort(compare)
ts |> Array.fast_sort(floatCompare)
ts
}
let concatMany = (t1: array<array<'a>>) => {
let ts = Belt.Array.concatMany(t1)
ts |> Array.fast_sort(compare)
ts |> Array.fast_sort(floatCompare)
ts
}
@ -525,6 +526,9 @@ module A = {
let diff = (max -. min) /. Belt.Float.fromInt(n - 1)
Belt.Array.makeBy(n, i => min +. Belt.Float.fromInt(i) *. diff)
}
let min = Js.Math.minMany_float
let max = Js.Math.maxMany_float
}
}

View File

@ -9,6 +9,13 @@ type algebraicOperation = [
| #Power
| #Logarithm
]
type convolutionOperation = [
| #Add
| #Multiply
| #Subtract
]
@genType
type pointwiseOperation = [#Add | #Multiply | #Power]
type scaleOperation = [#Multiply | #Power | #Logarithm | #Divide]
@ -20,6 +27,16 @@ type distToFloatOperation = [
| #Sample
]
module Convolution = {
type t = convolutionOperation
let toFn: (t, float, float) => float = x =>
switch x {
| #Add => \"+."
| #Subtract => \"-."
| #Multiply => \"*."
}
}
module Algebraic = {
type t = algebraicOperation
let toFn: (t, float, float) => float = x =>

View File

@ -16,7 +16,7 @@ let create = (relativeHeights: array<float>, ~maximum=?, ()) => {
if E.A.length(relativeHeights) === 0 {
""
} else {
let maximum = maximum->E.O2.default(E.A.max(relativeHeights)->E.O2.toExn(""))
let maximum = maximum->E.O2.default(E.A.Floats.max(relativeHeights))
relativeHeights
->E.A2.fmap(_heightToTickIndex(maximum))

View File

@ -60,8 +60,8 @@ module T = {
module Ts = {
type t = T.ts
let minX = (t: t) => t |> E.A.fmap(T.minX) |> E.A.min |> extImp
let maxX = (t: t) => t |> E.A.fmap(T.maxX) |> E.A.max |> extImp
let minX = (t: t) => t |> E.A.fmap(T.minX) |> E.A.Floats.min
let maxX = (t: t) => t |> E.A.fmap(T.maxX) |> E.A.Floats.max
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
let allXs = (t: t) => t |> E.A.fmap(T.xs) |> E.A.Sorted.concatMany
}
@ -199,7 +199,7 @@ module XtoY = {
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
For discrete distributions, the probability density between points is zero, so we just return zero here. */
let discreteInterpolator: interpolator = (t: T.t, leftIndex: int, x: float) => 0.0
let discreteInterpolator: interpolator = (_: T.t, _: int, _: float) => 0.0
}
module XsConversion = {
@ -220,8 +220,8 @@ module XsConversion = {
module Zipped = {
type zipped = array<(float, float)>
let compareYs = ((_, y1), (_, y2)) => y1 > y2 ? 1 : 0
let compareXs = ((x1, _), (x2, _)) => x1 > x2 ? 1 : 0
let compareYs = ((_, y1): (float, float), (_, y2): (float, float)) => y1 > y2 ? 1 : 0
let compareXs = ((x1, _): (float, float), (x2, _): (float, float)) => x1 > x2 ? 1 : 0
let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs)
let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs)
let filterByX = (testFn: float => bool, t: zipped) => t |> E.A.filter(((x, _)) => testFn(x))
@ -229,7 +229,7 @@ module Zipped = {
module PointwiseCombination = {
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
let combine = %raw(` // : (float => float => float, T.t, T.t, bool) => T.t
let combine: ((float, float) => float, interpolator, T.t, T.t) => T.t = %raw(`
// This function combines two xyShapes by looping through both of them simultaneously.
// It always moves on to the next smallest x, whether that's in the first or second input's xs,
// and interpolates the value on the other side, thus accumulating xs and ys.

View File

@ -1928,11 +1928,6 @@
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==
"@glennsl/bs-json@^5.0.2":
version "5.0.4"
resolved "https://registry.yarnpkg.com/@glennsl/bs-json/-/bs-json-5.0.4.tgz#8a80906f3b5e04d78dc06722e5987ff6499c89a8"
integrity sha512-Th9DetZjRlMZrb74kgGJ44oWcoFyOTE884WlSuXft0Cd+J09vHRxiB7eVyK7Gthb4cSevsBBJDHYAbGGL25wPw==
"@glennsl/rescript-jest@^0.9.0":
version "0.9.1"
resolved "https://registry.yarnpkg.com/@glennsl/rescript-jest/-/rescript-jest-0.9.1.tgz#a85a6f0e4c3b79010b5a917c3652aa70d374e4d1"
@ -4041,10 +4036,10 @@
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
"@types/react-dom@^18.0.0", "@types/react-dom@^18.0.1":
version "18.0.1"
resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.1.tgz#cb3cc10ea91141b12c71001fede1017acfbce4db"
integrity sha512-jCwTXvHtRLiyVvKm9aEdHXs8rflVOGd5Sl913JZrPshfXjn8NYsTNZOz70bCsA31IR0TOqwi3ad+X4tSCBoMTw==
"@types/react-dom@^18.0.0", "@types/react-dom@^18.0.2":
version "18.0.2"
resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.2.tgz#2d6b46557aa30257e87e67a6d952146d15979d79"
integrity sha512-UxeS+Wtj5bvLRREz9tIgsK4ntCuLDo0EcAcACgw3E+9wE8ePDr9uQpq53MfcyxyIS55xJ+0B6mDS8c4qkkHLBg==
dependencies:
"@types/react" "*"
@ -4082,9 +4077,9 @@
"@types/react" "*"
"@types/react@*", "@types/react@^16.9.19", "@types/react@^18.0.1", "@types/react@^18.0.3":
version "18.0.5"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.5.tgz#1a4d4b705ae6af5aed369dec22800b20f89f5301"
integrity sha512-UPxNGInDCIKlfqBrm8LDXYWNfLHwIdisWcsH5GpMyGjhEDLFgTtlRBaoWuCua9HcyuE0rMkmAeZ3FXV1pYLIYQ==
version "18.0.6"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.6.tgz#30206c3830af6ce8639b91ace5868bc2d3d1d96c"
integrity sha512-bPqwzJRzKtfI0mVYr5R+1o9BOE8UEXefwc1LwcBtfnaAn6OoqMhLa/91VA8aeWfDPJt1kHvYKI8RHcQybZLHHA==
dependencies:
"@types/prop-types" "*"
"@types/scheduler" "*"
@ -7566,20 +7561,6 @@ dns-packet@^5.2.2:
dependencies:
"@leichtgewicht/ip-codec" "^2.0.1"
docsify@^4.12.2:
version "4.12.2"
resolved "https://registry.yarnpkg.com/docsify/-/docsify-4.12.2.tgz#749115d2ff7d358780ea865e01f4a0162423d67f"
integrity sha512-hpRez5upcvkYigT2zD8P5kH5t9HpSWL8yn/ZU/g04/WfAfxVNW6CPUVOOF1EsQUDxTRuyNTFOb6uUv+tPij3tg==
dependencies:
dompurify "^2.3.1"
marked "^1.2.9"
medium-zoom "^1.0.6"
opencollective-postinstall "^2.0.2"
prismjs "^1.23.0"
strip-indent "^3.0.0"
tinydate "^1.3.0"
tweezer.js "^1.4.0"
doctrine@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
@ -7677,11 +7658,6 @@ domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1:
dependencies:
domelementtype "^2.2.0"
dompurify@^2.3.1:
version "2.3.6"
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.6.tgz#2e019d7d7617aacac07cbbe3d88ae3ad354cf875"
integrity sha512-OFP2u/3T1R5CEgWCEONuJ1a5+MFKnOYpkywpUSxv/dj1LeBT1erK+JwM7zK0ROy2BRhqVCf0LRw/kHqKuMkVGg==
domutils@1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf"
@ -11579,11 +11555,6 @@ markdown-to-jsx@^7.1.3:
resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.1.7.tgz#a5f22102fb12241c8cea1ca6a4050bb76b23a25d"
integrity sha512-VI3TyyHlGkO8uFle0IOibzpO1c1iJDcXcS/zBrQrXQQvJ2tpdwVzVZ7XdKsyRz1NdRmre4dqQkMZzUHaKIG/1w==
marked@^1.2.9:
version "1.2.9"
resolved "https://registry.yarnpkg.com/marked/-/marked-1.2.9.tgz#53786f8b05d4c01a2a5a76b7d1ec9943d29d72dc"
integrity sha512-H8lIX2SvyitGX+TRdtS06m1jHMijKN/XjfH6Ooii9fvxMlh8QdqBfBDkGUpMWH2kQNrtixjzYUa3SH8ROTgRRw==
mathjs@10.5.0:
version "10.5.0"
resolved "https://registry.yarnpkg.com/mathjs/-/mathjs-10.5.0.tgz#f81d0518fe7b4b2a0b85e1125b8ecfc364fb0292"
@ -11666,11 +11637,6 @@ media-typer@0.3.0:
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
medium-zoom@^1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/medium-zoom/-/medium-zoom-1.0.6.tgz#9247f21ca9313d8bbe9420aca153a410df08d027"
integrity sha512-UdiUWfvz9fZMg1pzf4dcuqA0W079o0mpqbTnOz5ip4VGYX96QjmbM+OgOU/0uOzAytxC0Ny4z+VcYQnhdifimg==
mem@^8.1.1:
version "8.1.1"
resolved "https://registry.yarnpkg.com/mem/-/mem-8.1.1.tgz#cf118b357c65ab7b7e0817bdf00c8062297c0122"
@ -12421,11 +12387,6 @@ open@^8.0.9, open@^8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
opencollective-postinstall@^2.0.2:
version "2.0.3"
resolved "https://registry.yarnpkg.com/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz#7a0fff978f6dbfa4d006238fbac98ed4198c3259"
integrity sha512-8AV/sCtuzUeTo8gQK5qDZzARrulB3egtLzFgteqB2tcT4Mw7B8Kt7JcDHmltjz6FOAHsvTevk70gZEbhM4ZS9Q==
opener@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
@ -13595,7 +13556,7 @@ prism-react-renderer@^1.2.1, prism-react-renderer@^1.3.1:
resolved "https://registry.yarnpkg.com/prism-react-renderer/-/prism-react-renderer-1.3.1.tgz#88fc9d0df6bed06ca2b9097421349f8c2f24e30d"
integrity sha512-xUeDMEz074d0zc5y6rxiMp/dlC7C+5IDDlaEUlcBOFE2wddz7hz5PNupb087mPwTt7T9BrFmewObfCBuf/LKwQ==
prismjs@^1.21.0, prismjs@^1.23.0, prismjs@^1.27.0, prismjs@~1.27.0:
prismjs@^1.21.0, prismjs@^1.27.0, prismjs@~1.27.0:
version "1.27.0"
resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.27.0.tgz#bb6ee3138a0b438a3653dd4d6ce0cc6510a45057"
integrity sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==
@ -16479,11 +16440,6 @@ tiny-warning@^1.0.0, tiny-warning@^1.0.3:
resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754"
integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
tinydate@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/tinydate/-/tinydate-1.3.0.tgz#e6ca8e5a22b51bb4ea1c3a2a4fd1352dbd4c57fb"
integrity sha512-7cR8rLy2QhYHpsBDBVYnnWXm8uRTr38RoZakFSW7Bs7PzfMPNZthuMLkwqZv7MTu8lhQ91cOFYS5a7iFj2oR3w==
tmpl@1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
@ -16686,11 +16642,6 @@ tty-browserify@0.0.0:
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=
tweezer.js@^1.4.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/tweezer.js/-/tweezer.js-1.5.0.tgz#ca50ac5215022203fd3be4d28617e8e2305f5c0c"
integrity sha512-aSiJz7rGWNAQq7hjMK9ZYDuEawXupcCWgl3woQQSoDP2Oh8O4srWb/uO1PzzHIsrPEOqrjJ2sUb9FERfzuBabQ==
type-check@^0.4.0, type-check@~0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"