Compare commits

..

13 Commits

Author SHA1 Message Date
Vyacheslav Matyukhin
9e2eace05e
Merge pull request #1231 from quantified-uncertainty/project-in-editors
Project in editors and remove warnings
2022-10-14 18:06:49 +03:00
Vyacheslav Matyukhin
a0000cd179
Merge branch 'develop' into project-in-editors 2022-10-13 03:36:08 +04:00
Vyacheslav Matyukhin
56771820aa
Merge pull request #965 from quantified-uncertainty/experiment-10.0rc1
bump `rescript` and `@rescript/std` to `10.0.1`
2022-10-13 02:25:23 +03:00
Vyacheslav Matyukhin
33f0647be8
Merge pull request #1260 from quantified-uncertainty/drop-bisect-ppx
remove bisect_ppx
2022-10-13 02:24:53 +03:00
Vyacheslav Matyukhin
4cd045b9c8
format rescript 2022-10-12 20:11:28 +04:00
Vyacheslav Matyukhin
a617ec0436
update coverage name in prettierignore 2022-10-12 20:00:27 +04:00
Vyacheslav Matyukhin
80cc20ac72
fix tests 2022-10-12 19:49:14 +04:00
Vyacheslav Matyukhin
666524a36a
Merge branch 'drop-bisect-ppx' into experiment-10.0rc1 2022-10-12 19:36:23 +04:00
Sam Nolan
98454a87b5 Get projects working in Playgrounds 2022-10-10 14:23:04 +11:00
Sam Nolan
0f8e7ce6b6 Project in editors and remove warnings 2022-10-08 16:23:58 +11:00
Quinn Dougherty
36c3a93d08 10.0.0 2022-08-27 10:37:01 +08:00
Quinn Dougherty
f67abe55a8 Merge remote-tracking branch 'origin/develop' into experiment-10.0rc1 2022-08-27 10:33:48 +08:00
Quinn
07af79adc8
bump rescript and @rescript/std to 10.0.0-rc.1 2022-08-08 14:54:57 -04:00
55 changed files with 957 additions and 746 deletions

View File

@ -24,7 +24,7 @@ export const Alert: React.FC<{
children, children,
}) => { }) => {
return ( return (
<div className={clsx("rounded-md p-4", backgroundColor)}> <div className={clsx("rounded-md p-4", backgroundColor)} role="status">
<div className="flex"> <div className="flex">
<Icon <Icon
className={clsx("h-5 w-5 flex-shrink-0", iconColor)} className={clsx("h-5 w-5 flex-shrink-0", iconColor)}

View File

@ -55,10 +55,7 @@ export const CodeEditor: FC<CodeEditorProps> = ({
editorProps={{ editorProps={{
$blockScrolling: true, $blockScrolling: true,
}} }}
setOptions={{ setOptions={{}}
enableBasicAutocompletion: false,
enableLiveAutocompletion: false,
}}
commands={[ commands={[
{ {
name: "submit", name: "submit",

View File

@ -1,5 +1,10 @@
import * as React from "react"; import * as React from "react";
import { SqValue, environment, SqProject } from "@quri/squiggle-lang"; import {
SqValue,
environment,
SqProject,
defaultEnvironment,
} from "@quri/squiggle-lang";
import { useSquiggle } from "../lib/hooks"; import { useSquiggle } from "../lib/hooks";
import { SquiggleViewer } from "./SquiggleViewer"; import { SquiggleViewer } from "./SquiggleViewer";
import { JsImports } from "../lib/jsImports"; import { JsImports } from "../lib/jsImports";
@ -66,7 +71,6 @@ type ProjectExecutionProps = {
}; };
const defaultOnChange = () => {}; const defaultOnChange = () => {};
const defaultImports: JsImports = {}; const defaultImports: JsImports = {};
const defaultContinues: string[] = [];
export const splitSquiggleChartSettings = (props: SquiggleChartProps) => { export const splitSquiggleChartSettings = (props: SquiggleChartProps) => {
const { const {
@ -120,24 +124,15 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
width, width,
height = 200, height = 200,
enableLocalSettings = false, enableLocalSettings = false,
continues = defaultContinues, continues,
project,
environment,
} = props; } = props;
const p = React.useMemo(() => {
if (props.project) {
return props.project;
} else {
const p = SqProject.create();
if (props.environment) {
p.setEnvironment(props.environment);
}
return p;
}
}, [props.project, props.environment]);
const resultAndBindings = useSquiggle({ const resultAndBindings = useSquiggle({
environment,
continues, continues,
project: p, project,
code, code,
jsImports, jsImports,
onChange, onChange,
@ -153,7 +148,9 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
height={height} height={height}
distributionPlotSettings={distributionPlotSettings} distributionPlotSettings={distributionPlotSettings}
chartSettings={chartSettings} chartSettings={chartSettings}
environment={p.getEnvironment()} environment={
project ? project.getEnvironment() : environment ?? defaultEnvironment
}
enableLocalSettings={enableLocalSettings} enableLocalSettings={enableLocalSettings}
/> />
); );

View File

@ -54,17 +54,13 @@ export const SquiggleEditor: React.FC<SquiggleEditorProps> = (props) => {
width, width,
height = 200, height = 200,
enableLocalSettings = false, enableLocalSettings = false,
continues,
project,
} = props; } = props;
const project = React.useMemo(() => {
const p = SqProject.create();
if (environment) {
p.setEnvironment(environment);
}
return p;
}, [environment]);
const resultAndBindings = useSquiggle({ const resultAndBindings = useSquiggle({
environment,
continues,
code, code,
project, project,
jsImports, jsImports,

View File

@ -251,6 +251,8 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
onSettingsChange, onSettingsChange,
showEditor = true, showEditor = true,
showShareButton = false, showShareButton = false,
continues,
project,
}) => { }) => {
const [code, setCode] = useMaybeControlledValue({ const [code, setCode] = useMaybeControlledValue({
value: controlledCode, value: controlledCode,
@ -305,15 +307,9 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
executionId, executionId,
} = useRunnerState(code); } = useRunnerState(code);
const project = React.useMemo(() => {
const p = SqProject.create();
if (environment) {
p.setEnvironment(environment);
}
return p;
}, [environment]);
const resultAndBindings = useSquiggle({ const resultAndBindings = useSquiggle({
environment,
continues,
code: renderedCode, code: renderedCode,
project, project,
jsImports: imports, jsImports: imports,

View File

@ -45,7 +45,7 @@ export const VariableBox: React.FC<VariableBoxProps> = ({
: location.path.items[location.path.items.length - 1]; : location.path.items[location.path.items.length - 1];
return ( return (
<div> <div role={isTopLevel ? "status" : undefined}>
<header className="inline-flex space-x-1"> <header className="inline-flex space-x-1">
<Tooltip text={heading}> <Tooltip text={heading}>
<span <span

View File

@ -4,16 +4,18 @@ import {
SqProject, SqProject,
SqRecord, SqRecord,
SqValue, SqValue,
environment,
} from "@quri/squiggle-lang"; } from "@quri/squiggle-lang";
import { useEffect, useMemo } from "react"; import { useEffect, useMemo } from "react";
import { JsImports, jsImportsToSquiggleCode } from "../jsImports"; import { JsImports, jsImportsToSquiggleCode } from "../jsImports";
import * as uuid from "uuid"; import * as uuid from "uuid";
type SquiggleArgs = { type SquiggleArgs = {
environment?: environment;
code: string; code: string;
executionId?: number; executionId?: number;
jsImports?: JsImports; jsImports?: JsImports;
project: SqProject; project?: SqProject;
continues?: string[]; continues?: string[];
onChange?: (expr: SqValue | undefined, sourceName: string) => void; onChange?: (expr: SqValue | undefined, sourceName: string) => void;
}; };
@ -27,15 +29,25 @@ const importSourceName = (sourceName: string) => "imports-" + sourceName;
const defaultContinues = []; const defaultContinues = [];
export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => { export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
const project = useMemo(() => {
if (args.project) {
return args.project;
} else {
const p = SqProject.create();
if (args.environment) {
p.setEnvironment(args.environment);
}
return p;
}
}, [args.project, args.environment]);
const sourceName = useMemo(() => uuid.v4(), []); const sourceName = useMemo(() => uuid.v4(), []);
const env = args.project.getEnvironment(); const env = project.getEnvironment();
const continues = args.continues || defaultContinues; const continues = args.continues || defaultContinues;
const result = useMemo( const result = useMemo(
() => { () => {
const project = args.project;
project.setSource(sourceName, args.code); project.setSource(sourceName, args.code);
let fullContinues = continues; let fullContinues = continues;
if (args.jsImports && Object.keys(args.jsImports).length) { if (args.jsImports && Object.keys(args.jsImports).length) {
@ -59,7 +71,7 @@ export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
args.executionId, args.executionId,
sourceName, sourceName,
continues, continues,
args.project, project,
env, env,
] ]
); );
@ -75,11 +87,11 @@ export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
useEffect(() => { useEffect(() => {
return () => { return () => {
args.project.removeSource(sourceName); project.removeSource(sourceName);
if (args.project.getSource(importSourceName(sourceName))) if (project.getSource(importSourceName(sourceName)))
args.project.removeSource(importSourceName(sourceName)); project.removeSource(importSourceName(sourceName));
}; };
}, [args.project, sourceName]); }, [project, sourceName]);
return result; return result;
}; };

View File

@ -1,9 +1,14 @@
import { render } from "@testing-library/react"; import { render, screen } from "@testing-library/react";
import React from "react"; import React from "react";
import "@testing-library/jest-dom"; import "@testing-library/jest-dom";
import { SquiggleChart } from "../src/index"; import {
SquiggleChart,
SquiggleEditor,
SquigglePlayground,
} from "../src/index";
import { SqProject } from "@quri/squiggle-lang";
test("Logs nothing on render", async () => { test("Chart logs nothing on render", async () => {
const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />); const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />);
unmount(); unmount();
@ -11,3 +16,38 @@ test("Logs nothing on render", async () => {
expect(console.warn).not.toBeCalled(); expect(console.warn).not.toBeCalled();
expect(console.error).not.toBeCalled(); expect(console.error).not.toBeCalled();
}); });
test("Editor logs nothing on render", async () => {
const { unmount } = render(<SquiggleEditor code={"normal(0, 1)"} />);
unmount();
expect(console.log).not.toBeCalled();
expect(console.warn).not.toBeCalled();
expect(console.error).not.toBeCalled();
});
test("Project dependencies work in editors", async () => {
const project = SqProject.create();
render(<SquiggleEditor code={"x = 1"} project={project} />);
const source = project.getSourceIds()[0];
const { container } = render(
<SquiggleEditor code={"x + 1"} project={project} continues={[source]} />
);
expect(container).toHaveTextContent("2");
});
test("Project dependencies work in playgrounds", async () => {
const project = SqProject.create();
project.setSource("depend", "x = 1");
render(
<SquigglePlayground
code={"x + 1"}
project={project}
continues={["depend"]}
/>
);
// We must await here because SquigglePlayground loads results asynchronously
expect(await screen.findByRole("status")).toHaveTextContent("2");
});

View File

@ -3,7 +3,7 @@ lib
*.bs.js *.bs.js
*.gen.tsx *.gen.tsx
.nyc_output/ .nyc_output/
_coverage/ coverage/
.cache/ .cache/
Reducer_Peggy_GeneratedParser.js Reducer_Peggy_GeneratedParser.js
ReducerProject_IncludeParser.js ReducerProject_IncludeParser.js

View File

@ -32,7 +32,10 @@ describe("dotSubtract", () => {
*/ */
Skip.test("mean of normal minus exponential (property)", () => { Skip.test("mean of normal minus exponential (property)", () => {
assert_( assert_(
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => { property2(
float_(),
floatRange(1e-5, 1e5),
(mean, rate) => {
// We limit ourselves to stdev=1 so that the integral is trivial // We limit ourselves to stdev=1 so that the integral is trivial
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract( let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
~env, ~env,
@ -50,7 +53,8 @@ describe("dotSubtract", () => {
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error | Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError) | Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
} }
}), },
),
) )
pass pass
}) })

View File

@ -40,7 +40,9 @@ let algebraicPower = algebraicPower(~env)
describe("(Algebraic) addition of distributions", () => { describe("(Algebraic) addition of distributions", () => {
describe("mean", () => { describe("mean", () => {
test("normal(mean=5) + normal(mean=20)", () => { test(
"normal(mean=5) + normal(mean=20)",
() => {
normalDist5 normalDist5
->algebraicAdd(normalDist20) ->algebraicAdd(normalDist20)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean) ->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
@ -49,9 +51,12 @@ describe("(Algebraic) addition of distributions", () => {
->E.R.toExn("Expected float", _) ->E.R.toExn("Expected float", _)
->expect ->expect
->toBe(Some(2.5e1)) ->toBe(Some(2.5e1))
}) },
)
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => { test(
"uniform(low=9, high=10) + beta(alpha=2, beta=5)",
() => {
// let uniformMean = (9.0 +. 10.0) /. 2.0 // let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0) // let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received = let received =
@ -67,8 +72,11 @@ describe("(Algebraic) addition of distributions", () => {
// sometimes it works with ~digits=2. // sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean) | Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
} }
}) },
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => { )
test(
"beta(alpha=2, beta=5) + uniform(low=9, high=10)",
() => {
// let uniformMean = (9.0 +. 10.0) /. 2.0 // let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0) // let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received = let received =
@ -84,7 +92,8 @@ describe("(Algebraic) addition of distributions", () => {
// sometimes it works with ~digits=2. // sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean) | Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
} }
}) },
)
}) })
describe("pdf", () => { describe("pdf", () => {
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION. // TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
@ -122,7 +131,9 @@ describe("(Algebraic) addition of distributions", () => {
} }
}, },
) )
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => { test(
"(normal(mean=10) + normal(mean=10)).pdf(1.9e1)",
() => {
let received = let received =
normalDist20 normalDist20
->Ok ->Ok
@ -150,8 +161,11 @@ describe("(Algebraic) addition of distributions", () => {
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1) | Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
} }
} }
}) },
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => { )
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)",
() => {
let received = let received =
uniformDist uniformDist
->algebraicAdd(betaDist) ->algebraicAdd(betaDist)
@ -166,8 +180,11 @@ describe("(Algebraic) addition of distributions", () => {
// This value was calculated by a python script // This value was calculated by a python script
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0) | Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
} }
}) },
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => { )
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)",
() => {
let received = let received =
betaDist betaDist
->algebraicAdd(uniformDist) ->algebraicAdd(uniformDist)
@ -180,10 +197,14 @@ describe("(Algebraic) addition of distributions", () => {
// This is nondeterministic. // This is nondeterministic.
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0) | Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
} }
}) },
)
}) })
describe("cdf", () => { describe("cdf", () => {
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => { testAll(
"(normal(mean=5) + normal(mean=5)).cdf (imprecise)",
list{6e0, 8e0, 1e1, 1.2e1},
x => {
let received = let received =
normalDist10 normalDist10
->Ok ->Ok
@ -212,8 +233,11 @@ describe("(Algebraic) addition of distributions", () => {
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0) | Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
} }
} }
}) },
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => { )
test(
"(normal(mean=10) + normal(mean=10)).cdf(1.25e1)",
() => {
let received = let received =
normalDist20 normalDist20
->Ok ->Ok
@ -241,8 +265,11 @@ describe("(Algebraic) addition of distributions", () => {
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2) | Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
} }
} }
}) },
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => { )
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)",
() => {
let received = let received =
uniformDist uniformDist
->algebraicAdd(betaDist) ->algebraicAdd(betaDist)
@ -256,8 +283,11 @@ describe("(Algebraic) addition of distributions", () => {
// The value was calculated externally using a python script // The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1) | Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
} }
}) },
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => { )
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)",
() => {
let received = let received =
betaDist betaDist
->algebraicAdd(uniformDist) ->algebraicAdd(uniformDist)
@ -271,11 +301,15 @@ describe("(Algebraic) addition of distributions", () => {
// The value was calculated externally using a python script // The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1) | Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
} }
}) },
)
}) })
describe("inv", () => { describe("inv", () => {
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => { testAll(
"(normal(mean=5) + normal(mean=5)).inv (imprecise)",
list{5e-2, 4.2e-3, 9e-3},
x => {
let received = let received =
normalDist10 normalDist10
->Ok ->Ok
@ -304,8 +338,11 @@ describe("(Algebraic) addition of distributions", () => {
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1) | Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
} }
} }
}) },
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => { )
test(
"(normal(mean=10) + normal(mean=10)).inv(1e-1)",
() => {
let received = let received =
normalDist20 normalDist20
->Ok ->Ok
@ -333,8 +370,11 @@ describe("(Algebraic) addition of distributions", () => {
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1) | Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
} }
} }
}) },
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => { )
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)",
() => {
let received = let received =
uniformDist uniformDist
->algebraicAdd(betaDist) ->algebraicAdd(betaDist)
@ -348,8 +388,11 @@ describe("(Algebraic) addition of distributions", () => {
// sometimes it works with ~digits=2. // sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0) | Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
} }
}) },
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => { )
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)",
() => {
let received = let received =
betaDist betaDist
->algebraicAdd(uniformDist) ->algebraicAdd(uniformDist)
@ -363,6 +406,7 @@ describe("(Algebraic) addition of distributions", () => {
// sometimes it works with ~digits=2. // sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0) | Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
} }
}) },
)
}) })
}) })

View File

@ -87,14 +87,22 @@ describe("Means are invariant", () => {
let testAddInvariant = (t1, t2) => let testAddInvariant = (t1, t2) =>
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _) E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => { testAll(
"with two of the same distribution",
distributions,
dist => {
testAddInvariant(dist, dist) testAddInvariant(dist, dist)
}) },
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => { testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists let (dist1, dist2) = dists
testAddInvariant(dist1, dist2) testAddInvariant(dist1, dist2)
}) },
)
testAll( testAll(
"with two different distributions in swapped order", "with two different distributions in swapped order",
@ -116,14 +124,22 @@ describe("Means are invariant", () => {
let testSubtractInvariant = (t1, t2) => let testSubtractInvariant = (t1, t2) =>
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _) E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => { testAll(
"with two of the same distribution",
distributions,
dist => {
testSubtractInvariant(dist, dist) testSubtractInvariant(dist, dist)
}) },
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => { testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists let (dist1, dist2) = dists
testSubtractInvariant(dist1, dist2) testSubtractInvariant(dist1, dist2)
}) },
)
testAll( testAll(
"with two different distributions in swapped order", "with two different distributions in swapped order",
@ -145,14 +161,22 @@ describe("Means are invariant", () => {
let testMultiplicationInvariant = (t1, t2) => let testMultiplicationInvariant = (t1, t2) =>
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _) E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => { testAll(
"with two of the same distribution",
distributions,
dist => {
testMultiplicationInvariant(dist, dist) testMultiplicationInvariant(dist, dist)
}) },
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => { testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists let (dist1, dist2) = dists
testMultiplicationInvariant(dist1, dist2) testMultiplicationInvariant(dist1, dist2)
}) },
)
testAll( testAll(
"with two different distributions in swapped order", "with two different distributions in swapped order",

View File

@ -17,10 +17,9 @@ describe("klDivergence: continuous -> continuous -> float", () => {
let answer = let answer =
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s)) uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
let prediction = let prediction =
uniformMakeR( uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
lowPrediction, s => DistributionTypes.ArgumentError(s),
highPrediction, )
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx // integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer)) let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
let kl = E.R.liftJoin2(klDivergence, prediction, answer) let kl = E.R.liftJoin2(klDivergence, prediction, answer)
@ -183,9 +182,9 @@ describe("combineAlongSupportOfSecondArgument0", () => {
let answer = let answer =
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s)) uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
let prediction = let prediction =
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError( uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
s, s => DistributionTypes.ArgumentError(s),
)) )
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer) let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction) let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)

View File

@ -3,7 +3,7 @@ open Expect
open TestHelpers open TestHelpers
// TODO: use Normal.make (but preferably after teh new validation dispatch is in) // TODO: use Normal.make (but preferably after teh new validation dispatch is in)
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev})) let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
describe("(Symbolic) normalize", () => { describe("(Symbolic) normalize", () => {
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => { testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
@ -47,10 +47,7 @@ describe("(Symbolic) mean", () => {
tup => { tup => {
let (low, medium, high) = tup let (low, medium, high) = tup
let meanValue = run( let meanValue = run(
FromDist( FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Triangular({low, medium, high}))),
#ToFloat(#Mean),
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
),
) )
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/ meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
}, },
@ -63,7 +60,7 @@ describe("(Symbolic) mean", () => {
tup => { tup => {
let (alpha, beta) = tup let (alpha, beta) = tup
let meanValue = run( let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))), FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha, beta}))),
) )
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
}, },
@ -84,8 +81,8 @@ describe("(Symbolic) mean", () => {
let (mean, stdev) = tup let (mean, stdev) = tup
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev) let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
let meanValue = let meanValue =
betaDistribution->E.R2.fmap(d => betaDistribution->E.R2.fmap(
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)) d => run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)),
) )
switch meanValue { switch meanValue {
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean) | Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
@ -100,7 +97,7 @@ describe("(Symbolic) mean", () => {
tup => { tup => {
let (mu, sigma) = tup let (mu, sigma) = tup
let meanValue = run( let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))), FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu, sigma}))),
) )
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/ meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
}, },
@ -112,7 +109,7 @@ describe("(Symbolic) mean", () => {
tup => { tup => {
let (low, high) = tup let (low, high) = tup
let meanValue = run( let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))), FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low, high}))),
) )
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
}, },

View File

@ -33,12 +33,18 @@ describe("Bindings", () => {
let value2 = Reducer_T.IEvNumber(5.) let value2 = Reducer_T.IEvNumber(5.)
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2) let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
test("get on extended", () => { test(
"get on extended",
() => {
expect(extendedBindings->Bindings.get("value")) == Some(value2) expect(extendedBindings->Bindings.get("value")) == Some(value2)
}) },
)
test("get on original", () => { test(
"get on original",
() => {
expect(bindings->Bindings.get("value")) == Some(value) expect(bindings->Bindings.get("value")) == Some(value)
}) },
)
}) })
}) })

View File

@ -40,14 +40,23 @@ describe("Namespace", () => {
let nsMerged = Namespace.mergeMany([ns, ns1, ns2]) let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
test("merge many 1", () => { test(
"merge many 1",
() => {
expect(nsMerged->Namespace.get("x1")) == Some(x1) expect(nsMerged->Namespace.get("x1")) == Some(x1)
}) },
test("merge many 2", () => { )
test(
"merge many 2",
() => {
expect(nsMerged->Namespace.get("x4")) == Some(x4) expect(nsMerged->Namespace.get("x4")) == Some(x4)
}) },
test("merge many 3", () => { )
test(
"merge many 3",
() => {
expect(nsMerged->Namespace.get("value")) == Some(value) expect(nsMerged->Namespace.get("value")) == Some(value)
}) },
)
}) })
}) })

View File

@ -75,7 +75,9 @@ describe("Peggy to Expression", () => {
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ()) testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
describe("ternary bindings", () => { describe(
"ternary bindings",
() => {
testToExpression( testToExpression(
// expression binding // expression binding
"f(a) = a > 5 ? 1 : 0; f(6)", "f(a) = a > 5 ? 1 : 0; f(6)",
@ -97,7 +99,8 @@ describe("Peggy to Expression", () => {
~v="6", ~v="6",
(), (),
) )
}) },
)
}) })
describe("if then else", () => { describe("if then else", () => {

View File

@ -22,7 +22,7 @@ let expectEvalError = (code: string) =>
Expression.BackCompatible.evaluateString(code) Expression.BackCompatible.evaluateString(code)
->Reducer_Value.toStringResult ->Reducer_Value.toStringResult
->expect ->expect
->toMatch("Error\(") ->toMatch("Error\\(")
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer)) let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
let testDescriptionParseToBe = (desc, expr, answer) => let testDescriptionParseToBe = (desc, expr, answer) =>

View File

@ -37,14 +37,16 @@ describe("eval", () => {
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)")) test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)")) test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
testEvalError("{a: 1}.b") // invalid syntax testEvalError("{a: 1}.b") // invalid syntax
test("always the same property ending", () => test(
"always the same property ending",
() =>
expectEvalToBe( expectEvalToBe(
`{ `{
a: 1, a: 1,
b: 2, b: 2,
}`, }`,
"Ok({a: 1,b: 2})", "Ok({a: 1,b: 2})",
) ),
) )
}) })

View File

@ -11,7 +11,9 @@ describe("ReducerProject Tutorial", () => {
/* /*
Case "Running a single source". Case "Running a single source".
*/ */
test("run", () => { test(
"run",
() => {
/* Let's start with running a single source and getting Result as well as the Bindings /* Let's start with running a single source and getting Result as well as the Bindings
First you need to create a project. A project is a collection of sources. First you need to create a project. A project is a collection of sources.
Project takes care of the dependencies between the sources, correct compilation and run order. Project takes care of the dependencies between the sources, correct compilation and run order.
@ -51,9 +53,12 @@ Case "Running a single source".
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect == (result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}") ("Ok(3)", "{}")
/* You've got 3 with empty bindings. */ /* You've got 3 with empty bindings. */
}) },
)
test("run summary", () => { test(
"run summary",
() => {
let project = Project.createProject() let project = Project.createProject()
project->Project.setSource("main", "1 + 2") project->Project.setSource("main", "1 + 2")
project->Project.runAll project->Project.runAll
@ -64,9 +69,12 @@ Case "Running a single source".
result->Reducer_Value.toStringResult, result->Reducer_Value.toStringResult,
bindings->Reducer_T.IEvRecord->Reducer_Value.toString, bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
)->expect == ("Ok(3)", "{}") )->expect == ("Ok(3)", "{}")
}) },
)
test("run with an environment", () => { test(
"run with an environment",
() => {
/* Running the source code like above allows you to set a custom environment */ /* Running the source code like above allows you to set a custom environment */
let project = Project.createProject() let project = Project.createProject()
@ -78,15 +86,19 @@ Case "Running a single source".
let result = project->Project.getResult("main") let result = project->Project.getResult("main")
let _bindings = project->Project.getBindings("main") let _bindings = project->Project.getBindings("main")
result->Reducer_Value.toStringResult->expect == "Ok(3)" result->Reducer_Value.toStringResult->expect == "Ok(3)"
}) },
)
test("shortcut", () => { test(
"shortcut",
() => {
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */ /* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
/* Examples above was to prepare you for the multi source tutorial. */ /* Examples above was to prepare you for the multi source tutorial. */
let (result, bindings) = Project.evaluate("1+2") let (result, bindings) = Project.evaluate("1+2")
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect == (result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}") ("Ok(3)", "{}")
}) },
)
}) })
}) })

View File

@ -10,7 +10,9 @@ describe("ReducerProject Tutorial", () => {
describe("Multi source", () => { describe("Multi source", () => {
/* /*
Case "Running multiple sources" */ Case "Running multiple sources" */
test("Chaining", () => { test(
"Chaining",
() => {
let project = Project.createProject() let project = Project.createProject()
/* This time let's add 3 sources and chain them together */ /* This time let's add 3 sources and chain them together */
project->Project.setSource("source1", "x=1") project->Project.setSource("source1", "x=1")
@ -32,9 +34,12 @@ describe("ReducerProject Tutorial", () => {
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect == (result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}") ("Ok(())", "{z: 3}")
}) },
)
test("Depending", () => { test(
"Depending",
() => {
/* Instead of chaining the sources, we could have a dependency tree */ /* Instead of chaining the sources, we could have a dependency tree */
/* The point here is that any source can depend on multiple sources */ /* The point here is that any source can depend on multiple sources */
let project = Project.createProject() let project = Project.createProject()
@ -56,9 +61,12 @@ describe("ReducerProject Tutorial", () => {
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect == (result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}") ("Ok(())", "{z: 3}")
}) },
)
test("Intro to including", () => { test(
"Intro to including",
() => {
/* Though it would not be practical for a storybook, /* Though it would not be practical for a storybook,
let's write the same project above with includes. let's write the same project above with includes.
You will see that parsing includes is setting the dependencies the same way as before. */ You will see that parsing includes is setting the dependencies the same way as before. */
@ -99,6 +107,7 @@ describe("ReducerProject Tutorial", () => {
- And the depended source1 and source2 is not already there in the project - And the depended source1 and source2 is not already there in the project
- If you knew the includes before hand there would not be point of the include directive. - If you knew the includes before hand there would not be point of the include directive.
More on those on the next section. */ More on those on the next section. */
}) },
)
}) })
}) })

View File

@ -24,15 +24,20 @@ Here we will finally proceed to a real life scenario. */
) )
/* We need to parse includes after changing the source */ /* We need to parse includes after changing the source */
project->Project.parseIncludes("main") project->Project.parseIncludes("main")
test("getDependencies", () => { test(
"getDependencies",
() => {
/* Parse includes has set the dependencies */ /* Parse includes has set the dependencies */
project->Project.getDependencies("main")->expect == ["common"] project->Project.getDependencies("main")->expect == ["common"]
/* If there were no includes than there would be no dependencies */ /* If there were no includes than there would be no dependencies */
/* However if there was a syntax error at includes then would be no dependencies also */ /* However if there was a syntax error at includes then would be no dependencies also */
/* Therefore looking at dependencies is not the right way to load includes */ /* Therefore looking at dependencies is not the right way to load includes */
/* getDependencies does not distinguish between setContinues or parseIncludes */ /* getDependencies does not distinguish between setContinues or parseIncludes */
}) },
test("getIncludes", () => { )
test(
"getIncludes",
() => {
/* Parse includes has set the includes */ /* Parse includes has set the includes */
switch project->Project.getIncludes("main") { switch project->Project.getIncludes("main") {
| Ok(includes) => includes->expect == ["common"] | Ok(includes) => includes->expect == ["common"]
@ -42,17 +47,23 @@ Here we will finally proceed to a real life scenario. */
Otherwise you get the includes. Otherwise you get the includes.
If there is no syntax error then you can load that file and use setSource to add it to the project. If there is no syntax error then you can load that file and use setSource to add it to the project.
And so on recursively... */ And so on recursively... */
}) },
test("getDependents", () => { )
test(
"getDependents",
() => {
/* For any reason, you are able to query what other sources /* For any reason, you are able to query what other sources
include or depend on the current source. include or depend on the current source.
But you don't need to use this to execute the projects. But you don't need to use this to execute the projects.
It is provided for completeness of information. */ It is provided for completeness of information. */
project->Project.getDependents("main")->expect == [] project->Project.getDependents("main")->expect == []
/* Nothing is depending on or including main */ /* Nothing is depending on or including main */
}) },
)
describe("Real Like", () => { describe(
"Real Like",
() => {
/* Now let's look at recursive and possibly cyclic includes */ /* Now let's look at recursive and possibly cyclic includes */
/* There is no function provided to load the include files. /* There is no function provided to load the include files.
Because we have no idea if will it be an ordinary function or will it use promises. Because we have no idea if will it be an ordinary function or will it use promises.
@ -88,7 +99,8 @@ Here we will finally proceed to a real life scenario. */
| Error(err) => err->SqError.toString->Js.Exn.raiseError | Error(err) => err->SqError.toString->Js.Exn.raiseError
| Ok(includes) => | Ok(includes) =>
includes->Belt.Array.forEach(newIncludeName => { includes->Belt.Array.forEach(
newIncludeName => {
/* We have got one of the new includes. /* We have got one of the new includes.
Let's load it and add it to the project */ Let's load it and add it to the project */
let newSource = loadSource(newIncludeName) let newSource = loadSource(newIncludeName)
@ -97,7 +109,8 @@ Here we will finally proceed to a real life scenario. */
/* Of course the new source might have includes too. */ /* Of course the new source might have includes too. */
/* Let's recursively load them */ /* Let's recursively load them */
project->loadIncludesRecursively(newIncludeName, newVisited) project->loadIncludesRecursively(newIncludeName, newVisited)
}) },
)
} }
} }
} }
@ -143,12 +156,18 @@ Here we will finally proceed to a real life scenario. */
let result = project->Project.getResult("main") let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main") let bindings = project->Project.getBindings("main")
/* And see the result and bindings.. */ /* And see the result and bindings.. */
test("recursive includes", () => { test(
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect == "recursive includes",
("Ok(6)", "{a: 6,b: 2}") () => {
(
result->Reducer_Value.toStringResult,
bindings->Reducer_Value.toStringRecord,
)->expect == ("Ok(6)", "{a: 6,b: 2}")
/* Everything as expected */ /* Everything as expected */
}) },
}) )
},
)
}) })
describe("Includes myFile as myVariable", () => { describe("Includes myFile as myVariable", () => {
@ -163,14 +182,20 @@ Here we will finally proceed to a real life scenario. */
`, `,
) )
Project.parseIncludes(project, "main") Project.parseIncludes(project, "main")
test("getDependencies", () => { test(
"getDependencies",
() => {
Project.getDependencies(project, "main")->expect == ["common"] Project.getDependencies(project, "main")->expect == ["common"]
}) },
test("getIncludes", () => { )
test(
"getIncludes",
() => {
switch Project.getIncludes(project, "main") { switch Project.getIncludes(project, "main") {
| Ok(includes) => includes->expect == ["common"] | Ok(includes) => includes->expect == ["common"]
| Error(err) => err->SqError.toString->fail | Error(err) => err->SqError.toString->fail
} }
}) },
)
}) })
}) })

View File

@ -30,8 +30,9 @@ describe("ReducerProject Tutorial", () => {
}) })
test("userResults", () => { test("userResults", () => {
let userResultsAsString = Belt.Array.map(userResults, aResult => let userResultsAsString = Belt.Array.map(
aResult->Reducer_Value.toStringResult userResults,
aResult => aResult->Reducer_Value.toStringResult,
) )
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"] userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
}) })

View File

@ -99,15 +99,19 @@ describe("FunctionRegistry Library", () => {
}) })
describe("Fn auto-testing", () => { describe("Fn auto-testing", () => {
testAll("tests of validity", examples, r => { testAll(
"tests of validity",
examples,
r => {
expectEvalToBeOk(r) expectEvalToBeOk(r)
}) },
)
testAll( testAll(
"tests of type", "tests of type",
E.A.to_list( E.A.to_list(
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) => FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(
E.O.isSome(fn.output) ((fn, _)) => E.O.isSome(fn.output),
), ),
), ),
((fn, example)) => { ((fn, example)) => {

View File

@ -45,12 +45,12 @@ let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.generi
let unpackFloat = x => x->toFloat->toExtFloat let unpackFloat = x => x->toFloat->toExtFloat
let unpackDist = y => y->toDist->toExtDist let unpackDist = y => y->toDist->toExtDist
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev})) let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta})) let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha, beta}))
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate})) let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high})) let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low, high}))
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale})) let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local, scale}))
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma})) let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu, sigma}))
let mkDelta = x => DistributionTypes.Symbolic(#Float(x)) let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
let normalMake = SymbolicDist.Normal.make let normalMake = SymbolicDist.Normal.make

View File

@ -39,7 +39,7 @@
], ],
"author": "Quantified Uncertainty Research Institute", "author": "Quantified Uncertainty Research Institute",
"dependencies": { "dependencies": {
"@rescript/std": "^9.1.4", "@rescript/std": "^10.0.0",
"@stdlib/stats": "^0.0.13", "@stdlib/stats": "^0.0.13",
"jstat": "^1.9.5", "jstat": "^1.9.5",
"lodash": "^4.17.21", "lodash": "^4.17.21",
@ -58,7 +58,7 @@
"peggy": "^2.0.1", "peggy": "^2.0.1",
"prettier": "^2.7.1", "prettier": "^2.7.1",
"reanalyze": "^2.23.0", "reanalyze": "^2.23.0",
"rescript": "^9.1.4", "rescript": "^10.0.0",
"rescript-fast-check": "^1.1.1", "rescript-fast-check": "^1.1.1",
"rescript-js-map": "^1.1.0", "rescript-js-map": "^1.1.0",
"ts-jest": "^29.0.3", "ts-jest": "^29.0.3",

View File

@ -141,6 +141,7 @@ let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
Js.log2("Console log requested: ", dist) Js.log2("Console log requested: ", dist)
Dist(dist) Dist(dist)
} }
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist | #ToDist(Normalize) => dist->GenericDist.normalize->Dist
| #ToScore(LogScore(answer, prior)) => | #ToScore(LogScore(answer, prior)) =>
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env) GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)

View File

@ -99,6 +99,7 @@ let toFloatOperation = (
} }
} }
} }
| (#Stdev | #Variance | #Mode) as op => | (#Stdev | #Variance | #Mode) as op =>
switch t { switch t {
| SampleSet(s) => | SampleSet(s) =>
@ -129,7 +130,7 @@ let toPointSet = (
SampleSetDist.toPointSetDist( SampleSetDist.toPointSetDist(
~samples=r, ~samples=r,
~samplingInputs={ ~samplingInputs={
sampleCount: sampleCount, sampleCount,
outputXYPoints: xyPointLength, outputXYPoints: xyPointLength,
pointSetDistLength: xyPointLength, pointSetDistLength: xyPointLength,
kernelWidth: None, kernelWidth: None,
@ -427,6 +428,7 @@ module AlgebraicCombination = {
~toSampleSetFn, ~toSampleSetFn,
) )
} }
| (None, AsMonteCarlo) => | (None, AsMonteCarlo) =>
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2) StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
| (None, AsSymbolic) => | (None, AsSymbolic) =>
@ -443,6 +445,7 @@ module AlgebraicCombination = {
)}` )}`
Error(RequestedStrategyInvalidError(errString)) Error(RequestedStrategyInvalidError(errString))
} }
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2) | Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
} }
} }

View File

@ -69,7 +69,7 @@ let toDiscretePointMassesFromTriangulars = (
() ()
} }
{n: n - 2, masses: masses, means: means, variances: variances} {n: n - 2, masses, means, variances}
} else { } else {
for i in 1 to n - 2 { for i in 1 to n - 2 {
// area of triangle = width * height / 2 // area of triangle = width * height / 2
@ -91,7 +91,7 @@ let toDiscretePointMassesFromTriangulars = (
) |> ignore ) |> ignore
() ()
} }
{n: n - 2, masses: masses, means: means, variances: variances} {n: n - 2, masses, means, variances}
} }
} }
@ -184,7 +184,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
let means: array<float> = Belt.Array.makeBy(n, i => xs[i]) let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0) let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
{n: n, masses: masses, means: means, variances: variances} {n, masses, means, variances}
} }
type argumentPosition = First | Second type argumentPosition = First | Second

View File

@ -45,16 +45,16 @@ module Analysis = {
let getShape = (t: t) => t.xyShape let getShape = (t: t) => t.xyShape
let interpolation = (t: t) => t.interpolation let interpolation = (t: t) => t.interpolation
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => { let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape, xyShape,
interpolation: interpolation, interpolation,
integralSumCache: integralSumCache, integralSumCache,
integralCache: integralCache, integralCache,
} }
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => { let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape), xyShape: fn(xyShape),
interpolation: interpolation, interpolation,
integralSumCache: integralSumCache, integralSumCache,
integralCache: integralCache, integralCache,
} }
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option< let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
@ -135,10 +135,10 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
let updateIntegralSumCache = (integralSumCache, t: t): t => { let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t, ...t,
integralSumCache: integralSumCache, integralSumCache,
} }
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache} let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache}
let sum = ( let sum = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None, ~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,

View File

@ -4,14 +4,14 @@ open Distributions
type t = PointSetTypes.discreteShape type t = PointSetTypes.discreteShape
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => { let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape, xyShape,
integralSumCache: integralSumCache, integralSumCache,
integralCache: integralCache, integralCache,
} }
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => { let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape), xyShape: fn(xyShape),
integralSumCache: integralSumCache, integralSumCache,
integralCache: integralCache, integralCache,
} }
let getShape = (t: t) => t.xyShape let getShape = (t: t) => t.xyShape
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> => let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
@ -63,12 +63,12 @@ let reduce = (
let updateIntegralSumCache = (integralSumCache, t: t): t => { let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t, ...t,
integralSumCache: integralSumCache, integralSumCache,
} }
let updateIntegralCache = (integralCache, t: t): t => { let updateIntegralCache = (integralCache, t: t): t => {
...t, ...t,
integralCache: integralCache, integralCache,
} }
/* This multiples all of the data points together and creates a new discrete distribution from the results. /* This multiples all of the data points together and creates a new discrete distribution from the results.

View File

@ -4,10 +4,10 @@ open Distributions
type t = PointSetTypes.mixedShape type t = PointSetTypes.mixedShape
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => { let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
continuous: continuous, continuous,
discrete: discrete, discrete,
integralSumCache: integralSumCache, integralSumCache,
integralCache: integralCache, integralCache,
} }
let totalLength = (t: t): int => { let totalLength = (t: t): int => {
@ -35,7 +35,7 @@ let toDiscrete = ({discrete}: t) => Some(discrete)
let updateIntegralCache = (integralCache, t: t): t => { let updateIntegralCache = (integralCache, t: t): t => {
...t, ...t,
integralCache: integralCache, integralCache,
} }
let combinePointwise = ( let combinePointwise = (

View File

@ -79,8 +79,8 @@ module MixedPoint = {
type t = mixedPoint type t = mixedPoint
let toContinuousValue = (t: t) => t.continuous let toContinuousValue = (t: t) => t.continuous
let toDiscreteValue = (t: t) => t.discrete let toDiscreteValue = (t: t) => t.discrete
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0} let makeContinuous = (continuous: float): t => {continuous, discrete: 0.0}
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete} let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete}
let fmap = (fn: float => float, t: t) => { let fmap = (fn: float => float, t: t) => {
continuous: fn(t.continuous), continuous: fn(t.continuous),

View File

@ -7,7 +7,7 @@ module Normal = {
type t = normal type t = normal
let make = (mean: float, stdev: float): result<symbolicDist, string> => let make = (mean: float, stdev: float): result<symbolicDist, string> =>
stdev > 0.0 stdev > 0.0
? Ok(#Normal({mean: mean, stdev: stdev})) ? Ok(#Normal({mean, stdev}))
: Error("Standard deviation of normal distribution must be larger than 0") : Error("Standard deviation of normal distribution must be larger than 0")
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev) let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev) let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
@ -15,7 +15,7 @@ module Normal = {
let from90PercentCI = (low, high) => { let from90PercentCI = (low, high) => {
let mean = E.A.Floats.mean([low, high]) let mean = E.A.Floats.mean([low, high])
let stdev = (high -. low) /. (2. *. normal95confidencePoint) let stdev = (high -. low) /. (2. *. normal95confidencePoint)
#Normal({mean: mean, stdev: stdev}) #Normal({mean, stdev})
} }
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev) let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev) let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
@ -25,12 +25,12 @@ module Normal = {
let add = (n1: t, n2: t) => { let add = (n1: t, n2: t) => {
let mean = n1.mean +. n2.mean let mean = n1.mean +. n2.mean
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.) let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev}) #Normal({mean, stdev})
} }
let subtract = (n1: t, n2: t) => { let subtract = (n1: t, n2: t) => {
let mean = n1.mean -. n2.mean let mean = n1.mean -. n2.mean
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.) let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev}) #Normal({mean, stdev})
} }
// TODO: is this useful here at all? would need the integral as well ... // TODO: is this useful here at all? would need the integral as well ...
@ -38,7 +38,7 @@ module Normal = {
let mean = let mean =
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.) (n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.) let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev}) #Normal({mean, stdev})
} }
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) => let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
@ -88,7 +88,7 @@ module Cauchy = {
type t = cauchy type t = cauchy
let make = (local, scale): result<symbolicDist, string> => let make = (local, scale): result<symbolicDist, string> =>
scale > 0.0 scale > 0.0
? Ok(#Cauchy({local: local, scale: scale})) ? Ok(#Cauchy({local, scale}))
: Error("Cauchy distribution scale parameter must larger than 0.") : Error("Cauchy distribution scale parameter must larger than 0.")
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale) let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale) let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
@ -102,7 +102,7 @@ module Triangular = {
type t = triangular type t = triangular
let make = (low, medium, high): result<symbolicDist, string> => let make = (low, medium, high): result<symbolicDist, string> =>
low < medium && medium < high low < medium && medium < high
? Ok(#Triangular({low: low, medium: medium, high: high})) ? Ok(#Triangular({low, medium, high}))
: Error("Triangular values must be increasing order.") : Error("Triangular values must be increasing order.")
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium? let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium) let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
@ -116,7 +116,7 @@ module Beta = {
type t = beta type t = beta
let make = (alpha, beta) => let make = (alpha, beta) =>
alpha > 0.0 && beta > 0.0 alpha > 0.0 && beta > 0.0
? Ok(#Beta({alpha: alpha, beta: beta})) ? Ok(#Beta({alpha, beta}))
: Error("Beta distribution parameters must be positive") : Error("Beta distribution parameters must be positive")
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta) let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta) let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
@ -150,7 +150,7 @@ module Lognormal = {
type t = lognormal type t = lognormal
let make = (mu, sigma) => let make = (mu, sigma) =>
sigma > 0.0 sigma > 0.0
? Ok(#Lognormal({mu: mu, sigma: sigma})) ? Ok(#Lognormal({mu, sigma}))
: Error("Lognormal standard deviation must be larger than 0") : Error("Lognormal standard deviation must be larger than 0")
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma) let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma) let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
@ -164,7 +164,7 @@ module Lognormal = {
let logHigh = Js.Math.log(high) let logHigh = Js.Math.log(high)
let mu = E.A.Floats.mean([logLow, logHigh]) let mu = E.A.Floats.mean([logLow, logHigh])
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint) let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
#Lognormal({mu: mu, sigma: sigma}) #Lognormal({mu, sigma})
} }
let fromMeanAndStdev = (mean, stdev) => { let fromMeanAndStdev = (mean, stdev) => {
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution // https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
@ -174,7 +174,7 @@ module Lognormal = {
let meanSquared = mean ** 2. let meanSquared = mean ** 2.
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared) let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.)) let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
Ok(#Lognormal({mu: mu, sigma: sigma})) Ok(#Lognormal({mu, sigma}))
} else { } else {
Error("Lognormal standard deviation must be larger than 0") Error("Lognormal standard deviation must be larger than 0")
} }
@ -184,14 +184,14 @@ module Lognormal = {
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables // https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
let mu = l1.mu +. l2.mu let mu = l1.mu +. l2.mu
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.) let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
#Lognormal({mu: mu, sigma: sigma}) #Lognormal({mu, sigma})
} }
let divide = (l1, l2) => { let divide = (l1, l2) => {
let mu = l1.mu -. l2.mu let mu = l1.mu -. l2.mu
// We believe the ratiands will have covariance zero. // We believe the ratiands will have covariance zero.
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details // See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.) let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
#Lognormal({mu: mu, sigma: sigma}) #Lognormal({mu, sigma})
} }
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) => let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
switch operation { switch operation {
@ -220,7 +220,7 @@ module Lognormal = {
module Uniform = { module Uniform = {
type t = uniform type t = uniform
let make = (low, high) => let make = (low, high) =>
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low") high > low ? Ok(#Uniform({low, high})) : Error("High must be larger than low")
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high) let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high) let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
@ -239,9 +239,7 @@ module Uniform = {
module Logistic = { module Logistic = {
type t = logistic type t = logistic
let make = (location, scale) => let make = (location, scale) =>
scale > 0.0 scale > 0.0 ? Ok(#Logistic({location, scale})) : Error("Scale must be positive")
? Ok(#Logistic({location: location, scale: scale}))
: Error("Scale must be positive")
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale) let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale) let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
@ -285,7 +283,7 @@ module Gamma = {
let make = (shape: float, scale: float) => { let make = (shape: float, scale: float) => {
if shape > 0. { if shape > 0. {
if scale > 0. { if scale > 0. {
Ok(#Gamma({shape: shape, scale: scale})) Ok(#Gamma({shape, scale}))
} else { } else {
Error("scale must be larger than 0") Error("scale must be larger than 0")
} }
@ -543,6 +541,6 @@ module T = {
| _ => | _ =>
let xs = interpolateXs(~xSelection, d, sampleCount) let xs = interpolateXs(~xSelection, d, sampleCount)
let ys = xs |> E.A.fmap(x => pdf(x, d)) let ys = xs |> E.A.fmap(x => pdf(x, d))
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys})) Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs, ys}))
} }
} }

View File

@ -23,7 +23,7 @@ let makeFn = (
name: string, name: string,
inputs: array<frType>, inputs: array<frType>,
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>, fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
) => makeFnMany(name, [{inputs: inputs, fn: fn}]) ) => makeFnMany(name, [{inputs, fn}])
let library = [ let library = [
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts) Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
@ -62,6 +62,7 @@ let library = [
let answer = Js.String2.concat(a, b) let answer = Js.String2.concat(a, b)
answer->Reducer_T.IEvString->Ok answer->Reducer_T.IEvString->Ok
} }
| _ => Error(impossibleError) | _ => Error(impossibleError)
} }
}), }),
@ -72,6 +73,7 @@ let library = [
let _ = Js.Array2.pushMany(a, b) let _ = Js.Array2.pushMany(a, b)
a->Reducer_T.IEvArray->Ok a->Reducer_T.IEvArray->Ok
} }
| _ => Error(impossibleError) | _ => Error(impossibleError)
} }
}), }),
@ -81,6 +83,7 @@ let library = [
Js.log(value->Reducer_Value.toString) Js.log(value->Reducer_Value.toString)
value->Ok value->Ok
} }
| _ => Error(impossibleError) | _ => Error(impossibleError)
} }
}), }),
@ -90,6 +93,7 @@ let library = [
Js.log(`${label}: ${value->Reducer_Value.toString}`) Js.log(`${label}: ${value->Reducer_Value.toString}`)
value->Ok value->Ok
} }
| _ => Error(impossibleError) | _ => Error(impossibleError)
} }
}), }),

View File

@ -135,11 +135,13 @@ module Integration = {
let wrappedResult = result->Reducer_T.IEvNumber->Ok let wrappedResult = result->Reducer_T.IEvNumber->Ok
wrappedResult wrappedResult
} }
| (Error(b), _) => Error(b) | (Error(b), _) => Error(b)
| (_, Error(b)) => Error(b) | (_, Error(b)) => Error(b)
} }
resultWithOuterPoints resultWithOuterPoints
} }
| Error(b) => | Error(b) =>
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++ ("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
"Original error: " ++ "Original error: " ++
@ -362,6 +364,7 @@ module DiminishingReturns = {
result[indexOfBiggestDMR] = value result[indexOfBiggestDMR] = value
Ok(result) Ok(result)
} }
| Error(b) => Error(b) | Error(b) => Error(b)
} }
@ -371,10 +374,12 @@ module DiminishingReturns = {
} }
Ok(newAcc) Ok(newAcc)
} }
| Error(b) => Error(b) | Error(b) => Error(b)
} }
newAccWrapped newAccWrapped
} }
| Error(b) => Error(b) | Error(b) => Error(b)
} }
}) })
@ -427,10 +432,12 @@ module DiminishingReturns = {
) )
result result
} }
| Error(b) => Error(b) | Error(b) => Error(b)
} }
result result
} }
| _ => | _ =>
"Error in Danger.diminishingMarginalReturnsForTwoFunctions" "Error in Danger.diminishingMarginalReturnsForTwoFunctions"
->SqError.Message.REOther ->SqError.Message.REOther

View File

@ -20,6 +20,7 @@ module Declaration = {
->E.A.R.firstErrorOrOpen ->E.A.R.firstErrorOrOpen
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args))) ->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
} }
| Error(r) => Error(r) | Error(r) => Error(r)
| Ok(_) => Error(impossibleErrorString) | Ok(_) => Error(impossibleErrorString)
} }

View File

@ -140,6 +140,7 @@ module Old = {
| Error(err) => error(err) | Error(err) => error(err)
} }
} }
| Some(IEvNumber(_)) | Some(IEvNumber(_))
| Some(IEvDistribution(_)) => | Some(IEvDistribution(_)) =>
switch parseDistributionArray(args) { switch parseDistributionArray(args) {
@ -192,6 +193,7 @@ module Old = {
} }
Helpers.toFloatFn(fn, dist, ~env) Helpers.toFloatFn(fn, dist, ~env)
} }
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env) | ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env) | ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
| ("sparkline", [IEvDistribution(dist)]) => | ("sparkline", [IEvDistribution(dist)]) =>

View File

@ -19,6 +19,7 @@ let inputsToDist = (inputs: array<Reducer_T.value>, xyShapeToPointSetDist) => {
| _ => impossibleError->SqError.Message.throw | _ => impossibleError->SqError.Message.throw
} }
} }
| _ => impossibleError->SqError.Message.throw | _ => impossibleError->SqError.Message.throw
} }
) )

View File

@ -61,6 +61,7 @@ module FRType = {
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}` let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}` `{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
} }
| FRTypeArray(r) => `list(${toString(r)})` | FRTypeArray(r) => `list(${toString(r)})`
| FRTypeLambda => `lambda` | FRTypeLambda => `lambda`
| FRTypeString => `string` | FRTypeString => `string`
@ -132,9 +133,9 @@ module FnDefinition = {
} }
let make = (~name, ~inputs, ~run, ()): t => { let make = (~name, ~inputs, ~run, ()): t => {
name: name, name,
inputs: inputs, inputs,
run: run, run,
} }
} }
@ -160,14 +161,14 @@ module Function = {
~isExperimental=false, ~isExperimental=false,
(), (),
): t => { ): t => {
name: name, name,
nameSpace: nameSpace, nameSpace,
definitions: definitions, definitions,
output: output, output,
examples: examples->E.O2.default([]), examples: examples->E.O2.default([]),
isExperimental: isExperimental, isExperimental,
requiresNamespace: requiresNamespace, requiresNamespace,
description: description, description,
} }
let toJson = (t: t): functionJson => { let toJson = (t: t): functionJson => {
@ -203,15 +204,19 @@ module Registry = {
fn.requiresNamespace ? [] : [def.name], fn.requiresNamespace ? [] : [def.name],
]->E.A.concatMany ]->E.A.concatMany
names->Belt.Array.reduce(acc, (acc, name) => { names->Belt.Array.reduce(
acc,
(acc, name) => {
switch acc->Belt.Map.String.get(name) { switch acc->Belt.Map.String.get(name) {
| Some(fns) => { | Some(fns) => {
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
acc acc
} }
| None => acc->Belt.Map.String.set(name, [def]) | None => acc->Belt.Map.String.set(name, [def])
} }
}) },
)
}) })
) )
} }
@ -245,6 +250,7 @@ module Registry = {
| None => REOther(showNameMatchDefinitions())->Error | None => REOther(showNameMatchDefinitions())->Error
} }
} }
| None => RESymbolNotFound(fnName)->Error | None => RESymbolNotFound(fnName)->Error
} }
} }

View File

@ -34,6 +34,7 @@ module Prepare = {
let n2 = map->Belt.Map.String.getExn(arg2) let n2 = map->Belt.Map.String.getExn(arg2)
Ok([n1, n2]) Ok([n1, n2])
} }
| _ => Error(impossibleErrorString) | _ => Error(impossibleErrorString)
} }
@ -45,6 +46,7 @@ module Prepare = {
let n3 = map->Belt.Map.String.getExn(arg3) let n3 = map->Belt.Map.String.getExn(arg3)
Ok([n1, n2, n3]) Ok([n1, n2, n3])
} }
| _ => Error(impossibleErrorString) | _ => Error(impossibleErrorString)
} }
} }

View File

@ -44,4 +44,4 @@ let removeResult = ({namespace} as bindings: t): t => {
let locals = ({namespace}: t): Reducer_T.namespace => namespace let locals = ({namespace}: t): Reducer_T.namespace => namespace
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace: namespace, parent: None} let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace, parent: None}

View File

@ -6,7 +6,7 @@ let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environ
{ {
frameStack: list{}, frameStack: list{},
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend, bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
environment: environment, environment,
inFunction: None, inFunction: None,
} }
} }

View File

@ -123,6 +123,7 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
) )
(result, context) (result, context)
} }
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context) | _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
} }
} }

View File

@ -23,8 +23,8 @@ let make = (): t => list{}
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) => let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
t->Belt.List.add({ t->Belt.List.add({
name: name, name,
location: location, location,
}) })
// this is useful for SyntaxErrors // this is useful for SyntaxErrors

View File

@ -43,10 +43,10 @@ let makeLambda = (
FnLambda({ FnLambda({
// context: bindings, // context: bindings,
name: name, name,
body: lambda, body: lambda,
parameters: parameters, parameters,
location: location, location,
}) })
} }
@ -54,8 +54,8 @@ let makeLambda = (
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({ let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings). // Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident. // But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
body: body, body,
name: name, name,
}) })
// this function doesn't scale to FunctionRegistry's polymorphic functions // this function doesn't scale to FunctionRegistry's polymorphic functions

View File

@ -113,7 +113,7 @@ let nodeToAST = (node: node) => {
| _ => raise(UnsupportedPeggyNodeType(node["type"])) | _ => raise(UnsupportedPeggyNodeType(node["type"]))
} }
{location: node["location"], content: content} {location: node["location"], content}
} }
let nodeIdentifierToAST = (node: nodeIdentifier) => { let nodeIdentifierToAST = (node: nodeIdentifier) => {

View File

@ -68,7 +68,7 @@ let rec fromNode = (node: Parse.node): expression => {
} }
{ {
ast: ast, ast,
content: content, content,
} }
} }

View File

@ -216,6 +216,7 @@ let tryRunWithResult = (
project->setResult(sourceId, Error(error)) project->setResult(sourceId, Error(error))
Error(error) Error(error)
} }
| Ok(_prevResult) => { | Ok(_prevResult) => {
project->doLinkAndRun(sourceId) project->doLinkAndRun(sourceId)
project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult) project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult)

View File

@ -6,7 +6,7 @@ type t = T.t
let emptyItem = (sourceId: string): projectItem => { let emptyItem = (sourceId: string): projectItem => {
source: "", source: "",
sourceId: sourceId, sourceId,
rawParse: None, rawParse: None,
expression: None, expression: None,
continuation: Reducer_Namespace.make(), continuation: Reducer_Namespace.make(),
@ -76,7 +76,7 @@ let resetIncludes = (r: t): t => {
} }
let setSource = (r: t, source: T.sourceArgumentType): t => let setSource = (r: t, source: T.sourceArgumentType): t =>
{...r, source: source}->resetIncludes->touchSource {...r, source}->resetIncludes->touchSource
let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t => let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t =>
{...r, rawParse: Some(rawParse)}->touchRawParse {...r, rawParse: Some(rawParse)}->touchRawParse
@ -86,7 +86,7 @@ let setExpression = (r: t, expression: T.expressionArgumentType): t =>
let setContinuation = (r: t, continuation: T.continuationArgumentType): t => { let setContinuation = (r: t, continuation: T.continuationArgumentType): t => {
...r, ...r,
continuation: continuation, continuation,
} }
let setResult = (r: t, result: T.resultArgumentType): t => { let setResult = (r: t, result: T.resultArgumentType): t => {
@ -110,24 +110,23 @@ let getPastChain = (this: t): array<string> => {
Js.Array2.concat(getDirectIncludes(this), getContinues(this)) Js.Array2.concat(getDirectIncludes(this), getContinues(this))
} }
let setContinues = (this: t, continues: array<string>): t => let setContinues = (this: t, continues: array<string>): t => {...this, continues}->touchSource
{...this, continues: continues}->touchSource
let removeContinues = (this: t): t => {...this, continues: []}->touchSource let removeContinues = (this: t): t => {...this, continues: []}->touchSource
let setIncludes = (this: t, includes: T.includesType): t => { let setIncludes = (this: t, includes: T.includesType): t => {
...this, ...this,
includes: includes, includes,
} }
let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => { let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => {
...this, ...this,
includeAsVariables: includeAsVariables, includeAsVariables,
} }
let setDirectImports = (this: t, directIncludes: array<string>): t => { let setDirectImports = (this: t, directIncludes: array<string>): t => {
...this, ...this,
directIncludes: directIncludes, directIncludes,
} }
let parseIncludes = (this: t): t => { let parseIncludes = (this: t): t => {
@ -144,9 +143,9 @@ let parseIncludes = (this: t): t => {
->Belt.Array.map(((_variable, file)) => file) ->Belt.Array.map(((_variable, file)) => file)
{ {
...this, ...this,
includes: includes, includes,
includeAsVariables: includeAsVariables, includeAsVariables,
directIncludes: directIncludes, directIncludes,
} }
} }
} }

View File

@ -54,6 +54,7 @@ module Message = {
} }
answer answer
} }
| REMacroNotFound(macro) => `Macro not found: ${macro}` | REMacroNotFound(macro) => `Macro not found: ${macro}`
| RENotAFunction(valueString) => `${valueString} is not a function` | RENotAFunction(valueString) => `${valueString} is not a function`
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}` | RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
@ -93,8 +94,8 @@ type t = {
exception SqException(t) exception SqException(t)
let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => { let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => {
message: message, message,
frameStack: frameStack, frameStack,
} }
// this shouldn't be used much, since frame stack will be empty // this shouldn't be used much, since frame stack will be empty

View File

@ -18,6 +18,7 @@ let stdLib: Reducer_T.namespace = {
| None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw | None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw
} }
} }
| [IEvRecord(dict), IEvString(sIndex)] => | [IEvRecord(dict), IEvString(sIndex)] =>
switch Belt.Map.String.get(dict, sIndex) { switch Belt.Map.String.get(dict, sIndex) {
| Some(value) => value | Some(value) => value

View File

@ -9,13 +9,13 @@ type declaration<'a> = {
module ContinuousFloatArg = { module ContinuousFloatArg = {
let make = (min: float, max: float): arg => { let make = (min: float, max: float): arg => {
Float({min: min, max: max}) Float({min, max})
} }
} }
module ContinuousTimeArg = { module ContinuousTimeArg = {
let make = (min: Js.Date.t, max: Js.Date.t): arg => { let make = (min: Js.Date.t, max: Js.Date.t): arg => {
Date({min: min, max: max}) Date({min, max})
} }
} }
@ -33,7 +33,7 @@ module Arg = {
} }
let make = (fn: 'a, args: array<arg>): declaration<'a> => { let make = (fn: 'a, args: array<arg>): declaration<'a> => {
{fn: fn, args: args} {fn, args}
} }
let toString = (r: declaration<'a>, fnToString): string => { let toString = (r: declaration<'a>, fnToString): string => {

View File

@ -85,8 +85,8 @@ module T = {
} }
let square = mapX(x => x ** 2.0) let square = mapX(x => x ** 2.0)
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys) let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
let fromArray = ((xs, ys)): t => {xs: xs, ys: ys} let fromArray = ((xs, ys)): t => {xs, ys}
let fromArrays = (xs, ys): t => {xs: xs, ys: ys} let fromArrays = (xs, ys): t => {xs, ys}
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys))) let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
let concat = (t1: t, t2: t) => { let concat = (t1: t, t2: t) => {
let cxs = Array.concat(list{t1.xs, t2.xs}) let cxs = Array.concat(list{t1.xs, t2.xs})
@ -142,7 +142,7 @@ module T = {
} }
let make = (~xs: array<float>, ~ys: array<float>) => { let make = (~xs: array<float>, ~ys: array<float>) => {
let attempt: t = {xs: xs, ys: ys} let attempt: t = {xs, ys}
switch Validator.validate(attempt) { switch Validator.validate(attempt) {
| Some(error) => Error(error) | Some(error) => Error(error)
| None => Ok(attempt) | None => Ok(attempt)
@ -452,6 +452,7 @@ module PointwiseCombination = {
let _ = Js.Array.push(fn(y1, y2), newYs) let _ = Js.Array.push(fn(y1, y2), newYs)
let _ = Js.Array.push(x, newXs) let _ = Js.Array.push(x, newXs)
} }
| None => () | None => ()
} }
} }
@ -558,7 +559,7 @@ module Range = {
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY) (xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
) )
} }
Some({xs: xs, ys: cumulativeY}) Some({xs, ys: cumulativeY})
} }
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x) let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)

View File

@ -3037,6 +3037,11 @@
"@react-hook/passive-layout-effect" "^1.2.0" "@react-hook/passive-layout-effect" "^1.2.0"
"@react-hook/resize-observer" "^1.2.1" "@react-hook/resize-observer" "^1.2.1"
"@rescript/std@^10.0.0":
version "10.0.0"
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-10.0.0.tgz#11996296739d7f0d2949283c93b4d14e9ed4589d"
integrity sha512-DFwX5vWASZtvjFdqar2VIadvmy2ZBPTnPI2A9EKEkvNR93OUoZygOfvhRaueIQtlS4f9X50E3v2awI9JJG+JsQ==
"@rescript/std@^9.1.4": "@rescript/std@^9.1.4":
version "9.1.4" version "9.1.4"
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-9.1.4.tgz#94971cb504b10d36d470618fa1c6f0a2d03a6b9b" resolved "https://registry.yarnpkg.com/@rescript/std/-/std-9.1.4.tgz#94971cb504b10d36d470618fa1c6f0a2d03a6b9b"
@ -16340,10 +16345,10 @@ rescript-js-map@^1.1.0:
dependencies: dependencies:
rescript-js-iterator "^1.1.0" rescript-js-iterator "^1.1.0"
rescript@^9.1.4: rescript@^10.0.0:
version "9.1.4" version "10.0.0"
resolved "https://registry.yarnpkg.com/rescript/-/rescript-9.1.4.tgz#1eb126f98d6c16942c0bf0df67c050198e580515" resolved "https://registry.yarnpkg.com/rescript/-/rescript-10.0.0.tgz#8460bc6f7d94bc580eac02d7c7efdf0a470916b8"
integrity sha512-aXANK4IqecJzdnDpJUsU6pxMViCR5ogAxzuqS0mOr8TloMnzAjJFu63fjD6LCkWrKAhlMkFFzQvVQYaAaVkFXw== integrity sha512-LhNg/4+0j8NvoFeslgAeYLlzUwkq6kR6l6v8BnZ61VDTxopK2l96uT1lq5lv1aMxzMDynvE2qnX0zalre+6XxA==
resize-observer-polyfill@^1.5.1: resize-observer-polyfill@^1.5.1:
version "1.5.1" version "1.5.1"