Compare commits
6 Commits
develop
...
plot-funct
Author | SHA1 | Date | |
---|---|---|---|
|
76fe461363 | ||
|
8d612f75f0 | ||
|
c060304161 | ||
|
56913bc95e | ||
|
2dfb57240e | ||
|
62f735efcb |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -35,8 +35,6 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
- name: Setup Node.js environment
|
- name: Setup Node.js environment
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
See the [Changelog.mdx page](./packages/website/docs/Changelog.mdx) for the changelog.
|
|
|
@ -65,12 +65,6 @@ turbo run build --filter=@quri/squiggle-components
|
||||||
|
|
||||||
You can also run specific npm scripts for the package you're working on. See `packages/*/README.md` for the details.
|
You can also run specific npm scripts for the package you're working on. See `packages/*/README.md` for the details.
|
||||||
|
|
||||||
# NixOS users
|
|
||||||
|
|
||||||
This repository requires the use of bundled binaries from node_modules, which
|
|
||||||
are not linked statically. The easiest way to get them working is to enable
|
|
||||||
[nix-ld](https://github.com/Mic92/nix-ld).
|
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
See `CONTRIBUTING.md`.
|
See `CONTRIBUTING.md`.
|
||||||
|
|
|
@ -30,6 +30,16 @@ rec {
|
||||||
patchelf --replace-needed libstdc++.so.6 $THE_SO linux/ninja.exe && echo "- replaced needed for linux/ninja.exe"
|
patchelf --replace-needed libstdc++.so.6 $THE_SO linux/ninja.exe && echo "- replaced needed for linux/ninja.exe"
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
bisect_ppx = {
|
||||||
|
buildInputs = common.which;
|
||||||
|
postInstall = ''
|
||||||
|
echo "PATCHELF'ING BISECT_PPX EXECUTABLE"
|
||||||
|
THE_LD=$(patchelf --print-interpreter $(which mkdir))
|
||||||
|
patchelf --set-interpreter $THE_LD bin/linux/ppx
|
||||||
|
patchelf --set-interpreter $THE_LD bin/linux/bisect-ppx-report
|
||||||
|
cp bin/linux/ppx ppx
|
||||||
|
'';
|
||||||
|
};
|
||||||
gentype = {
|
gentype = {
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
mv gentype.exe ELFLESS-gentype.exe
|
mv gentype.exe ELFLESS-gentype.exe
|
||||||
|
|
18
nixos.sh
Executable file
18
nixos.sh
Executable file
|
@ -0,0 +1,18 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# This script is only relevant if you're rolling nixos.
|
||||||
|
|
||||||
|
# Esy (a bisect_ppx dependency/build tool) is borked on nixos without using an FHS shell. https://github.com/esy/esy/issues/858
|
||||||
|
# We need to patchelf rescript executables. https://github.com/NixOS/nixpkgs/issues/107375
|
||||||
|
set -x
|
||||||
|
|
||||||
|
fhsShellName="squiggle-fhs-development"
|
||||||
|
fhsShellDotNix="{pkgs ? import <nixpkgs> {} }: (pkgs.buildFHSUserEnv { name = \"${fhsShellName}\"; targetPkgs = pkgs: [pkgs.yarn pkgs.glibc]; runScript = \"yarn\"; }).env"
|
||||||
|
nix-shell - <<<"$fhsShellDotNix"
|
||||||
|
|
||||||
|
theLd=$(patchelf --print-interpreter $(which mkdir))
|
||||||
|
patchelf --set-interpreter $theLd ./node_modules/gentype/gentype.exe
|
||||||
|
patchelf --set-interpreter $theLd ./node_modules/rescript/linux/*.exe
|
||||||
|
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/ppx
|
||||||
|
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/bisect-ppx-report
|
||||||
|
theSo=$(find /nix/store/*$fhsShellName*/lib64 -name libstdc++.so.6 | head -n 1)
|
||||||
|
patchelf --replace-needed libstdc++.so.6 $theSo ./node_modules/rescript/linux/ninja.exe
|
|
@ -20,30 +20,3 @@ Runs compilation in the current directory and all of its subdirectories.
|
||||||
### `npx squiggle-cli-experimental watch`
|
### `npx squiggle-cli-experimental watch`
|
||||||
|
|
||||||
Watches `.squiggleU` files in the current directory (and subdirectories) and rebuilds them when they are saved. Note that this will _not_ rebuild files when their dependencies are changed, just when they are changed directly.
|
Watches `.squiggleU` files in the current directory (and subdirectories) and rebuilds them when they are saved. Note that this will _not_ rebuild files when their dependencies are changed, just when they are changed directly.
|
||||||
|
|
||||||
## Further instructions
|
|
||||||
|
|
||||||
The above requires having node, npm and npx. To install the first two, see [here](https://nodejs.org/en/), to install npx, run:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm install -g npx
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, you can run the following without the need for npx:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm install squiggle-cli-experimental
|
|
||||||
node node_modules/squiggle-cli-experimental/index.js compile
|
|
||||||
```
|
|
||||||
|
|
||||||
or you can add a script to your `package.json`, like:
|
|
||||||
|
|
||||||
```
|
|
||||||
...
|
|
||||||
scripts: {
|
|
||||||
"compile": "squiggle-cli-experimental compile"
|
|
||||||
}
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
This can be run with `npm run compile`. `npm` knows how to reach into the node_modules directly, so it's not necessary to specify that.
|
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"chalk": "^5.1.0",
|
"chalk": "^5.0.1",
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"commander": "^9.4.1",
|
"commander": "^9.4.1",
|
||||||
"fs": "^0.0.1-security",
|
"fs": "^0.0.1-security",
|
||||||
|
|
|
@ -12,11 +12,11 @@
|
||||||
"@react-hook/size": "^2.1.2",
|
"@react-hook/size": "^2.1.2",
|
||||||
"@types/uuid": "^8.3.4",
|
"@types/uuid": "^8.3.4",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
"framer-motion": "^7.5.3",
|
"framer-motion": "^7.5.1",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"react": "^18.1.0",
|
"react": "^18.1.0",
|
||||||
"react-ace": "^10.1.0",
|
"react-ace": "^10.1.0",
|
||||||
"react-hook-form": "^7.37.0",
|
"react-hook-form": "^7.36.1",
|
||||||
"react-use": "^17.4.0",
|
"react-use": "^17.4.0",
|
||||||
"react-vega": "^7.6.0",
|
"react-vega": "^7.6.0",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.0",
|
||||||
|
@ -41,7 +41,7 @@
|
||||||
"@testing-library/user-event": "^14.4.3",
|
"@testing-library/user-event": "^14.4.3",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"@types/lodash": "^4.14.186",
|
"@types/lodash": "^4.14.186",
|
||||||
"@types/node": "^18.8.3",
|
"@types/node": "^18.8.0",
|
||||||
"@types/react": "^18.0.21",
|
"@types/react": "^18.0.21",
|
||||||
"@types/styled-components": "^5.1.26",
|
"@types/styled-components": "^5.1.26",
|
||||||
"@types/uuid": "^8.3.4",
|
"@types/uuid": "^8.3.4",
|
||||||
|
@ -49,8 +49,8 @@
|
||||||
"canvas": "^2.10.1",
|
"canvas": "^2.10.1",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"jest": "^29.0.3",
|
"jest": "^29.0.3",
|
||||||
"jest-environment-jsdom": "^29.1.2",
|
"jest-environment-jsdom": "^29.0.3",
|
||||||
"jsdom": "^20.0.1",
|
"jsdom": "^20.0.0",
|
||||||
"mini-css-extract-plugin": "^2.6.1",
|
"mini-css-extract-plugin": "^2.6.1",
|
||||||
"postcss-cli": "^10.0.0",
|
"postcss-cli": "^10.0.0",
|
||||||
"postcss-import": "^15.0.0",
|
"postcss-import": "^15.0.0",
|
||||||
|
@ -60,11 +60,11 @@
|
||||||
"react-scripts": "^5.0.1",
|
"react-scripts": "^5.0.1",
|
||||||
"style-loader": "^3.3.1",
|
"style-loader": "^3.3.1",
|
||||||
"tailwindcss": "^3.1.8",
|
"tailwindcss": "^3.1.8",
|
||||||
"ts-jest": "^29.0.3",
|
"ts-jest": "^29.0.2",
|
||||||
"ts-loader": "^9.4.1",
|
"ts-loader": "^9.4.1",
|
||||||
"tsconfig-paths-webpack-plugin": "^4.0.0",
|
"tsconfig-paths-webpack-plugin": "^4.0.0",
|
||||||
"typescript": "^4.8.4",
|
"typescript": "^4.8.4",
|
||||||
"web-vitals": "^3.0.3",
|
"web-vitals": "^3.0.2",
|
||||||
"webpack": "^5.74.0",
|
"webpack": "^5.74.0",
|
||||||
"webpack-cli": "^4.10.0",
|
"webpack-cli": "^4.10.0",
|
||||||
"webpack-dev-server": "^4.11.1"
|
"webpack-dev-server": "^4.11.1"
|
||||||
|
|
|
@ -24,7 +24,7 @@ export const Alert: React.FC<{
|
||||||
children,
|
children,
|
||||||
}) => {
|
}) => {
|
||||||
return (
|
return (
|
||||||
<div className={clsx("rounded-md p-4", backgroundColor)} role="status">
|
<div className={clsx("rounded-md p-4", backgroundColor)}>
|
||||||
<div className="flex">
|
<div className="flex">
|
||||||
<Icon
|
<Icon
|
||||||
className={clsx("h-5 w-5 flex-shrink-0", iconColor)}
|
className={clsx("h-5 w-5 flex-shrink-0", iconColor)}
|
||||||
|
|
|
@ -55,7 +55,10 @@ export const CodeEditor: FC<CodeEditorProps> = ({
|
||||||
editorProps={{
|
editorProps={{
|
||||||
$blockScrolling: true,
|
$blockScrolling: true,
|
||||||
}}
|
}}
|
||||||
setOptions={{}}
|
setOptions={{
|
||||||
|
enableBasicAutocompletion: false,
|
||||||
|
enableLiveAutocompletion: false,
|
||||||
|
}}
|
||||||
commands={[
|
commands={[
|
||||||
{
|
{
|
||||||
name: "submit",
|
name: "submit",
|
||||||
|
|
|
@ -3,8 +3,9 @@ import {
|
||||||
SqDistribution,
|
SqDistribution,
|
||||||
result,
|
result,
|
||||||
SqDistributionError,
|
SqDistributionError,
|
||||||
|
LabeledDistribution,
|
||||||
resultMap,
|
resultMap,
|
||||||
SqRecord,
|
SqPlot,
|
||||||
environment,
|
environment,
|
||||||
SqDistributionTag,
|
SqDistributionTag,
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
|
@ -17,7 +18,6 @@ import {
|
||||||
DistributionChartSpecOptions,
|
DistributionChartSpecOptions,
|
||||||
} from "../lib/distributionSpecBuilder";
|
} from "../lib/distributionSpecBuilder";
|
||||||
import { NumberShower } from "./NumberShower";
|
import { NumberShower } from "./NumberShower";
|
||||||
import { Plot, parsePlot } from "../lib/plotParser";
|
|
||||||
import { flattenResult } from "../lib/utility";
|
import { flattenResult } from "../lib/utility";
|
||||||
import { hasMassBelowZero } from "../lib/distributionUtils";
|
import { hasMassBelowZero } from "../lib/distributionUtils";
|
||||||
|
|
||||||
|
@ -28,27 +28,15 @@ export type DistributionPlottingSettings = {
|
||||||
} & DistributionChartSpecOptions;
|
} & DistributionChartSpecOptions;
|
||||||
|
|
||||||
export type DistributionChartProps = {
|
export type DistributionChartProps = {
|
||||||
plot: Plot;
|
|
||||||
environment: environment;
|
environment: environment;
|
||||||
width?: number;
|
width?: number;
|
||||||
height: number;
|
height: number;
|
||||||
xAxisType?: "number" | "dateTime";
|
xAxisType?: "number" | "dateTime";
|
||||||
} & DistributionPlottingSettings;
|
} & DistributionPlottingSettings &
|
||||||
|
({ plot: SqPlot } | { distribution: SqDistribution });
|
||||||
export function defaultPlot(distribution: SqDistribution): Plot {
|
|
||||||
return { distributions: [{ name: "default", distribution }] };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function makePlot(record: SqRecord): Plot | void {
|
|
||||||
const plotResult = parsePlot(record);
|
|
||||||
if (plotResult.tag === "Ok") {
|
|
||||||
return plotResult.value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||||
const {
|
const {
|
||||||
plot,
|
|
||||||
environment,
|
environment,
|
||||||
height,
|
height,
|
||||||
showSummary,
|
showSummary,
|
||||||
|
@ -57,8 +45,14 @@ export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||||
actions = false,
|
actions = false,
|
||||||
} = props;
|
} = props;
|
||||||
const [sized] = useSize((size) => {
|
const [sized] = useSize((size) => {
|
||||||
const shapes = flattenResult(
|
let distributions: LabeledDistribution[];
|
||||||
plot.distributions.map((x) =>
|
if ("plot" in props) {
|
||||||
|
distributions = props.plot.getDistributions();
|
||||||
|
} else {
|
||||||
|
distributions = [{ name: "default", distribution: props.distribution }];
|
||||||
|
}
|
||||||
|
let shapes = flattenResult(
|
||||||
|
distributions.map((x) =>
|
||||||
resultMap(x.distribution.pointSet(environment), (pointSet) => ({
|
resultMap(x.distribution.pointSet(environment), (pointSet) => ({
|
||||||
name: x.name,
|
name: x.name,
|
||||||
// color: x.color, // not supported yet
|
// color: x.color, // not supported yet
|
||||||
|
@ -77,7 +71,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||||
|
|
||||||
// if this is a sample set, include the samples
|
// if this is a sample set, include the samples
|
||||||
const samples: number[] = [];
|
const samples: number[] = [];
|
||||||
for (const { distribution } of plot?.distributions) {
|
for (const { distribution } of distributions) {
|
||||||
if (distribution.tag === SqDistributionTag.SampleSet) {
|
if (distribution.tag === SqDistributionTag.SampleSet) {
|
||||||
samples.push(...distribution.value());
|
samples.push(...distribution.value());
|
||||||
}
|
}
|
||||||
|
@ -126,9 +120,9 @@ export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<div className="flex justify-center">
|
<div className="flex justify-center">
|
||||||
{showSummary && plot.distributions.length === 1 && (
|
{showSummary && distributions.length === 1 && (
|
||||||
<SummaryTable
|
<SummaryTable
|
||||||
distribution={plot.distributions[0].distribution}
|
distribution={distributions[0].distribution}
|
||||||
environment={environment}
|
environment={environment}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
|
@ -15,7 +15,6 @@ import * as percentilesSpec from "../vega-specs/spec-percentiles.json";
|
||||||
import {
|
import {
|
||||||
DistributionChart,
|
DistributionChart,
|
||||||
DistributionPlottingSettings,
|
DistributionPlottingSettings,
|
||||||
defaultPlot,
|
|
||||||
} from "./DistributionChart";
|
} from "./DistributionChart";
|
||||||
import { NumberShower } from "./NumberShower";
|
import { NumberShower } from "./NumberShower";
|
||||||
import { ErrorAlert } from "./Alert";
|
import { ErrorAlert } from "./Alert";
|
||||||
|
@ -184,7 +183,7 @@ export const FunctionChart1Dist: React.FC<FunctionChart1DistProps> = ({
|
||||||
mouseItem.tag === "Ok" &&
|
mouseItem.tag === "Ok" &&
|
||||||
mouseItem.value.tag === SqValueTag.Distribution ? (
|
mouseItem.value.tag === SqValueTag.Distribution ? (
|
||||||
<DistributionChart
|
<DistributionChart
|
||||||
plot={defaultPlot(mouseItem.value.value)}
|
distribution={mouseItem.value.value}
|
||||||
environment={environment}
|
environment={environment}
|
||||||
width={400}
|
width={400}
|
||||||
height={50}
|
height={50}
|
||||||
|
@ -194,7 +193,7 @@ export const FunctionChart1Dist: React.FC<FunctionChart1DistProps> = ({
|
||||||
|
|
||||||
let getPercentilesMemoized = React.useMemo(
|
let getPercentilesMemoized = React.useMemo(
|
||||||
() => getPercentiles({ chartSettings, fn, environment }),
|
() => getPercentiles({ chartSettings, fn, environment }),
|
||||||
[environment, fn]
|
[chartSettings, environment, fn]
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -1,10 +1,5 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import {
|
import { SqValue, environment, SqProject } from "@quri/squiggle-lang";
|
||||||
SqValue,
|
|
||||||
environment,
|
|
||||||
SqProject,
|
|
||||||
defaultEnvironment,
|
|
||||||
} from "@quri/squiggle-lang";
|
|
||||||
import { useSquiggle } from "../lib/hooks";
|
import { useSquiggle } from "../lib/hooks";
|
||||||
import { SquiggleViewer } from "./SquiggleViewer";
|
import { SquiggleViewer } from "./SquiggleViewer";
|
||||||
import { JsImports } from "../lib/jsImports";
|
import { JsImports } from "../lib/jsImports";
|
||||||
|
@ -71,6 +66,7 @@ type ProjectExecutionProps = {
|
||||||
};
|
};
|
||||||
const defaultOnChange = () => {};
|
const defaultOnChange = () => {};
|
||||||
const defaultImports: JsImports = {};
|
const defaultImports: JsImports = {};
|
||||||
|
const defaultContinues: string[] = [];
|
||||||
|
|
||||||
export const splitSquiggleChartSettings = (props: SquiggleChartProps) => {
|
export const splitSquiggleChartSettings = (props: SquiggleChartProps) => {
|
||||||
const {
|
const {
|
||||||
|
@ -124,15 +120,24 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
width,
|
width,
|
||||||
height = 200,
|
height = 200,
|
||||||
enableLocalSettings = false,
|
enableLocalSettings = false,
|
||||||
continues,
|
continues = defaultContinues,
|
||||||
project,
|
|
||||||
environment,
|
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
|
const p = React.useMemo(() => {
|
||||||
|
if (props.project) {
|
||||||
|
return props.project;
|
||||||
|
} else {
|
||||||
|
const p = SqProject.create();
|
||||||
|
if (props.environment) {
|
||||||
|
p.setEnvironment(props.environment);
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
}, [props.project, props.environment]);
|
||||||
|
|
||||||
const resultAndBindings = useSquiggle({
|
const resultAndBindings = useSquiggle({
|
||||||
environment,
|
|
||||||
continues,
|
continues,
|
||||||
project,
|
project: p,
|
||||||
code,
|
code,
|
||||||
jsImports,
|
jsImports,
|
||||||
onChange,
|
onChange,
|
||||||
|
@ -148,9 +153,7 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
height={height}
|
height={height}
|
||||||
distributionPlotSettings={distributionPlotSettings}
|
distributionPlotSettings={distributionPlotSettings}
|
||||||
chartSettings={chartSettings}
|
chartSettings={chartSettings}
|
||||||
environment={
|
environment={p.getEnvironment()}
|
||||||
project ? project.getEnvironment() : environment ?? defaultEnvironment
|
|
||||||
}
|
|
||||||
enableLocalSettings={enableLocalSettings}
|
enableLocalSettings={enableLocalSettings}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
|
@ -16,7 +16,7 @@ const WrappedCodeEditor: React.FC<{
|
||||||
setCode: (code: string) => void;
|
setCode: (code: string) => void;
|
||||||
errorLocations?: SqLocation[];
|
errorLocations?: SqLocation[];
|
||||||
}> = ({ code, setCode, errorLocations }) => (
|
}> = ({ code, setCode, errorLocations }) => (
|
||||||
<div className="border border-grey-200 p-2 m-4" data-testid="squiggle-editor">
|
<div className="border border-grey-200 p-2 m-4">
|
||||||
<CodeEditor
|
<CodeEditor
|
||||||
value={code}
|
value={code}
|
||||||
onChange={setCode}
|
onChange={setCode}
|
||||||
|
@ -54,13 +54,17 @@ export const SquiggleEditor: React.FC<SquiggleEditorProps> = (props) => {
|
||||||
width,
|
width,
|
||||||
height = 200,
|
height = 200,
|
||||||
enableLocalSettings = false,
|
enableLocalSettings = false,
|
||||||
continues,
|
|
||||||
project,
|
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
|
const project = React.useMemo(() => {
|
||||||
|
const p = SqProject.create();
|
||||||
|
if (environment) {
|
||||||
|
p.setEnvironment(environment);
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}, [environment]);
|
||||||
|
|
||||||
const resultAndBindings = useSquiggle({
|
const resultAndBindings = useSquiggle({
|
||||||
environment,
|
|
||||||
continues,
|
|
||||||
code,
|
code,
|
||||||
project,
|
project,
|
||||||
jsImports,
|
jsImports,
|
||||||
|
|
|
@ -182,7 +182,7 @@ const RunControls: React.FC<{
|
||||||
const CurrentPlayIcon = isRunning ? RefreshIcon : PlayIcon;
|
const CurrentPlayIcon = isRunning ? RefreshIcon : PlayIcon;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex space-x-1 items-center" data-testid="autorun-controls">
|
<div className="flex space-x-1 items-center">
|
||||||
{autorunMode ? null : (
|
{autorunMode ? null : (
|
||||||
<button onClick={run}>
|
<button onClick={run}>
|
||||||
<CurrentPlayIcon
|
<CurrentPlayIcon
|
||||||
|
@ -251,8 +251,6 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
onSettingsChange,
|
onSettingsChange,
|
||||||
showEditor = true,
|
showEditor = true,
|
||||||
showShareButton = false,
|
showShareButton = false,
|
||||||
continues,
|
|
||||||
project,
|
|
||||||
}) => {
|
}) => {
|
||||||
const [code, setCode] = useMaybeControlledValue({
|
const [code, setCode] = useMaybeControlledValue({
|
||||||
value: controlledCode,
|
value: controlledCode,
|
||||||
|
@ -307,10 +305,16 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
executionId,
|
executionId,
|
||||||
} = useRunnerState(code);
|
} = useRunnerState(code);
|
||||||
|
|
||||||
|
const project = React.useMemo(() => {
|
||||||
|
const p = SqProject.create();
|
||||||
|
if (environment) {
|
||||||
|
p.setEnvironment(environment);
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}, [environment]);
|
||||||
|
|
||||||
const resultAndBindings = useSquiggle({
|
const resultAndBindings = useSquiggle({
|
||||||
environment,
|
code,
|
||||||
continues,
|
|
||||||
code: renderedCode,
|
|
||||||
project,
|
project,
|
||||||
jsImports: imports,
|
jsImports: imports,
|
||||||
executionId,
|
executionId,
|
||||||
|
@ -351,7 +355,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
const errorLocations = getErrorLocations(resultAndBindings.result);
|
const errorLocations = getErrorLocations(resultAndBindings.result);
|
||||||
|
|
||||||
const firstTab = vars.showEditor ? (
|
const firstTab = vars.showEditor ? (
|
||||||
<div className="border border-slate-200" data-testid="squiggle-editor">
|
<div className="border border-slate-200">
|
||||||
<CodeEditor
|
<CodeEditor
|
||||||
errorLocations={errorLocations}
|
errorLocations={errorLocations}
|
||||||
value={code}
|
value={code}
|
||||||
|
@ -403,9 +407,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
>
|
>
|
||||||
{tabs}
|
{tabs}
|
||||||
</div>
|
</div>
|
||||||
<div className="w-1/2 p-2 pl-4" data-testid="playground-result">
|
<div className="w-1/2 p-2 pl-4">{squiggleChart}</div>
|
||||||
{squiggleChart}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import React, { useContext } from "react";
|
import React, { useContext } from "react";
|
||||||
import { SqDistributionTag, SqValue, SqValueTag } from "@quri/squiggle-lang";
|
import { SqDistributionTag, SqValue, SqValueTag } from "@quri/squiggle-lang";
|
||||||
import { NumberShower } from "../NumberShower";
|
import { NumberShower } from "../NumberShower";
|
||||||
import { DistributionChart, defaultPlot, makePlot } from "../DistributionChart";
|
import { DistributionChart } from "../DistributionChart";
|
||||||
import { FunctionChart } from "../FunctionChart";
|
import { FunctionChart } from "../FunctionChart";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
import { VariableBox } from "./VariableBox";
|
import { VariableBox } from "./VariableBox";
|
||||||
|
@ -104,7 +104,7 @@ export const ExpressionViewer: React.FC<Props> = ({ value, width }) => {
|
||||||
{(settings) => {
|
{(settings) => {
|
||||||
return (
|
return (
|
||||||
<DistributionChart
|
<DistributionChart
|
||||||
plot={defaultPlot(value.value)}
|
distribution={value.value}
|
||||||
environment={settings.environment}
|
environment={settings.environment}
|
||||||
{...settings.distributionPlotSettings}
|
{...settings.distributionPlotSettings}
|
||||||
height={settings.height}
|
height={settings.height}
|
||||||
|
@ -219,63 +219,61 @@ export const ExpressionViewer: React.FC<Props> = ({ value, width }) => {
|
||||||
</VariableBox>
|
</VariableBox>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
case SqValueTag.Plot:
|
||||||
|
const plot = value.value;
|
||||||
|
return (
|
||||||
|
<VariableBox
|
||||||
|
value={value}
|
||||||
|
heading="Plot"
|
||||||
|
renderSettingsMenu={({ onChange }) => {
|
||||||
|
let disableLogX = plot.getDistributions().some((x) => {
|
||||||
|
let pointSet = x.distribution.pointSet(
|
||||||
|
getMergedSettings(value.location).environment
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
pointSet.tag === "Ok" &&
|
||||||
|
hasMassBelowZero(pointSet.value.asShape())
|
||||||
|
);
|
||||||
|
});
|
||||||
|
return (
|
||||||
|
<ItemSettingsMenu
|
||||||
|
value={value}
|
||||||
|
onChange={onChange}
|
||||||
|
disableLogX={disableLogX}
|
||||||
|
withFunctionSettings={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{(settings) => {
|
||||||
|
return (
|
||||||
|
<DistributionChart
|
||||||
|
plot={plot}
|
||||||
|
environment={settings.environment}
|
||||||
|
{...settings.distributionPlotSettings}
|
||||||
|
height={settings.height}
|
||||||
|
width={width}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
</VariableBox>
|
||||||
|
);
|
||||||
case SqValueTag.Record:
|
case SqValueTag.Record:
|
||||||
const plot = makePlot(value.value);
|
return (
|
||||||
if (plot) {
|
<VariableList value={value} heading="Record">
|
||||||
return (
|
{(_) =>
|
||||||
<VariableBox
|
value.value
|
||||||
value={value}
|
.entries()
|
||||||
heading="Plot"
|
.map(([key, r]) => (
|
||||||
renderSettingsMenu={({ onChange }) => {
|
<ExpressionViewer
|
||||||
let disableLogX = plot.distributions.some((x) => {
|
key={key}
|
||||||
let pointSet = x.distribution.pointSet(
|
value={r}
|
||||||
getMergedSettings(value.location).environment
|
width={width !== undefined ? width - 20 : width}
|
||||||
);
|
|
||||||
return (
|
|
||||||
pointSet.tag === "Ok" &&
|
|
||||||
hasMassBelowZero(pointSet.value.asShape())
|
|
||||||
);
|
|
||||||
});
|
|
||||||
return (
|
|
||||||
<ItemSettingsMenu
|
|
||||||
value={value}
|
|
||||||
onChange={onChange}
|
|
||||||
disableLogX={disableLogX}
|
|
||||||
withFunctionSettings={false}
|
|
||||||
/>
|
/>
|
||||||
);
|
))
|
||||||
}}
|
}
|
||||||
>
|
</VariableList>
|
||||||
{(settings) => {
|
);
|
||||||
return (
|
|
||||||
<DistributionChart
|
|
||||||
plot={plot}
|
|
||||||
environment={settings.environment}
|
|
||||||
{...settings.distributionPlotSettings}
|
|
||||||
height={settings.height}
|
|
||||||
width={width}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}}
|
|
||||||
</VariableBox>
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return (
|
|
||||||
<VariableList value={value} heading="Record">
|
|
||||||
{(_) =>
|
|
||||||
value.value
|
|
||||||
.entries()
|
|
||||||
.map(([key, r]) => (
|
|
||||||
<ExpressionViewer
|
|
||||||
key={key}
|
|
||||||
value={r}
|
|
||||||
width={width !== undefined ? width - 20 : width}
|
|
||||||
/>
|
|
||||||
))
|
|
||||||
}
|
|
||||||
</VariableList>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
case SqValueTag.Array:
|
case SqValueTag.Array:
|
||||||
return (
|
return (
|
||||||
<VariableList value={value} heading="Array">
|
<VariableList value={value} heading="Array">
|
||||||
|
|
|
@ -45,7 +45,7 @@ export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||||
: location.path.items[location.path.items.length - 1];
|
: location.path.items[location.path.items.length - 1];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div role={isTopLevel ? "status" : undefined}>
|
<div>
|
||||||
<header className="inline-flex space-x-1">
|
<header className="inline-flex space-x-1">
|
||||||
<Tooltip text={heading}>
|
<Tooltip text={heading}>
|
||||||
<span
|
<span
|
||||||
|
|
|
@ -4,18 +4,16 @@ import {
|
||||||
SqProject,
|
SqProject,
|
||||||
SqRecord,
|
SqRecord,
|
||||||
SqValue,
|
SqValue,
|
||||||
environment,
|
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
import { useEffect, useMemo } from "react";
|
import { useEffect, useMemo } from "react";
|
||||||
import { JsImports, jsImportsToSquiggleCode } from "../jsImports";
|
import { JsImports, jsImportsToSquiggleCode } from "../jsImports";
|
||||||
import * as uuid from "uuid";
|
import * as uuid from "uuid";
|
||||||
|
|
||||||
type SquiggleArgs = {
|
type SquiggleArgs = {
|
||||||
environment?: environment;
|
|
||||||
code: string;
|
code: string;
|
||||||
executionId?: number;
|
executionId?: number;
|
||||||
jsImports?: JsImports;
|
jsImports?: JsImports;
|
||||||
project?: SqProject;
|
project: SqProject;
|
||||||
continues?: string[];
|
continues?: string[];
|
||||||
onChange?: (expr: SqValue | undefined, sourceName: string) => void;
|
onChange?: (expr: SqValue | undefined, sourceName: string) => void;
|
||||||
};
|
};
|
||||||
|
@ -29,25 +27,15 @@ const importSourceName = (sourceName: string) => "imports-" + sourceName;
|
||||||
const defaultContinues = [];
|
const defaultContinues = [];
|
||||||
|
|
||||||
export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
|
export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
|
||||||
const project = useMemo(() => {
|
|
||||||
if (args.project) {
|
|
||||||
return args.project;
|
|
||||||
} else {
|
|
||||||
const p = SqProject.create();
|
|
||||||
if (args.environment) {
|
|
||||||
p.setEnvironment(args.environment);
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
}, [args.project, args.environment]);
|
|
||||||
|
|
||||||
const sourceName = useMemo(() => uuid.v4(), []);
|
const sourceName = useMemo(() => uuid.v4(), []);
|
||||||
|
|
||||||
const env = project.getEnvironment();
|
const env = args.project.getEnvironment();
|
||||||
const continues = args.continues || defaultContinues;
|
const continues = args.continues || defaultContinues;
|
||||||
|
|
||||||
const result = useMemo(
|
const result = useMemo(
|
||||||
() => {
|
() => {
|
||||||
|
const project = args.project;
|
||||||
|
|
||||||
project.setSource(sourceName, args.code);
|
project.setSource(sourceName, args.code);
|
||||||
let fullContinues = continues;
|
let fullContinues = continues;
|
||||||
if (args.jsImports && Object.keys(args.jsImports).length) {
|
if (args.jsImports && Object.keys(args.jsImports).length) {
|
||||||
|
@ -71,7 +59,7 @@ export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
|
||||||
args.executionId,
|
args.executionId,
|
||||||
sourceName,
|
sourceName,
|
||||||
continues,
|
continues,
|
||||||
project,
|
args.project,
|
||||||
env,
|
env,
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
@ -87,11 +75,11 @@ export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
project.removeSource(sourceName);
|
args.project.removeSource(sourceName);
|
||||||
if (project.getSource(importSourceName(sourceName)))
|
if (args.project.getSource(importSourceName(sourceName)))
|
||||||
project.removeSource(importSourceName(sourceName));
|
args.project.removeSource(importSourceName(sourceName));
|
||||||
};
|
};
|
||||||
}, [project, sourceName]);
|
}, [args.project, sourceName]);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
import * as yup from "yup";
|
|
||||||
import {
|
|
||||||
SqValue,
|
|
||||||
SqValueTag,
|
|
||||||
SqDistribution,
|
|
||||||
result,
|
|
||||||
SqRecord,
|
|
||||||
} from "@quri/squiggle-lang";
|
|
||||||
|
|
||||||
export type LabeledDistribution = {
|
|
||||||
name: string;
|
|
||||||
distribution: SqDistribution;
|
|
||||||
color?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Plot = {
|
|
||||||
distributions: LabeledDistribution[];
|
|
||||||
};
|
|
||||||
|
|
||||||
function error<a, b>(err: b): result<a, b> {
|
|
||||||
return { tag: "Error", value: err };
|
|
||||||
}
|
|
||||||
|
|
||||||
function ok<a, b>(x: a): result<a, b> {
|
|
||||||
return { tag: "Ok", value: x };
|
|
||||||
}
|
|
||||||
|
|
||||||
const schema = yup
|
|
||||||
.object()
|
|
||||||
.noUnknown()
|
|
||||||
.strict()
|
|
||||||
.shape({
|
|
||||||
distributions: yup
|
|
||||||
.array()
|
|
||||||
.required()
|
|
||||||
.of(
|
|
||||||
yup.object().required().shape({
|
|
||||||
name: yup.string().required(),
|
|
||||||
distribution: yup.mixed().required(),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
});
|
|
||||||
|
|
||||||
type JsonObject =
|
|
||||||
| string
|
|
||||||
| { [key: string]: JsonObject }
|
|
||||||
| JsonObject[]
|
|
||||||
| SqDistribution;
|
|
||||||
|
|
||||||
function toJson(val: SqValue): JsonObject {
|
|
||||||
if (val.tag === SqValueTag.String) {
|
|
||||||
return val.value;
|
|
||||||
} else if (val.tag === SqValueTag.Record) {
|
|
||||||
return toJsonRecord(val.value);
|
|
||||||
} else if (val.tag === SqValueTag.Array) {
|
|
||||||
return val.value.getValues().map(toJson);
|
|
||||||
} else if (val.tag === SqValueTag.Distribution) {
|
|
||||||
return val.value;
|
|
||||||
} else {
|
|
||||||
throw new Error("Could not parse object of type " + val.tag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function toJsonRecord(val: SqRecord): JsonObject {
|
|
||||||
let recordObject: JsonObject = {};
|
|
||||||
val.entries().forEach(([key, value]) => (recordObject[key] = toJson(value)));
|
|
||||||
return recordObject;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parsePlot(record: SqRecord): result<Plot, string> {
|
|
||||||
try {
|
|
||||||
const plotRecord = schema.validateSync(toJsonRecord(record));
|
|
||||||
if (plotRecord.distributions) {
|
|
||||||
return ok({ distributions: plotRecord.distributions.map((x) => x) });
|
|
||||||
} else {
|
|
||||||
// I have no idea why yup's typings thinks this is possible
|
|
||||||
return error("no distributions field. Should never get here");
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
const message = e instanceof Error ? e.message : "Unknown error";
|
|
||||||
return error(message);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,55 +0,0 @@
|
||||||
import { render, screen, waitFor, within } from "@testing-library/react";
|
|
||||||
import userEvent from "@testing-library/user-event";
|
|
||||||
import * as React from "react";
|
|
||||||
import "@testing-library/jest-dom";
|
|
||||||
import { SquigglePlayground } from "../src/index";
|
|
||||||
|
|
||||||
test("Autorun is default", async () => {
|
|
||||||
render(<SquigglePlayground code="70*30" />);
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Autorun can be switched off", async () => {
|
|
||||||
const user = userEvent.setup();
|
|
||||||
render(<SquigglePlayground code="70*30" />);
|
|
||||||
|
|
||||||
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Autorun");
|
|
||||||
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100")
|
|
||||||
);
|
|
||||||
|
|
||||||
await user.click(screen.getByText("Autorun")); // disable
|
|
||||||
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Paused");
|
|
||||||
expect(screen.getByTestId("autorun-controls")).not.toHaveTextContent(
|
|
||||||
"Autorun"
|
|
||||||
);
|
|
||||||
|
|
||||||
await user.click(screen.getByText("Paused")); // enable autorun again
|
|
||||||
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Autorun");
|
|
||||||
|
|
||||||
// we should replace the code here, but it's hard to update react-ace state via user events: https://github.com/securingsincity/react-ace/issues/923
|
|
||||||
// ...or replace react-ace with something else
|
|
||||||
|
|
||||||
// TODO:
|
|
||||||
|
|
||||||
/*
|
|
||||||
const editor = screen
|
|
||||||
.getByTestId("squiggle-editor")
|
|
||||||
.querySelector(".ace_editor") as HTMLElement;
|
|
||||||
editor.focus();
|
|
||||||
// await user.clear(editor);
|
|
||||||
await userEvent.paste("40*40"); // https://github.com/securingsincity/react-ace/issues/923#issuecomment-755502696
|
|
||||||
screen.debug(editor);
|
|
||||||
|
|
||||||
// this makes the tests slower, but it's hard to test otherwise that the code _didn't_ execute
|
|
||||||
await new Promise((r) => setTimeout(r, 300));
|
|
||||||
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100"); // still the old value
|
|
||||||
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(screen.getByTestId("playground-result")).toHaveTextContent("1600")
|
|
||||||
);
|
|
||||||
*/
|
|
||||||
});
|
|
|
@ -1,14 +1,9 @@
|
||||||
import { render, screen } from "@testing-library/react";
|
import { render } from "@testing-library/react";
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import "@testing-library/jest-dom";
|
import "@testing-library/jest-dom";
|
||||||
import {
|
import { SquiggleChart } from "../src/index";
|
||||||
SquiggleChart,
|
|
||||||
SquiggleEditor,
|
|
||||||
SquigglePlayground,
|
|
||||||
} from "../src/index";
|
|
||||||
import { SqProject } from "@quri/squiggle-lang";
|
|
||||||
|
|
||||||
test("Chart logs nothing on render", async () => {
|
test("Logs nothing on render", async () => {
|
||||||
const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />);
|
const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />);
|
||||||
unmount();
|
unmount();
|
||||||
|
|
||||||
|
@ -16,38 +11,3 @@ test("Chart logs nothing on render", async () => {
|
||||||
expect(console.warn).not.toBeCalled();
|
expect(console.warn).not.toBeCalled();
|
||||||
expect(console.error).not.toBeCalled();
|
expect(console.error).not.toBeCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Editor logs nothing on render", async () => {
|
|
||||||
const { unmount } = render(<SquiggleEditor code={"normal(0, 1)"} />);
|
|
||||||
unmount();
|
|
||||||
|
|
||||||
expect(console.log).not.toBeCalled();
|
|
||||||
expect(console.warn).not.toBeCalled();
|
|
||||||
expect(console.error).not.toBeCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Project dependencies work in editors", async () => {
|
|
||||||
const project = SqProject.create();
|
|
||||||
|
|
||||||
render(<SquiggleEditor code={"x = 1"} project={project} />);
|
|
||||||
const source = project.getSourceIds()[0];
|
|
||||||
const { container } = render(
|
|
||||||
<SquiggleEditor code={"x + 1"} project={project} continues={[source]} />
|
|
||||||
);
|
|
||||||
expect(container).toHaveTextContent("2");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Project dependencies work in playgrounds", async () => {
|
|
||||||
const project = SqProject.create();
|
|
||||||
project.setSource("depend", "x = 1");
|
|
||||||
|
|
||||||
render(
|
|
||||||
<SquigglePlayground
|
|
||||||
code={"x + 1"}
|
|
||||||
project={project}
|
|
||||||
continues={["depend"]}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
// We must await here because SquigglePlayground loads results asynchronously
|
|
||||||
expect(await screen.findByRole("status")).toHaveTextContent("2");
|
|
||||||
});
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ lib
|
||||||
*.bs.js
|
*.bs.js
|
||||||
*.gen.tsx
|
*.gen.tsx
|
||||||
.nyc_output/
|
.nyc_output/
|
||||||
coverage/
|
_coverage/
|
||||||
.cache/
|
.cache/
|
||||||
Reducer_Peggy_GeneratedParser.js
|
Reducer_Peggy_GeneratedParser.js
|
||||||
ReducerProject_IncludeParser.js
|
ReducerProject_IncludeParser.js
|
||||||
|
|
|
@ -32,29 +32,25 @@ describe("dotSubtract", () => {
|
||||||
*/
|
*/
|
||||||
Skip.test("mean of normal minus exponential (property)", () => {
|
Skip.test("mean of normal minus exponential (property)", () => {
|
||||||
assert_(
|
assert_(
|
||||||
property2(
|
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
|
||||||
float_(),
|
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||||
floatRange(1e-5, 1e5),
|
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||||
(mean, rate) => {
|
~env,
|
||||||
// We limit ourselves to stdev=1 so that the integral is trivial
|
mkNormal(mean, 1.0),
|
||||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
mkExponential(rate),
|
||||||
~env,
|
)
|
||||||
mkNormal(mean, 1.0),
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||||
mkExponential(rate),
|
// according to algebra or random variables,
|
||||||
|
let meanAnalytical =
|
||||||
|
mean -.
|
||||||
|
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||||
|
"On trusted input this should never happen",
|
||||||
)
|
)
|
||||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
switch meanResult {
|
||||||
// according to algebra or random variables,
|
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||||
let meanAnalytical =
|
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||||
mean -.
|
}
|
||||||
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
}),
|
||||||
"On trusted input this should never happen",
|
|
||||||
)
|
|
||||||
switch meanResult {
|
|
||||||
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
|
||||||
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
})
|
})
|
||||||
|
|
|
@ -40,60 +40,51 @@ let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
describe("(Algebraic) addition of distributions", () => {
|
describe("(Algebraic) addition of distributions", () => {
|
||||||
describe("mean", () => {
|
describe("mean", () => {
|
||||||
test(
|
test("normal(mean=5) + normal(mean=20)", () => {
|
||||||
"normal(mean=5) + normal(mean=20)",
|
normalDist5
|
||||||
() => {
|
->algebraicAdd(normalDist20)
|
||||||
normalDist5
|
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||||
->algebraicAdd(normalDist20)
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
->expect
|
||||||
|
->toBe(Some(2.5e1))
|
||||||
|
})
|
||||||
|
|
||||||
|
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
|
||||||
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||||
->E.R2.fmap(run)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(toFloat)
|
->E.R2.fmap(toFloat)
|
||||||
->E.R.toExn("Expected float", _)
|
->E.R.toExn("Expected float", _)
|
||||||
->expect
|
switch received {
|
||||||
->toBe(Some(2.5e1))
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
},
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
)
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
||||||
test(
|
}
|
||||||
"uniform(low=9, high=10) + beta(alpha=2, beta=5)",
|
})
|
||||||
() => {
|
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
|
||||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
let received =
|
let received =
|
||||||
uniformDist
|
betaDist
|
||||||
->algebraicAdd(betaDist)
|
->algebraicAdd(uniformDist)
|
||||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||||
->E.R2.fmap(run)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(toFloat)
|
->E.R2.fmap(toFloat)
|
||||||
->E.R.toExn("Expected float", _)
|
->E.R.toExn("Expected float", _)
|
||||||
switch received {
|
switch received {
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
// sometimes it works with ~digits=2.
|
// sometimes it works with ~digits=2.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
test(
|
|
||||||
"beta(alpha=2, beta=5) + uniform(low=9, high=10)",
|
|
||||||
() => {
|
|
||||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
|
||||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
|
||||||
let received =
|
|
||||||
betaDist
|
|
||||||
->algebraicAdd(uniformDist)
|
|
||||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toExn("Expected float", _)
|
|
||||||
switch received {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
|
||||||
// sometimes it works with ~digits=2.
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
describe("pdf", () => {
|
describe("pdf", () => {
|
||||||
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
||||||
|
@ -131,282 +122,247 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
test(
|
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
|
||||||
"(normal(mean=10) + normal(mean=10)).pdf(1.9e1)",
|
let received =
|
||||||
() => {
|
normalDist20
|
||||||
let received =
|
->Ok
|
||||||
normalDist20
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
->Ok
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(run)
|
->E.R.toOption
|
||||||
->E.R2.fmap(toFloat)
|
->E.O.flatten
|
||||||
->E.R.toOption
|
let calculated =
|
||||||
->E.O.flatten
|
normalDist10
|
||||||
let calculated =
|
->algebraicAdd(normalDist10)
|
||||||
normalDist10
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
->algebraicAdd(normalDist10)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(run)
|
->E.R.toOption
|
||||||
->E.R2.fmap(toFloat)
|
->E.O.flatten
|
||||||
->E.R.toOption
|
switch received {
|
||||||
->E.O.flatten
|
| None =>
|
||||||
switch received {
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
| None =>
|
->expect
|
||||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
->toBe("never")
|
||||||
->expect
|
| Some(x) =>
|
||||||
->toBe("never")
|
switch calculated {
|
||||||
| Some(x) =>
|
|
||||||
switch calculated {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)",
|
|
||||||
() => {
|
|
||||||
let received =
|
|
||||||
uniformDist
|
|
||||||
->algebraicAdd(betaDist)
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toExn("Expected float", _)
|
|
||||||
switch received {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||||
// sometimes it works with ~digits=4.
|
|
||||||
// This value was calculated by a python script
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
)
|
})
|
||||||
test(
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
|
||||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)",
|
let received =
|
||||||
() => {
|
uniformDist
|
||||||
let received =
|
->algebraicAdd(betaDist)
|
||||||
betaDist
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
->algebraicAdd(uniformDist)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(run)
|
->E.R.toExn("Expected float", _)
|
||||||
->E.R2.fmap(toFloat)
|
switch received {
|
||||||
->E.R.toExn("Expected float", _)
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
switch received {
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
// sometimes it works with ~digits=4.
|
||||||
// This is nondeterministic.
|
// This value was calculated by a python script
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
describe("cdf", () => {
|
describe("cdf", () => {
|
||||||
testAll(
|
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
|
||||||
"(normal(mean=5) + normal(mean=5)).cdf (imprecise)",
|
let received =
|
||||||
list{6e0, 8e0, 1e1, 1.2e1},
|
normalDist10
|
||||||
x => {
|
->Ok
|
||||||
let received =
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||||
normalDist10
|
->E.R2.fmap(run)
|
||||||
->Ok
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
->E.R.toOption
|
||||||
->E.R2.fmap(run)
|
->E.O.flatten
|
||||||
->E.R2.fmap(toFloat)
|
let calculated =
|
||||||
->E.R.toOption
|
normalDist5
|
||||||
->E.O.flatten
|
->algebraicAdd(normalDist5)
|
||||||
let calculated =
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||||
normalDist5
|
->E.R2.fmap(run)
|
||||||
->algebraicAdd(normalDist5)
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
->E.R.toOption
|
||||||
->E.R2.fmap(run)
|
->E.O.flatten
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
|
|
||||||
switch received {
|
switch received {
|
||||||
| None =>
|
| None =>
|
||||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
->expect
|
->expect
|
||||||
->toBe("never")
|
->toBe("never")
|
||||||
| Some(x) =>
|
| Some(x) =>
|
||||||
switch calculated {
|
switch calculated {
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"(normal(mean=10) + normal(mean=10)).cdf(1.25e1)",
|
|
||||||
() => {
|
|
||||||
let received =
|
|
||||||
normalDist20
|
|
||||||
->Ok
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
let calculated =
|
|
||||||
normalDist10
|
|
||||||
->algebraicAdd(normalDist10)
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
switch received {
|
|
||||||
| None =>
|
|
||||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
|
||||||
->expect
|
|
||||||
->toBe("never")
|
|
||||||
| Some(x) =>
|
|
||||||
switch calculated {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)",
|
|
||||||
() => {
|
|
||||||
let received =
|
|
||||||
uniformDist
|
|
||||||
->algebraicAdd(betaDist)
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toExn("Expected float", _)
|
|
||||||
switch received {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
// The value was calculated externally using a python script
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
)
|
})
|
||||||
test(
|
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
|
||||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)",
|
let received =
|
||||||
() => {
|
normalDist20
|
||||||
let received =
|
->Ok
|
||||||
betaDist
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
->algebraicAdd(uniformDist)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(run)
|
->E.R.toOption
|
||||||
->E.R2.fmap(toFloat)
|
->E.O.flatten
|
||||||
->E.R.toExn("Expected float", _)
|
let calculated =
|
||||||
switch received {
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||||
// The value was calculated externally using a python script
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
)
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// The value was calculated externally using a python script
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// The value was calculated externally using a python script
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("inv", () => {
|
describe("inv", () => {
|
||||||
testAll(
|
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
|
||||||
"(normal(mean=5) + normal(mean=5)).inv (imprecise)",
|
let received =
|
||||||
list{5e-2, 4.2e-3, 9e-3},
|
normalDist10
|
||||||
x => {
|
->Ok
|
||||||
let received =
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||||
normalDist10
|
->E.R2.fmap(run)
|
||||||
->Ok
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
->E.R.toOption
|
||||||
->E.R2.fmap(run)
|
->E.O.flatten
|
||||||
->E.R2.fmap(toFloat)
|
let calculated =
|
||||||
->E.R.toOption
|
normalDist5
|
||||||
->E.O.flatten
|
->algebraicAdd(normalDist5)
|
||||||
let calculated =
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||||
normalDist5
|
->E.R2.fmap(run)
|
||||||
->algebraicAdd(normalDist5)
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
->E.R.toOption
|
||||||
->E.R2.fmap(run)
|
->E.O.flatten
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
|
|
||||||
switch received {
|
switch received {
|
||||||
| None =>
|
| None =>
|
||||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
->expect
|
->expect
|
||||||
->toBe("never")
|
->toBe("never")
|
||||||
| Some(x) =>
|
| Some(x) =>
|
||||||
switch calculated {
|
switch calculated {
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"(normal(mean=10) + normal(mean=10)).inv(1e-1)",
|
|
||||||
() => {
|
|
||||||
let received =
|
|
||||||
normalDist20
|
|
||||||
->Ok
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
let calculated =
|
|
||||||
normalDist10
|
|
||||||
->algebraicAdd(normalDist10)
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toOption
|
|
||||||
->E.O.flatten
|
|
||||||
switch received {
|
|
||||||
| None =>
|
|
||||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
|
||||||
->expect
|
|
||||||
->toBe("never")
|
|
||||||
| Some(x) =>
|
|
||||||
switch calculated {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)",
|
|
||||||
() => {
|
|
||||||
let received =
|
|
||||||
uniformDist
|
|
||||||
->algebraicAdd(betaDist)
|
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
|
||||||
->E.R2.fmap(run)
|
|
||||||
->E.R2.fmap(toFloat)
|
|
||||||
->E.R.toExn("Expected float", _)
|
|
||||||
switch received {
|
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
// sometimes it works with ~digits=2.
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
)
|
})
|
||||||
test(
|
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
|
||||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)",
|
let received =
|
||||||
() => {
|
normalDist20
|
||||||
let received =
|
->Ok
|
||||||
betaDist
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
->algebraicAdd(uniformDist)
|
->E.R2.fmap(run)
|
||||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
->E.R2.fmap(toFloat)
|
||||||
->E.R2.fmap(run)
|
->E.R.toOption
|
||||||
->E.R2.fmap(toFloat)
|
->E.O.flatten
|
||||||
->E.R.toExn("Expected float", _)
|
let calculated =
|
||||||
switch received {
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
// sometimes it works with ~digits=2.
|
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
)
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn("Expected float", _)
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -87,22 +87,14 @@ describe("Means are invariant", () => {
|
||||||
let testAddInvariant = (t1, t2) =>
|
let testAddInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll(
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
"with two of the same distribution",
|
testAddInvariant(dist, dist)
|
||||||
distributions,
|
})
|
||||||
dist => {
|
|
||||||
testAddInvariant(dist, dist)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
"with two different distributions",
|
let (dist1, dist2) = dists
|
||||||
pairsOfDifferentDistributions,
|
testAddInvariant(dist1, dist2)
|
||||||
dists => {
|
})
|
||||||
let (dist1, dist2) = dists
|
|
||||||
testAddInvariant(dist1, dist2)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
@ -124,22 +116,14 @@ describe("Means are invariant", () => {
|
||||||
let testSubtractInvariant = (t1, t2) =>
|
let testSubtractInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll(
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
"with two of the same distribution",
|
testSubtractInvariant(dist, dist)
|
||||||
distributions,
|
})
|
||||||
dist => {
|
|
||||||
testSubtractInvariant(dist, dist)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
"with two different distributions",
|
let (dist1, dist2) = dists
|
||||||
pairsOfDifferentDistributions,
|
testSubtractInvariant(dist1, dist2)
|
||||||
dists => {
|
})
|
||||||
let (dist1, dist2) = dists
|
|
||||||
testSubtractInvariant(dist1, dist2)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
@ -161,22 +145,14 @@ describe("Means are invariant", () => {
|
||||||
let testMultiplicationInvariant = (t1, t2) =>
|
let testMultiplicationInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll(
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
"with two of the same distribution",
|
testMultiplicationInvariant(dist, dist)
|
||||||
distributions,
|
})
|
||||||
dist => {
|
|
||||||
testMultiplicationInvariant(dist, dist)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
"with two different distributions",
|
let (dist1, dist2) = dists
|
||||||
pairsOfDifferentDistributions,
|
testMultiplicationInvariant(dist1, dist2)
|
||||||
dists => {
|
})
|
||||||
let (dist1, dist2) = dists
|
|
||||||
testMultiplicationInvariant(dist1, dist2)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
|
|
@ -17,9 +17,10 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let answer =
|
let answer =
|
||||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
let prediction =
|
let prediction =
|
||||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
uniformMakeR(
|
||||||
s => DistributionTypes.ArgumentError(s),
|
lowPrediction,
|
||||||
)
|
highPrediction,
|
||||||
|
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
||||||
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
||||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
@ -182,9 +183,9 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
||||||
let answer =
|
let answer =
|
||||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
let prediction =
|
let prediction =
|
||||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||||
s => DistributionTypes.ArgumentError(s),
|
s,
|
||||||
)
|
))
|
||||||
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||||
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
|
@ -47,7 +47,10 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, medium, high) = tup
|
let (low, medium, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Triangular({low, medium, high}))),
|
FromDist(
|
||||||
|
#ToFloat(#Mean),
|
||||||
|
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||||
},
|
},
|
||||||
|
@ -60,7 +63,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (alpha, beta) = tup
|
let (alpha, beta) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha, beta}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
},
|
},
|
||||||
|
@ -81,8 +84,8 @@ describe("(Symbolic) mean", () => {
|
||||||
let (mean, stdev) = tup
|
let (mean, stdev) = tup
|
||||||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||||
let meanValue =
|
let meanValue =
|
||||||
betaDistribution->E.R2.fmap(
|
betaDistribution->E.R2.fmap(d =>
|
||||||
d => run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)),
|
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||||
)
|
)
|
||||||
switch meanValue {
|
switch meanValue {
|
||||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
|
@ -97,7 +100,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (mu, sigma) = tup
|
let (mu, sigma) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu, sigma}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
},
|
},
|
||||||
|
@ -109,7 +112,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, high) = tup
|
let (low, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low, high}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
},
|
},
|
||||||
|
|
|
@ -33,18 +33,12 @@ describe("Bindings", () => {
|
||||||
let value2 = Reducer_T.IEvNumber(5.)
|
let value2 = Reducer_T.IEvNumber(5.)
|
||||||
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
|
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
|
||||||
|
|
||||||
test(
|
test("get on extended", () => {
|
||||||
"get on extended",
|
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
||||||
() => {
|
})
|
||||||
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("get on original", () => {
|
||||||
"get on original",
|
expect(bindings->Bindings.get("value")) == Some(value)
|
||||||
() => {
|
})
|
||||||
expect(bindings->Bindings.get("value")) == Some(value)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -40,23 +40,14 @@ describe("Namespace", () => {
|
||||||
|
|
||||||
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
|
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
|
||||||
|
|
||||||
test(
|
test("merge many 1", () => {
|
||||||
"merge many 1",
|
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
||||||
() => {
|
})
|
||||||
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
test("merge many 2", () => {
|
||||||
},
|
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
||||||
)
|
})
|
||||||
test(
|
test("merge many 3", () => {
|
||||||
"merge many 2",
|
expect(nsMerged->Namespace.get("value")) == Some(value)
|
||||||
() => {
|
})
|
||||||
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
test(
|
|
||||||
"merge many 3",
|
|
||||||
() => {
|
|
||||||
expect(nsMerged->Namespace.get("value")) == Some(value)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -75,32 +75,29 @@ describe("Peggy to Expression", () => {
|
||||||
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
|
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
|
||||||
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
|
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
|
||||||
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
|
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
|
||||||
describe(
|
describe("ternary bindings", () => {
|
||||||
"ternary bindings",
|
testToExpression(
|
||||||
() => {
|
// expression binding
|
||||||
testToExpression(
|
"f(a) = a > 5 ? 1 : 0; f(6)",
|
||||||
// expression binding
|
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
|
||||||
"f(a) = a > 5 ? 1 : 0; f(6)",
|
~v="1",
|
||||||
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
|
(),
|
||||||
~v="1",
|
)
|
||||||
(),
|
testToExpression(
|
||||||
)
|
// when true binding
|
||||||
testToExpression(
|
"f(a) = a > 5 ? a : 0; f(6)",
|
||||||
// when true binding
|
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
|
||||||
"f(a) = a > 5 ? a : 0; f(6)",
|
~v="6",
|
||||||
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
|
(),
|
||||||
~v="6",
|
)
|
||||||
(),
|
testToExpression(
|
||||||
)
|
// when false binding
|
||||||
testToExpression(
|
"f(a) = a < 5 ? 1 : a; f(6)",
|
||||||
// when false binding
|
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
|
||||||
"f(a) = a < 5 ? 1 : a; f(6)",
|
~v="6",
|
||||||
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
|
(),
|
||||||
~v="6",
|
)
|
||||||
(),
|
})
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("if then else", () => {
|
describe("if then else", () => {
|
||||||
|
|
|
@ -22,7 +22,7 @@ let expectEvalError = (code: string) =>
|
||||||
Expression.BackCompatible.evaluateString(code)
|
Expression.BackCompatible.evaluateString(code)
|
||||||
->Reducer_Value.toStringResult
|
->Reducer_Value.toStringResult
|
||||||
->expect
|
->expect
|
||||||
->toMatch("Error\\(")
|
->toMatch("Error\(")
|
||||||
|
|
||||||
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||||
let testDescriptionParseToBe = (desc, expr, answer) =>
|
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||||
|
|
|
@ -37,16 +37,14 @@ describe("eval", () => {
|
||||||
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
|
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
|
||||||
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
|
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
|
||||||
testEvalError("{a: 1}.b") // invalid syntax
|
testEvalError("{a: 1}.b") // invalid syntax
|
||||||
test(
|
test("always the same property ending", () =>
|
||||||
"always the same property ending",
|
expectEvalToBe(
|
||||||
() =>
|
`{
|
||||||
expectEvalToBe(
|
|
||||||
`{
|
|
||||||
a: 1,
|
a: 1,
|
||||||
b: 2,
|
b: 2,
|
||||||
}`,
|
}`,
|
||||||
"Ok({a: 1,b: 2})",
|
"Ok({a: 1,b: 2})",
|
||||||
),
|
)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -101,7 +99,7 @@ describe("stacktraces", () => {
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
error,
|
error,
|
||||||
)->toBe(`Error: There are function matches for add(), but with different arguments: [add(number, number)]; [add(distribution, number)]; [add(number, distribution)]; [add(distribution, distribution)]; [add(date, duration)]; [add(duration, duration)]
|
)->toBe(`Error: There are function matches for add(), but with different arguments: [add(number, number)]; [add(date, duration)]; [add(duration, duration)]; [add(distribution, number)]; [add(number, distribution)]; [add(distribution, distribution)]
|
||||||
Stack trace:
|
Stack trace:
|
||||||
f at line 4, column 5
|
f at line 4, column 5
|
||||||
g at line 6, column 12
|
g at line 6, column 12
|
||||||
|
|
|
@ -11,34 +11,32 @@ describe("ReducerProject Tutorial", () => {
|
||||||
/*
|
/*
|
||||||
Case "Running a single source".
|
Case "Running a single source".
|
||||||
*/
|
*/
|
||||||
test(
|
test("run", () => {
|
||||||
"run",
|
/* Let's start with running a single source and getting Result as well as the Bindings
|
||||||
() => {
|
|
||||||
/* Let's start with running a single source and getting Result as well as the Bindings
|
|
||||||
First you need to create a project. A project is a collection of sources.
|
First you need to create a project. A project is a collection of sources.
|
||||||
Project takes care of the dependencies between the sources, correct compilation and run order.
|
Project takes care of the dependencies between the sources, correct compilation and run order.
|
||||||
You can run any source in the project. It will be compiled and run if it hasn't happened already; otherwise already existing results will be presented.
|
You can run any source in the project. It will be compiled and run if it hasn't happened already; otherwise already existing results will be presented.
|
||||||
The dependencies will be automatically compiled and run. So you don't need to worry about that in a multi source project.
|
The dependencies will be automatically compiled and run. So you don't need to worry about that in a multi source project.
|
||||||
In summary you issue a run command on the whole project or on a specific source to ensure that there is a result for that source.
|
In summary you issue a run command on the whole project or on a specific source to ensure that there is a result for that source.
|
||||||
*/
|
*/
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
/* Every source has a name. This is used for debugging, dependencies and error messages. */
|
/* Every source has a name. This is used for debugging, dependencies and error messages. */
|
||||||
project->Project.setSource("main", "1 + 2")
|
project->Project.setSource("main", "1 + 2")
|
||||||
/* Let's run "main" source. */
|
/* Let's run "main" source. */
|
||||||
project->Project.run("main")
|
project->Project.run("main")
|
||||||
/* Now you have a result for "main" source.
|
/* Now you have a result for "main" source.
|
||||||
Running one by one is necessary for UI to navigate among the sources and to see the results by source.
|
Running one by one is necessary for UI to navigate among the sources and to see the results by source.
|
||||||
And you're free to run any source you want.
|
And you're free to run any source you want.
|
||||||
You will look at the results of this source and you don't want to run the others if not required.
|
You will look at the results of this source and you don't want to run the others if not required.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/* However, you could also run the whole project.
|
/* However, you could also run the whole project.
|
||||||
If you have all the sources, you can always run the whole project.
|
If you have all the sources, you can always run the whole project.
|
||||||
Dependencies and recompiling on demand will be taken care of by the project.
|
Dependencies and recompiling on demand will be taken care of by the project.
|
||||||
*/
|
*/
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
|
|
||||||
/* Either with run or runAll you executed the project.
|
/* Either with run or runAll you executed the project.
|
||||||
You can get the result of a specific source by calling getResult for that source.
|
You can get the result of a specific source by calling getResult for that source.
|
||||||
You can get the bindings of a specific source by calling getBindings for that source.
|
You can get the bindings of a specific source by calling getBindings for that source.
|
||||||
If there is any runtime error, getResult will return the error.
|
If there is any runtime error, getResult will return the error.
|
||||||
|
@ -46,59 +44,49 @@ Case "Running a single source".
|
||||||
Note that getResult returns None if the source has not been run.
|
Note that getResult returns None if the source has not been run.
|
||||||
Getting None means you have forgotten to run the source.
|
Getting None means you have forgotten to run the source.
|
||||||
*/
|
*/
|
||||||
let result = project->Project.getResult("main")
|
let result = project->Project.getResult("main")
|
||||||
let bindings = project->Project.getBindings("main")
|
let bindings = project->Project.getBindings("main")
|
||||||
|
|
||||||
/* Let's display the result and bindings */
|
/* Let's display the result and bindings */
|
||||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(3)", "{}")
|
("Ok(3)", "{}")
|
||||||
/* You've got 3 with empty bindings. */
|
/* You've got 3 with empty bindings. */
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("run summary", () => {
|
||||||
"run summary",
|
let project = Project.createProject()
|
||||||
() => {
|
project->Project.setSource("main", "1 + 2")
|
||||||
let project = Project.createProject()
|
project->Project.runAll
|
||||||
project->Project.setSource("main", "1 + 2")
|
let result = project->Project.getResult("main")
|
||||||
project->Project.runAll
|
let bindings = project->Project.getBindings("main")
|
||||||
let result = project->Project.getResult("main")
|
/* Now you have external bindings and external result. */
|
||||||
let bindings = project->Project.getBindings("main")
|
(
|
||||||
/* Now you have external bindings and external result. */
|
result->Reducer_Value.toStringResult,
|
||||||
(
|
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
||||||
result->Reducer_Value.toStringResult,
|
)->expect == ("Ok(3)", "{}")
|
||||||
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
})
|
||||||
)->expect == ("Ok(3)", "{}")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("run with an environment", () => {
|
||||||
"run with an environment",
|
/* Running the source code like above allows you to set a custom environment */
|
||||||
() => {
|
let project = Project.createProject()
|
||||||
/* Running the source code like above allows you to set a custom environment */
|
|
||||||
let project = Project.createProject()
|
|
||||||
|
|
||||||
/* Optional. Set your custom environment anytime before running */
|
/* Optional. Set your custom environment anytime before running */
|
||||||
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
|
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
|
||||||
|
|
||||||
project->Project.setSource("main", "1 + 2")
|
project->Project.setSource("main", "1 + 2")
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
let result = project->Project.getResult("main")
|
let result = project->Project.getResult("main")
|
||||||
let _bindings = project->Project.getBindings("main")
|
let _bindings = project->Project.getBindings("main")
|
||||||
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("shortcut", () => {
|
||||||
"shortcut",
|
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
||||||
() => {
|
/* Examples above was to prepare you for the multi source tutorial. */
|
||||||
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
let (result, bindings) = Project.evaluate("1+2")
|
||||||
/* Examples above was to prepare you for the multi source tutorial. */
|
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||||
let (result, bindings) = Project.evaluate("1+2")
|
("Ok(3)", "{}")
|
||||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
})
|
||||||
("Ok(3)", "{}")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -10,104 +10,95 @@ describe("ReducerProject Tutorial", () => {
|
||||||
describe("Multi source", () => {
|
describe("Multi source", () => {
|
||||||
/*
|
/*
|
||||||
Case "Running multiple sources" */
|
Case "Running multiple sources" */
|
||||||
test(
|
test("Chaining", () => {
|
||||||
"Chaining",
|
let project = Project.createProject()
|
||||||
() => {
|
/* This time let's add 3 sources and chain them together */
|
||||||
let project = Project.createProject()
|
project->Project.setSource("source1", "x=1")
|
||||||
/* This time let's add 3 sources and chain them together */
|
|
||||||
project->Project.setSource("source1", "x=1")
|
|
||||||
|
|
||||||
project->Project.setSource("source2", "y=x+1")
|
project->Project.setSource("source2", "y=x+1")
|
||||||
/* To run, source2 depends on source1 */
|
/* To run, source2 depends on source1 */
|
||||||
project->Project.setContinues("source2", ["source1"])
|
project->Project.setContinues("source2", ["source1"])
|
||||||
|
|
||||||
project->Project.setSource("source3", "z=y+1")
|
project->Project.setSource("source3", "z=y+1")
|
||||||
/* To run, source3 depends on source2 */
|
/* To run, source3 depends on source2 */
|
||||||
project->Project.setContinues("source3", ["source2"])
|
project->Project.setContinues("source3", ["source2"])
|
||||||
|
|
||||||
/* Now we can run the project */
|
/* Now we can run the project */
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
|
|
||||||
/* And let's check the result and bindings of source3 */
|
/* And let's check the result and bindings of source3 */
|
||||||
let result3 = project->Project.getResult("source3")
|
let result3 = project->Project.getResult("source3")
|
||||||
let bindings3 = project->Project.getBindings("source3")
|
let bindings3 = project->Project.getBindings("source3")
|
||||||
|
|
||||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(())", "{z: 3}")
|
("Ok(())", "{z: 3}")
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("Depending", () => {
|
||||||
"Depending",
|
/* Instead of chaining the sources, we could have a dependency tree */
|
||||||
() => {
|
/* The point here is that any source can depend on multiple sources */
|
||||||
/* Instead of chaining the sources, we could have a dependency tree */
|
let project = Project.createProject()
|
||||||
/* The point here is that any source can depend on multiple sources */
|
|
||||||
let project = Project.createProject()
|
|
||||||
|
|
||||||
/* This time source1 and source2 are not depending on anything */
|
/* This time source1 and source2 are not depending on anything */
|
||||||
project->Project.setSource("source1", "x=1")
|
project->Project.setSource("source1", "x=1")
|
||||||
project->Project.setSource("source2", "y=2")
|
project->Project.setSource("source2", "y=2")
|
||||||
|
|
||||||
project->Project.setSource("source3", "z=x+y")
|
project->Project.setSource("source3", "z=x+y")
|
||||||
/* To run, source3 depends on source1 and source3 together */
|
/* To run, source3 depends on source1 and source3 together */
|
||||||
project->Project.setContinues("source3", ["source1", "source2"])
|
project->Project.setContinues("source3", ["source1", "source2"])
|
||||||
|
|
||||||
/* Now we can run the project */
|
/* Now we can run the project */
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
|
|
||||||
/* And let's check the result and bindings of source3 */
|
/* And let's check the result and bindings of source3 */
|
||||||
let result3 = project->Project.getResult("source3")
|
let result3 = project->Project.getResult("source3")
|
||||||
let bindings3 = project->Project.getBindings("source3")
|
let bindings3 = project->Project.getBindings("source3")
|
||||||
|
|
||||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(())", "{z: 3}")
|
("Ok(())", "{z: 3}")
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
test(
|
test("Intro to including", () => {
|
||||||
"Intro to including",
|
/* Though it would not be practical for a storybook,
|
||||||
() => {
|
|
||||||
/* Though it would not be practical for a storybook,
|
|
||||||
let's write the same project above with includes.
|
let's write the same project above with includes.
|
||||||
You will see that parsing includes is setting the dependencies the same way as before. */
|
You will see that parsing includes is setting the dependencies the same way as before. */
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
|
|
||||||
/* This time source1 and source2 are not depending on anything */
|
/* This time source1 and source2 are not depending on anything */
|
||||||
project->Project.setSource("source1", "x=1")
|
project->Project.setSource("source1", "x=1")
|
||||||
project->Project.setSource("source2", "y=2")
|
project->Project.setSource("source2", "y=2")
|
||||||
|
|
||||||
project->Project.setSource(
|
project->Project.setSource(
|
||||||
"source3",
|
"source3",
|
||||||
`
|
`
|
||||||
#include "source1"
|
#include "source1"
|
||||||
#include "source2"
|
#include "source2"
|
||||||
z=x+y`,
|
z=x+y`,
|
||||||
)
|
)
|
||||||
/* We need to parse the includes to set the dependencies */
|
/* We need to parse the includes to set the dependencies */
|
||||||
project->Project.parseIncludes("source3")
|
project->Project.parseIncludes("source3")
|
||||||
|
|
||||||
/* Now we can run the project */
|
/* Now we can run the project */
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
|
|
||||||
/* And let's check the result and bindings of source3
|
/* And let's check the result and bindings of source3
|
||||||
This time you are getting all the variables because we are including the other sources
|
This time you are getting all the variables because we are including the other sources
|
||||||
Behind the scenes parseIncludes is setting the dependencies */
|
Behind the scenes parseIncludes is setting the dependencies */
|
||||||
let result3 = project->Project.getResult("source3")
|
let result3 = project->Project.getResult("source3")
|
||||||
let bindings3 = project->Project.getBindings("source3")
|
let bindings3 = project->Project.getBindings("source3")
|
||||||
|
|
||||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(())", "{z: 3}")
|
("Ok(())", "{z: 3}")
|
||||||
/*
|
/*
|
||||||
Doing it like this is too verbose for a storybook
|
Doing it like this is too verbose for a storybook
|
||||||
But I hope you have seen the relation of setContinues and parseIncludes */
|
But I hope you have seen the relation of setContinues and parseIncludes */
|
||||||
/*
|
/*
|
||||||
Dealing with includes needs more.
|
Dealing with includes needs more.
|
||||||
- There are parse errors
|
- There are parse errors
|
||||||
- There are cyclic includes
|
- There are cyclic includes
|
||||||
- And the depended source1 and source2 is not already there in the project
|
- And the depended source1 and source2 is not already there in the project
|
||||||
- If you knew the includes before hand there would not be point of the include directive.
|
- If you knew the includes before hand there would not be point of the include directive.
|
||||||
More on those on the next section. */
|
More on those on the next section. */
|
||||||
},
|
})
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -24,106 +24,93 @@ Here we will finally proceed to a real life scenario. */
|
||||||
)
|
)
|
||||||
/* We need to parse includes after changing the source */
|
/* We need to parse includes after changing the source */
|
||||||
project->Project.parseIncludes("main")
|
project->Project.parseIncludes("main")
|
||||||
test(
|
test("getDependencies", () => {
|
||||||
"getDependencies",
|
/* Parse includes has set the dependencies */
|
||||||
() => {
|
project->Project.getDependencies("main")->expect == ["common"]
|
||||||
/* Parse includes has set the dependencies */
|
/* If there were no includes than there would be no dependencies */
|
||||||
project->Project.getDependencies("main")->expect == ["common"]
|
/* However if there was a syntax error at includes then would be no dependencies also */
|
||||||
/* If there were no includes than there would be no dependencies */
|
/* Therefore looking at dependencies is not the right way to load includes */
|
||||||
/* However if there was a syntax error at includes then would be no dependencies also */
|
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
||||||
/* Therefore looking at dependencies is not the right way to load includes */
|
})
|
||||||
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
test("getIncludes", () => {
|
||||||
},
|
/* Parse includes has set the includes */
|
||||||
)
|
switch project->Project.getIncludes("main") {
|
||||||
test(
|
| Ok(includes) => includes->expect == ["common"]
|
||||||
"getIncludes",
|
| Error(err) => err->SqError.toString->fail
|
||||||
() => {
|
}
|
||||||
/* Parse includes has set the includes */
|
/* If the includes cannot be parsed then you get a syntax error.
|
||||||
switch project->Project.getIncludes("main") {
|
|
||||||
| Ok(includes) => includes->expect == ["common"]
|
|
||||||
| Error(err) => err->SqError.toString->fail
|
|
||||||
}
|
|
||||||
/* If the includes cannot be parsed then you get a syntax error.
|
|
||||||
Otherwise you get the includes.
|
Otherwise you get the includes.
|
||||||
If there is no syntax error then you can load that file and use setSource to add it to the project.
|
If there is no syntax error then you can load that file and use setSource to add it to the project.
|
||||||
And so on recursively... */
|
And so on recursively... */
|
||||||
},
|
})
|
||||||
)
|
test("getDependents", () => {
|
||||||
test(
|
/* For any reason, you are able to query what other sources
|
||||||
"getDependents",
|
|
||||||
() => {
|
|
||||||
/* For any reason, you are able to query what other sources
|
|
||||||
include or depend on the current source.
|
include or depend on the current source.
|
||||||
But you don't need to use this to execute the projects.
|
But you don't need to use this to execute the projects.
|
||||||
It is provided for completeness of information. */
|
It is provided for completeness of information. */
|
||||||
project->Project.getDependents("main")->expect == []
|
project->Project.getDependents("main")->expect == []
|
||||||
/* Nothing is depending on or including main */
|
/* Nothing is depending on or including main */
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
describe(
|
describe("Real Like", () => {
|
||||||
"Real Like",
|
/* Now let's look at recursive and possibly cyclic includes */
|
||||||
() => {
|
/* There is no function provided to load the include files.
|
||||||
/* Now let's look at recursive and possibly cyclic includes */
|
|
||||||
/* There is no function provided to load the include files.
|
|
||||||
Because we have no idea if will it be an ordinary function or will it use promises.
|
Because we have no idea if will it be an ordinary function or will it use promises.
|
||||||
Therefore one has to write a function to load sources recursively and and setSources
|
Therefore one has to write a function to load sources recursively and and setSources
|
||||||
while checking for dependencies */
|
while checking for dependencies */
|
||||||
|
|
||||||
/* Let's make a dummy loader */
|
/* Let's make a dummy loader */
|
||||||
let loadSource = (sourceName: string) =>
|
let loadSource = (sourceName: string) =>
|
||||||
switch sourceName {
|
switch sourceName {
|
||||||
| "source1" => "x=1"
|
| "source1" => "x=1"
|
||||||
| "source2" => `
|
| "source2" => `
|
||||||
#include "source1"
|
#include "source1"
|
||||||
y=2`
|
y=2`
|
||||||
| "source3" => `
|
| "source3" => `
|
||||||
#include "source2"
|
#include "source2"
|
||||||
z=3`
|
z=3`
|
||||||
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
|
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
|
||||||
}
|
}
|
||||||
|
|
||||||
/* let's recursively load the sources */
|
/* let's recursively load the sources */
|
||||||
let rec loadIncludesRecursively = (project, sourceName, visited) => {
|
let rec loadIncludesRecursively = (project, sourceName, visited) => {
|
||||||
if visited->Js.Array2.includes(sourceName) {
|
if visited->Js.Array2.includes(sourceName) {
|
||||||
/* Oh we have already visited this source. There is an include cycle */
|
/* Oh we have already visited this source. There is an include cycle */
|
||||||
"Cyclic include ${sourceName}"->Js.Exn.raiseError
|
"Cyclic include ${sourceName}"->Js.Exn.raiseError
|
||||||
} else {
|
} else {
|
||||||
let newVisited = Js.Array2.copy(visited)
|
let newVisited = Js.Array2.copy(visited)
|
||||||
let _ = newVisited->Js.Array2.push(sourceName)
|
let _ = newVisited->Js.Array2.push(sourceName)
|
||||||
/* Let's parse the includes and dive into them */
|
/* Let's parse the includes and dive into them */
|
||||||
Project.parseIncludes(project, sourceName)
|
Project.parseIncludes(project, sourceName)
|
||||||
let rIncludes = project->Project.getIncludes(sourceName)
|
let rIncludes = project->Project.getIncludes(sourceName)
|
||||||
switch rIncludes {
|
switch rIncludes {
|
||||||
/* Maybe there is an include syntax error */
|
/* Maybe there is an include syntax error */
|
||||||
| Error(err) => err->SqError.toString->Js.Exn.raiseError
|
| Error(err) => err->SqError.toString->Js.Exn.raiseError
|
||||||
|
|
||||||
| Ok(includes) =>
|
| Ok(includes) =>
|
||||||
includes->Belt.Array.forEach(
|
includes->Belt.Array.forEach(newIncludeName => {
|
||||||
newIncludeName => {
|
/* We have got one of the new includes.
|
||||||
/* We have got one of the new includes.
|
Let's load it and add it to the project */
|
||||||
Let's load it and add it to the project */
|
let newSource = loadSource(newIncludeName)
|
||||||
let newSource = loadSource(newIncludeName)
|
project->Project.setSource(newIncludeName, newSource)
|
||||||
project->Project.setSource(newIncludeName, newSource)
|
/* The new source is loaded and added to the project. */
|
||||||
/* The new source is loaded and added to the project. */
|
/* Of course the new source might have includes too. */
|
||||||
/* Of course the new source might have includes too. */
|
/* Let's recursively load them */
|
||||||
/* Let's recursively load them */
|
project->loadIncludesRecursively(newIncludeName, newVisited)
|
||||||
project->loadIncludesRecursively(newIncludeName, newVisited)
|
})
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* As we have a fake source loader and a recursive include handler,
|
}
|
||||||
We can not set up a real project */
|
/* As we have a fake source loader and a recursive include handler,
|
||||||
|
We can not set up a real project */
|
||||||
|
|
||||||
/* * Here starts our real life project! * */
|
/* * Here starts our real life project! * */
|
||||||
|
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
|
|
||||||
project->Project.setSource(
|
project->Project.setSource(
|
||||||
"main",
|
"main",
|
||||||
`
|
`
|
||||||
#include "source1"
|
#include "source1"
|
||||||
#include "source2"
|
#include "source2"
|
||||||
#include "source3"
|
#include "source3"
|
||||||
|
@ -131,43 +118,37 @@ Here we will finally proceed to a real life scenario. */
|
||||||
b = doubleX
|
b = doubleX
|
||||||
a
|
a
|
||||||
`,
|
`,
|
||||||
)
|
)
|
||||||
/* Setting source requires parsing and loading the includes recursively */
|
/* Setting source requires parsing and loading the includes recursively */
|
||||||
project->loadIncludesRecursively("main", []) // Not visited yet
|
project->loadIncludesRecursively("main", []) // Not visited yet
|
||||||
|
|
||||||
/* Let's salt it more. Let's have another source in the project which also has includes */
|
/* Let's salt it more. Let's have another source in the project which also has includes */
|
||||||
/* doubleX includes source1 which is eventually included by main as well */
|
/* doubleX includes source1 which is eventually included by main as well */
|
||||||
project->Project.setSource(
|
project->Project.setSource(
|
||||||
"doubleX",
|
"doubleX",
|
||||||
`
|
`
|
||||||
#include "source1"
|
#include "source1"
|
||||||
doubleX = x * 2
|
doubleX = x * 2
|
||||||
`,
|
`,
|
||||||
)
|
)
|
||||||
project->loadIncludesRecursively("doubleX", [])
|
project->loadIncludesRecursively("doubleX", [])
|
||||||
/* Remember, any time you set a source, you need to load includes recursively */
|
/* Remember, any time you set a source, you need to load includes recursively */
|
||||||
|
|
||||||
/* As doubleX is not included by main, it is not loaded recursively.
|
/* As doubleX is not included by main, it is not loaded recursively.
|
||||||
So we link it to the project as a dependency */
|
So we link it to the project as a dependency */
|
||||||
project->Project.setContinues("main", ["doubleX"])
|
project->Project.setContinues("main", ["doubleX"])
|
||||||
|
|
||||||
/* Let's run the project */
|
/* Let's run the project */
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
let result = project->Project.getResult("main")
|
let result = project->Project.getResult("main")
|
||||||
let bindings = project->Project.getBindings("main")
|
let bindings = project->Project.getBindings("main")
|
||||||
/* And see the result and bindings.. */
|
/* And see the result and bindings.. */
|
||||||
test(
|
test("recursive includes", () => {
|
||||||
"recursive includes",
|
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||||
() => {
|
("Ok(6)", "{a: 6,b: 2}")
|
||||||
(
|
/* Everything as expected */
|
||||||
result->Reducer_Value.toStringResult,
|
})
|
||||||
bindings->Reducer_Value.toStringRecord,
|
})
|
||||||
)->expect == ("Ok(6)", "{a: 6,b: 2}")
|
|
||||||
/* Everything as expected */
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Includes myFile as myVariable", () => {
|
describe("Includes myFile as myVariable", () => {
|
||||||
|
@ -182,20 +163,14 @@ Here we will finally proceed to a real life scenario. */
|
||||||
`,
|
`,
|
||||||
)
|
)
|
||||||
Project.parseIncludes(project, "main")
|
Project.parseIncludes(project, "main")
|
||||||
test(
|
test("getDependencies", () => {
|
||||||
"getDependencies",
|
Project.getDependencies(project, "main")->expect == ["common"]
|
||||||
() => {
|
})
|
||||||
Project.getDependencies(project, "main")->expect == ["common"]
|
test("getIncludes", () => {
|
||||||
},
|
switch Project.getIncludes(project, "main") {
|
||||||
)
|
| Ok(includes) => includes->expect == ["common"]
|
||||||
test(
|
| Error(err) => err->SqError.toString->fail
|
||||||
"getIncludes",
|
}
|
||||||
() => {
|
})
|
||||||
switch Project.getIncludes(project, "main") {
|
|
||||||
| Ok(includes) => includes->expect == ["common"]
|
|
||||||
| Error(err) => err->SqError.toString->fail
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -30,9 +30,8 @@ describe("ReducerProject Tutorial", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test("userResults", () => {
|
test("userResults", () => {
|
||||||
let userResultsAsString = Belt.Array.map(
|
let userResultsAsString = Belt.Array.map(userResults, aResult =>
|
||||||
userResults,
|
aResult->Reducer_Value.toStringResult
|
||||||
aResult => aResult->Reducer_Value.toStringResult,
|
|
||||||
)
|
)
|
||||||
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
|
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
|
||||||
})
|
})
|
||||||
|
|
|
@ -99,19 +99,15 @@ describe("FunctionRegistry Library", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Fn auto-testing", () => {
|
describe("Fn auto-testing", () => {
|
||||||
testAll(
|
testAll("tests of validity", examples, r => {
|
||||||
"tests of validity",
|
expectEvalToBeOk(r)
|
||||||
examples,
|
})
|
||||||
r => {
|
|
||||||
expectEvalToBeOk(r)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"tests of type",
|
"tests of type",
|
||||||
E.A.to_list(
|
E.A.to_list(
|
||||||
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(
|
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) =>
|
||||||
((fn, _)) => E.O.isSome(fn.output),
|
E.O.isSome(fn.output)
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
((fn, example)) => {
|
((fn, example)) => {
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
open Jest
|
||||||
|
open Reducer_TestHelpers
|
||||||
|
|
||||||
|
describe("Plot Library", () => {
|
||||||
|
testEvalToBe(
|
||||||
|
`Plot.dist({
|
||||||
|
show: [{
|
||||||
|
name: "normal",
|
||||||
|
value: normal(0, 1)
|
||||||
|
}, {
|
||||||
|
name: "lognormal",
|
||||||
|
value: 1 to 2
|
||||||
|
}, {
|
||||||
|
name: "constant",
|
||||||
|
value: 3
|
||||||
|
}]
|
||||||
|
})`,
|
||||||
|
"Ok(Plot showing normal,lognormal,constant)",
|
||||||
|
)
|
||||||
|
})
|
|
@ -45,12 +45,12 @@ let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.generi
|
||||||
let unpackFloat = x => x->toFloat->toExtFloat
|
let unpackFloat = x => x->toFloat->toExtFloat
|
||||||
let unpackDist = y => y->toDist->toExtDist
|
let unpackDist = y => y->toDist->toExtDist
|
||||||
|
|
||||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha, beta}))
|
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||||
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
||||||
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low, high}))
|
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
||||||
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local, scale}))
|
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||||
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu, sigma}))
|
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
||||||
|
|
||||||
let normalMake = SymbolicDist.Normal.make
|
let normalMake = SymbolicDist.Normal.make
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
],
|
],
|
||||||
"suffix": ".bs.js",
|
"suffix": ".bs.js",
|
||||||
"namespace": true,
|
"namespace": true,
|
||||||
|
"bs-dependencies": ["bisect_ppx"],
|
||||||
"bs-dev-dependencies": [
|
"bs-dev-dependencies": [
|
||||||
"@glennsl/rescript-jest",
|
"@glennsl/rescript-jest",
|
||||||
"rescript-fast-check",
|
"rescript-fast-check",
|
||||||
|
@ -44,5 +45,8 @@
|
||||||
"refmt": 3,
|
"refmt": 3,
|
||||||
"warnings": {
|
"warnings": {
|
||||||
"number": "+A-42-48-9-30-4"
|
"number": "+A-42-48-9-30-4"
|
||||||
}
|
},
|
||||||
|
"ppx-flags": [
|
||||||
|
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,9 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
preset: "ts-jest",
|
preset: "ts-jest",
|
||||||
testEnvironment: "node",
|
testEnvironment: "node",
|
||||||
|
setupFilesAfterEnv: [
|
||||||
|
"<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js",
|
||||||
|
],
|
||||||
testPathIgnorePatterns: [
|
testPathIgnorePatterns: [
|
||||||
".*Fixtures.bs.js",
|
".*Fixtures.bs.js",
|
||||||
"/node_modules/",
|
"/node_modules/",
|
||||||
|
|
|
@ -22,8 +22,12 @@
|
||||||
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
"test:fnRegistry": "jest __tests__/SquiggleLibrary/SquiggleLibrary_FunctionRegistryLibrary_test.bs.js",
|
"test:fnRegistry": "jest __tests__/SquiggleLibrary/SquiggleLibrary_FunctionRegistryLibrary_test.bs.js",
|
||||||
"coverage:local": "jest --coverage && echo && echo 'Open ./coverage/lcov-report/index.html to see the detailed report.'",
|
"coverage:rescript:local": "rm -f *.coverage && yarn clean && BISECT_ENABLE=yes yarn build && yarn test:rescript && bisect-ppx-report html",
|
||||||
"coverage": "jest --coverage && codecov",
|
"coverage:ts:local": "yarn clean && yarn build && nyc --reporter=lcov yarn test:ts",
|
||||||
|
"coverage:rescript": "yarn clean && BISECT_ENABLE=yes yarn build:rescript && yarn test:rescript && bisect-ppx-report send-to Codecov",
|
||||||
|
"coverage:ts": "yarn coverage:ts:local && codecov",
|
||||||
|
"coverage": "yarn coverage:ts && yarn coverage:rescript",
|
||||||
|
"coverage:local": "yarn coverage:ts:local && yarn coverage:rescript:local",
|
||||||
"lint:rescript": "./lint.sh",
|
"lint:rescript": "./lint.sh",
|
||||||
"lint:prettier": "prettier --check .",
|
"lint:prettier": "prettier --check .",
|
||||||
"lint": "yarn lint:rescript && yarn lint:prettier",
|
"lint": "yarn lint:rescript && yarn lint:prettier",
|
||||||
|
@ -39,7 +43,7 @@
|
||||||
],
|
],
|
||||||
"author": "Quantified Uncertainty Research Institute",
|
"author": "Quantified Uncertainty Research Institute",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@rescript/std": "^10.0.0",
|
"@rescript/std": "^9.1.4",
|
||||||
"@stdlib/stats": "^0.0.13",
|
"@stdlib/stats": "^0.0.13",
|
||||||
"jstat": "^1.9.5",
|
"jstat": "^1.9.5",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
|
@ -48,20 +52,24 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@glennsl/rescript-jest": "^0.9.2",
|
"@glennsl/rescript-jest": "^0.9.2",
|
||||||
|
"@istanbuljs/nyc-config-typescript": "^1.0.2",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"chalk": "^5.1.0",
|
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||||
|
"bisect_ppx": "^2.7.1",
|
||||||
|
"chalk": "^5.0.1",
|
||||||
"codecov": "^3.8.3",
|
"codecov": "^3.8.3",
|
||||||
"fast-check": "^3.1.4",
|
"fast-check": "^3.1.4",
|
||||||
"gentype": "^4.5.0",
|
"gentype": "^4.5.0",
|
||||||
"jest": "^27.5.1",
|
"jest": "^27.5.1",
|
||||||
"moduleserve": "^0.9.1",
|
"moduleserve": "^0.9.1",
|
||||||
|
"nyc": "^15.1.0",
|
||||||
"peggy": "^2.0.1",
|
"peggy": "^2.0.1",
|
||||||
"prettier": "^2.7.1",
|
"prettier": "^2.7.1",
|
||||||
"reanalyze": "^2.23.0",
|
"reanalyze": "^2.23.0",
|
||||||
"rescript": "^10.0.0",
|
"rescript": "^9.1.4",
|
||||||
"rescript-fast-check": "^1.1.1",
|
"rescript-fast-check": "^1.1.1",
|
||||||
"rescript-js-map": "^1.1.0",
|
"rescript-js-map": "^1.1.0",
|
||||||
"ts-jest": "^29.0.3",
|
"ts-jest": "^27.1.4",
|
||||||
"ts-loader": "^9.4.1",
|
"ts-loader": "^9.4.1",
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"typescript": "^4.8.4",
|
"typescript": "^4.8.4",
|
||||||
|
|
25
packages/squiggle-lang/src/js/SqPlot.ts
Normal file
25
packages/squiggle-lang/src/js/SqPlot.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import * as RSPlot from "../rescript/ForTS/ForTS_SquiggleValue/ForTS_SquiggleValue_Plot.gen";
|
||||||
|
import { SqDistribution, wrapDistribution } from "./SqDistribution";
|
||||||
|
import { SqValueLocation } from "./SqValueLocation";
|
||||||
|
|
||||||
|
type T = RSPlot.squiggleValue_Plot;
|
||||||
|
|
||||||
|
export type LabeledDistribution = {
|
||||||
|
name: string;
|
||||||
|
distribution: SqDistribution;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class SqPlot {
|
||||||
|
constructor(private _value: T, public location: SqValueLocation) {}
|
||||||
|
|
||||||
|
getDistributions(): LabeledDistribution[] {
|
||||||
|
return this._value.distributions.map((v: RSPlot.labeledDistribution) => ({
|
||||||
|
...v,
|
||||||
|
distribution: wrapDistribution(v.distribution),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
toString() {
|
||||||
|
return RSPlot.toString(this._value);
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ import { wrapDistribution } from "./SqDistribution";
|
||||||
import { SqLambda } from "./SqLambda";
|
import { SqLambda } from "./SqLambda";
|
||||||
import { SqLambdaDeclaration } from "./SqLambdaDeclaration";
|
import { SqLambdaDeclaration } from "./SqLambdaDeclaration";
|
||||||
import { SqRecord } from "./SqRecord";
|
import { SqRecord } from "./SqRecord";
|
||||||
|
import { SqPlot } from "./SqPlot";
|
||||||
import { SqArray } from "./SqArray";
|
import { SqArray } from "./SqArray";
|
||||||
import { SqValueLocation } from "./SqValueLocation";
|
import { SqValueLocation } from "./SqValueLocation";
|
||||||
|
|
||||||
|
@ -91,6 +92,14 @@ export class SqNumberValue extends SqAbstractValue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class SqPlotValue extends SqAbstractValue {
|
||||||
|
tag = Tag.Plot as const;
|
||||||
|
|
||||||
|
get value() {
|
||||||
|
return new SqPlot(this.valueMethod(RSValue.getPlot), this.location);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export class SqRecordValue extends SqAbstractValue {
|
export class SqRecordValue extends SqAbstractValue {
|
||||||
tag = Tag.Record as const;
|
tag = Tag.Record as const;
|
||||||
|
|
||||||
|
@ -131,6 +140,7 @@ const tagToClass = {
|
||||||
[Tag.Distribution]: SqDistributionValue,
|
[Tag.Distribution]: SqDistributionValue,
|
||||||
[Tag.Lambda]: SqLambdaValue,
|
[Tag.Lambda]: SqLambdaValue,
|
||||||
[Tag.Number]: SqNumberValue,
|
[Tag.Number]: SqNumberValue,
|
||||||
|
[Tag.Plot]: SqPlotValue,
|
||||||
[Tag.Record]: SqRecordValue,
|
[Tag.Record]: SqRecordValue,
|
||||||
[Tag.String]: SqStringValue,
|
[Tag.String]: SqStringValue,
|
||||||
[Tag.TimeDuration]: SqTimeDurationValue,
|
[Tag.TimeDuration]: SqTimeDurationValue,
|
||||||
|
@ -148,6 +158,7 @@ export type SqValue =
|
||||||
| SqLambdaValue
|
| SqLambdaValue
|
||||||
| SqNumberValue
|
| SqNumberValue
|
||||||
| SqRecordValue
|
| SqRecordValue
|
||||||
|
| SqPlotValue
|
||||||
| SqStringValue
|
| SqStringValue
|
||||||
| SqTimeDurationValue
|
| SqTimeDurationValue
|
||||||
| SqVoidValue;
|
| SqVoidValue;
|
||||||
|
|
|
@ -6,6 +6,7 @@ export { result } from "../rescript/ForTS/ForTS_Result_tag";
|
||||||
export { SqDistribution, SqDistributionTag } from "./SqDistribution";
|
export { SqDistribution, SqDistributionTag } from "./SqDistribution";
|
||||||
export { SqDistributionError } from "./SqDistributionError";
|
export { SqDistributionError } from "./SqDistributionError";
|
||||||
export { SqRecord } from "./SqRecord";
|
export { SqRecord } from "./SqRecord";
|
||||||
|
export { SqPlot, LabeledDistribution } from "./SqPlot";
|
||||||
export { SqLambda } from "./SqLambda";
|
export { SqLambda } from "./SqLambda";
|
||||||
export { SqProject };
|
export { SqProject };
|
||||||
export { SqValue, SqValueTag };
|
export { SqValue, SqValueTag };
|
||||||
|
@ -14,7 +15,7 @@ export {
|
||||||
defaultEnvironment,
|
defaultEnvironment,
|
||||||
} from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution.gen";
|
} from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution.gen";
|
||||||
export { SqError, SqFrame, SqLocation } from "./SqError";
|
export { SqError, SqFrame, SqLocation } from "./SqError";
|
||||||
export { SqShape } from "./SqPointSetDist";
|
export { SqShape, SqPoint } from "./SqPointSetDist";
|
||||||
|
|
||||||
export { resultMap } from "./types";
|
export { resultMap } from "./types";
|
||||||
|
|
||||||
|
|
|
@ -141,7 +141,6 @@ let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
Js.log2("Console log requested: ", dist)
|
Js.log2("Console log requested: ", dist)
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
|
|
||||||
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
| #ToScore(LogScore(answer, prior)) =>
|
| #ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||||
|
|
|
@ -99,7 +99,6 @@ let toFloatOperation = (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| (#Stdev | #Variance | #Mode) as op =>
|
| (#Stdev | #Variance | #Mode) as op =>
|
||||||
switch t {
|
switch t {
|
||||||
| SampleSet(s) =>
|
| SampleSet(s) =>
|
||||||
|
@ -130,7 +129,7 @@ let toPointSet = (
|
||||||
SampleSetDist.toPointSetDist(
|
SampleSetDist.toPointSetDist(
|
||||||
~samples=r,
|
~samples=r,
|
||||||
~samplingInputs={
|
~samplingInputs={
|
||||||
sampleCount,
|
sampleCount: sampleCount,
|
||||||
outputXYPoints: xyPointLength,
|
outputXYPoints: xyPointLength,
|
||||||
pointSetDistLength: xyPointLength,
|
pointSetDistLength: xyPointLength,
|
||||||
kernelWidth: None,
|
kernelWidth: None,
|
||||||
|
@ -428,7 +427,6 @@ module AlgebraicCombination = {
|
||||||
~toSampleSetFn,
|
~toSampleSetFn,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
| (None, AsMonteCarlo) =>
|
| (None, AsMonteCarlo) =>
|
||||||
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
||||||
| (None, AsSymbolic) =>
|
| (None, AsSymbolic) =>
|
||||||
|
@ -445,7 +443,6 @@ module AlgebraicCombination = {
|
||||||
)}`
|
)}`
|
||||||
Error(RequestedStrategyInvalidError(errString))
|
Error(RequestedStrategyInvalidError(errString))
|
||||||
}
|
}
|
||||||
|
|
||||||
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
|
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ let toDiscretePointMassesFromTriangulars = (
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
|
|
||||||
{n: n - 2, masses, means, variances}
|
{n: n - 2, masses: masses, means: means, variances: variances}
|
||||||
} else {
|
} else {
|
||||||
for i in 1 to n - 2 {
|
for i in 1 to n - 2 {
|
||||||
// area of triangle = width * height / 2
|
// area of triangle = width * height / 2
|
||||||
|
@ -91,7 +91,7 @@ let toDiscretePointMassesFromTriangulars = (
|
||||||
) |> ignore
|
) |> ignore
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
{n: n - 2, masses, means, variances}
|
{n: n - 2, masses: masses, means: means, variances: variances}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,7 +184,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
|
||||||
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
|
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
|
||||||
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
|
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
|
||||||
|
|
||||||
{n, masses, means, variances}
|
{n: n, masses: masses, means: means, variances: variances}
|
||||||
}
|
}
|
||||||
|
|
||||||
type argumentPosition = First | Second
|
type argumentPosition = First | Second
|
||||||
|
|
|
@ -45,16 +45,16 @@ module Analysis = {
|
||||||
let getShape = (t: t) => t.xyShape
|
let getShape = (t: t) => t.xyShape
|
||||||
let interpolation = (t: t) => t.interpolation
|
let interpolation = (t: t) => t.interpolation
|
||||||
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
xyShape,
|
xyShape: xyShape,
|
||||||
interpolation,
|
interpolation: interpolation,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
interpolation,
|
interpolation: interpolation,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
||||||
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
|
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
|
||||||
|
@ -135,10 +135,10 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
|
||||||
|
|
||||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache}
|
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache}
|
||||||
|
|
||||||
let sum = (
|
let sum = (
|
||||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||||
|
|
|
@ -4,14 +4,14 @@ open Distributions
|
||||||
type t = PointSetTypes.discreteShape
|
type t = PointSetTypes.discreteShape
|
||||||
|
|
||||||
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
xyShape,
|
xyShape: xyShape,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
let getShape = (t: t) => t.xyShape
|
let getShape = (t: t) => t.xyShape
|
||||||
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
|
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
|
||||||
|
@ -63,12 +63,12 @@ let reduce = (
|
||||||
|
|
||||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
||||||
|
|
|
@ -4,10 +4,10 @@ open Distributions
|
||||||
|
|
||||||
type t = PointSetTypes.mixedShape
|
type t = PointSetTypes.mixedShape
|
||||||
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
|
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
|
||||||
continuous,
|
continuous: continuous,
|
||||||
discrete,
|
discrete: discrete,
|
||||||
integralSumCache,
|
integralSumCache: integralSumCache,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let totalLength = (t: t): int => {
|
let totalLength = (t: t): int => {
|
||||||
|
@ -35,7 +35,7 @@ let toDiscrete = ({discrete}: t) => Some(discrete)
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
|
|
@ -79,8 +79,8 @@ module MixedPoint = {
|
||||||
type t = mixedPoint
|
type t = mixedPoint
|
||||||
let toContinuousValue = (t: t) => t.continuous
|
let toContinuousValue = (t: t) => t.continuous
|
||||||
let toDiscreteValue = (t: t) => t.discrete
|
let toDiscreteValue = (t: t) => t.discrete
|
||||||
let makeContinuous = (continuous: float): t => {continuous, discrete: 0.0}
|
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
|
||||||
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete}
|
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
|
||||||
|
|
||||||
let fmap = (fn: float => float, t: t) => {
|
let fmap = (fn: float => float, t: t) => {
|
||||||
continuous: fn(t.continuous),
|
continuous: fn(t.continuous),
|
||||||
|
|
|
@ -7,7 +7,7 @@ module Normal = {
|
||||||
type t = normal
|
type t = normal
|
||||||
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
||||||
stdev > 0.0
|
stdev > 0.0
|
||||||
? Ok(#Normal({mean, stdev}))
|
? Ok(#Normal({mean: mean, stdev: stdev}))
|
||||||
: Error("Standard deviation of normal distribution must be larger than 0")
|
: Error("Standard deviation of normal distribution must be larger than 0")
|
||||||
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
|
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
|
||||||
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
|
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
|
||||||
|
@ -15,7 +15,7 @@ module Normal = {
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let mean = E.A.Floats.mean([low, high])
|
let mean = E.A.Floats.mean([low, high])
|
||||||
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
||||||
#Normal({mean, stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
||||||
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
|
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
|
||||||
|
@ -25,12 +25,12 @@ module Normal = {
|
||||||
let add = (n1: t, n2: t) => {
|
let add = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean +. n2.mean
|
let mean = n1.mean +. n2.mean
|
||||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean, stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
let subtract = (n1: t, n2: t) => {
|
let subtract = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean -. n2.mean
|
let mean = n1.mean -. n2.mean
|
||||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean, stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: is this useful here at all? would need the integral as well ...
|
// TODO: is this useful here at all? would need the integral as well ...
|
||||||
|
@ -38,7 +38,7 @@ module Normal = {
|
||||||
let mean =
|
let mean =
|
||||||
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
|
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
|
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
|
||||||
#Normal({mean, stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
|
|
||||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
|
@ -88,7 +88,7 @@ module Cauchy = {
|
||||||
type t = cauchy
|
type t = cauchy
|
||||||
let make = (local, scale): result<symbolicDist, string> =>
|
let make = (local, scale): result<symbolicDist, string> =>
|
||||||
scale > 0.0
|
scale > 0.0
|
||||||
? Ok(#Cauchy({local, scale}))
|
? Ok(#Cauchy({local: local, scale: scale}))
|
||||||
: Error("Cauchy distribution scale parameter must larger than 0.")
|
: Error("Cauchy distribution scale parameter must larger than 0.")
|
||||||
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
|
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
|
||||||
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
||||||
|
@ -102,7 +102,7 @@ module Triangular = {
|
||||||
type t = triangular
|
type t = triangular
|
||||||
let make = (low, medium, high): result<symbolicDist, string> =>
|
let make = (low, medium, high): result<symbolicDist, string> =>
|
||||||
low < medium && medium < high
|
low < medium && medium < high
|
||||||
? Ok(#Triangular({low, medium, high}))
|
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
||||||
: Error("Triangular values must be increasing order.")
|
: Error("Triangular values must be increasing order.")
|
||||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||||
|
@ -116,7 +116,7 @@ module Beta = {
|
||||||
type t = beta
|
type t = beta
|
||||||
let make = (alpha, beta) =>
|
let make = (alpha, beta) =>
|
||||||
alpha > 0.0 && beta > 0.0
|
alpha > 0.0 && beta > 0.0
|
||||||
? Ok(#Beta({alpha, beta}))
|
? Ok(#Beta({alpha: alpha, beta: beta}))
|
||||||
: Error("Beta distribution parameters must be positive")
|
: Error("Beta distribution parameters must be positive")
|
||||||
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
|
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
|
||||||
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
|
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
|
||||||
|
@ -150,7 +150,7 @@ module Lognormal = {
|
||||||
type t = lognormal
|
type t = lognormal
|
||||||
let make = (mu, sigma) =>
|
let make = (mu, sigma) =>
|
||||||
sigma > 0.0
|
sigma > 0.0
|
||||||
? Ok(#Lognormal({mu, sigma}))
|
? Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
: Error("Lognormal standard deviation must be larger than 0")
|
: Error("Lognormal standard deviation must be larger than 0")
|
||||||
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
||||||
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
||||||
|
@ -164,7 +164,7 @@ module Lognormal = {
|
||||||
let logHigh = Js.Math.log(high)
|
let logHigh = Js.Math.log(high)
|
||||||
let mu = E.A.Floats.mean([logLow, logHigh])
|
let mu = E.A.Floats.mean([logLow, logHigh])
|
||||||
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
||||||
#Lognormal({mu, sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let fromMeanAndStdev = (mean, stdev) => {
|
let fromMeanAndStdev = (mean, stdev) => {
|
||||||
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
||||||
|
@ -174,7 +174,7 @@ module Lognormal = {
|
||||||
let meanSquared = mean ** 2.
|
let meanSquared = mean ** 2.
|
||||||
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
||||||
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
||||||
Ok(#Lognormal({mu, sigma}))
|
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
} else {
|
} else {
|
||||||
Error("Lognormal standard deviation must be larger than 0")
|
Error("Lognormal standard deviation must be larger than 0")
|
||||||
}
|
}
|
||||||
|
@ -184,14 +184,14 @@ module Lognormal = {
|
||||||
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
||||||
let mu = l1.mu +. l2.mu
|
let mu = l1.mu +. l2.mu
|
||||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu, sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let divide = (l1, l2) => {
|
let divide = (l1, l2) => {
|
||||||
let mu = l1.mu -. l2.mu
|
let mu = l1.mu -. l2.mu
|
||||||
// We believe the ratiands will have covariance zero.
|
// We believe the ratiands will have covariance zero.
|
||||||
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
||||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu, sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
switch operation {
|
switch operation {
|
||||||
|
@ -220,7 +220,7 @@ module Lognormal = {
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
type t = uniform
|
type t = uniform
|
||||||
let make = (low, high) =>
|
let make = (low, high) =>
|
||||||
high > low ? Ok(#Uniform({low, high})) : Error("High must be larger than low")
|
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low")
|
||||||
|
|
||||||
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
||||||
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
||||||
|
@ -239,7 +239,9 @@ module Uniform = {
|
||||||
module Logistic = {
|
module Logistic = {
|
||||||
type t = logistic
|
type t = logistic
|
||||||
let make = (location, scale) =>
|
let make = (location, scale) =>
|
||||||
scale > 0.0 ? Ok(#Logistic({location, scale})) : Error("Scale must be positive")
|
scale > 0.0
|
||||||
|
? Ok(#Logistic({location: location, scale: scale}))
|
||||||
|
: Error("Scale must be positive")
|
||||||
|
|
||||||
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
||||||
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
||||||
|
@ -283,7 +285,7 @@ module Gamma = {
|
||||||
let make = (shape: float, scale: float) => {
|
let make = (shape: float, scale: float) => {
|
||||||
if shape > 0. {
|
if shape > 0. {
|
||||||
if scale > 0. {
|
if scale > 0. {
|
||||||
Ok(#Gamma({shape, scale}))
|
Ok(#Gamma({shape: shape, scale: scale}))
|
||||||
} else {
|
} else {
|
||||||
Error("scale must be larger than 0")
|
Error("scale must be larger than 0")
|
||||||
}
|
}
|
||||||
|
@ -541,6 +543,6 @@ module T = {
|
||||||
| _ =>
|
| _ =>
|
||||||
let xs = interpolateXs(~xSelection, d, sampleCount)
|
let xs = interpolateXs(~xSelection, d, sampleCount)
|
||||||
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
||||||
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs, ys}))
|
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ let makeFn = (
|
||||||
name: string,
|
name: string,
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
||||||
) => makeFnMany(name, [{inputs, fn}])
|
) => makeFnMany(name, [{inputs: inputs, fn: fn}])
|
||||||
|
|
||||||
let library = [
|
let library = [
|
||||||
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
|
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
|
||||||
|
@ -62,7 +62,6 @@ let library = [
|
||||||
let answer = Js.String2.concat(a, b)
|
let answer = Js.String2.concat(a, b)
|
||||||
answer->Reducer_T.IEvString->Ok
|
answer->Reducer_T.IEvString->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -73,7 +72,6 @@ let library = [
|
||||||
let _ = Js.Array2.pushMany(a, b)
|
let _ = Js.Array2.pushMany(a, b)
|
||||||
a->Reducer_T.IEvArray->Ok
|
a->Reducer_T.IEvArray->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -83,7 +81,6 @@ let library = [
|
||||||
Js.log(value->Reducer_Value.toString)
|
Js.log(value->Reducer_Value.toString)
|
||||||
value->Ok
|
value->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -93,7 +90,6 @@ let library = [
|
||||||
Js.log(`${label}: ${value->Reducer_Value.toString}`)
|
Js.log(`${label}: ${value->Reducer_Value.toString}`)
|
||||||
value->Ok
|
value->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -135,13 +135,11 @@ module Integration = {
|
||||||
let wrappedResult = result->Reducer_T.IEvNumber->Ok
|
let wrappedResult = result->Reducer_T.IEvNumber->Ok
|
||||||
wrappedResult
|
wrappedResult
|
||||||
}
|
}
|
||||||
|
|
||||||
| (Error(b), _) => Error(b)
|
| (Error(b), _) => Error(b)
|
||||||
| (_, Error(b)) => Error(b)
|
| (_, Error(b)) => Error(b)
|
||||||
}
|
}
|
||||||
resultWithOuterPoints
|
resultWithOuterPoints
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) =>
|
| Error(b) =>
|
||||||
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
|
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
|
||||||
"Original error: " ++
|
"Original error: " ++
|
||||||
|
@ -364,7 +362,6 @@ module DiminishingReturns = {
|
||||||
result[indexOfBiggestDMR] = value
|
result[indexOfBiggestDMR] = value
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -374,12 +371,10 @@ module DiminishingReturns = {
|
||||||
}
|
}
|
||||||
Ok(newAcc)
|
Ok(newAcc)
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
newAccWrapped
|
newAccWrapped
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -432,12 +427,10 @@ module DiminishingReturns = {
|
||||||
)
|
)
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ =>
|
| _ =>
|
||||||
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
|
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
|
||||||
->SqError.Message.REOther
|
->SqError.Message.REOther
|
||||||
|
|
|
@ -20,7 +20,6 @@ module Declaration = {
|
||||||
->E.A.R.firstErrorOrOpen
|
->E.A.R.firstErrorOrOpen
|
||||||
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
|
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(r) => Error(r)
|
| Error(r) => Error(r)
|
||||||
| Ok(_) => Error(impossibleErrorString)
|
| Ok(_) => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,7 +140,6 @@ module Old = {
|
||||||
| Error(err) => error(err)
|
| Error(err) => error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| Some(IEvNumber(_))
|
| Some(IEvNumber(_))
|
||||||
| Some(IEvDistribution(_)) =>
|
| Some(IEvDistribution(_)) =>
|
||||||
switch parseDistributionArray(args) {
|
switch parseDistributionArray(args) {
|
||||||
|
@ -193,7 +192,6 @@ module Old = {
|
||||||
}
|
}
|
||||||
Helpers.toFloatFn(fn, dist, ~env)
|
Helpers.toFloatFn(fn, dist, ~env)
|
||||||
}
|
}
|
||||||
|
|
||||||
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
||||||
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
||||||
| ("sparkline", [IEvDistribution(dist)]) =>
|
| ("sparkline", [IEvDistribution(dist)]) =>
|
||||||
|
|
146
packages/squiggle-lang/src/rescript/FR/FR_Plot.res
Normal file
146
packages/squiggle-lang/src/rescript/FR/FR_Plot.res
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
open FunctionRegistry_Core
|
||||||
|
open FunctionRegistry_Helpers
|
||||||
|
|
||||||
|
let nameSpace = "Plot"
|
||||||
|
|
||||||
|
module FnApp = {
|
||||||
|
type fnApp<'a> = {
|
||||||
|
result: Reducer_T.value => result<'a, SqError.Message.t>,
|
||||||
|
typeRequired: frType,
|
||||||
|
}
|
||||||
|
|
||||||
|
let fmap = (f: 'a => 'b, m: fnApp<'a>): fnApp<'b> => {
|
||||||
|
{
|
||||||
|
result: (a: Reducer_T.value) => E.R.fmap(f, m.result(a)),
|
||||||
|
typeRequired: m.typeRequired,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module Record = {
|
||||||
|
type t<'a> = {
|
||||||
|
result: Reducer_T.map => result<'a, SqError.Message.t>,
|
||||||
|
typesRequired: array<(string, frType)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let getField = (key: string, parser: fnApp<'a>): t<'a> => {
|
||||||
|
let func = (a: Reducer_T.map) =>
|
||||||
|
switch Belt.Map.String.get(a, key) {
|
||||||
|
| Some(x) => parser.result(x)
|
||||||
|
| None => Error(impossibleError)
|
||||||
|
}
|
||||||
|
{result: func, typesRequired: [(key, parser.typeRequired)]}
|
||||||
|
}
|
||||||
|
|
||||||
|
let merge = (m1: t<'a>, m2: t<'b>): t<('a, 'b)> => {
|
||||||
|
{
|
||||||
|
result: (a: Reducer_T.map) => E.R.merge(m1.result(a), m2.result(a)),
|
||||||
|
typesRequired: Belt.Array.concat(m1.typesRequired, m2.typesRequired),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let fmap = (f: 'a => 'b, m: t<'a>): t<'b> => {
|
||||||
|
{
|
||||||
|
result: (a: Reducer_T.map) => E.R.fmap(f, m.result(a)),
|
||||||
|
typesRequired: m.typesRequired,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let app = (m1: t<'a => 'b>, m2: t<'a>): t<'b> => {
|
||||||
|
{
|
||||||
|
result: (a: Reducer_T.map) =>
|
||||||
|
E.R.merge(m1.result(a), m2.result(a))->E.R2.fmap(((f, x)) => f(x)),
|
||||||
|
typesRequired: Belt.Array.concat(m1.typesRequired, m2.typesRequired),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let getString: fnApp<string> = {
|
||||||
|
let func = (a: Reducer_T.value) =>
|
||||||
|
switch a {
|
||||||
|
| IEvString(s) => Ok(s)
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
{result: func, typeRequired: FRTypeString}
|
||||||
|
}
|
||||||
|
|
||||||
|
let getArray = (child: fnApp<'a>): fnApp<array<'a>> => {
|
||||||
|
let func = (a: Reducer_T.value) =>
|
||||||
|
switch a {
|
||||||
|
| IEvArray(x) => x->E.A2.fmap(child.result)->E.A.R.firstErrorOrOpen
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
{result: func, typeRequired: FRTypeArray(child.typeRequired)}
|
||||||
|
}
|
||||||
|
let getRecord = (recMonad: Record.t<'a>): fnApp<'a> => {
|
||||||
|
let func = (a: Reducer_T.value) =>
|
||||||
|
switch a {
|
||||||
|
| IEvRecord(s) => recMonad.result(s)
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
{result: func, typeRequired: FRTypeRecord(recMonad.typesRequired)}
|
||||||
|
}
|
||||||
|
|
||||||
|
let getDistOrNumber: fnApp<GenericDist.t> = {
|
||||||
|
let func = (a: Reducer_T.value) =>
|
||||||
|
switch a {
|
||||||
|
| IEvDistribution(s) => Ok(s)
|
||||||
|
| IEvNumber(s) => Ok(GenericDist.fromFloat(s))
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
{result: func, typeRequired: FRTypeDistOrNumber}
|
||||||
|
}
|
||||||
|
|
||||||
|
let oneArgDef = (
|
||||||
|
name: string,
|
||||||
|
arg1: fnApp<'a>,
|
||||||
|
def: 'a => result<Reducer_T.value, SqError.Message.t>,
|
||||||
|
): FnDefinition.t =>
|
||||||
|
FnDefinition.make(
|
||||||
|
~name,
|
||||||
|
~inputs=[arg1.typeRequired],
|
||||||
|
~run=(inputs, _, _) => {
|
||||||
|
E.R.bind(arg1.result(inputs[0]), def)
|
||||||
|
},
|
||||||
|
(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
module Internals = {
|
||||||
|
let makeLabeledDistribution = (
|
||||||
|
name: string,
|
||||||
|
distribution: GenericDist.t,
|
||||||
|
): Reducer_T.labeledDistribution => {name: name, distribution: distribution}
|
||||||
|
|
||||||
|
let getLabeledDistribution: FnApp.fnApp<Reducer_T.labeledDistribution> = {
|
||||||
|
makeLabeledDistribution
|
||||||
|
->FnApp.Record.fmap(FnApp.Record.getField("name", FnApp.getString))
|
||||||
|
->FnApp.Record.app(FnApp.Record.getField("value", FnApp.getDistOrNumber))
|
||||||
|
->FnApp.getRecord
|
||||||
|
}
|
||||||
|
|
||||||
|
let makePlot = (show: array<Reducer_T.labeledDistribution>): Reducer_T.plotValue => {
|
||||||
|
distributions: show,
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsePlotValue: FnApp.fnApp<Reducer_T.plotValue> = {
|
||||||
|
makePlot
|
||||||
|
->FnApp.Record.fmap(FnApp.Record.getField("show", FnApp.getArray(getLabeledDistribution)))
|
||||||
|
->FnApp.getRecord
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let library = [
|
||||||
|
Function.make(
|
||||||
|
~name="dist",
|
||||||
|
~nameSpace,
|
||||||
|
~requiresNamespace=true,
|
||||||
|
~output=EvtPlot,
|
||||||
|
~examples=[
|
||||||
|
`Plot.dist({show: [{name: "Control", value: 1 to 2}, {name: "Treatment", value: 1.5 to 2.5}]}) `,
|
||||||
|
],
|
||||||
|
~definitions=[
|
||||||
|
FnApp.oneArgDef("dist", Internals.parsePlotValue, (a: Reducer_T.plotValue) => Ok(IEvPlot(a))),
|
||||||
|
],
|
||||||
|
(),
|
||||||
|
),
|
||||||
|
]
|
|
@ -19,7 +19,6 @@ let inputsToDist = (inputs: array<Reducer_T.value>, xyShapeToPointSetDist) => {
|
||||||
| _ => impossibleError->SqError.Message.throw
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => impossibleError->SqError.Message.throw
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -6,6 +6,7 @@ type error = SqError.t //use
|
||||||
type squiggleValue_Declaration = ForTS_SquiggleValue_Declaration.squiggleValue_Declaration //use
|
type squiggleValue_Declaration = ForTS_SquiggleValue_Declaration.squiggleValue_Declaration //use
|
||||||
type squiggleValue_Distribution = ForTS_SquiggleValue_Distribution.squiggleValue_Distribution //use
|
type squiggleValue_Distribution = ForTS_SquiggleValue_Distribution.squiggleValue_Distribution //use
|
||||||
type squiggleValue_Lambda = ForTS_SquiggleValue_Lambda.squiggleValue_Lambda //use
|
type squiggleValue_Lambda = ForTS_SquiggleValue_Lambda.squiggleValue_Lambda //use
|
||||||
|
@genType type squiggleValue_Plot = Reducer_T.plotValue //use
|
||||||
|
|
||||||
// Return values are kept as they are if they are JavaScript types.
|
// Return values are kept as they are if they are JavaScript types.
|
||||||
|
|
||||||
|
@ -30,6 +31,9 @@ external svtLambda_: string = "Lambda"
|
||||||
@module("./ForTS_SquiggleValue_tag") @scope("squiggleValueTag")
|
@module("./ForTS_SquiggleValue_tag") @scope("squiggleValueTag")
|
||||||
external svtNumber_: string = "Number"
|
external svtNumber_: string = "Number"
|
||||||
|
|
||||||
|
@module("./ForTS_SquiggleValue_tag") @scope("squiggleValueTag")
|
||||||
|
external svtPlot_: string = "Plot"
|
||||||
|
|
||||||
@module("./ForTS_SquiggleValue_tag") @scope("squiggleValueTag")
|
@module("./ForTS_SquiggleValue_tag") @scope("squiggleValueTag")
|
||||||
external svtRecord_: string = "Record"
|
external svtRecord_: string = "Record"
|
||||||
|
|
||||||
|
@ -57,6 +61,7 @@ let getTag = (variant: squiggleValue): squiggleValueTag =>
|
||||||
| IEvDistribution(_) => svtDistribution_->castEnum
|
| IEvDistribution(_) => svtDistribution_->castEnum
|
||||||
| IEvLambda(_) => svtLambda_->castEnum
|
| IEvLambda(_) => svtLambda_->castEnum
|
||||||
| IEvNumber(_) => svtNumber_->castEnum
|
| IEvNumber(_) => svtNumber_->castEnum
|
||||||
|
| IEvPlot(_) => svtPlot_->castEnum
|
||||||
| IEvRecord(_) => svtRecord_->castEnum
|
| IEvRecord(_) => svtRecord_->castEnum
|
||||||
| IEvString(_) => svtString_->castEnum
|
| IEvString(_) => svtString_->castEnum
|
||||||
| IEvTimeDuration(_) => svtTimeDuration_->castEnum
|
| IEvTimeDuration(_) => svtTimeDuration_->castEnum
|
||||||
|
@ -122,6 +127,13 @@ let getNumber = (variant: squiggleValue): option<float> =>
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let getPlot = (variant: squiggleValue): option<squiggleValue_Plot> =>
|
||||||
|
switch variant {
|
||||||
|
| IEvPlot(value) => value->Some
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let getRecord = (variant: squiggleValue): option<squiggleValue_Record> =>
|
let getRecord = (variant: squiggleValue): option<squiggleValue_Record> =>
|
||||||
switch variant {
|
switch variant {
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
type squiggleValue = ForTS_SquiggleValue.squiggleValue //use
|
||||||
|
@genType type squiggleValue_Plot = ForTS_SquiggleValue.squiggleValue_Plot //re-export recursive type
|
||||||
|
@genType type labeledDistribution = Reducer_T.labeledDistribution // use
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let toString = (v: squiggleValue_Plot) => Reducer_Value.toStringPlot(v)
|
|
@ -6,6 +6,7 @@ export enum squiggleValueTag {
|
||||||
Distribution = "Distribution",
|
Distribution = "Distribution",
|
||||||
Lambda = "Lambda",
|
Lambda = "Lambda",
|
||||||
Number = "Number",
|
Number = "Number",
|
||||||
|
Plot = "Plot",
|
||||||
Record = "Record",
|
Record = "Record",
|
||||||
String = "String",
|
String = "String",
|
||||||
TimeDuration = "TimeDuration",
|
TimeDuration = "TimeDuration",
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
type internalExpressionValueType = Reducer_Value.internalExpressionValueType
|
type internalExpressionValueType = Reducer_Value.internalExpressionValueType
|
||||||
|
let valueTypeToString = Reducer_Value.valueTypeToString
|
||||||
type errorMessage = SqError.Message.t
|
type errorMessage = SqError.Message.t
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -61,7 +62,6 @@ module FRType = {
|
||||||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||||
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
|
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
|
||||||
}
|
}
|
||||||
|
|
||||||
| FRTypeArray(r) => `list(${toString(r)})`
|
| FRTypeArray(r) => `list(${toString(r)})`
|
||||||
| FRTypeLambda => `lambda`
|
| FRTypeLambda => `lambda`
|
||||||
| FRTypeString => `string`
|
| FRTypeString => `string`
|
||||||
|
@ -133,9 +133,9 @@ module FnDefinition = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let make = (~name, ~inputs, ~run, ()): t => {
|
let make = (~name, ~inputs, ~run, ()): t => {
|
||||||
name,
|
name: name,
|
||||||
inputs,
|
inputs: inputs,
|
||||||
run,
|
run: run,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,14 +161,14 @@ module Function = {
|
||||||
~isExperimental=false,
|
~isExperimental=false,
|
||||||
(),
|
(),
|
||||||
): t => {
|
): t => {
|
||||||
name,
|
name: name,
|
||||||
nameSpace,
|
nameSpace: nameSpace,
|
||||||
definitions,
|
definitions: definitions,
|
||||||
output,
|
output: output,
|
||||||
examples: examples->E.O2.default([]),
|
examples: examples->E.O2.default([]),
|
||||||
isExperimental,
|
isExperimental: isExperimental,
|
||||||
requiresNamespace,
|
requiresNamespace: requiresNamespace,
|
||||||
description,
|
description: description,
|
||||||
}
|
}
|
||||||
|
|
||||||
let toJson = (t: t): functionJson => {
|
let toJson = (t: t): functionJson => {
|
||||||
|
@ -204,19 +204,15 @@ module Registry = {
|
||||||
fn.requiresNamespace ? [] : [def.name],
|
fn.requiresNamespace ? [] : [def.name],
|
||||||
]->E.A.concatMany
|
]->E.A.concatMany
|
||||||
|
|
||||||
names->Belt.Array.reduce(
|
names->Belt.Array.reduce(acc, (acc, name) => {
|
||||||
acc,
|
switch acc->Belt.Map.String.get(name) {
|
||||||
(acc, name) => {
|
| Some(fns) => {
|
||||||
switch acc->Belt.Map.String.get(name) {
|
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
||||||
| Some(fns) => {
|
acc
|
||||||
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
|
||||||
acc
|
|
||||||
}
|
|
||||||
|
|
||||||
| None => acc->Belt.Map.String.set(name, [def])
|
|
||||||
}
|
}
|
||||||
},
|
| None => acc->Belt.Map.String.set(name, [def])
|
||||||
)
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -250,7 +246,6 @@ module Registry = {
|
||||||
| None => REOther(showNameMatchDefinitions())->Error
|
| None => REOther(showNameMatchDefinitions())->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| None => RESymbolNotFound(fnName)->Error
|
| None => RESymbolNotFound(fnName)->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,6 @@ module Prepare = {
|
||||||
let n2 = map->Belt.Map.String.getExn(arg2)
|
let n2 = map->Belt.Map.String.getExn(arg2)
|
||||||
Ok([n1, n2])
|
Ok([n1, n2])
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleErrorString)
|
| _ => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +45,6 @@ module Prepare = {
|
||||||
let n3 = map->Belt.Map.String.getExn(arg3)
|
let n3 = map->Belt.Map.String.getExn(arg3)
|
||||||
Ok([n1, n2, n3])
|
Ok([n1, n2, n3])
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleErrorString)
|
| _ => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,19 @@
|
||||||
let fnList = Belt.Array.concatMany([
|
let fnList = Belt.Array.concatMany([
|
||||||
FR_Builtin.library,
|
FR_Builtin.library,
|
||||||
|
FR_Danger.library,
|
||||||
|
FR_Date.library,
|
||||||
FR_Dict.library,
|
FR_Dict.library,
|
||||||
FR_Dist.library,
|
FR_Dist.library,
|
||||||
FR_Danger.library,
|
|
||||||
FR_Fn.library,
|
FR_Fn.library,
|
||||||
FR_Sampleset.library,
|
|
||||||
FR_List.library,
|
|
||||||
FR_Number.library,
|
|
||||||
FR_Pointset.library,
|
|
||||||
FR_Scoring.library,
|
|
||||||
FR_GenericDist.library,
|
FR_GenericDist.library,
|
||||||
FR_Units.library,
|
FR_List.library,
|
||||||
FR_Date.library,
|
|
||||||
FR_Math.library,
|
FR_Math.library,
|
||||||
|
FR_Number.library,
|
||||||
|
FR_Plot.library,
|
||||||
|
FR_Pointset.library,
|
||||||
|
FR_Sampleset.library,
|
||||||
|
FR_Scoring.library,
|
||||||
|
FR_Units.library,
|
||||||
])
|
])
|
||||||
|
|
||||||
let registry = FunctionRegistry_Core.Registry.make(fnList)
|
let registry = FunctionRegistry_Core.Registry.make(fnList)
|
||||||
|
|
|
@ -44,4 +44,4 @@ let removeResult = ({namespace} as bindings: t): t => {
|
||||||
|
|
||||||
let locals = ({namespace}: t): Reducer_T.namespace => namespace
|
let locals = ({namespace}: t): Reducer_T.namespace => namespace
|
||||||
|
|
||||||
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace, parent: None}
|
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace: namespace, parent: None}
|
||||||
|
|
|
@ -6,7 +6,7 @@ let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environ
|
||||||
{
|
{
|
||||||
frameStack: list{},
|
frameStack: list{},
|
||||||
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
|
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
|
||||||
environment,
|
environment: environment,
|
||||||
inFunction: None,
|
inFunction: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,6 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
)
|
)
|
||||||
(result, context)
|
(result, context)
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
|
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,8 @@ let make = (): t => list{}
|
||||||
|
|
||||||
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
|
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
|
||||||
t->Belt.List.add({
|
t->Belt.List.add({
|
||||||
name,
|
name: name,
|
||||||
location,
|
location: location,
|
||||||
})
|
})
|
||||||
|
|
||||||
// this is useful for SyntaxErrors
|
// this is useful for SyntaxErrors
|
||||||
|
|
|
@ -43,10 +43,10 @@ let makeLambda = (
|
||||||
|
|
||||||
FnLambda({
|
FnLambda({
|
||||||
// context: bindings,
|
// context: bindings,
|
||||||
name,
|
name: name,
|
||||||
body: lambda,
|
body: lambda,
|
||||||
parameters,
|
parameters: parameters,
|
||||||
location,
|
location: location,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,8 +54,8 @@ let makeLambda = (
|
||||||
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
|
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
|
||||||
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
|
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
|
||||||
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
|
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
|
||||||
body,
|
body: body,
|
||||||
name,
|
name: name,
|
||||||
})
|
})
|
||||||
|
|
||||||
// this function doesn't scale to FunctionRegistry's polymorphic functions
|
// this function doesn't scale to FunctionRegistry's polymorphic functions
|
||||||
|
|
|
@ -113,7 +113,7 @@ let nodeToAST = (node: node) => {
|
||||||
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
|
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
|
||||||
}
|
}
|
||||||
|
|
||||||
{location: node["location"], content}
|
{location: node["location"], content: content}
|
||||||
}
|
}
|
||||||
|
|
||||||
let nodeIdentifierToAST = (node: nodeIdentifier) => {
|
let nodeIdentifierToAST = (node: nodeIdentifier) => {
|
||||||
|
|
|
@ -68,7 +68,7 @@ let rec fromNode = (node: Parse.node): expression => {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
ast,
|
ast: ast,
|
||||||
content,
|
content: content,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,10 +9,12 @@ type rec value =
|
||||||
| IEvDistribution(DistributionTypes.genericDist)
|
| IEvDistribution(DistributionTypes.genericDist)
|
||||||
| IEvLambda(lambdaValue)
|
| IEvLambda(lambdaValue)
|
||||||
| IEvNumber(float)
|
| IEvNumber(float)
|
||||||
|
| IEvPlot(plotValue)
|
||||||
| IEvRecord(map)
|
| IEvRecord(map)
|
||||||
| IEvString(string)
|
| IEvString(string)
|
||||||
| IEvTimeDuration(float)
|
| IEvTimeDuration(float)
|
||||||
| IEvVoid
|
| IEvVoid
|
||||||
|
|
||||||
@genType.opaque and arrayValue = array<value>
|
@genType.opaque and arrayValue = array<value>
|
||||||
@genType.opaque and map = Belt.Map.String.t<value>
|
@genType.opaque and map = Belt.Map.String.t<value>
|
||||||
and lambdaBody = (array<value>, context, reducerFn) => value
|
and lambdaBody = (array<value>, context, reducerFn) => value
|
||||||
|
@ -66,4 +68,12 @@ and context = {
|
||||||
|
|
||||||
and reducerFn = (expression, context) => (value, context)
|
and reducerFn = (expression, context) => (value, context)
|
||||||
|
|
||||||
|
@genType and plotValue = {distributions: array<labeledDistribution>}
|
||||||
|
|
||||||
|
@genType
|
||||||
|
and labeledDistribution = {
|
||||||
|
name: string,
|
||||||
|
distribution: DistributionTypes.genericDist,
|
||||||
|
}
|
||||||
|
|
||||||
let topFrameName = "<top>"
|
let topFrameName = "<top>"
|
||||||
|
|
|
@ -14,6 +14,7 @@ let rec toString = (aValue: T.value) =>
|
||||||
| IEvDistribution(dist) => toStringDistribution(dist)
|
| IEvDistribution(dist) => toStringDistribution(dist)
|
||||||
| IEvLambda(lambdaValue) => toStringLambda(lambdaValue)
|
| IEvLambda(lambdaValue) => toStringLambda(lambdaValue)
|
||||||
| IEvNumber(aNumber) => toStringNumber(aNumber)
|
| IEvNumber(aNumber) => toStringNumber(aNumber)
|
||||||
|
| IEvPlot(aPlot) => toStringPlot(aPlot)
|
||||||
| IEvRecord(aMap) => aMap->toStringRecord
|
| IEvRecord(aMap) => aMap->toStringRecord
|
||||||
| IEvString(aString) => toStringString(aString)
|
| IEvString(aString) => toStringString(aString)
|
||||||
| IEvTimeDuration(t) => toStringTimeDuration(t)
|
| IEvTimeDuration(t) => toStringTimeDuration(t)
|
||||||
|
@ -35,6 +36,10 @@ and toStringLambda = (lambdaValue: T.lambdaValue) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
and toStringNumber = aNumber => Js.String.make(aNumber)
|
and toStringNumber = aNumber => Js.String.make(aNumber)
|
||||||
|
and toStringPlot = aPlot => {
|
||||||
|
let chartNames = E.A.fmap((x: Reducer_T.labeledDistribution) => x.name, aPlot.distributions)
|
||||||
|
`Plot showing ${Js.Array2.toString(chartNames)}`
|
||||||
|
}
|
||||||
and toStringRecord = aMap => aMap->toStringMap
|
and toStringRecord = aMap => aMap->toStringMap
|
||||||
and toStringString = aString => `'${aString}'`
|
and toStringString = aString => `'${aString}'`
|
||||||
and toStringSymbol = aString => `:${aString}`
|
and toStringSymbol = aString => `:${aString}`
|
||||||
|
@ -59,6 +64,7 @@ let toStringWithType = (aValue: T.value) =>
|
||||||
| IEvDistribution(_) => `Distribution::${toString(aValue)}`
|
| IEvDistribution(_) => `Distribution::${toString(aValue)}`
|
||||||
| IEvLambda(_) => `Lambda::${toString(aValue)}`
|
| IEvLambda(_) => `Lambda::${toString(aValue)}`
|
||||||
| IEvNumber(_) => `Number::${toString(aValue)}`
|
| IEvNumber(_) => `Number::${toString(aValue)}`
|
||||||
|
| IEvPlot(_) => `Plot::${toString(aValue)}`
|
||||||
| IEvRecord(_) => `Record::${toString(aValue)}`
|
| IEvRecord(_) => `Record::${toString(aValue)}`
|
||||||
| IEvString(_) => `String::${toString(aValue)}`
|
| IEvString(_) => `String::${toString(aValue)}`
|
||||||
| IEvTimeDuration(_) => `Date::${toString(aValue)}`
|
| IEvTimeDuration(_) => `Date::${toString(aValue)}`
|
||||||
|
@ -91,6 +97,7 @@ type internalExpressionValueType =
|
||||||
| EvtDistribution
|
| EvtDistribution
|
||||||
| EvtLambda
|
| EvtLambda
|
||||||
| EvtNumber
|
| EvtNumber
|
||||||
|
| EvtPlot
|
||||||
| EvtRecord
|
| EvtRecord
|
||||||
| EvtString
|
| EvtString
|
||||||
| EvtTimeDuration
|
| EvtTimeDuration
|
||||||
|
@ -109,6 +116,7 @@ let valueToValueType = (value: T.value) =>
|
||||||
| IEvDistribution(_) => EvtDistribution
|
| IEvDistribution(_) => EvtDistribution
|
||||||
| IEvLambda(_) => EvtLambda
|
| IEvLambda(_) => EvtLambda
|
||||||
| IEvNumber(_) => EvtNumber
|
| IEvNumber(_) => EvtNumber
|
||||||
|
| IEvPlot(_) => EvtPlot
|
||||||
| IEvRecord(_) => EvtRecord
|
| IEvRecord(_) => EvtRecord
|
||||||
| IEvString(_) => EvtString
|
| IEvString(_) => EvtString
|
||||||
| IEvTimeDuration(_) => EvtTimeDuration
|
| IEvTimeDuration(_) => EvtTimeDuration
|
||||||
|
@ -129,6 +137,7 @@ let valueTypeToString = (valueType: internalExpressionValueType): string =>
|
||||||
| EvtDistribution => `Distribution`
|
| EvtDistribution => `Distribution`
|
||||||
| EvtLambda => `Lambda`
|
| EvtLambda => `Lambda`
|
||||||
| EvtNumber => `Number`
|
| EvtNumber => `Number`
|
||||||
|
| EvtPlot => `Plot`
|
||||||
| EvtRecord => `Record`
|
| EvtRecord => `Record`
|
||||||
| EvtString => `String`
|
| EvtString => `String`
|
||||||
| EvtTimeDuration => `Duration`
|
| EvtTimeDuration => `Duration`
|
||||||
|
|
|
@ -216,7 +216,6 @@ let tryRunWithResult = (
|
||||||
project->setResult(sourceId, Error(error))
|
project->setResult(sourceId, Error(error))
|
||||||
Error(error)
|
Error(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
| Ok(_prevResult) => {
|
| Ok(_prevResult) => {
|
||||||
project->doLinkAndRun(sourceId)
|
project->doLinkAndRun(sourceId)
|
||||||
project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult)
|
project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult)
|
||||||
|
|
|
@ -6,7 +6,7 @@ type t = T.t
|
||||||
|
|
||||||
let emptyItem = (sourceId: string): projectItem => {
|
let emptyItem = (sourceId: string): projectItem => {
|
||||||
source: "",
|
source: "",
|
||||||
sourceId,
|
sourceId: sourceId,
|
||||||
rawParse: None,
|
rawParse: None,
|
||||||
expression: None,
|
expression: None,
|
||||||
continuation: Reducer_Namespace.make(),
|
continuation: Reducer_Namespace.make(),
|
||||||
|
@ -76,7 +76,7 @@ let resetIncludes = (r: t): t => {
|
||||||
}
|
}
|
||||||
|
|
||||||
let setSource = (r: t, source: T.sourceArgumentType): t =>
|
let setSource = (r: t, source: T.sourceArgumentType): t =>
|
||||||
{...r, source}->resetIncludes->touchSource
|
{...r, source: source}->resetIncludes->touchSource
|
||||||
|
|
||||||
let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t =>
|
let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t =>
|
||||||
{...r, rawParse: Some(rawParse)}->touchRawParse
|
{...r, rawParse: Some(rawParse)}->touchRawParse
|
||||||
|
@ -86,7 +86,7 @@ let setExpression = (r: t, expression: T.expressionArgumentType): t =>
|
||||||
|
|
||||||
let setContinuation = (r: t, continuation: T.continuationArgumentType): t => {
|
let setContinuation = (r: t, continuation: T.continuationArgumentType): t => {
|
||||||
...r,
|
...r,
|
||||||
continuation,
|
continuation: continuation,
|
||||||
}
|
}
|
||||||
|
|
||||||
let setResult = (r: t, result: T.resultArgumentType): t => {
|
let setResult = (r: t, result: T.resultArgumentType): t => {
|
||||||
|
@ -110,23 +110,24 @@ let getPastChain = (this: t): array<string> => {
|
||||||
Js.Array2.concat(getDirectIncludes(this), getContinues(this))
|
Js.Array2.concat(getDirectIncludes(this), getContinues(this))
|
||||||
}
|
}
|
||||||
|
|
||||||
let setContinues = (this: t, continues: array<string>): t => {...this, continues}->touchSource
|
let setContinues = (this: t, continues: array<string>): t =>
|
||||||
|
{...this, continues: continues}->touchSource
|
||||||
|
|
||||||
let removeContinues = (this: t): t => {...this, continues: []}->touchSource
|
let removeContinues = (this: t): t => {...this, continues: []}->touchSource
|
||||||
|
|
||||||
let setIncludes = (this: t, includes: T.includesType): t => {
|
let setIncludes = (this: t, includes: T.includesType): t => {
|
||||||
...this,
|
...this,
|
||||||
includes,
|
includes: includes,
|
||||||
}
|
}
|
||||||
|
|
||||||
let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => {
|
let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => {
|
||||||
...this,
|
...this,
|
||||||
includeAsVariables,
|
includeAsVariables: includeAsVariables,
|
||||||
}
|
}
|
||||||
|
|
||||||
let setDirectImports = (this: t, directIncludes: array<string>): t => {
|
let setDirectImports = (this: t, directIncludes: array<string>): t => {
|
||||||
...this,
|
...this,
|
||||||
directIncludes,
|
directIncludes: directIncludes,
|
||||||
}
|
}
|
||||||
|
|
||||||
let parseIncludes = (this: t): t => {
|
let parseIncludes = (this: t): t => {
|
||||||
|
@ -143,9 +144,9 @@ let parseIncludes = (this: t): t => {
|
||||||
->Belt.Array.map(((_variable, file)) => file)
|
->Belt.Array.map(((_variable, file)) => file)
|
||||||
{
|
{
|
||||||
...this,
|
...this,
|
||||||
includes,
|
includes: includes,
|
||||||
includeAsVariables,
|
includeAsVariables: includeAsVariables,
|
||||||
directIncludes,
|
directIncludes: directIncludes,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,6 @@ module Message = {
|
||||||
}
|
}
|
||||||
answer
|
answer
|
||||||
}
|
}
|
||||||
|
|
||||||
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
||||||
| RENotAFunction(valueString) => `${valueString} is not a function`
|
| RENotAFunction(valueString) => `${valueString} is not a function`
|
||||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||||
|
@ -94,8 +93,8 @@ type t = {
|
||||||
exception SqException(t)
|
exception SqException(t)
|
||||||
|
|
||||||
let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => {
|
let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => {
|
||||||
message,
|
message: message,
|
||||||
frameStack,
|
frameStack: frameStack,
|
||||||
}
|
}
|
||||||
|
|
||||||
// this shouldn't be used much, since frame stack will be empty
|
// this shouldn't be used much, since frame stack will be empty
|
||||||
|
|
|
@ -18,7 +18,6 @@ let stdLib: Reducer_T.namespace = {
|
||||||
| None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw
|
| None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| [IEvRecord(dict), IEvString(sIndex)] =>
|
| [IEvRecord(dict), IEvString(sIndex)] =>
|
||||||
switch Belt.Map.String.get(dict, sIndex) {
|
switch Belt.Map.String.get(dict, sIndex) {
|
||||||
| Some(value) => value
|
| Some(value) => value
|
||||||
|
|
|
@ -9,13 +9,13 @@ type declaration<'a> = {
|
||||||
|
|
||||||
module ContinuousFloatArg = {
|
module ContinuousFloatArg = {
|
||||||
let make = (min: float, max: float): arg => {
|
let make = (min: float, max: float): arg => {
|
||||||
Float({min, max})
|
Float({min: min, max: max})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module ContinuousTimeArg = {
|
module ContinuousTimeArg = {
|
||||||
let make = (min: Js.Date.t, max: Js.Date.t): arg => {
|
let make = (min: Js.Date.t, max: Js.Date.t): arg => {
|
||||||
Date({min, max})
|
Date({min: min, max: max})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ module Arg = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let make = (fn: 'a, args: array<arg>): declaration<'a> => {
|
let make = (fn: 'a, args: array<arg>): declaration<'a> => {
|
||||||
{fn, args}
|
{fn: fn, args: args}
|
||||||
}
|
}
|
||||||
|
|
||||||
let toString = (r: declaration<'a>, fnToString): string => {
|
let toString = (r: declaration<'a>, fnToString): string => {
|
||||||
|
|
|
@ -85,8 +85,8 @@ module T = {
|
||||||
}
|
}
|
||||||
let square = mapX(x => x ** 2.0)
|
let square = mapX(x => x ** 2.0)
|
||||||
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
|
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
|
||||||
let fromArray = ((xs, ys)): t => {xs, ys}
|
let fromArray = ((xs, ys)): t => {xs: xs, ys: ys}
|
||||||
let fromArrays = (xs, ys): t => {xs, ys}
|
let fromArrays = (xs, ys): t => {xs: xs, ys: ys}
|
||||||
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
|
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
|
||||||
let concat = (t1: t, t2: t) => {
|
let concat = (t1: t, t2: t) => {
|
||||||
let cxs = Array.concat(list{t1.xs, t2.xs})
|
let cxs = Array.concat(list{t1.xs, t2.xs})
|
||||||
|
@ -142,7 +142,7 @@ module T = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let make = (~xs: array<float>, ~ys: array<float>) => {
|
let make = (~xs: array<float>, ~ys: array<float>) => {
|
||||||
let attempt: t = {xs, ys}
|
let attempt: t = {xs: xs, ys: ys}
|
||||||
switch Validator.validate(attempt) {
|
switch Validator.validate(attempt) {
|
||||||
| Some(error) => Error(error)
|
| Some(error) => Error(error)
|
||||||
| None => Ok(attempt)
|
| None => Ok(attempt)
|
||||||
|
@ -452,7 +452,6 @@ module PointwiseCombination = {
|
||||||
let _ = Js.Array.push(fn(y1, y2), newYs)
|
let _ = Js.Array.push(fn(y1, y2), newYs)
|
||||||
let _ = Js.Array.push(x, newXs)
|
let _ = Js.Array.push(x, newXs)
|
||||||
}
|
}
|
||||||
|
|
||||||
| None => ()
|
| None => ()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -559,7 +558,7 @@ module Range = {
|
||||||
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
|
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Some({xs, ys: cumulativeY})
|
Some({xs: xs, ys: cumulativeY})
|
||||||
}
|
}
|
||||||
|
|
||||||
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)
|
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)
|
||||||
|
|
|
@ -332,7 +332,7 @@ truncateRight: (distribution, right: number) => distribution
|
||||||
**Examples**
|
**Examples**
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
truncateRight(normal(5, 2), 6);
|
truncateLeft(normal(5, 2), 6);
|
||||||
```
|
```
|
||||||
|
|
||||||
### klDivergence
|
### klDivergence
|
||||||
|
|
19
packages/website/docs/Api/Plot.md
Normal file
19
packages/website/docs/Api/Plot.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
---
|
||||||
|
sidebar_position: 8
|
||||||
|
title: Plot
|
||||||
|
---
|
||||||
|
|
||||||
|
Plot objects can be created to make plots of different kinds. If you wish to plot
|
||||||
|
multiple distributions simultaneously, you can use `Plot.dist`.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
### dist
|
||||||
|
|
||||||
|
```
|
||||||
|
Plot.dist({show: list({name: string, value: distribution|number})})
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
Plot.dist({show: [{name: "normal", value: normal(0, 1)}, {name: "lognormal", value: 2 to 3}]})
|
||||||
|
```
|
|
@ -1,20 +0,0 @@
|
||||||
---
|
|
||||||
title: Changelog
|
|
||||||
---
|
|
||||||
|
|
||||||
## 0.5.0
|
|
||||||
|
|
||||||
- Performance improvements:
|
|
||||||
- Interpreter is now 5x-20x faster on code written in Squiggle
|
|
||||||
- SampleSet to PointSet conversions are 2x faster
|
|
||||||
- cdf function on SampleSets is 30x faster
|
|
||||||
- overall speedup is about 2x on average on real code written in Squiggle
|
|
||||||
- 50% smaller bundle size for [@quri/squiggle-lang](https://www.npmjs.com/package/@quri/squiggle-lang); 20% smaller bundle size for [@quri/squiggle-components](https://www.npmjs.com/package/@quri/squiggle-components).
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
Some rarely used math functions got removed or moved to the `Math` namespace.
|
|
||||||
|
|
||||||
For example, `cos(x)` is now `Math.cos(x)`, and `atanh(x)` doesn't exist.
|
|
||||||
|
|
||||||
If your code is now failing with `<function> is not defined` for anything from [this list](https://mathjs.org/docs/reference/functions.html), try adding `Math.` prefix first, and then complain on [Github issues](https://github.com/quantified-uncertainty/squiggle/issues).
|
|
|
@ -3,11 +3,9 @@ title: How to import squiggle files into `.mdx` documents
|
||||||
sidebar_position: 5
|
sidebar_position: 5
|
||||||
---
|
---
|
||||||
|
|
||||||
:::caution Proof of concept
|
import { SquiggleEditorWithImportedBindings } from "../../src/components/SquiggleEditor";
|
||||||
|
|
||||||
The following usage pattern is currently broken. We expect to bring it back in some form in 0.5.1 or 0.5.2 release.
|
_Proof of concept_
|
||||||
|
|
||||||
:::
|
|
||||||
|
|
||||||
## Consider the following squiggle file
|
## Consider the following squiggle file
|
||||||
|
|
||||||
|
@ -32,3 +30,10 @@ import { SquiggleEditorWithImportedBindings } from "../../src/components/Squiggl
|
||||||
```
|
```
|
||||||
|
|
||||||
Notice, you need to wrap the export of `@quri/squiggle-components` in custom code for dynamicism, please view `packages/website/src/components/` in github for details.
|
Notice, you need to wrap the export of `@quri/squiggle-components` in custom code for dynamicism, please view `packages/website/src/components/` in github for details.
|
||||||
|
|
||||||
|
Which would then look exactly like
|
||||||
|
|
||||||
|
<SquiggleEditorWithImportedBindings
|
||||||
|
defaultCode={"f(z)"}
|
||||||
|
bindingsImportUrl={"/estimates/demo.squiggle"}
|
||||||
|
/>
|
||||||
|
|
|
@ -61,11 +61,6 @@ const sidebars = {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
|
||||||
type: "doc",
|
|
||||||
id: "Changelog",
|
|
||||||
label: "Changelog",
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
|
|
||||||
// But you can create a sidebar manually
|
// But you can create a sidebar manually
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
"benchmark/**/*.bs.js",
|
"benchmark/**/*.bs.js",
|
||||||
"src/rescript/**/*.js",
|
"src/rescript/**/*.js",
|
||||||
"src/rescript/**/*.gen.tsx",
|
"src/rescript/**/*.gen.tsx",
|
||||||
|
"../../node_modules/bisect_ppx/**/*.bs.js",
|
||||||
"dist/**"
|
"dist/**"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -30,10 +31,10 @@
|
||||||
"outputs": []
|
"outputs": []
|
||||||
},
|
},
|
||||||
"bundle": {
|
"bundle": {
|
||||||
"dependsOn": ["build"]
|
"dependsOn": ["^build", "build"]
|
||||||
},
|
},
|
||||||
"coverage": {
|
"coverage": {
|
||||||
"dependsOn": ["build"]
|
"cache": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user