Merge branch 'develop' into reducer-dev
This commit is contained in:
commit
7b5fd2b101
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -68,6 +68,8 @@ jobs:
|
||||||
working-directory: packages/squiggle-lang
|
working-directory: packages/squiggle-lang
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
- name: Install dependencies from monorepo level
|
- name: Install dependencies from monorepo level
|
||||||
run: cd ../../ && yarn
|
run: cd ../../ && yarn
|
||||||
- name: Build rescript codebase
|
- name: Build rescript codebase
|
||||||
|
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
|
@ -12,12 +12,6 @@
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- production
|
|
||||||
- staging
|
|
||||||
- develop
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "42 19 * * 0"
|
- cron: "42 19 * * 0"
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,6 @@ the packages can be found in `packages`.
|
||||||
- `packages/website` is the main descriptive website for squiggle,
|
- `packages/website` is the main descriptive website for squiggle,
|
||||||
it is hosted at `squiggle-language.com`.
|
it is hosted at `squiggle-language.com`.
|
||||||
|
|
||||||
The playground depends on the components library which then depends on the language. This means that if you wish to work on the components library, you will need to build (no need to bundle) the language, and as of this writing playground doesn't really work.
|
|
||||||
|
|
||||||
# Develop
|
# Develop
|
||||||
|
|
||||||
For any project in the repo, begin by running `yarn` in the top level
|
For any project in the repo, begin by running `yarn` in the top level
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
|
|
||||||
This package contains the react components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
This package contains the react components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
||||||
|
|
||||||
|
The `@quri/squiggle-components` package offers several components and utilities for people who want to embed Squiggle components into websites.
|
||||||
|
|
||||||
# Usage in a `react` project
|
# Usage in a `react` project
|
||||||
|
|
||||||
For example, in a fresh `create-react-app` project
|
For example, in a fresh `create-react-app` project
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@quri/squiggle-components",
|
"name": "@quri/squiggle-components",
|
||||||
"version": "0.2.19",
|
"version": "0.2.20",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@quri/squiggle-lang": "^0.2.8",
|
"@quri/squiggle-lang": "^0.2.8",
|
||||||
|
@ -10,14 +10,14 @@
|
||||||
"react-ace": "^10.1.0",
|
"react-ace": "^10.1.0",
|
||||||
"react-dom": "^18.1.0",
|
"react-dom": "^18.1.0",
|
||||||
"react-use": "^17.3.2",
|
"react-use": "^17.3.2",
|
||||||
"react-vega": "^7.5.0",
|
"react-vega": "^7.5.1",
|
||||||
"styled-components": "^5.3.5",
|
"styled-components": "^5.3.5",
|
||||||
"vega": "^5.22.1",
|
"vega": "^5.22.1",
|
||||||
"vega-embed": "^6.20.6",
|
"vega-embed": "^6.20.6",
|
||||||
"vega-lite": "^5.2.0"
|
"vega-lite": "^5.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/plugin-proposal-private-property-in-object": "^7.16.7",
|
"@babel/plugin-proposal-private-property-in-object": "^7.17.12",
|
||||||
"@storybook/addon-actions": "^6.4.22",
|
"@storybook/addon-actions": "^6.4.22",
|
||||||
"@storybook/addon-essentials": "^6.4.22",
|
"@storybook/addon-essentials": "^6.4.22",
|
||||||
"@storybook/addon-links": "^6.4.22",
|
"@storybook/addon-links": "^6.4.22",
|
||||||
|
@ -28,12 +28,12 @@
|
||||||
"@storybook/react": "^6.4.22",
|
"@storybook/react": "^6.4.22",
|
||||||
"@testing-library/jest-dom": "^5.16.4",
|
"@testing-library/jest-dom": "^5.16.4",
|
||||||
"@testing-library/react": "^13.2.0",
|
"@testing-library/react": "^13.2.0",
|
||||||
"@testing-library/user-event": "^14.1.1",
|
"@testing-library/user-event": "^14.2.0",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"@types/lodash": "^4.14.182",
|
"@types/lodash": "^4.14.182",
|
||||||
"@types/node": "^17.0.31",
|
"@types/node": "^17.0.34",
|
||||||
"@types/react": "^18.0.3",
|
"@types/react": "^18.0.9",
|
||||||
"@types/react-dom": "^18.0.2",
|
"@types/react-dom": "^18.0.4",
|
||||||
"@types/styled-components": "^5.1.24",
|
"@types/styled-components": "^5.1.24",
|
||||||
"@types/webpack": "^5.28.0",
|
"@types/webpack": "^5.28.0",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
|
@ -43,9 +43,9 @@
|
||||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^4.6.3",
|
||||||
"web-vitals": "^2.1.4",
|
"web-vitals": "^2.1.4",
|
||||||
"webpack": "^5.72.0",
|
"webpack": "^5.72.1",
|
||||||
"webpack-cli": "^4.9.2",
|
"webpack-cli": "^4.9.2",
|
||||||
"webpack-dev-server": "^4.8.1"
|
"webpack-dev-server": "^4.9.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import _ from "lodash";
|
import _ from "lodash";
|
||||||
import type { Distribution } from "@quri/squiggle-lang";
|
import {
|
||||||
import { distributionErrorToString } from "@quri/squiggle-lang";
|
Distribution,
|
||||||
|
result,
|
||||||
|
distributionError,
|
||||||
|
distributionErrorToString,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
import { Vega, VisualizationSpec } from "react-vega";
|
import { Vega, VisualizationSpec } from "react-vega";
|
||||||
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
||||||
import { ErrorBox } from "./ErrorBox";
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
@ -13,11 +17,14 @@ import {
|
||||||
expYScale,
|
expYScale,
|
||||||
} from "./DistributionVegaScales";
|
} from "./DistributionVegaScales";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
|
import { NumberShower } from "./NumberShower";
|
||||||
|
|
||||||
type DistributionChartProps = {
|
type DistributionChartProps = {
|
||||||
distribution: Distribution;
|
distribution: Distribution;
|
||||||
width?: number;
|
width?: number;
|
||||||
height: number;
|
height: number;
|
||||||
|
/** Whether to show a summary of means, stdev, percentiles etc */
|
||||||
|
showSummary: boolean;
|
||||||
/** Whether to show the user graph controls (scale etc) */
|
/** Whether to show the user graph controls (scale etc) */
|
||||||
showControls?: boolean;
|
showControls?: boolean;
|
||||||
};
|
};
|
||||||
|
@ -25,6 +32,7 @@ type DistributionChartProps = {
|
||||||
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
distribution,
|
distribution,
|
||||||
height,
|
height,
|
||||||
|
showSummary,
|
||||||
width,
|
width,
|
||||||
showControls = false,
|
showControls = false,
|
||||||
}: DistributionChartProps) => {
|
}: DistributionChartProps) => {
|
||||||
|
@ -37,7 +45,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
shape.value.continuous.some((x) => x.x <= 0) ||
|
shape.value.continuous.some((x) => x.x <= 0) ||
|
||||||
shape.value.discrete.some((x) => x.x <= 0);
|
shape.value.discrete.some((x) => x.x <= 0);
|
||||||
let spec = buildVegaSpec(isLogX, isExpY);
|
let spec = buildVegaSpec(isLogX, isExpY);
|
||||||
let widthProp = width ? width - 20 : size.width - 10;
|
let widthProp = width ? width : size.width;
|
||||||
|
|
||||||
// Check whether we should disable the checkbox
|
// Check whether we should disable the checkbox
|
||||||
var logCheckbox = (
|
var logCheckbox = (
|
||||||
|
@ -58,21 +66,22 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
}
|
}
|
||||||
|
|
||||||
var result = (
|
var result = (
|
||||||
<div>
|
<ChartContainer width={widthProp + "px"}>
|
||||||
<Vega
|
<Vega
|
||||||
spec={spec}
|
spec={spec}
|
||||||
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||||
width={widthProp}
|
width={widthProp - 10}
|
||||||
height={height}
|
height={height}
|
||||||
actions={false}
|
actions={false}
|
||||||
/>
|
/>
|
||||||
|
{showSummary && <SummaryTable distribution={distribution} />}
|
||||||
{showControls && (
|
{showControls && (
|
||||||
<div>
|
<div>
|
||||||
{logCheckbox}
|
{logCheckbox}
|
||||||
<CheckBox label="Exp Y scale" value={isExpY} onChange={setExpY} />
|
<CheckBox label="Exp Y scale" value={isExpY} onChange={setExpY} />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</ChartContainer>
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
var result = (
|
var result = (
|
||||||
|
@ -87,6 +96,12 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
return sized;
|
return sized;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type ChartContainerProps = { width: string };
|
||||||
|
|
||||||
|
let ChartContainer = styled.div<ChartContainerProps>`
|
||||||
|
width: ${(props) => props.width};
|
||||||
|
`;
|
||||||
|
|
||||||
function buildVegaSpec(isLogX: boolean, isExpY: boolean): VisualizationSpec {
|
function buildVegaSpec(isLogX: boolean, isExpY: boolean): VisualizationSpec {
|
||||||
return {
|
return {
|
||||||
...chartSpecification,
|
...chartSpecification,
|
||||||
|
@ -128,3 +143,90 @@ export const CheckBox = ({
|
||||||
</span>
|
</span>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type SummaryTableProps = {
|
||||||
|
distribution: Distribution;
|
||||||
|
};
|
||||||
|
|
||||||
|
const Table = styled.table`
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
border-collapse: collapse;
|
||||||
|
text-align: center;
|
||||||
|
border-style: hidden;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const TableHead = styled.thead`
|
||||||
|
border-bottom: 1px solid rgb(141 149 167);
|
||||||
|
`;
|
||||||
|
|
||||||
|
const TableHeadCell = styled.th`
|
||||||
|
border-right: 1px solid rgb(141 149 167);
|
||||||
|
border-left: 1px solid rgb(141 149 167);
|
||||||
|
padding: 0.3em;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const TableBody = styled.tbody``;
|
||||||
|
|
||||||
|
const Row = styled.tr``;
|
||||||
|
|
||||||
|
const Cell = styled.td`
|
||||||
|
padding: 0.3em;
|
||||||
|
border-right: 1px solid rgb(141 149 167);
|
||||||
|
border-left: 1px solid rgb(141 149 167);
|
||||||
|
`;
|
||||||
|
|
||||||
|
const SummaryTable: React.FC<SummaryTableProps> = ({
|
||||||
|
distribution,
|
||||||
|
}: SummaryTableProps) => {
|
||||||
|
let mean = distribution.mean();
|
||||||
|
let p5 = distribution.inv(0.05);
|
||||||
|
let p10 = distribution.inv(0.1);
|
||||||
|
let p25 = distribution.inv(0.25);
|
||||||
|
let p50 = distribution.inv(0.5);
|
||||||
|
let p75 = distribution.inv(0.75);
|
||||||
|
let p90 = distribution.inv(0.9);
|
||||||
|
let p95 = distribution.inv(0.95);
|
||||||
|
let unwrapResult = (
|
||||||
|
x: result<number, distributionError>
|
||||||
|
): React.ReactNode => {
|
||||||
|
if (x.tag === "Ok") {
|
||||||
|
return <NumberShower number={x.value} />;
|
||||||
|
} else {
|
||||||
|
return (
|
||||||
|
<ErrorBox heading="Distribution Error">
|
||||||
|
{distributionErrorToString(x.value)}
|
||||||
|
</ErrorBox>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Table>
|
||||||
|
<TableHead>
|
||||||
|
<Row>
|
||||||
|
<TableHeadCell>{"Mean"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"5%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"10%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"25%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"50%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"75%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"90%"}</TableHeadCell>
|
||||||
|
<TableHeadCell>{"95%"}</TableHeadCell>
|
||||||
|
</Row>
|
||||||
|
</TableHead>
|
||||||
|
<TableBody>
|
||||||
|
<Row>
|
||||||
|
<Cell>{unwrapResult(mean)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p5)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p10)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p25)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p50)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p75)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p90)}</Cell>
|
||||||
|
<Cell>{unwrapResult(p95)}</Cell>
|
||||||
|
</Row>
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
|
@ -1,18 +1,26 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import _ from "lodash";
|
import _ from "lodash";
|
||||||
import type { Spec } from "vega";
|
import type { Spec } from "vega";
|
||||||
import type { Distribution, errorValue, result } from "@quri/squiggle-lang";
|
import {
|
||||||
|
Distribution,
|
||||||
|
result,
|
||||||
|
lambdaValue,
|
||||||
|
environment,
|
||||||
|
runForeign,
|
||||||
|
squiggleExpression,
|
||||||
|
errorValue,
|
||||||
|
errorValueToString,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
import { createClassFromSpec } from "react-vega";
|
import { createClassFromSpec } from "react-vega";
|
||||||
import * as percentilesSpec from "../vega-specs/spec-percentiles.json";
|
import * as percentilesSpec from "../vega-specs/spec-percentiles.json";
|
||||||
import { DistributionChart } from "./DistributionChart";
|
import { DistributionChart } from "./DistributionChart";
|
||||||
|
import { NumberShower } from "./NumberShower";
|
||||||
import { ErrorBox } from "./ErrorBox";
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
|
||||||
let SquigglePercentilesChart = createClassFromSpec({
|
let SquigglePercentilesChart = createClassFromSpec({
|
||||||
spec: percentilesSpec as Spec,
|
spec: percentilesSpec as Spec,
|
||||||
});
|
});
|
||||||
|
|
||||||
type distPlusFn = (a: number) => result<Distribution, errorValue>;
|
|
||||||
|
|
||||||
const _rangeByCount = (start: number, stop: number, count: number) => {
|
const _rangeByCount = (start: number, stop: number, count: number) => {
|
||||||
const step = (stop - start) / (count - 1);
|
const step = (stop - start) / (count - 1);
|
||||||
const items = _.range(start, stop, step);
|
const items = _.range(start, stop, step);
|
||||||
|
@ -27,89 +35,177 @@ function unwrap<a, b>(x: result<a, b>): a {
|
||||||
throw Error("FAILURE TO UNWRAP");
|
throw Error("FAILURE TO UNWRAP");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export type FunctionChartSettings = {
|
||||||
|
start: number;
|
||||||
|
stop: number;
|
||||||
|
count: number;
|
||||||
|
};
|
||||||
|
|
||||||
function mapFilter<a, b>(xs: a[], f: (x: a) => b | undefined): b[] {
|
interface FunctionChartProps {
|
||||||
let initial: b[] = [];
|
fn: lambdaValue;
|
||||||
return xs.reduce((previous, current) => {
|
chartSettings: FunctionChartSettings;
|
||||||
let value: b | undefined = f(current);
|
environment: environment;
|
||||||
if (value !== undefined) {
|
|
||||||
return previous.concat([value]);
|
|
||||||
} else {
|
|
||||||
return previous;
|
|
||||||
}
|
|
||||||
}, initial);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const FunctionChart: React.FC<{
|
type percentiles = {
|
||||||
distPlusFn: distPlusFn;
|
x: number;
|
||||||
diagramStart: number;
|
p1: number;
|
||||||
diagramStop: number;
|
p5: number;
|
||||||
diagramCount: number;
|
p10: number;
|
||||||
}> = ({ distPlusFn, diagramStart, diagramStop, diagramCount }) => {
|
p20: number;
|
||||||
|
p30: number;
|
||||||
|
p40: number;
|
||||||
|
p50: number;
|
||||||
|
p60: number;
|
||||||
|
p70: number;
|
||||||
|
p80: number;
|
||||||
|
p90: number;
|
||||||
|
p95: number;
|
||||||
|
p99: number;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
type errors = _.Dictionary<
|
||||||
|
{
|
||||||
|
x: number;
|
||||||
|
value: string;
|
||||||
|
}[]
|
||||||
|
>;
|
||||||
|
|
||||||
|
type point = { x: number; value: result<Distribution, string> };
|
||||||
|
|
||||||
|
let getPercentiles = ({ chartSettings, fn, environment }) => {
|
||||||
|
let chartPointsToRender = _rangeByCount(
|
||||||
|
chartSettings.start,
|
||||||
|
chartSettings.stop,
|
||||||
|
chartSettings.count
|
||||||
|
);
|
||||||
|
|
||||||
|
let chartPointsData: point[] = chartPointsToRender.map((x) => {
|
||||||
|
let result = runForeign(fn, [x], environment);
|
||||||
|
if (result.tag === "Ok") {
|
||||||
|
if (result.value.tag == "distribution") {
|
||||||
|
return { x, value: { tag: "Ok", value: result.value.value } };
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
x,
|
||||||
|
value: {
|
||||||
|
tag: "Error",
|
||||||
|
value:
|
||||||
|
"Cannot currently render functions that don't return distributions",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
x,
|
||||||
|
value: { tag: "Error", value: errorValueToString(result.value) },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let initialPartition: [
|
||||||
|
{ x: number; value: Distribution }[],
|
||||||
|
{ x: number; value: string }[]
|
||||||
|
] = [[], []];
|
||||||
|
|
||||||
|
let [functionImage, errors] = chartPointsData.reduce((acc, current) => {
|
||||||
|
if (current.value.tag === "Ok") {
|
||||||
|
acc[0].push({ x: current.x, value: current.value.value });
|
||||||
|
} else {
|
||||||
|
acc[1].push({ x: current.x, value: current.value.value });
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, initialPartition);
|
||||||
|
|
||||||
|
let groupedErrors: errors = _.groupBy(errors, (x) => x.value);
|
||||||
|
|
||||||
|
let percentiles: percentiles = functionImage.map(({ x, value }) => {
|
||||||
|
// We convert it to to a pointSet distribution first, so that in case its a sample set
|
||||||
|
// distribution, it doesn't internally convert it to a pointSet distribution for every
|
||||||
|
// single inv() call.
|
||||||
|
let toPointSet: Distribution = unwrap(value.toPointSet());
|
||||||
|
return {
|
||||||
|
x: x,
|
||||||
|
p1: unwrap(toPointSet.inv(0.01)),
|
||||||
|
p5: unwrap(toPointSet.inv(0.05)),
|
||||||
|
p10: unwrap(toPointSet.inv(0.1)),
|
||||||
|
p20: unwrap(toPointSet.inv(0.2)),
|
||||||
|
p30: unwrap(toPointSet.inv(0.3)),
|
||||||
|
p40: unwrap(toPointSet.inv(0.4)),
|
||||||
|
p50: unwrap(toPointSet.inv(0.5)),
|
||||||
|
p60: unwrap(toPointSet.inv(0.6)),
|
||||||
|
p70: unwrap(toPointSet.inv(0.7)),
|
||||||
|
p80: unwrap(toPointSet.inv(0.8)),
|
||||||
|
p90: unwrap(toPointSet.inv(0.9)),
|
||||||
|
p95: unwrap(toPointSet.inv(0.95)),
|
||||||
|
p99: unwrap(toPointSet.inv(0.99)),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return { percentiles, errors: groupedErrors };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const FunctionChart: React.FC<FunctionChartProps> = ({
|
||||||
|
fn,
|
||||||
|
chartSettings,
|
||||||
|
environment,
|
||||||
|
}: FunctionChartProps) => {
|
||||||
let [mouseOverlay, setMouseOverlay] = React.useState(0);
|
let [mouseOverlay, setMouseOverlay] = React.useState(0);
|
||||||
function handleHover(...args) {
|
function handleHover(_name: string, value: unknown) {
|
||||||
setMouseOverlay(args[1]);
|
setMouseOverlay(value as number);
|
||||||
}
|
}
|
||||||
function handleOut() {
|
function handleOut() {
|
||||||
setMouseOverlay(NaN);
|
setMouseOverlay(NaN);
|
||||||
}
|
}
|
||||||
const signalListeners = { mousemove: handleHover, mouseout: handleOut };
|
const signalListeners = { mousemove: handleHover, mouseout: handleOut };
|
||||||
let mouseItem = distPlusFn(mouseOverlay);
|
let mouseItem: result<squiggleExpression, errorValue> = !!mouseOverlay
|
||||||
|
? runForeign(fn, [mouseOverlay], environment)
|
||||||
|
: {
|
||||||
|
tag: "Error",
|
||||||
|
value: {
|
||||||
|
tag: "REExpectedType",
|
||||||
|
value: "Hover x-coordinate returned NaN. Expected a number.",
|
||||||
|
},
|
||||||
|
};
|
||||||
let showChart =
|
let showChart =
|
||||||
mouseItem.tag === "Ok" ? (
|
mouseItem.tag === "Ok" && mouseItem.value.tag == "distribution" ? (
|
||||||
<DistributionChart
|
<DistributionChart
|
||||||
distribution={mouseItem.value}
|
distribution={mouseItem.value.value}
|
||||||
width={400}
|
width={400}
|
||||||
height={140}
|
height={140}
|
||||||
|
showSummary={false}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<></>
|
<></>
|
||||||
);
|
);
|
||||||
let data1 = _rangeByCount(diagramStart, diagramStop, diagramCount);
|
|
||||||
let valueData = mapFilter(data1, (x) => {
|
|
||||||
let result = distPlusFn(x);
|
|
||||||
if (result.tag === "Ok") {
|
|
||||||
return { x: x, value: result.value };
|
|
||||||
}
|
|
||||||
}).map(({ x, value }) => {
|
|
||||||
return {
|
|
||||||
x: x,
|
|
||||||
p1: unwrap(value.inv(0.01)),
|
|
||||||
p5: unwrap(value.inv(0.05)),
|
|
||||||
p10: unwrap(value.inv(0.12)),
|
|
||||||
p20: unwrap(value.inv(0.2)),
|
|
||||||
p30: unwrap(value.inv(0.3)),
|
|
||||||
p40: unwrap(value.inv(0.4)),
|
|
||||||
p50: unwrap(value.inv(0.5)),
|
|
||||||
p60: unwrap(value.inv(0.6)),
|
|
||||||
p70: unwrap(value.inv(0.7)),
|
|
||||||
p80: unwrap(value.inv(0.8)),
|
|
||||||
p90: unwrap(value.inv(0.9)),
|
|
||||||
p95: unwrap(value.inv(0.95)),
|
|
||||||
p99: unwrap(value.inv(0.99)),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
let errorData = mapFilter(data1, (x) => {
|
let getPercentilesMemoized = React.useMemo(
|
||||||
let result = distPlusFn(x);
|
() => getPercentiles({ chartSettings, fn, environment }),
|
||||||
if (result.tag === "Error") {
|
[environment, fn]
|
||||||
return { x: x, error: result.value };
|
);
|
||||||
}
|
|
||||||
});
|
|
||||||
let error2 = _.groupBy(errorData, (x) => x.error);
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<SquigglePercentilesChart
|
<SquigglePercentilesChart
|
||||||
data={{ facet: valueData }}
|
data={{ facet: getPercentilesMemoized.percentiles }}
|
||||||
actions={false}
|
actions={false}
|
||||||
signalListeners={signalListeners}
|
signalListeners={signalListeners}
|
||||||
/>
|
/>
|
||||||
{showChart}
|
{showChart}
|
||||||
{_.keysIn(error2).map((k) => (
|
{_.entries(getPercentilesMemoized.errors).map(
|
||||||
<ErrorBox heading={k}>
|
([errorName, errorPoints]) => (
|
||||||
{`Values: [${error2[k].map((r) => r.x.toFixed(2)).join(",")}]`}
|
<ErrorBox key={errorName} heading={errorName}>
|
||||||
</ErrorBox>
|
Values:{" "}
|
||||||
|
{errorPoints
|
||||||
|
.map((r, i) => <NumberShower key={i} number={r.x} />)
|
||||||
|
.reduce((a, b) => (
|
||||||
|
<>
|
||||||
|
{a}, {b}
|
||||||
|
</>
|
||||||
))}
|
))}
|
||||||
|
</ErrorBox>
|
||||||
|
)
|
||||||
|
)}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,14 +6,16 @@ import {
|
||||||
errorValueToString,
|
errorValueToString,
|
||||||
squiggleExpression,
|
squiggleExpression,
|
||||||
bindings,
|
bindings,
|
||||||
samplingParams,
|
environment,
|
||||||
jsImports,
|
jsImports,
|
||||||
defaultImports,
|
defaultImports,
|
||||||
defaultBindings,
|
defaultBindings,
|
||||||
|
defaultEnvironment,
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
import { NumberShower } from "./NumberShower";
|
import { NumberShower } from "./NumberShower";
|
||||||
import { DistributionChart } from "./DistributionChart";
|
import { DistributionChart } from "./DistributionChart";
|
||||||
import { ErrorBox } from "./ErrorBox";
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
import { FunctionChart, FunctionChartSettings } from "./FunctionChart";
|
||||||
|
|
||||||
const variableBox = {
|
const variableBox = {
|
||||||
Component: styled.div`
|
Component: styled.div`
|
||||||
|
@ -36,7 +38,7 @@ const variableBox = {
|
||||||
interface VariableBoxProps {
|
interface VariableBoxProps {
|
||||||
heading: string;
|
heading: string;
|
||||||
children: React.ReactNode;
|
children: React.ReactNode;
|
||||||
showTypes?: boolean;
|
showTypes: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const VariableBox: React.FC<VariableBoxProps> = ({
|
export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||||
|
@ -54,7 +56,7 @@ export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||||
</variableBox.Component>
|
</variableBox.Component>
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
return <>{children}</>;
|
return <div>{children}</div>;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -65,18 +67,27 @@ export interface SquiggleItemProps {
|
||||||
expression: squiggleExpression;
|
expression: squiggleExpression;
|
||||||
width?: number;
|
width?: number;
|
||||||
height: number;
|
height: number;
|
||||||
|
/** Whether to show a summary of statistics for distributions */
|
||||||
|
showSummary: boolean;
|
||||||
/** Whether to show type information */
|
/** Whether to show type information */
|
||||||
showTypes?: boolean;
|
showTypes: boolean;
|
||||||
/** Whether to show users graph controls (scale etc) */
|
/** Whether to show users graph controls (scale etc) */
|
||||||
showControls?: boolean;
|
showControls: boolean;
|
||||||
|
/** Settings for displaying functions */
|
||||||
|
chartSettings: FunctionChartSettings;
|
||||||
|
/** Environment for further function executions */
|
||||||
|
environment: environment;
|
||||||
}
|
}
|
||||||
|
|
||||||
const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||||
expression,
|
expression,
|
||||||
width,
|
width,
|
||||||
height,
|
height,
|
||||||
|
showSummary,
|
||||||
showTypes = false,
|
showTypes = false,
|
||||||
showControls = false,
|
showControls = false,
|
||||||
|
chartSettings,
|
||||||
|
environment,
|
||||||
}: SquiggleItemProps) => {
|
}: SquiggleItemProps) => {
|
||||||
switch (expression.tag) {
|
switch (expression.tag) {
|
||||||
case "number":
|
case "number":
|
||||||
|
@ -103,6 +114,7 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||||
distribution={expression.value}
|
distribution={expression.value}
|
||||||
height={height}
|
height={height}
|
||||||
width={width}
|
width={width}
|
||||||
|
showSummary={showSummary}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
/>
|
/>
|
||||||
</VariableBox>
|
</VariableBox>
|
||||||
|
@ -136,13 +148,17 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||||
case "array":
|
case "array":
|
||||||
return (
|
return (
|
||||||
<VariableBox heading="Array" showTypes={showTypes}>
|
<VariableBox heading="Array" showTypes={showTypes}>
|
||||||
{expression.value.map((r) => (
|
{expression.value.map((r, i) => (
|
||||||
<SquiggleItem
|
<SquiggleItem
|
||||||
|
key={i}
|
||||||
expression={r}
|
expression={r}
|
||||||
width={width !== undefined ? width - 20 : width}
|
width={width !== undefined ? width - 20 : width}
|
||||||
height={50}
|
height={50}
|
||||||
showTypes={showTypes}
|
showTypes={showTypes}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
|
chartSettings={chartSettings}
|
||||||
|
environment={environment}
|
||||||
|
showSummary={showSummary}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</VariableBox>
|
</VariableBox>
|
||||||
|
@ -151,30 +167,38 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||||
return (
|
return (
|
||||||
<VariableBox heading="Record" showTypes={showTypes}>
|
<VariableBox heading="Record" showTypes={showTypes}>
|
||||||
{Object.entries(expression.value).map(([key, r]) => (
|
{Object.entries(expression.value).map(([key, r]) => (
|
||||||
<>
|
<div key={key}>
|
||||||
<RecordKeyHeader>{key}</RecordKeyHeader>
|
<RecordKeyHeader>{key}</RecordKeyHeader>
|
||||||
<SquiggleItem
|
<SquiggleItem
|
||||||
expression={r}
|
expression={r}
|
||||||
width={width !== undefined ? width - 20 : width}
|
width={width !== undefined ? width - 20 : width}
|
||||||
height={50}
|
height={50}
|
||||||
showTypes={showTypes}
|
showTypes={showTypes}
|
||||||
|
showSummary={showSummary}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
|
chartSettings={chartSettings}
|
||||||
|
environment={environment}
|
||||||
/>
|
/>
|
||||||
</>
|
</div>
|
||||||
))}
|
))}
|
||||||
</VariableBox>
|
</VariableBox>
|
||||||
);
|
);
|
||||||
case "arraystring":
|
case "arraystring":
|
||||||
return (
|
return (
|
||||||
<VariableBox heading="Array String" showTypes={showTypes}>
|
<VariableBox heading="Array String" showTypes={showTypes}>
|
||||||
{expression.value.map((r) => `"${r}"`)}
|
{expression.value.map((r) => `"${r}"`).join(", ")}
|
||||||
</VariableBox>
|
</VariableBox>
|
||||||
);
|
);
|
||||||
case "lambda":
|
case "lambda":
|
||||||
return (
|
return (
|
||||||
<ErrorBox heading="No Viewer">
|
<FunctionChart
|
||||||
There is no viewer currently available for function types.
|
fn={expression.value}
|
||||||
</ErrorBox>
|
chartSettings={chartSettings}
|
||||||
|
environment={{
|
||||||
|
sampleCount: environment.sampleCount / 10,
|
||||||
|
xyPointLength: environment.xyPointLength / 10,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -185,15 +209,9 @@ export interface SquiggleChartProps {
|
||||||
/** If the output requires monte carlo sampling, the amount of samples */
|
/** If the output requires monte carlo sampling, the amount of samples */
|
||||||
sampleCount?: number;
|
sampleCount?: number;
|
||||||
/** The amount of points returned to draw the distribution */
|
/** The amount of points returned to draw the distribution */
|
||||||
outputXYPoints?: number;
|
environment?: environment;
|
||||||
kernelWidth?: number;
|
/** If the result is a function, where the function starts, ends and the amount of stops */
|
||||||
pointDistLength?: number;
|
chartSettings?: FunctionChartSettings;
|
||||||
/** If the result is a function, where the function starts */
|
|
||||||
diagramStart?: number;
|
|
||||||
/** If the result is a function, where the function ends */
|
|
||||||
diagramStop?: number;
|
|
||||||
/** If the result is a function, how many points along the function it samples */
|
|
||||||
diagramCount?: number;
|
|
||||||
/** When the environment changes */
|
/** When the environment changes */
|
||||||
onChange?(expr: squiggleExpression): void;
|
onChange?(expr: squiggleExpression): void;
|
||||||
/** CSS width of the element */
|
/** CSS width of the element */
|
||||||
|
@ -203,6 +221,8 @@ export interface SquiggleChartProps {
|
||||||
bindings?: bindings;
|
bindings?: bindings;
|
||||||
/** JS imported parameters */
|
/** JS imported parameters */
|
||||||
jsImports?: jsImports;
|
jsImports?: jsImports;
|
||||||
|
/** Whether to show a summary of the distirbution */
|
||||||
|
showSummary?: boolean;
|
||||||
/** Whether to show type information about returns, default false */
|
/** Whether to show type information about returns, default false */
|
||||||
showTypes?: boolean;
|
showTypes?: boolean;
|
||||||
/** Whether to show graph controls (scale etc)*/
|
/** Whether to show graph controls (scale etc)*/
|
||||||
|
@ -215,28 +235,23 @@ const ChartWrapper = styled.div`
|
||||||
"Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
"Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
let defaultChartSettings = { start: 0, stop: 10, count: 20 };
|
||||||
|
|
||||||
export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
||||||
squiggleString = "",
|
squiggleString = "",
|
||||||
sampleCount = 1000,
|
environment,
|
||||||
outputXYPoints = 1000,
|
|
||||||
onChange = () => {},
|
onChange = () => {},
|
||||||
height = 60,
|
height = 60,
|
||||||
bindings = defaultBindings,
|
bindings = defaultBindings,
|
||||||
jsImports = defaultImports,
|
jsImports = defaultImports,
|
||||||
|
showSummary = false,
|
||||||
width,
|
width,
|
||||||
showTypes = false,
|
showTypes = false,
|
||||||
showControls = false,
|
showControls = false,
|
||||||
|
chartSettings = defaultChartSettings,
|
||||||
}: SquiggleChartProps) => {
|
}: SquiggleChartProps) => {
|
||||||
let samplingInputs: samplingParams = {
|
let expressionResult = run(squiggleString, bindings, environment, jsImports);
|
||||||
sampleCount: sampleCount,
|
let e = environment ? environment : defaultEnvironment;
|
||||||
xyPointLength: outputXYPoints,
|
|
||||||
};
|
|
||||||
let expressionResult = run(
|
|
||||||
squiggleString,
|
|
||||||
bindings,
|
|
||||||
samplingInputs,
|
|
||||||
jsImports
|
|
||||||
);
|
|
||||||
let internal: JSX.Element;
|
let internal: JSX.Element;
|
||||||
if (expressionResult.tag === "Ok") {
|
if (expressionResult.tag === "Ok") {
|
||||||
let expression = expressionResult.value;
|
let expression = expressionResult.value;
|
||||||
|
@ -246,8 +261,11 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
||||||
expression={expression}
|
expression={expression}
|
||||||
width={width}
|
width={width}
|
||||||
height={height}
|
height={height}
|
||||||
|
showSummary={showSummary}
|
||||||
showTypes={showTypes}
|
showTypes={showTypes}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
|
chartSettings={chartSettings}
|
||||||
|
environment={e}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -5,7 +5,7 @@ import { CodeEditor } from "./CodeEditor";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
import type {
|
import type {
|
||||||
squiggleExpression,
|
squiggleExpression,
|
||||||
samplingParams,
|
environment,
|
||||||
bindings,
|
bindings,
|
||||||
jsImports,
|
jsImports,
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
|
@ -21,11 +21,7 @@ export interface SquiggleEditorProps {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
initialSquiggleString?: string;
|
initialSquiggleString?: string;
|
||||||
/** If the output requires monte carlo sampling, the amount of samples */
|
/** If the output requires monte carlo sampling, the amount of samples */
|
||||||
sampleCount?: number;
|
environment?: environment;
|
||||||
/** The amount of points returned to draw the distribution */
|
|
||||||
outputXYPoints?: number;
|
|
||||||
kernelWidth?: number;
|
|
||||||
pointDistLength?: number;
|
|
||||||
/** If the result is a function, where the function starts */
|
/** If the result is a function, where the function starts */
|
||||||
diagramStart?: number;
|
diagramStart?: number;
|
||||||
/** If the result is a function, where the function ends */
|
/** If the result is a function, where the function ends */
|
||||||
|
@ -43,7 +39,9 @@ export interface SquiggleEditorProps {
|
||||||
/** Whether to show detail about types of the returns, default false */
|
/** Whether to show detail about types of the returns, default false */
|
||||||
showTypes?: boolean;
|
showTypes?: boolean;
|
||||||
/** Whether to give users access to graph controls */
|
/** Whether to give users access to graph controls */
|
||||||
showControls: boolean;
|
showControls?: boolean;
|
||||||
|
/** Whether to show a summary table */
|
||||||
|
showSummary?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Input = styled.div`
|
const Input = styled.div`
|
||||||
|
@ -55,20 +53,23 @@ const Input = styled.div`
|
||||||
export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
||||||
initialSquiggleString = "",
|
initialSquiggleString = "",
|
||||||
width,
|
width,
|
||||||
sampleCount,
|
environment,
|
||||||
outputXYPoints,
|
diagramStart = 0,
|
||||||
kernelWidth,
|
diagramStop = 10,
|
||||||
pointDistLength,
|
diagramCount = 20,
|
||||||
diagramStart,
|
|
||||||
diagramStop,
|
|
||||||
diagramCount,
|
|
||||||
onChange,
|
onChange,
|
||||||
bindings = defaultBindings,
|
bindings = defaultBindings,
|
||||||
jsImports = defaultImports,
|
jsImports = defaultImports,
|
||||||
showTypes = false,
|
showTypes = false,
|
||||||
showControls = false,
|
showControls = false,
|
||||||
|
showSummary = false,
|
||||||
}: SquiggleEditorProps) => {
|
}: SquiggleEditorProps) => {
|
||||||
let [expression, setExpression] = React.useState(initialSquiggleString);
|
let [expression, setExpression] = React.useState(initialSquiggleString);
|
||||||
|
let chartSettings = {
|
||||||
|
start: diagramStart,
|
||||||
|
stop: diagramStop,
|
||||||
|
count: diagramCount,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<Input>
|
<Input>
|
||||||
|
@ -82,19 +83,15 @@ export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
||||||
</Input>
|
</Input>
|
||||||
<SquiggleChart
|
<SquiggleChart
|
||||||
width={width}
|
width={width}
|
||||||
|
environment={environment}
|
||||||
squiggleString={expression}
|
squiggleString={expression}
|
||||||
sampleCount={sampleCount}
|
chartSettings={chartSettings}
|
||||||
outputXYPoints={outputXYPoints}
|
|
||||||
kernelWidth={kernelWidth}
|
|
||||||
pointDistLength={pointDistLength}
|
|
||||||
diagramStart={diagramStart}
|
|
||||||
diagramStop={diagramStop}
|
|
||||||
diagramCount={diagramCount}
|
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
bindings={bindings}
|
bindings={bindings}
|
||||||
jsImports={jsImports}
|
jsImports={jsImports}
|
||||||
showTypes={showTypes}
|
showTypes={showTypes}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
|
showSummary={showSummary}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -136,11 +133,7 @@ export interface SquigglePartialProps {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
initialSquiggleString?: string;
|
initialSquiggleString?: string;
|
||||||
/** If the output requires monte carlo sampling, the amount of samples */
|
/** If the output requires monte carlo sampling, the amount of samples */
|
||||||
sampleCount?: number;
|
environment?: environment;
|
||||||
/** The amount of points returned to draw the distribution */
|
|
||||||
outputXYPoints?: number;
|
|
||||||
kernelWidth?: number;
|
|
||||||
pointDistLength?: number;
|
|
||||||
/** If the result is a function, where the function starts */
|
/** If the result is a function, where the function starts */
|
||||||
diagramStart?: number;
|
diagramStart?: number;
|
||||||
/** If the result is a function, where the function ends */
|
/** If the result is a function, where the function ends */
|
||||||
|
@ -161,14 +154,9 @@ export let SquigglePartial: React.FC<SquigglePartialProps> = ({
|
||||||
initialSquiggleString = "",
|
initialSquiggleString = "",
|
||||||
onChange,
|
onChange,
|
||||||
bindings = defaultBindings,
|
bindings = defaultBindings,
|
||||||
sampleCount = 1000,
|
environment,
|
||||||
outputXYPoints = 1000,
|
|
||||||
jsImports = defaultImports,
|
jsImports = defaultImports,
|
||||||
}: SquigglePartialProps) => {
|
}: SquigglePartialProps) => {
|
||||||
let samplingInputs: samplingParams = {
|
|
||||||
sampleCount: sampleCount,
|
|
||||||
xyPointLength: outputXYPoints,
|
|
||||||
};
|
|
||||||
let [expression, setExpression] = React.useState(initialSquiggleString);
|
let [expression, setExpression] = React.useState(initialSquiggleString);
|
||||||
let [error, setError] = React.useState<string | null>(null);
|
let [error, setError] = React.useState<string | null>(null);
|
||||||
|
|
||||||
|
@ -176,7 +164,7 @@ export let SquigglePartial: React.FC<SquigglePartialProps> = ({
|
||||||
let squiggleResult = runPartial(
|
let squiggleResult = runPartial(
|
||||||
expression,
|
expression,
|
||||||
bindings,
|
bindings,
|
||||||
samplingInputs,
|
environment,
|
||||||
jsImports
|
jsImports
|
||||||
);
|
);
|
||||||
if (squiggleResult.tag == "Ok") {
|
if (squiggleResult.tag == "Ok") {
|
||||||
|
|
|
@ -4,6 +4,11 @@ import ReactDOM from "react-dom";
|
||||||
import { SquiggleChart } from "./SquiggleChart";
|
import { SquiggleChart } from "./SquiggleChart";
|
||||||
import CodeEditor from "./CodeEditor";
|
import CodeEditor from "./CodeEditor";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
|
import {
|
||||||
|
defaultBindings,
|
||||||
|
environment,
|
||||||
|
defaultImports,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
|
|
||||||
interface FieldFloatProps {
|
interface FieldFloatProps {
|
||||||
label: string;
|
label: string;
|
||||||
|
@ -40,18 +45,11 @@ function FieldFloat(Props: FieldFloatProps) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Props {
|
interface ShowBoxProps {
|
||||||
initialSquiggleString?: string;
|
|
||||||
height?: number;
|
|
||||||
showTypes?: boolean;
|
|
||||||
showControls?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Props2 {
|
|
||||||
height: number;
|
height: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ShowBox = styled.div<Props2>`
|
const ShowBox = styled.div<ShowBoxProps>`
|
||||||
border: 1px solid #eee;
|
border: 1px solid #eee;
|
||||||
border-radius: 2px;
|
border-radius: 2px;
|
||||||
height: ${(props) => props.height};
|
height: ${(props) => props.height};
|
||||||
|
@ -76,12 +74,26 @@ const Row = styled.div`
|
||||||
`;
|
`;
|
||||||
const Col = styled.div``;
|
const Col = styled.div``;
|
||||||
|
|
||||||
let SquigglePlayground: FC<Props> = ({
|
interface PlaygroundProps {
|
||||||
|
/** The initial squiggle string to put in the playground */
|
||||||
|
initialSquiggleString?: string;
|
||||||
|
/** How many pixels high is the playground */
|
||||||
|
height?: number;
|
||||||
|
/** Whether to show the types of outputs in the playground */
|
||||||
|
showTypes?: boolean;
|
||||||
|
/** Whether to show the log scale controls in the playground */
|
||||||
|
showControls?: boolean;
|
||||||
|
/** Whether to show the summary table in the playground */
|
||||||
|
showSummary?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
let SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
initialSquiggleString = "",
|
initialSquiggleString = "",
|
||||||
height = 300,
|
height = 300,
|
||||||
showTypes = false,
|
showTypes = false,
|
||||||
showControls = false,
|
showControls = false,
|
||||||
}: Props) => {
|
showSummary = false,
|
||||||
|
}: PlaygroundProps) => {
|
||||||
let [squiggleString, setSquiggleString] = useState(initialSquiggleString);
|
let [squiggleString, setSquiggleString] = useState(initialSquiggleString);
|
||||||
let [sampleCount, setSampleCount] = useState(1000);
|
let [sampleCount, setSampleCount] = useState(1000);
|
||||||
let [outputXYPoints, setOutputXYPoints] = useState(1000);
|
let [outputXYPoints, setOutputXYPoints] = useState(1000);
|
||||||
|
@ -89,6 +101,15 @@ let SquigglePlayground: FC<Props> = ({
|
||||||
let [diagramStart, setDiagramStart] = useState(0);
|
let [diagramStart, setDiagramStart] = useState(0);
|
||||||
let [diagramStop, setDiagramStop] = useState(10);
|
let [diagramStop, setDiagramStop] = useState(10);
|
||||||
let [diagramCount, setDiagramCount] = useState(20);
|
let [diagramCount, setDiagramCount] = useState(20);
|
||||||
|
let chartSettings = {
|
||||||
|
start: diagramStart,
|
||||||
|
stop: diagramStop,
|
||||||
|
count: diagramCount,
|
||||||
|
};
|
||||||
|
let env: environment = {
|
||||||
|
sampleCount: sampleCount,
|
||||||
|
xyPointLength: outputXYPoints,
|
||||||
|
};
|
||||||
return (
|
return (
|
||||||
<ShowBox height={height}>
|
<ShowBox height={height}>
|
||||||
<Row>
|
<Row>
|
||||||
|
@ -105,15 +126,14 @@ let SquigglePlayground: FC<Props> = ({
|
||||||
<Display maxHeight={height - 3}>
|
<Display maxHeight={height - 3}>
|
||||||
<SquiggleChart
|
<SquiggleChart
|
||||||
squiggleString={squiggleString}
|
squiggleString={squiggleString}
|
||||||
sampleCount={sampleCount}
|
environment={env}
|
||||||
outputXYPoints={outputXYPoints}
|
chartSettings={chartSettings}
|
||||||
diagramStart={diagramStart}
|
|
||||||
diagramStop={diagramStop}
|
|
||||||
diagramCount={diagramCount}
|
|
||||||
pointDistLength={pointDistLength}
|
|
||||||
height={150}
|
height={150}
|
||||||
showTypes={showTypes}
|
showTypes={showTypes}
|
||||||
showControls={showControls}
|
showControls={showControls}
|
||||||
|
bindings={defaultBindings}
|
||||||
|
jsImports={defaultImports}
|
||||||
|
showSummary={showSummary}
|
||||||
/>
|
/>
|
||||||
</Display>
|
</Display>
|
||||||
</Col>
|
</Col>
|
||||||
|
@ -122,7 +142,7 @@ let SquigglePlayground: FC<Props> = ({
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
export default SquigglePlayground;
|
export default SquigglePlayground;
|
||||||
export function renderSquigglePlaygroundToDom(props: Props) {
|
export function renderSquigglePlaygroundToDom(props: PlaygroundProps) {
|
||||||
let parent = document.createElement("div");
|
let parent = document.createElement("div");
|
||||||
ReactDOM.render(<SquigglePlayground {...props} />, parent);
|
ReactDOM.render(<SquigglePlayground {...props} />, parent);
|
||||||
return parent;
|
return parent;
|
||||||
|
|
|
@ -9,3 +9,5 @@ import SquigglePlayground, {
|
||||||
renderSquigglePlaygroundToDom,
|
renderSquigglePlaygroundToDom,
|
||||||
} from "./components/SquigglePlayground";
|
} from "./components/SquigglePlayground";
|
||||||
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
||||||
|
|
||||||
|
export { mergeBindings } from "@quri/squiggle-lang";
|
||||||
|
|
|
@ -153,6 +153,20 @@ to allow large and small numbers being printed cleanly.
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
|
## Functions
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Function"
|
||||||
|
args={{
|
||||||
|
squiggleString: "foo(t) = normal(t,2)*normal(5,3); foo",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
## Records
|
## Records
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
|
|
|
@ -48,7 +48,7 @@
|
||||||
"value": 0
|
"value": 0
|
||||||
},
|
},
|
||||||
"fill": {
|
"fill": {
|
||||||
"signal": "{gradient: 'linear', x1: 1, y1: 1, x2: 0, y2: 1, stops: [ {offset: 0.0, color: '#4C78A8'}] }"
|
"value": "#4C78A8"
|
||||||
},
|
},
|
||||||
"interpolate": {
|
"interpolate": {
|
||||||
"value": "monotone"
|
"value": "monotone"
|
||||||
|
|
|
@ -13,6 +13,10 @@ For instance, in a javascript project, you can
|
||||||
yarn add @quri/squiggle-lang
|
yarn add @quri/squiggle-lang
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The `@quri/squiggle-lang` package exports a single function, `run`, which given
|
||||||
|
a string of Squiggle code, will execute the code and return any exports and the
|
||||||
|
environment created from the squiggle code.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import { run } from "@quri/squiggle-lang";
|
import { run } from "@quri/squiggle-lang";
|
||||||
run(
|
run(
|
||||||
|
@ -22,6 +26,16 @@ run(
|
||||||
|
|
||||||
**However, for most use cases you'll prefer to use our [library of react components](https://www.npmjs.com/package/@quri/squiggle-components)**, and let your app transitively depend on `@quri/squiggle-lang`.
|
**However, for most use cases you'll prefer to use our [library of react components](https://www.npmjs.com/package/@quri/squiggle-components)**, and let your app transitively depend on `@quri/squiggle-lang`.
|
||||||
|
|
||||||
|
`run` has two optional arguments. The first optional argument allows you to set
|
||||||
|
sampling settings for Squiggle when representing distributions. The second optional
|
||||||
|
argument allows you to pass an environment previously created by another `run`
|
||||||
|
call. Passing this environment will mean that all previously declared variables
|
||||||
|
in the previous environment will be made available.
|
||||||
|
|
||||||
|
The return type of `run` is a bit complicated, and comes from auto generated `js`
|
||||||
|
code that comes from rescript. We highly recommend using typescript when using
|
||||||
|
this library to help navigate the return type.
|
||||||
|
|
||||||
# Build for development
|
# Build for development
|
||||||
|
|
||||||
We assume that you ran `yarn` at the monorepo level.
|
We assume that you ran `yarn` at the monorepo level.
|
||||||
|
|
|
@ -0,0 +1,57 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
open FastCheck
|
||||||
|
open Arbitrary
|
||||||
|
open Property.Sync
|
||||||
|
|
||||||
|
describe("dotSubtract", () => {
|
||||||
|
test("mean of normal minus exponential (unit)", () => {
|
||||||
|
let mean = 0.0
|
||||||
|
let rate = 10.0
|
||||||
|
exception MeanFailed
|
||||||
|
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||||
|
~env,
|
||||||
|
mkNormal(mean, 1.0),
|
||||||
|
mkExponential(rate),
|
||||||
|
)
|
||||||
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||||
|
let meanAnalytical =
|
||||||
|
mean -.
|
||||||
|
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||||
|
"On trusted input this should never happen",
|
||||||
|
)
|
||||||
|
switch meanResult {
|
||||||
|
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||||
|
| Error(_) => raise(MeanFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
/*
|
||||||
|
It seems like this test should work, and it's plausible that
|
||||||
|
there's some bug in `pointwiseSubtract`
|
||||||
|
*/
|
||||||
|
Skip.test("mean of normal minus exponential (property)", () => {
|
||||||
|
assert_(
|
||||||
|
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
|
||||||
|
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||||
|
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||||
|
~env,
|
||||||
|
mkNormal(mean, 1.0),
|
||||||
|
mkExponential(rate),
|
||||||
|
)
|
||||||
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||||
|
// according to algebra or random variables,
|
||||||
|
let meanAnalytical =
|
||||||
|
mean -.
|
||||||
|
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||||
|
"On trusted input this should never happen",
|
||||||
|
)
|
||||||
|
switch meanResult {
|
||||||
|
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||||
|
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
})
|
||||||
|
})
|
|
@ -11,4 +11,15 @@ let triangularDist: DistributionTypes.genericDist = Symbolic(
|
||||||
)
|
)
|
||||||
let exponentialDist: DistributionTypes.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
let exponentialDist: DistributionTypes.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
||||||
let uniformDist: DistributionTypes.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
let uniformDist: DistributionTypes.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
||||||
|
let uniformDist2: DistributionTypes.genericDist = Symbolic(#Uniform({low: 8.0, high: 11.0}))
|
||||||
let floatDist: DistributionTypes.genericDist = Symbolic(#Float(1e1))
|
let floatDist: DistributionTypes.genericDist = Symbolic(#Float(1e1))
|
||||||
|
|
||||||
|
exception KlFailed
|
||||||
|
exception MixtureFailed
|
||||||
|
let float1 = 1.0
|
||||||
|
let float2 = 2.0
|
||||||
|
let float3 = 3.0
|
||||||
|
let {mkDelta} = module(TestHelpers)
|
||||||
|
let point1 = mkDelta(float1)
|
||||||
|
let point2 = mkDelta(float2)
|
||||||
|
let point3 = mkDelta(float3)
|
||||||
|
|
|
@ -0,0 +1,228 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
open GenericDist_Fixtures
|
||||||
|
|
||||||
|
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
||||||
|
let klNormalUniform = (mean, stdev, low, high): float =>
|
||||||
|
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
||||||
|
1.0 /.
|
||||||
|
stdev ** 2.0 *.
|
||||||
|
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
||||||
|
|
||||||
|
describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
|
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||||
|
|
||||||
|
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
||||||
|
test("of two uniforms is equal to the analytic expression", () => {
|
||||||
|
let answer =
|
||||||
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
|
let prediction =
|
||||||
|
uniformMakeR(
|
||||||
|
lowPrediction,
|
||||||
|
highPrediction,
|
||||||
|
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
|
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
||||||
|
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
||||||
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=7)
|
||||||
|
| Error(err) => {
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// The pair on the right (the answer) can be wider than the pair on the left (the prediction), but not the other way around.
|
||||||
|
testUniform(0.0, 1.0, -1.0, 2.0)
|
||||||
|
testUniform(0.0, 1.0, 0.0, 2.0) // equal left endpoints
|
||||||
|
testUniform(0.0, 1.0, -1.0, 1.0) // equal rightendpoints
|
||||||
|
testUniform(0.0, 1e1, 0.0, 1e1) // equal (klDivergence = 0)
|
||||||
|
// testUniform(-1.0, 1.0, 0.0, 2.0)
|
||||||
|
|
||||||
|
test("of two normals is equal to the formula", () => {
|
||||||
|
// This test case comes via Nuño https://github.com/quantified-uncertainty/squiggle/issues/433
|
||||||
|
let mean1 = 4.0
|
||||||
|
let mean2 = 1.0
|
||||||
|
let stdev1 = 4.0
|
||||||
|
let stdev2 = 1.0
|
||||||
|
|
||||||
|
let prediction =
|
||||||
|
normalMakeR(mean1, stdev1)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
|
let answer = normalMakeR(mean2, stdev2)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
|
// https://stats.stackexchange.com/questions/7440/kl-divergence-between-two-univariate-gaussians
|
||||||
|
let analyticalKl =
|
||||||
|
Js.Math.log(stdev1 /. stdev2) +.
|
||||||
|
(stdev2 ** 2.0 +. (mean2 -. mean1) ** 2.0) /. (2.0 *. stdev1 ** 2.0) -. 0.5
|
||||||
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=3)
|
||||||
|
| Error(err) => {
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test("of a normal and a uniform is equal to the formula", () => {
|
||||||
|
let prediction = normalDist10
|
||||||
|
let answer = uniformDist
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
let analyticalKl = klNormalUniform(10.0, 2.0, 9.0, 10.0)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=1)
|
||||||
|
| Error(err) => {
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("klDivergence: discrete -> discrete -> float", () => {
|
||||||
|
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||||
|
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
|
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
||||||
|
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
||||||
|
let (a, b) = switch (a', b') {
|
||||||
|
| (Dist(a''), Dist(b'')) => (a'', b'')
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
test("agrees with analytical answer when finite", () => {
|
||||||
|
let prediction = b
|
||||||
|
let answer = a
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
// Sigma_{i \in 1..2} 0.5 * log(0.5 / 0.33333)
|
||||||
|
let analyticalKl = Js.Math.log(3.0 /. 2.0)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=7)
|
||||||
|
| Error(err) =>
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("returns infinity when infinite", () => {
|
||||||
|
let prediction = a
|
||||||
|
let answer = b
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toEqual(infinity)
|
||||||
|
| Error(err) =>
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("klDivergence: mixed -> mixed -> float", () => {
|
||||||
|
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||||
|
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
|
let mixture = a => {
|
||||||
|
let dist' = a->mixture'->run
|
||||||
|
switch dist' {
|
||||||
|
| Dist(dist) => dist
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let a = [(point1, 1.0), (uniformDist, 1.0)]->mixture
|
||||||
|
let b = [(point1, 1.0), (floatDist, 1.0), (normalDist10, 1.0)]->mixture
|
||||||
|
let c = [(point1, 1.0), (point2, 1.0), (point3, 1.0), (uniformDist, 1.0)]->mixture
|
||||||
|
let d =
|
||||||
|
[(point1, 1.0), (point2, 1.0), (point3, 1.0), (floatDist, 1.0), (uniformDist2, 1.0)]->mixture
|
||||||
|
|
||||||
|
test("finite klDivergence produces correct answer", () => {
|
||||||
|
let prediction = b
|
||||||
|
let answer = a
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
// high = 10; low = 9; mean = 10; stdev = 2
|
||||||
|
let analyticalKlContinuousPart = klNormalUniform(10.0, 2.0, 9.0, 10.0) /. 2.0
|
||||||
|
let analyticalKlDiscretePart = 1.0 /. 2.0 *. Js.Math.log(2.0 /. 1.0)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') =>
|
||||||
|
kl'->expect->toBeSoCloseTo(analyticalKlContinuousPart +. analyticalKlDiscretePart, ~digits=1)
|
||||||
|
| Error(err) =>
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("returns infinity when infinite", () => {
|
||||||
|
let prediction = a
|
||||||
|
let answer = b
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') => kl'->expect->toEqual(infinity)
|
||||||
|
| Error(err) =>
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("finite klDivergence produces correct answer", () => {
|
||||||
|
let prediction = d
|
||||||
|
let answer = c
|
||||||
|
let kl = klDivergence(prediction, answer)
|
||||||
|
let analyticalKlContinuousPart = Js.Math.log((11.0 -. 8.0) /. (10.0 -. 9.0)) /. 4.0 // 4 = length of c' array
|
||||||
|
let analyticalKlDiscretePart = 3.0 /. 4.0 *. Js.Math.log(4.0 /. 3.0)
|
||||||
|
switch kl {
|
||||||
|
| Ok(kl') =>
|
||||||
|
kl'->expect->toBeSoCloseTo(analyticalKlContinuousPart +. analyticalKlDiscretePart, ~digits=1)
|
||||||
|
| Error(err) =>
|
||||||
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
|
raise(KlFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("combineAlongSupportOfSecondArgument0", () => {
|
||||||
|
// This tests the version of the function that we're NOT using. Haven't deleted the test in case we use the code later.
|
||||||
|
test("test on two uniforms", _ => {
|
||||||
|
let combineAlongSupportOfSecondArgument = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument0
|
||||||
|
let lowAnswer = 0.0
|
||||||
|
let highAnswer = 1.0
|
||||||
|
let lowPrediction = 0.0
|
||||||
|
let highPrediction = 2.0
|
||||||
|
|
||||||
|
let answer =
|
||||||
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
|
let prediction =
|
||||||
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||||
|
s,
|
||||||
|
))
|
||||||
|
let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
|
||||||
|
let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
|
||||||
|
|
||||||
|
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
||||||
|
let integrand = PointSetDist_Scoring.KLDivergence.integrand
|
||||||
|
|
||||||
|
let result = switch (answerWrapped, predictionWrapped) {
|
||||||
|
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
||||||
|
Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
result
|
||||||
|
->expect
|
||||||
|
->toEqual(
|
||||||
|
Some(
|
||||||
|
Ok({
|
||||||
|
xs: [
|
||||||
|
0.0,
|
||||||
|
MagicNumbers.Epsilon.ten,
|
||||||
|
2.0 *. MagicNumbers.Epsilon.ten,
|
||||||
|
1.0 -. MagicNumbers.Epsilon.ten,
|
||||||
|
1.0,
|
||||||
|
1.0 +. MagicNumbers.Epsilon.ten,
|
||||||
|
],
|
||||||
|
ys: [
|
||||||
|
-0.34657359027997264,
|
||||||
|
-0.34657359027997264,
|
||||||
|
-0.34657359027997264,
|
||||||
|
-0.34657359027997264,
|
||||||
|
-0.34657359027997264,
|
||||||
|
infinity,
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,38 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
describe("Scale logarithm", () => {
|
||||||
|
/* These tests may not be important, because scalelog isn't normalized
|
||||||
|
The first one may be failing for a number of reasons.
|
||||||
|
*/
|
||||||
|
Skip.test("mean of the base e scalar logarithm of an exponential(10)", () => {
|
||||||
|
let rate = 10.0
|
||||||
|
let scalelog = DistributionOperation.Constructors.scaleLogarithm(
|
||||||
|
~env,
|
||||||
|
mkExponential(rate),
|
||||||
|
MagicNumbers.Math.e,
|
||||||
|
)
|
||||||
|
|
||||||
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
||||||
|
// expected value of log of exponential distribution.
|
||||||
|
let meanAnalytical = Js.Math.log(rate) +. 1.0
|
||||||
|
switch meanResult {
|
||||||
|
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||||
|
| Error(err) => err->expect->toBe(DistributionTypes.OperationError(DivisionByZeroError))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
let low = 10.0
|
||||||
|
let high = 100.0
|
||||||
|
let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkUniform(low, high), 2.0)
|
||||||
|
|
||||||
|
test("mean of the base 2 scalar logarithm of a uniform(10, 100)", () => {
|
||||||
|
//For uniform pdf `_ => 1 / (b - a)`, the expected value of log of uniform is `integral from a to b of x * log(1 / (b -a)) dx`
|
||||||
|
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
||||||
|
let meanAnalytical = -.Js.Math.log2(high -. low) /. 2.0 *. (high ** 2.0 -. low ** 2.0) // -. Js.Math.log2(high -. low)
|
||||||
|
switch meanResult {
|
||||||
|
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||||
|
| Error(err) => err->expect->toEqual(DistributionTypes.OperationError(NegativeInfinityError))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
|
@ -17,6 +17,10 @@ describe("builtin", () => {
|
||||||
testEval("1-1", "Ok(0)")
|
testEval("1-1", "Ok(0)")
|
||||||
testEval("2>1", "Ok(true)")
|
testEval("2>1", "Ok(true)")
|
||||||
testEval("concat('a','b')", "Ok('ab')")
|
testEval("concat('a','b')", "Ok('ab')")
|
||||||
|
testEval(
|
||||||
|
"addOne(t)=t+1; toInternalSampleArray(mapSamples(fromSamples([1,2,3,4,5,6]), addOne))",
|
||||||
|
"Ok([2,3,4,5,6,7])",
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("builtin exception", () => {
|
describe("builtin exception", () => {
|
||||||
|
|
|
@ -31,6 +31,9 @@ describe("eval on distribution functions", () => {
|
||||||
testEval("mean(normal(5,2))", "Ok(5)")
|
testEval("mean(normal(5,2))", "Ok(5)")
|
||||||
testEval("mean(lognormal(1,2))", "Ok(20.085536923187668)")
|
testEval("mean(lognormal(1,2))", "Ok(20.085536923187668)")
|
||||||
testEval("mean(gamma(5,5))", "Ok(25)")
|
testEval("mean(gamma(5,5))", "Ok(25)")
|
||||||
|
testEval("mean(bernoulli(0.2))", "Ok(0.2)")
|
||||||
|
testEval("mean(bernoulli(0.8))", "Ok(0.8)")
|
||||||
|
testEval("mean(logistic(5,1))", "Ok(5)")
|
||||||
})
|
})
|
||||||
describe("toString", () => {
|
describe("toString", () => {
|
||||||
testEval("toString(normal(5,2))", "Ok('Normal(5,2)')")
|
testEval("toString(normal(5,2))", "Ok('Normal(5,2)')")
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
import { Distribution, resultMap, defaultBindings } from "../../src/js/index";
|
import {
|
||||||
|
Distribution,
|
||||||
|
resultMap,
|
||||||
|
defaultBindings,
|
||||||
|
mergeBindings,
|
||||||
|
} from "../../src/js/index";
|
||||||
import { testRun, testRunPartial } from "./TestHelpers";
|
import { testRun, testRunPartial } from "./TestHelpers";
|
||||||
|
|
||||||
function Ok<b>(x: b) {
|
function Ok<b>(x: b) {
|
||||||
|
@ -66,6 +71,17 @@ describe("Partials", () => {
|
||||||
value: 10,
|
value: 10,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
test("Can merge bindings from three partials", () => {
|
||||||
|
let bindings1 = testRunPartial(`x = 1`);
|
||||||
|
let bindings2 = testRunPartial(`y = 2`);
|
||||||
|
let bindings3 = testRunPartial(`z = 3`);
|
||||||
|
expect(
|
||||||
|
testRun(`x + y + z`, mergeBindings([bindings1, bindings2, bindings3]))
|
||||||
|
).toEqual({
|
||||||
|
tag: "number",
|
||||||
|
value: 6,
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("JS Imports", () => {
|
describe("JS Imports", () => {
|
||||||
|
|
|
@ -51,6 +51,7 @@ let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}
|
||||||
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
||||||
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||||
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
|
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
||||||
|
|
||||||
let normalMake = SymbolicDist.Normal.make
|
let normalMake = SymbolicDist.Normal.make
|
||||||
let betaMake = SymbolicDist.Beta.make
|
let betaMake = SymbolicDist.Beta.make
|
||||||
|
@ -60,3 +61,13 @@ let cauchyMake = SymbolicDist.Cauchy.make
|
||||||
let lognormalMake = SymbolicDist.Lognormal.make
|
let lognormalMake = SymbolicDist.Lognormal.make
|
||||||
let triangularMake = SymbolicDist.Triangular.make
|
let triangularMake = SymbolicDist.Triangular.make
|
||||||
let floatMake = SymbolicDist.Float.make
|
let floatMake = SymbolicDist.Float.make
|
||||||
|
|
||||||
|
let fmapGenDist = symbdistres => E.R.fmap(s => DistributionTypes.Symbolic(s), symbdistres)
|
||||||
|
let normalMakeR = (mean, stdev) => fmapGenDist(SymbolicDist.Normal.make(mean, stdev))
|
||||||
|
let betaMakeR = (alpha, beta) => fmapGenDist(SymbolicDist.Beta.make(alpha, beta))
|
||||||
|
let exponentialMakeR = rate => fmapGenDist(SymbolicDist.Exponential.make(rate))
|
||||||
|
let uniformMakeR = (low, high) => fmapGenDist(SymbolicDist.Uniform.make(low, high))
|
||||||
|
let cauchyMakeR = (local, rate) => fmapGenDist(SymbolicDist.Cauchy.make(local, rate))
|
||||||
|
let lognormalMakeR = (mu, sigma) => fmapGenDist(SymbolicDist.Lognormal.make(mu, sigma))
|
||||||
|
let triangularMakeR = (low, mode, high) =>
|
||||||
|
fmapGenDist(SymbolicDist.Triangular.make(low, mode, high))
|
||||||
|
|
|
@ -38,19 +38,6 @@ describe("XYShapes", () => {
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("logScorePoint", () => {
|
|
||||||
makeTest("When identical", XYShape.logScorePoint(30, pointSetDist1, pointSetDist1), Some(0.0))
|
|
||||||
makeTest(
|
|
||||||
"When similar",
|
|
||||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist2),
|
|
||||||
Some(1.658971191043856),
|
|
||||||
)
|
|
||||||
makeTest(
|
|
||||||
"When very different",
|
|
||||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist3),
|
|
||||||
Some(210.3721280423322),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
describe("integrateWithTriangles", () =>
|
describe("integrateWithTriangles", () =>
|
||||||
makeTest(
|
makeTest(
|
||||||
"integrates correctly",
|
"integrates correctly",
|
||||||
|
|
|
@ -20,7 +20,8 @@
|
||||||
],
|
],
|
||||||
"suffix": ".bs.js",
|
"suffix": ".bs.js",
|
||||||
"namespace": true,
|
"namespace": true,
|
||||||
"bs-dependencies": ["@glennsl/rescript-jest", "bisect_ppx"],
|
"bs-dependencies": ["bisect_ppx"],
|
||||||
|
"bs-dev-dependencies": ["@glennsl/rescript-jest", "rescript-fast-check"],
|
||||||
"gentypeconfig": {
|
"gentypeconfig": {
|
||||||
"language": "typescript",
|
"language": "typescript",
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@quri/squiggle-lang",
|
"name": "@quri/squiggle-lang",
|
||||||
"version": "0.2.8",
|
"version": "0.2.9",
|
||||||
"homepage": "https://squiggle-language.com",
|
"homepage": "https://squiggle-language.com",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -10,16 +10,16 @@
|
||||||
"build:typescript": "tsc",
|
"build:typescript": "tsc",
|
||||||
"bundle": "webpack",
|
"bundle": "webpack",
|
||||||
"start": "rescript build -w -with-deps",
|
"start": "rescript build -w -with-deps",
|
||||||
"clean": "rescript clean && rm -r dist",
|
"clean": "rescript clean && rm -rf dist",
|
||||||
"test:reducer": "jest __tests__/Reducer*/",
|
"test:reducer": "jest __tests__/Reducer*/",
|
||||||
"benchmark": "ts-node benchmark/conversion_tests.ts",
|
"benchmark": "ts-node benchmark/conversion_tests.ts",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:ts": "jest __tests__/TS/",
|
"test:ts": "jest __tests__/TS/",
|
||||||
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
"coverage:rescript": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test:rescript; bisect-ppx-report html",
|
"coverage:rescript": "rm -f *.coverage && yarn clean && BISECT_ENABLE=yes yarn build && yarn test:rescript && bisect-ppx-report html",
|
||||||
"coverage:ts": "yarn clean; yarn build; nyc --reporter=lcov yarn test:ts",
|
"coverage:ts": "yarn clean && yarn build && nyc --reporter=lcov yarn test:ts",
|
||||||
"coverage:rescript:ci": "yarn clean; BISECT_ENABLE=yes yarn build; yarn test:rescript; bisect-ppx-report send-to Codecov",
|
"coverage:rescript:ci": "yarn clean && BISECT_ENABLE=yes yarn build:rescript && yarn test:rescript && bisect-ppx-report send-to Codecov",
|
||||||
"coverage:ts:ci": "yarn coverage:ts && codecov",
|
"coverage:ts:ci": "yarn coverage:ts && codecov",
|
||||||
"lint:rescript": "./lint.sh",
|
"lint:rescript": "./lint.sh",
|
||||||
"lint:prettier": "prettier --check .",
|
"lint:prettier": "prettier --check .",
|
||||||
|
@ -34,34 +34,34 @@
|
||||||
"Rescript"
|
"Rescript"
|
||||||
],
|
],
|
||||||
"author": "Quantified Uncertainty Research Institute",
|
"author": "Quantified Uncertainty Research Institute",
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"rescript": "^9.1.4",
|
"@stdlib/stats": "^0.0.13",
|
||||||
"jstat": "^1.9.5",
|
"jstat": "^1.9.5",
|
||||||
|
"mathjs": "^10.5.2",
|
||||||
"pdfast": "^0.2.0",
|
"pdfast": "^0.2.0",
|
||||||
"mathjs": "^10.5.0"
|
"rescript": "^9.1.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"bisect_ppx": "^2.7.1",
|
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"rescript-fast-check": "^1.1.1",
|
|
||||||
"@glennsl/rescript-jest": "^0.9.0",
|
"@glennsl/rescript-jest": "^0.9.0",
|
||||||
"@istanbuljs/nyc-config-typescript": "^1.0.2",
|
"@istanbuljs/nyc-config-typescript": "^1.0.2",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||||
|
"bisect_ppx": "^2.7.1",
|
||||||
"chalk": "^5.0.1",
|
"chalk": "^5.0.1",
|
||||||
"codecov": "^3.8.3",
|
"codecov": "^3.8.3",
|
||||||
"fast-check": "^2.25.0",
|
"fast-check": "^2.25.0",
|
||||||
"gentype": "^4.3.0",
|
"gentype": "^4.3.0",
|
||||||
"jest": "^27.5.1",
|
"jest": "^27.5.1",
|
||||||
|
"lodash": "^4.17.21",
|
||||||
"moduleserve": "^0.9.1",
|
"moduleserve": "^0.9.1",
|
||||||
"nyc": "^15.1.0",
|
"nyc": "^15.1.0",
|
||||||
"reanalyze": "^2.19.0",
|
"reanalyze": "^2.19.0",
|
||||||
|
"rescript-fast-check": "^1.1.1",
|
||||||
"ts-jest": "^27.1.4",
|
"ts-jest": "^27.1.4",
|
||||||
"ts-loader": "^9.3.0",
|
"ts-loader": "^9.3.0",
|
||||||
"ts-node": "^10.7.0",
|
"ts-node": "^10.7.0",
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^4.6.3",
|
||||||
"webpack": "^5.72.0",
|
"webpack": "^5.72.1",
|
||||||
"webpack-cli": "^4.9.2"
|
"webpack-cli": "^4.9.2"
|
||||||
},
|
},
|
||||||
"source": "./src/js/index.ts",
|
"source": "./src/js/index.ts",
|
||||||
|
|
|
@ -35,7 +35,7 @@ import {
|
||||||
Constructors_pointwiseSubtract,
|
Constructors_pointwiseSubtract,
|
||||||
Constructors_pointwiseLogarithm,
|
Constructors_pointwiseLogarithm,
|
||||||
Constructors_pointwisePower,
|
Constructors_pointwisePower,
|
||||||
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
|
} from "../rescript/Distributions/DistributionOperation.gen";
|
||||||
|
|
||||||
export type point = { x: number; y: number };
|
export type point = { x: number; y: number };
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import * as _ from "lodash";
|
import * as _ from "lodash";
|
||||||
import {
|
import {
|
||||||
samplingParams,
|
|
||||||
environment,
|
environment,
|
||||||
defaultEnvironment,
|
defaultEnvironment,
|
||||||
evaluatePartialUsingExternalBindings,
|
evaluatePartialUsingExternalBindings,
|
||||||
|
@ -8,6 +7,7 @@ import {
|
||||||
externalBindings,
|
externalBindings,
|
||||||
expressionValue,
|
expressionValue,
|
||||||
errorValue,
|
errorValue,
|
||||||
|
foreignFunctionInterface,
|
||||||
} from "../rescript/TypescriptInterface.gen";
|
} from "../rescript/TypescriptInterface.gen";
|
||||||
export {
|
export {
|
||||||
makeSampleSetDist,
|
makeSampleSetDist,
|
||||||
|
@ -15,25 +15,31 @@ export {
|
||||||
distributionErrorToString,
|
distributionErrorToString,
|
||||||
distributionError,
|
distributionError,
|
||||||
} from "../rescript/TypescriptInterface.gen";
|
} from "../rescript/TypescriptInterface.gen";
|
||||||
export type {
|
export type { errorValue, externalBindings as bindings, jsImports };
|
||||||
samplingParams,
|
|
||||||
errorValue,
|
|
||||||
externalBindings as bindings,
|
|
||||||
jsImports,
|
|
||||||
};
|
|
||||||
import {
|
import {
|
||||||
jsValueToBinding,
|
jsValueToBinding,
|
||||||
|
jsValueToExpressionValue,
|
||||||
jsValue,
|
jsValue,
|
||||||
rescriptExport,
|
rescriptExport,
|
||||||
squiggleExpression,
|
squiggleExpression,
|
||||||
convertRawToTypescript,
|
convertRawToTypescript,
|
||||||
|
lambdaValue,
|
||||||
} from "./rescript_interop";
|
} from "./rescript_interop";
|
||||||
import { result, resultMap, tag, tagged } from "./types";
|
import { result, resultMap, tag, tagged } from "./types";
|
||||||
import { Distribution, shape } from "./distribution";
|
import { Distribution, shape } from "./distribution";
|
||||||
|
|
||||||
export { Distribution, squiggleExpression, result, resultMap, shape };
|
export {
|
||||||
|
Distribution,
|
||||||
|
squiggleExpression,
|
||||||
|
result,
|
||||||
|
resultMap,
|
||||||
|
shape,
|
||||||
|
lambdaValue,
|
||||||
|
environment,
|
||||||
|
defaultEnvironment,
|
||||||
|
};
|
||||||
|
|
||||||
export let defaultSamplingInputs: samplingParams = {
|
export let defaultSamplingInputs: environment = {
|
||||||
sampleCount: 10000,
|
sampleCount: 10000,
|
||||||
xyPointLength: 10000,
|
xyPointLength: 10000,
|
||||||
};
|
};
|
||||||
|
@ -48,7 +54,7 @@ export function run(
|
||||||
let i = imports ? imports : defaultImports;
|
let i = imports ? imports : defaultImports;
|
||||||
let e = environment ? environment : defaultEnvironment;
|
let e = environment ? environment : defaultEnvironment;
|
||||||
let res: result<expressionValue, errorValue> = evaluateUsingOptions(
|
let res: result<expressionValue, errorValue> = evaluateUsingOptions(
|
||||||
{ externalBindings: mergeImports(b, i), environment: e },
|
{ externalBindings: mergeImportsWithBindings(b, i), environment: e },
|
||||||
squiggleString
|
squiggleString
|
||||||
);
|
);
|
||||||
return resultMap(res, (x) => createTsExport(x, e));
|
return resultMap(res, (x) => createTsExport(x, e));
|
||||||
|
@ -67,12 +73,26 @@ export function runPartial(
|
||||||
|
|
||||||
return evaluatePartialUsingExternalBindings(
|
return evaluatePartialUsingExternalBindings(
|
||||||
squiggleString,
|
squiggleString,
|
||||||
mergeImports(b, i),
|
mergeImportsWithBindings(b, i),
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function mergeImports(
|
export function runForeign(
|
||||||
|
fn: lambdaValue,
|
||||||
|
args: jsValue[],
|
||||||
|
environment?: environment
|
||||||
|
): result<squiggleExpression, errorValue> {
|
||||||
|
let e = environment ? environment : defaultEnvironment;
|
||||||
|
let res: result<expressionValue, errorValue> = foreignFunctionInterface(
|
||||||
|
fn,
|
||||||
|
args.map(jsValueToExpressionValue),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
return resultMap(res, (x) => createTsExport(x, e));
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergeImportsWithBindings(
|
||||||
bindings: externalBindings,
|
bindings: externalBindings,
|
||||||
imports: jsImports
|
imports: jsImports
|
||||||
): externalBindings {
|
): externalBindings {
|
||||||
|
@ -90,6 +110,12 @@ type jsImports = { [key: string]: jsValue };
|
||||||
export let defaultImports: jsImports = {};
|
export let defaultImports: jsImports = {};
|
||||||
export let defaultBindings: externalBindings = {};
|
export let defaultBindings: externalBindings = {};
|
||||||
|
|
||||||
|
export function mergeBindings(
|
||||||
|
allBindings: externalBindings[]
|
||||||
|
): externalBindings {
|
||||||
|
return allBindings.reduce((acc, x) => ({ ...acc, ...x }));
|
||||||
|
}
|
||||||
|
|
||||||
function createTsExport(
|
function createTsExport(
|
||||||
x: expressionValue,
|
x: expressionValue,
|
||||||
environment: environment
|
environment: environment
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import * as _ from "lodash";
|
import * as _ from "lodash";
|
||||||
import {
|
import {
|
||||||
|
expressionValue,
|
||||||
mixedShape,
|
mixedShape,
|
||||||
sampleSetDist,
|
sampleSetDist,
|
||||||
genericDist,
|
genericDist,
|
||||||
|
@ -87,6 +88,8 @@ export type squiggleExpression =
|
||||||
| tagged<"number", number>
|
| tagged<"number", number>
|
||||||
| tagged<"record", { [key: string]: squiggleExpression }>;
|
| tagged<"record", { [key: string]: squiggleExpression }>;
|
||||||
|
|
||||||
|
export { lambdaValue };
|
||||||
|
|
||||||
export function convertRawToTypescript(
|
export function convertRawToTypescript(
|
||||||
result: rescriptExport,
|
result: rescriptExport,
|
||||||
environment: environment
|
environment: environment
|
||||||
|
@ -168,3 +171,21 @@ export function jsValueToBinding(value: jsValue): rescriptExport {
|
||||||
return { TAG: 7, _0: _.mapValues(value, jsValueToBinding) };
|
return { TAG: 7, _0: _.mapValues(value, jsValueToBinding) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function jsValueToExpressionValue(value: jsValue): expressionValue {
|
||||||
|
if (typeof value === "boolean") {
|
||||||
|
return { tag: "EvBool", value: value as boolean };
|
||||||
|
} else if (typeof value === "string") {
|
||||||
|
return { tag: "EvString", value: value as string };
|
||||||
|
} else if (typeof value === "number") {
|
||||||
|
return { tag: "EvNumber", value: value as number };
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
return { tag: "EvArray", value: value.map(jsValueToExpressionValue) };
|
||||||
|
} else {
|
||||||
|
// Record
|
||||||
|
return {
|
||||||
|
tag: "EvRecord",
|
||||||
|
value: _.mapValues(value, jsValueToExpressionValue),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -10,14 +10,15 @@ type env = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let defaultEnv = {
|
let defaultEnv = {
|
||||||
sampleCount: 10000,
|
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||||
xyPointLength: 10000,
|
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||||
}
|
}
|
||||||
|
|
||||||
type outputType =
|
type outputType =
|
||||||
| Dist(genericDist)
|
| Dist(genericDist)
|
||||||
| Float(float)
|
| Float(float)
|
||||||
| String(string)
|
| String(string)
|
||||||
|
| FloatArray(array<float>)
|
||||||
| Bool(bool)
|
| Bool(bool)
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
|
@ -128,7 +129,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
let fromDistFn = (
|
let fromDistFn = (
|
||||||
subFnName: DistributionTypes.DistributionOperation.fromDist,
|
subFnName: DistributionTypes.DistributionOperation.fromDist,
|
||||||
dist: genericDist,
|
dist: genericDist,
|
||||||
) => {
|
): outputType => {
|
||||||
let response = switch subFnName {
|
let response = switch subFnName {
|
||||||
| ToFloat(distToFloatOperation) =>
|
| ToFloat(distToFloatOperation) =>
|
||||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||||
|
@ -144,6 +145,19 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
|
| ToScore(KLDivergence(t2)) =>
|
||||||
|
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
||||||
|
->E.R2.fmap(r => Float(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToScore(LogScore(answer, prior)) =>
|
||||||
|
GenericDist.Score.logScoreWithPointResolution(
|
||||||
|
~prediction=dist,
|
||||||
|
~answer,
|
||||||
|
~prior,
|
||||||
|
~toPointSetFn,
|
||||||
|
)
|
||||||
|
->E.R2.fmap(r => Float(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||||
|
@ -159,6 +173,15 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||||
->E.R2.fmap(r => Dist(PointSet(r)))
|
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
|
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.pointwiseCombinationFloat(
|
||||||
|
~toPointSetFn,
|
||||||
|
~algebraicCombination=#LogarithmWithThreshold(eps),
|
||||||
|
~f,
|
||||||
|
)
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Logarithm, f)) =>
|
| ToDist(Scale(#Logarithm, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
||||||
|
@ -248,6 +271,13 @@ module Constructors = {
|
||||||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
|
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
||||||
|
let logScoreWithPointResolution = (
|
||||||
|
~env,
|
||||||
|
~prediction: DistributionTypes.genericDist,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<DistributionTypes.genericDist>,
|
||||||
|
) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||||
|
@ -266,6 +296,8 @@ module Constructors = {
|
||||||
let algebraicLogarithm = (~env, dist1, dist2) =>
|
let algebraicLogarithm = (~env, dist1, dist2) =>
|
||||||
C.algebraicLogarithm(dist1, dist2)->run(~env)->toDistR
|
C.algebraicLogarithm(dist1, dist2)->run(~env)->toDistR
|
||||||
let algebraicPower = (~env, dist1, dist2) => C.algebraicPower(dist1, dist2)->run(~env)->toDistR
|
let algebraicPower = (~env, dist1, dist2) => C.algebraicPower(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let scalePower = (~env, dist, n) => C.scalePower(dist, n)->run(~env)->toDistR
|
||||||
|
let scaleLogarithm = (~env, dist, n) => C.scaleLogarithm(dist, n)->run(~env)->toDistR
|
||||||
let pointwiseAdd = (~env, dist1, dist2) => C.pointwiseAdd(dist1, dist2)->run(~env)->toDistR
|
let pointwiseAdd = (~env, dist1, dist2) => C.pointwiseAdd(dist1, dist2)->run(~env)->toDistR
|
||||||
let pointwiseMultiply = (~env, dist1, dist2) =>
|
let pointwiseMultiply = (~env, dist1, dist2) =>
|
||||||
C.pointwiseMultiply(dist1, dist2)->run(~env)->toDistR
|
C.pointwiseMultiply(dist1, dist2)->run(~env)->toDistR
|
|
@ -14,6 +14,7 @@ type outputType =
|
||||||
| Dist(genericDist)
|
| Dist(genericDist)
|
||||||
| Float(float)
|
| Float(float)
|
||||||
| String(string)
|
| String(string)
|
||||||
|
| FloatArray(array<float>)
|
||||||
| Bool(bool)
|
| Bool(bool)
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
|
@ -60,6 +61,15 @@ module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
||||||
@genType
|
@genType
|
||||||
|
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let logScoreWithPointResolution: (
|
||||||
|
~env: env,
|
||||||
|
~prediction: genericDist,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<genericDist>,
|
||||||
|
) => result<float, error>
|
||||||
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||||
|
@ -86,6 +96,10 @@ module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
|
let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
|
@ -37,6 +37,7 @@ module Error = {
|
||||||
| LogarithmOfDistributionError(s) => `Logarithm of input error: ${s}`
|
| LogarithmOfDistributionError(s) => `Logarithm of input error: ${s}`
|
||||||
| SampleSetError(TooFewSamples) => "Too Few Samples"
|
| SampleSetError(TooFewSamples) => "Too Few Samples"
|
||||||
| SampleSetError(NonNumericInput(err)) => `Found a non-number in input: ${err}`
|
| SampleSetError(NonNumericInput(err)) => `Found a non-number in input: ${err}`
|
||||||
|
| SampleSetError(OperationError(err)) => Operation.Error.toString(err)
|
||||||
| OperationError(err) => Operation.Error.toString(err)
|
| OperationError(err) => Operation.Error.toString(err)
|
||||||
| PointSetConversionError(err) => SampleSetDist.pointsetConversionErrorToString(err)
|
| PointSetConversionError(err) => SampleSetDist.pointsetConversionErrorToString(err)
|
||||||
| SparklineError(err) => PointSetTypes.sparklineErrorToString(err)
|
| SparklineError(err) => PointSetTypes.sparklineErrorToString(err)
|
||||||
|
@ -72,6 +73,7 @@ module DistributionOperation = {
|
||||||
type toScaleFn = [
|
type toScaleFn = [
|
||||||
| #Power
|
| #Power
|
||||||
| #Logarithm
|
| #Logarithm
|
||||||
|
| #LogarithmWithThreshold(float)
|
||||||
]
|
]
|
||||||
|
|
||||||
type toDist =
|
type toDist =
|
||||||
|
@ -90,9 +92,12 @@ module DistributionOperation = {
|
||||||
| ToString
|
| ToString
|
||||||
| ToSparkline(int)
|
| ToSparkline(int)
|
||||||
|
|
||||||
|
type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
|
||||||
|
|
||||||
type fromDist =
|
type fromDist =
|
||||||
| ToFloat(toFloat)
|
| ToFloat(toFloat)
|
||||||
| ToDist(toDist)
|
| ToDist(toDist)
|
||||||
|
| ToScore(toScore)
|
||||||
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||||
| ToString(toString)
|
| ToString(toString)
|
||||||
| ToBool(toBool)
|
| ToBool(toBool)
|
||||||
|
@ -115,6 +120,8 @@ module DistributionOperation = {
|
||||||
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Sample) => `sample`
|
| ToFloat(#Sample) => `sample`
|
||||||
| ToFloat(#IntegralSum) => `integralSum`
|
| ToFloat(#IntegralSum) => `integralSum`
|
||||||
|
| ToScore(KLDivergence(_)) => `klDivergence`
|
||||||
|
| ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
|
||||||
| ToDist(Normalize) => `normalize`
|
| ToDist(Normalize) => `normalize`
|
||||||
| ToDist(ToPointSet) => `toPointSet`
|
| ToDist(ToPointSet) => `toPointSet`
|
||||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
|
@ -122,6 +129,8 @@ module DistributionOperation = {
|
||||||
| ToDist(Inspect) => `inspect`
|
| ToDist(Inspect) => `inspect`
|
||||||
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||||
|
| ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||||
|
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
||||||
| ToString(ToString) => `toString`
|
| ToString(ToString) => `toString`
|
||||||
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
||||||
| ToBool(IsNormalized) => `isNormalized`
|
| ToBool(IsNormalized) => `isNormalized`
|
||||||
|
@ -153,8 +162,17 @@ module Constructors = {
|
||||||
let fromSamples = (xs): t => FromSamples(xs)
|
let fromSamples = (xs): t => FromSamples(xs)
|
||||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
||||||
|
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
||||||
|
let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
|
||||||
|
ToScore(LogScore(answer, prior)),
|
||||||
|
prediction,
|
||||||
|
)
|
||||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
||||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
||||||
|
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
||||||
|
ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||||
|
dist,
|
||||||
|
)
|
||||||
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
||||||
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
||||||
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||||
|
|
|
@ -59,6 +59,46 @@ let integralEndY = (t: t): float =>
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||||
|
|
||||||
|
module Score = {
|
||||||
|
let klDivergence = (prediction, answer, ~toPointSetFn: toPointSetFn): result<float, error> => {
|
||||||
|
let pointSets = E.R.merge(toPointSetFn(prediction), toPointSetFn(answer))
|
||||||
|
pointSets |> E.R2.bind(((predi, ans)) =>
|
||||||
|
PointSetDist.T.klDivergence(predi, ans)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let logScoreWithPointResolution = (
|
||||||
|
~prediction: DistributionTypes.genericDist,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<DistributionTypes.genericDist>,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
): result<float, error> => {
|
||||||
|
switch prior {
|
||||||
|
| Some(prior') =>
|
||||||
|
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
|
||||||
|
prior'',
|
||||||
|
prediction'',
|
||||||
|
)) =>
|
||||||
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
|
~prediction=prediction'',
|
||||||
|
~answer,
|
||||||
|
~prior=prior''->Some,
|
||||||
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
|
)
|
||||||
|
| None =>
|
||||||
|
prediction
|
||||||
|
->toPointSetFn
|
||||||
|
->E.R.bind(x =>
|
||||||
|
PointSetDist.T.logScoreWithPointResolution(
|
||||||
|
~prediction=x,
|
||||||
|
~answer,
|
||||||
|
~prior=None,
|
||||||
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let toFloatOperation = (
|
let toFloatOperation = (
|
||||||
t,
|
t,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
|
@ -384,14 +424,12 @@ let pointwiseCombinationFloat = (
|
||||||
~algebraicCombination: Operation.algebraicOperation,
|
~algebraicCombination: Operation.algebraicOperation,
|
||||||
~f: float,
|
~f: float,
|
||||||
): result<t, error> => {
|
): result<t, error> => {
|
||||||
let m = switch algebraicCombination {
|
let executeCombination = arithOp =>
|
||||||
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
|
||||||
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
|
||||||
toPointSetFn(t)->E.R.bind(t => {
|
toPointSetFn(t)->E.R.bind(t => {
|
||||||
//TODO: Move to PointSet codebase
|
//TODO: Move to PointSet codebase
|
||||||
let fn = (secondary, main) => Operation.Scale.toFn(arithmeticOperation, main, secondary)
|
let fn = (secondary, main) => Operation.Scale.toFn(arithOp, main, secondary)
|
||||||
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(arithmeticOperation)
|
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(arithOp)
|
||||||
let integralCacheFn = Operation.Scale.toIntegralCacheFn(arithmeticOperation)
|
let integralCacheFn = Operation.Scale.toIntegralCacheFn(arithOp)
|
||||||
PointSetDist.T.mapYResult(
|
PointSetDist.T.mapYResult(
|
||||||
~integralSumCacheFn=integralSumCacheFn(f),
|
~integralSumCacheFn=integralSumCacheFn(f),
|
||||||
~integralCacheFn=integralCacheFn(f),
|
~integralCacheFn=integralCacheFn(f),
|
||||||
|
@ -399,6 +437,11 @@ let pointwiseCombinationFloat = (
|
||||||
t,
|
t,
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||||
})
|
})
|
||||||
|
let m = switch algebraicCombination {
|
||||||
|
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
||||||
|
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||||
|
executeCombination(arithmeticOperation)
|
||||||
|
| #LogarithmWithThreshold(eps) => executeCombination(#LogarithmWithThreshold(eps))
|
||||||
}
|
}
|
||||||
m->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
m->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
|
@ -23,6 +23,16 @@ let toFloatOperation: (
|
||||||
~distToFloatOperation: DistributionTypes.DistributionOperation.toFloat,
|
~distToFloatOperation: DistributionTypes.DistributionOperation.toFloat,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
|
|
||||||
|
module Score: {
|
||||||
|
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
||||||
|
let logScoreWithPointResolution: (
|
||||||
|
~prediction: t,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<t>,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
) => result<float, error>
|
||||||
|
}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (
|
let toPointSet: (
|
||||||
t,
|
t,
|
|
@ -86,6 +86,7 @@ let stepwiseToLinear = (t: t): t =>
|
||||||
|
|
||||||
// Note: This results in a distribution with as many points as the sum of those in t1 and t2.
|
// Note: This results in a distribution with as many points as the sum of those in t1 and t2.
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
~combiner=XYShape.PointwiseCombination.combine,
|
||||||
~integralSumCachesFn=(_, _) => None,
|
~integralSumCachesFn=(_, _) => None,
|
||||||
~distributionType: PointSetTypes.distributionType=#PDF,
|
~distributionType: PointSetTypes.distributionType=#PDF,
|
||||||
fn: (float, float) => result<float, Operation.Error.t>,
|
fn: (float, float) => result<float, Operation.Error.t>,
|
||||||
|
@ -119,7 +120,7 @@ let combinePointwise = (
|
||||||
|
|
||||||
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
||||||
|
|
||||||
XYShape.PointwiseCombination.combine(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||||
make(~integralSumCache=combinedIntegralSum, x)
|
make(~integralSumCache=combinedIntegralSum, x)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -269,11 +270,26 @@ module T = Dist({
|
||||||
}
|
}
|
||||||
let variance = (t: t): float =>
|
let variance = (t: t): float =>
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||||
|
|
||||||
|
let klDivergence = (prediction: t, answer: t) => {
|
||||||
|
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
||||||
|
PointSetDist_Scoring.KLDivergence.integrand,
|
||||||
|
prediction.xyShape,
|
||||||
|
answer.xyShape,
|
||||||
|
)
|
||||||
|
newShape->E.R2.fmap(x => x->make->integralEndY)
|
||||||
|
}
|
||||||
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
|
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
||||||
|
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
||||||
|
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => {
|
let isNormalized = (t: t): bool => {
|
||||||
let areaUnderIntegral = t |> updateIntegralCache(Some(T.integral(t))) |> T.integralEndY
|
let areaUnderIntegral = t |> updateIntegralCache(Some(T.integral(t))) |> T.integralEndY
|
||||||
areaUnderIntegral < 1. +. 1e-7 && areaUnderIntegral > 1. -. 1e-7
|
areaUnderIntegral < 1. +. MagicNumbers.Epsilon.seven &&
|
||||||
|
areaUnderIntegral > 1. -. MagicNumbers.Epsilon.seven
|
||||||
}
|
}
|
||||||
|
|
||||||
let downsampleEquallyOverX = (length, t): t =>
|
let downsampleEquallyOverX = (length, t): t =>
|
||||||
|
|
|
@ -33,29 +33,22 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
||||||
|
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
~combiner=XYShape.PointwiseCombination.combine,
|
||||||
~integralSumCachesFn=(_, _) => None,
|
~integralSumCachesFn=(_, _) => None,
|
||||||
fn,
|
~fn=(a, b) => Ok(a +. b),
|
||||||
t1: PointSetTypes.discreteShape,
|
t1: PointSetTypes.discreteShape,
|
||||||
t2: PointSetTypes.discreteShape,
|
t2: PointSetTypes.discreteShape,
|
||||||
): result<PointSetTypes.discreteShape, 'e> => {
|
): result<PointSetTypes.discreteShape, 'e> => {
|
||||||
let combinedIntegralSum = Common.combineIntegralSums(
|
// let combinedIntegralSum = Common.combineIntegralSums(
|
||||||
integralSumCachesFn,
|
// integralSumCachesFn,
|
||||||
t1.integralSumCache,
|
// t1.integralSumCache,
|
||||||
t2.integralSumCache,
|
// t2.integralSumCache,
|
||||||
)
|
// )
|
||||||
|
|
||||||
// TODO: does it ever make sense to pointwise combine the integrals here?
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
// It could be done for pointwise additions, but is that ever needed?
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
make(
|
combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
||||||
~integralSumCache=combinedIntegralSum,
|
|
||||||
XYShape.PointwiseCombination.combine(
|
|
||||||
fn,
|
|
||||||
XYShape.XtoY.discreteInterpolator,
|
|
||||||
t1.xyShape,
|
|
||||||
t2.xyShape,
|
|
||||||
)->E.R.toExn("Addition operation should never fail", _),
|
|
||||||
)->Ok
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let reduce = (
|
let reduce = (
|
||||||
|
@ -63,7 +56,7 @@ let reduce = (
|
||||||
fn: (float, float) => result<float, 'e>,
|
fn: (float, float) => result<float, 'e>,
|
||||||
discreteShapes: array<PointSetTypes.discreteShape>,
|
discreteShapes: array<PointSetTypes.discreteShape>,
|
||||||
): result<t, 'e> => {
|
): result<t, 'e> => {
|
||||||
let merge = combinePointwise(~integralSumCachesFn, fn)
|
let merge = combinePointwise(~integralSumCachesFn, ~fn)
|
||||||
discreteShapes |> E.A.R.foldM(merge, empty)
|
discreteShapes |> E.A.R.foldM(merge, empty)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,4 +221,15 @@ module T = Dist({
|
||||||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let klDivergence = (prediction: t, answer: t) => {
|
||||||
|
combinePointwise(
|
||||||
|
~fn=PointSetDist_Scoring.KLDivergence.integrand,
|
||||||
|
prediction,
|
||||||
|
answer,
|
||||||
|
)->E.R2.fmap(integralEndY)
|
||||||
|
}
|
||||||
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
|
Error(Operation.NotYetImplemented)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -33,6 +33,12 @@ module type dist = {
|
||||||
|
|
||||||
let mean: t => float
|
let mean: t => float
|
||||||
let variance: t => float
|
let variance: t => float
|
||||||
|
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
||||||
|
let logScoreWithPointResolution: (
|
||||||
|
~prediction: t,
|
||||||
|
~answer: float,
|
||||||
|
~prior: option<t>,
|
||||||
|
) => result<float, Operation.Error.t>
|
||||||
}
|
}
|
||||||
|
|
||||||
module Dist = (T: dist) => {
|
module Dist = (T: dist) => {
|
||||||
|
@ -55,6 +61,8 @@ module Dist = (T: dist) => {
|
||||||
let mean = T.mean
|
let mean = T.mean
|
||||||
let variance = T.variance
|
let variance = T.variance
|
||||||
let integralEndY = T.integralEndY
|
let integralEndY = T.integralEndY
|
||||||
|
let klDivergence = T.klDivergence
|
||||||
|
let logScoreWithPointResolution = T.logScoreWithPointResolution
|
||||||
|
|
||||||
let updateIntegralCache = T.updateIntegralCache
|
let updateIntegralCache = T.updateIntegralCache
|
||||||
|
|
||||||
|
|
|
@ -36,6 +36,47 @@ let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
integralCache: integralCache,
|
integralCache: integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let combinePointwise = (
|
||||||
|
~integralSumCachesFn=(_, _) => None,
|
||||||
|
~integralCachesFn=(_, _) => None,
|
||||||
|
fn: (float, float) => result<float, 'e>,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
): result<t, 'e> => {
|
||||||
|
let reducedDiscrete =
|
||||||
|
[t1, t2]
|
||||||
|
|> E.A.fmap(toDiscrete)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Discrete.reduce(~integralSumCachesFn, fn)
|
||||||
|
|> E.R.toExn("Theoretically unreachable state")
|
||||||
|
|
||||||
|
let reducedContinuous =
|
||||||
|
[t1, t2]
|
||||||
|
|> E.A.fmap(toContinuous)
|
||||||
|
|> E.A.O.concatSomes
|
||||||
|
|> Continuous.reduce(~integralSumCachesFn, fn)
|
||||||
|
|
||||||
|
let combinedIntegralSum = Common.combineIntegralSums(
|
||||||
|
integralSumCachesFn,
|
||||||
|
t1.integralSumCache,
|
||||||
|
t2.integralSumCache,
|
||||||
|
)
|
||||||
|
|
||||||
|
let combinedIntegral = Common.combineIntegrals(
|
||||||
|
integralCachesFn,
|
||||||
|
t1.integralCache,
|
||||||
|
t2.integralCache,
|
||||||
|
)
|
||||||
|
reducedContinuous->E.R2.fmap(continuous =>
|
||||||
|
make(
|
||||||
|
~integralSumCache=combinedIntegralSum,
|
||||||
|
~integralCache=combinedIntegral,
|
||||||
|
~discrete=reducedDiscrete,
|
||||||
|
~continuous,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
module T = Dist({
|
module T = Dist({
|
||||||
type t = PointSetTypes.mixedShape
|
type t = PointSetTypes.mixedShape
|
||||||
type integral = PointSetTypes.continuousShape
|
type integral = PointSetTypes.continuousShape
|
||||||
|
@ -259,6 +300,15 @@ module T = Dist({
|
||||||
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let klDivergence = (prediction: t, answer: t) => {
|
||||||
|
let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
|
||||||
|
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
||||||
|
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
||||||
|
}
|
||||||
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
|
Error(Operation.NotYetImplemented)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
||||||
|
|
|
@ -86,7 +86,7 @@ let combinePointwise = (
|
||||||
| (Discrete(m1), Discrete(m2)) =>
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
Discrete.combinePointwise(
|
Discrete.combinePointwise(
|
||||||
~integralSumCachesFn,
|
~integralSumCachesFn,
|
||||||
fn,
|
~fn,
|
||||||
m1,
|
m1,
|
||||||
m2,
|
m2,
|
||||||
)->E.R2.fmap(x => PointSetTypes.Discrete(x))
|
)->E.R2.fmap(x => PointSetTypes.Discrete(x))
|
||||||
|
@ -195,6 +195,23 @@ module T = Dist({
|
||||||
| Discrete(m) => Discrete.T.variance(m)
|
| Discrete(m) => Discrete.T.variance(m)
|
||||||
| Continuous(m) => Continuous.T.variance(m)
|
| Continuous(m) => Continuous.T.variance(m)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let klDivergence = (prediction: t, answer: t) =>
|
||||||
|
switch (prediction, answer) {
|
||||||
|
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
|
||||||
|
| (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
|
||||||
|
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||||
|
switch (prior, prediction) {
|
||||||
|
| (Some(Continuous(t1)), Continuous(t2)) =>
|
||||||
|
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
|
||||||
|
| (None, Continuous(t2)) =>
|
||||||
|
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
|
||||||
|
| _ => Error(Operation.NotYetImplemented)
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
let pdf = (f: float, t: t) => {
|
let pdf = (f: float, t: t) => {
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
module KLDivergence = {
|
||||||
|
let logFn = Js.Math.log // base e
|
||||||
|
let integrand = (predictionElement: float, answerElement: float): result<
|
||||||
|
float,
|
||||||
|
Operation.Error.t,
|
||||||
|
> =>
|
||||||
|
// We decided that negative infinity, not an error at answerElement = 0.0, is a desirable value.
|
||||||
|
if answerElement == 0.0 {
|
||||||
|
Ok(0.0)
|
||||||
|
} else if predictionElement == 0.0 {
|
||||||
|
Ok(infinity)
|
||||||
|
} else {
|
||||||
|
let quot = predictionElement /. answerElement
|
||||||
|
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module LogScoreWithPointResolution = {
|
||||||
|
let logFn = Js.Math.log
|
||||||
|
let score = (
|
||||||
|
~priorPdf: option<float => float>,
|
||||||
|
~predictionPdf: float => float,
|
||||||
|
~answer: float,
|
||||||
|
): result<float, Operation.Error.t> => {
|
||||||
|
let numerator = answer->predictionPdf
|
||||||
|
if numerator < 0.0 {
|
||||||
|
Operation.PdfInvalidError->Error
|
||||||
|
} else if numerator == 0.0 {
|
||||||
|
infinity->Ok
|
||||||
|
} else {
|
||||||
|
-.(
|
||||||
|
switch priorPdf {
|
||||||
|
| None => numerator->logFn
|
||||||
|
| Some(f) => {
|
||||||
|
let priorDensityOfAnswer = f(answer)
|
||||||
|
if priorDensityOfAnswer == 0.0 {
|
||||||
|
neg_infinity
|
||||||
|
} else {
|
||||||
|
(numerator /. priorDensityOfAnswer)->logFn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)->Ok
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,12 +1,14 @@
|
||||||
@genType
|
@genType
|
||||||
module Error = {
|
module Error = {
|
||||||
@genType
|
@genType
|
||||||
type sampleSetError = TooFewSamples | NonNumericInput(string)
|
type sampleSetError =
|
||||||
|
TooFewSamples | NonNumericInput(string) | OperationError(Operation.operationError)
|
||||||
|
|
||||||
let sampleSetErrorToString = (err: sampleSetError): string =>
|
let sampleSetErrorToString = (err: sampleSetError): string =>
|
||||||
switch err {
|
switch err {
|
||||||
| TooFewSamples => "Too few samples when constructing sample set"
|
| TooFewSamples => "Too few samples when constructing sample set"
|
||||||
| NonNumericInput(err) => `Found a non-number in input: ${err}`
|
| NonNumericInput(err) => `Found a non-number in input: ${err}`
|
||||||
|
| OperationError(err) => Operation.Error.toString(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
|
@ -16,6 +18,8 @@ module Error = {
|
||||||
switch err {
|
switch err {
|
||||||
| TooFewSamplesForConversionToPointSet => "Too Few Samples to convert to point set"
|
| TooFewSamplesForConversionToPointSet => "Too Few Samples to convert to point set"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let fromOperationError = e => OperationError(e)
|
||||||
}
|
}
|
||||||
|
|
||||||
include Error
|
include Error
|
||||||
|
@ -83,6 +87,14 @@ let sampleN = (t: t, n) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let samplesMap = (~fn: float => result<float, Operation.Error.t>, t: t): result<
|
||||||
|
t,
|
||||||
|
sampleSetError,
|
||||||
|
> => {
|
||||||
|
let samples = T.get(t)->E.A2.fmap(fn)
|
||||||
|
E.A.R.firstErrorOrOpen(samples)->E.R2.errMap(Error.fromOperationError) |> E.R2.bind(make)
|
||||||
|
}
|
||||||
|
|
||||||
//TODO: Figure out what to do if distributions are different lengths. ``zip`` is kind of inelegant for this.
|
//TODO: Figure out what to do if distributions are different lengths. ``zip`` is kind of inelegant for this.
|
||||||
let map2 = (~fn: (float, float) => result<float, Operation.Error.t>, ~t1: t, ~t2: t): result<
|
let map2 = (~fn: (float, float) => result<float, Operation.Error.t>, ~t1: t, ~t2: t): result<
|
||||||
t,
|
t,
|
||||||
|
@ -96,7 +108,7 @@ let map2 = (~fn: (float, float) => result<float, Operation.Error.t>, ~t1: t, ~t2
|
||||||
// I could prove this to the type system (say, creating a {first: float, second: float, ..., fifth: float, rest: array<float>}
|
// I could prove this to the type system (say, creating a {first: float, second: float, ..., fifth: float, rest: array<float>}
|
||||||
// But doing so would take too much time, so I'll leave it as an assertion
|
// But doing so would take too much time, so I'll leave it as an assertion
|
||||||
E.A.R.firstErrorOrOpen(samples)->E.R2.fmap(x =>
|
E.A.R.firstErrorOrOpen(samples)->E.R2.fmap(x =>
|
||||||
E.R.toExn("Input of samples should be larger than 5", make(x))
|
E.R.toExnFnString(Error.sampleSetErrorToString, make(x))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -216,6 +216,50 @@ module Uniform = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module Logistic = {
|
||||||
|
type t = logistic
|
||||||
|
let make = (location, scale) =>
|
||||||
|
scale > 0.0
|
||||||
|
? Ok(#Logistic({location: location, scale: scale}))
|
||||||
|
: Error("Scale must be positive")
|
||||||
|
|
||||||
|
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
||||||
|
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
||||||
|
let inv = (p, t: t) => Stdlib.Logistic.quantile(p, t.location, t.scale)
|
||||||
|
let sample = (t: t) => {
|
||||||
|
let s = Uniform.sample({low: 0.0, high: 1.0})
|
||||||
|
inv(s, t)
|
||||||
|
}
|
||||||
|
let mean = (t: t) => Ok(Stdlib.Logistic.mean(t.location, t.scale))
|
||||||
|
let toString = ({location, scale}: t) => j`Logistic($location,$scale)`
|
||||||
|
}
|
||||||
|
|
||||||
|
module Bernoulli = {
|
||||||
|
type t = bernoulli
|
||||||
|
let make = p =>
|
||||||
|
p >= 0.0 && p <= 1.0
|
||||||
|
? Ok(#Bernoulli({p: p}))
|
||||||
|
: Error("Bernoulli parameter must be between 0 and 1")
|
||||||
|
let pmf = (x, t: t) => Stdlib.Bernoulli.pmf(x, t.p)
|
||||||
|
|
||||||
|
//Bernoulli is a discrete distribution, so it doesn't really have a pdf().
|
||||||
|
//We fake this for now with the pmf function, but this should be fixed at some point.
|
||||||
|
let pdf = (x, t: t) => Stdlib.Bernoulli.pmf(x, t.p)
|
||||||
|
let cdf = (x, t: t) => Stdlib.Bernoulli.cdf(x, t.p)
|
||||||
|
let inv = (p, t: t) => Stdlib.Bernoulli.quantile(p, t.p)
|
||||||
|
let mean = (t: t) => Ok(Stdlib.Bernoulli.mean(t.p))
|
||||||
|
let min = (t: t) => t.p == 1.0 ? 1.0 : 0.0
|
||||||
|
let max = (t: t) => t.p == 0.0 ? 0.0 : 1.0
|
||||||
|
let sample = (t: t) => {
|
||||||
|
let s = Uniform.sample({low: 0.0, high: 1.0})
|
||||||
|
inv(s, t)
|
||||||
|
}
|
||||||
|
let toString = ({p}: t) => j`Bernoulli($p)`
|
||||||
|
let toPointSetDist = ({p}: t): PointSetTypes.pointSetDist => Discrete(
|
||||||
|
Discrete.make(~integralSumCache=Some(1.0), {xs: [0.0, 1.0], ys: [1.0 -. p, p]}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
module Gamma = {
|
module Gamma = {
|
||||||
type t = gamma
|
type t = gamma
|
||||||
let make = (shape: float, scale: float) => {
|
let make = (shape: float, scale: float) => {
|
||||||
|
@ -252,6 +296,9 @@ module Float = {
|
||||||
let mean = (t: t) => Ok(t)
|
let mean = (t: t) => Ok(t)
|
||||||
let sample = (t: t) => t
|
let sample = (t: t) => t
|
||||||
let toString = (t: t) => j`Delta($t)`
|
let toString = (t: t) => j`Delta($t)`
|
||||||
|
let toPointSetDist = (t: t): PointSetTypes.pointSetDist => Discrete(
|
||||||
|
Discrete.make(~integralSumCache=Some(1.0), {xs: [t], ys: [1.0]}),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
module From90thPercentile = {
|
module From90thPercentile = {
|
||||||
|
@ -275,9 +322,11 @@ module T = {
|
||||||
| #Cauchy(n) => Cauchy.pdf(x, n)
|
| #Cauchy(n) => Cauchy.pdf(x, n)
|
||||||
| #Gamma(n) => Gamma.pdf(x, n)
|
| #Gamma(n) => Gamma.pdf(x, n)
|
||||||
| #Lognormal(n) => Lognormal.pdf(x, n)
|
| #Lognormal(n) => Lognormal.pdf(x, n)
|
||||||
|
| #Logistic(n) => Logistic.pdf(x, n)
|
||||||
| #Uniform(n) => Uniform.pdf(x, n)
|
| #Uniform(n) => Uniform.pdf(x, n)
|
||||||
| #Beta(n) => Beta.pdf(x, n)
|
| #Beta(n) => Beta.pdf(x, n)
|
||||||
| #Float(n) => Float.pdf(x, n)
|
| #Float(n) => Float.pdf(x, n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.pdf(x, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
let cdf = (x, dist) =>
|
let cdf = (x, dist) =>
|
||||||
|
@ -287,10 +336,12 @@ module T = {
|
||||||
| #Exponential(n) => Exponential.cdf(x, n)
|
| #Exponential(n) => Exponential.cdf(x, n)
|
||||||
| #Cauchy(n) => Cauchy.cdf(x, n)
|
| #Cauchy(n) => Cauchy.cdf(x, n)
|
||||||
| #Gamma(n) => Gamma.cdf(x, n)
|
| #Gamma(n) => Gamma.cdf(x, n)
|
||||||
|
| #Logistic(n) => Logistic.cdf(x, n)
|
||||||
| #Lognormal(n) => Lognormal.cdf(x, n)
|
| #Lognormal(n) => Lognormal.cdf(x, n)
|
||||||
| #Uniform(n) => Uniform.cdf(x, n)
|
| #Uniform(n) => Uniform.cdf(x, n)
|
||||||
| #Beta(n) => Beta.cdf(x, n)
|
| #Beta(n) => Beta.cdf(x, n)
|
||||||
| #Float(n) => Float.cdf(x, n)
|
| #Float(n) => Float.cdf(x, n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.cdf(x, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
let inv = (x, dist) =>
|
let inv = (x, dist) =>
|
||||||
|
@ -300,10 +351,12 @@ module T = {
|
||||||
| #Exponential(n) => Exponential.inv(x, n)
|
| #Exponential(n) => Exponential.inv(x, n)
|
||||||
| #Cauchy(n) => Cauchy.inv(x, n)
|
| #Cauchy(n) => Cauchy.inv(x, n)
|
||||||
| #Gamma(n) => Gamma.inv(x, n)
|
| #Gamma(n) => Gamma.inv(x, n)
|
||||||
|
| #Logistic(n) => Logistic.inv(x, n)
|
||||||
| #Lognormal(n) => Lognormal.inv(x, n)
|
| #Lognormal(n) => Lognormal.inv(x, n)
|
||||||
| #Uniform(n) => Uniform.inv(x, n)
|
| #Uniform(n) => Uniform.inv(x, n)
|
||||||
| #Beta(n) => Beta.inv(x, n)
|
| #Beta(n) => Beta.inv(x, n)
|
||||||
| #Float(n) => Float.inv(x, n)
|
| #Float(n) => Float.inv(x, n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.inv(x, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
let sample: symbolicDist => float = x =>
|
let sample: symbolicDist => float = x =>
|
||||||
|
@ -313,10 +366,12 @@ module T = {
|
||||||
| #Exponential(n) => Exponential.sample(n)
|
| #Exponential(n) => Exponential.sample(n)
|
||||||
| #Cauchy(n) => Cauchy.sample(n)
|
| #Cauchy(n) => Cauchy.sample(n)
|
||||||
| #Gamma(n) => Gamma.sample(n)
|
| #Gamma(n) => Gamma.sample(n)
|
||||||
|
| #Logistic(n) => Logistic.sample(n)
|
||||||
| #Lognormal(n) => Lognormal.sample(n)
|
| #Lognormal(n) => Lognormal.sample(n)
|
||||||
| #Uniform(n) => Uniform.sample(n)
|
| #Uniform(n) => Uniform.sample(n)
|
||||||
| #Beta(n) => Beta.sample(n)
|
| #Beta(n) => Beta.sample(n)
|
||||||
| #Float(n) => Float.sample(n)
|
| #Float(n) => Float.sample(n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.sample(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
let doN = (n, fn) => {
|
let doN = (n, fn) => {
|
||||||
|
@ -336,10 +391,12 @@ module T = {
|
||||||
| #Cauchy(n) => Cauchy.toString(n)
|
| #Cauchy(n) => Cauchy.toString(n)
|
||||||
| #Normal(n) => Normal.toString(n)
|
| #Normal(n) => Normal.toString(n)
|
||||||
| #Gamma(n) => Gamma.toString(n)
|
| #Gamma(n) => Gamma.toString(n)
|
||||||
|
| #Logistic(n) => Logistic.toString(n)
|
||||||
| #Lognormal(n) => Lognormal.toString(n)
|
| #Lognormal(n) => Lognormal.toString(n)
|
||||||
| #Uniform(n) => Uniform.toString(n)
|
| #Uniform(n) => Uniform.toString(n)
|
||||||
| #Beta(n) => Beta.toString(n)
|
| #Beta(n) => Beta.toString(n)
|
||||||
| #Float(n) => Float.toString(n)
|
| #Float(n) => Float.toString(n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.toString(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
let min: symbolicDist => float = x =>
|
let min: symbolicDist => float = x =>
|
||||||
|
@ -349,8 +406,10 @@ module T = {
|
||||||
| #Cauchy(n) => Cauchy.inv(minCdfValue, n)
|
| #Cauchy(n) => Cauchy.inv(minCdfValue, n)
|
||||||
| #Normal(n) => Normal.inv(minCdfValue, n)
|
| #Normal(n) => Normal.inv(minCdfValue, n)
|
||||||
| #Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
| #Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
||||||
|
| #Logistic(n) => Logistic.inv(minCdfValue, n)
|
||||||
| #Gamma(n) => Gamma.inv(minCdfValue, n)
|
| #Gamma(n) => Gamma.inv(minCdfValue, n)
|
||||||
| #Uniform({low}) => low
|
| #Uniform({low}) => low
|
||||||
|
| #Bernoulli(n) => Bernoulli.min(n)
|
||||||
| #Beta(n) => Beta.inv(minCdfValue, n)
|
| #Beta(n) => Beta.inv(minCdfValue, n)
|
||||||
| #Float(n) => n
|
| #Float(n) => n
|
||||||
}
|
}
|
||||||
|
@ -363,7 +422,9 @@ module T = {
|
||||||
| #Normal(n) => Normal.inv(maxCdfValue, n)
|
| #Normal(n) => Normal.inv(maxCdfValue, n)
|
||||||
| #Gamma(n) => Gamma.inv(maxCdfValue, n)
|
| #Gamma(n) => Gamma.inv(maxCdfValue, n)
|
||||||
| #Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
| #Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
||||||
|
| #Logistic(n) => Logistic.inv(maxCdfValue, n)
|
||||||
| #Beta(n) => Beta.inv(maxCdfValue, n)
|
| #Beta(n) => Beta.inv(maxCdfValue, n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.max(n)
|
||||||
| #Uniform({high}) => high
|
| #Uniform({high}) => high
|
||||||
| #Float(n) => n
|
| #Float(n) => n
|
||||||
}
|
}
|
||||||
|
@ -376,8 +437,10 @@ module T = {
|
||||||
| #Normal(n) => Normal.mean(n)
|
| #Normal(n) => Normal.mean(n)
|
||||||
| #Lognormal(n) => Lognormal.mean(n)
|
| #Lognormal(n) => Lognormal.mean(n)
|
||||||
| #Beta(n) => Beta.mean(n)
|
| #Beta(n) => Beta.mean(n)
|
||||||
|
| #Logistic(n) => Logistic.mean(n)
|
||||||
| #Uniform(n) => Uniform.mean(n)
|
| #Uniform(n) => Uniform.mean(n)
|
||||||
| #Gamma(n) => Gamma.mean(n)
|
| #Gamma(n) => Gamma.mean(n)
|
||||||
|
| #Bernoulli(n) => Bernoulli.mean(n)
|
||||||
| #Float(n) => Float.mean(n)
|
| #Float(n) => Float.mean(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -396,8 +459,9 @@ module T = {
|
||||||
| (#ByWeight, #Uniform(n)) =>
|
| (#ByWeight, #Uniform(n)) =>
|
||||||
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
|
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
|
||||||
// on either side for proper rendering (just left and right of the discontinuities).
|
// on either side for proper rendering (just left and right of the discontinuities).
|
||||||
let dx = 0.00001 *. (n.high -. n.low)
|
let distance = n.high -. n.low
|
||||||
[n.low -. dx, n.low +. dx, n.high -. dx, n.high +. dx]
|
let dx = MagicNumbers.Epsilon.ten *. distance
|
||||||
|
[n.low -. dx, n.low, n.low +. dx, n.high -. dx, n.high, n.high +. dx]
|
||||||
| (#ByWeight, _) =>
|
| (#ByWeight, _) =>
|
||||||
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n)
|
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n)
|
||||||
ys |> E.A.fmap(y => inv(y, dist))
|
ys |> E.A.fmap(y => inv(y, dist))
|
||||||
|
@ -452,7 +516,8 @@ module T = {
|
||||||
d: symbolicDist,
|
d: symbolicDist,
|
||||||
): PointSetTypes.pointSetDist =>
|
): PointSetTypes.pointSetDist =>
|
||||||
switch d {
|
switch d {
|
||||||
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
| #Float(v) => Float.toPointSetDist(v)
|
||||||
|
| #Bernoulli(v) => Bernoulli.toPointSetDist(v)
|
||||||
| _ =>
|
| _ =>
|
||||||
let xs = interpolateXs(~xSelection, d, sampleCount)
|
let xs = interpolateXs(~xSelection, d, sampleCount)
|
||||||
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
||||||
|
|
|
@ -36,6 +36,13 @@ type gamma = {
|
||||||
scale: float,
|
scale: float,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type logistic = {
|
||||||
|
location: float,
|
||||||
|
scale: float,
|
||||||
|
}
|
||||||
|
|
||||||
|
type bernoulli = {p: float}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type symbolicDist = [
|
type symbolicDist = [
|
||||||
| #Normal(normal)
|
| #Normal(normal)
|
||||||
|
@ -47,6 +54,8 @@ type symbolicDist = [
|
||||||
| #Triangular(triangular)
|
| #Triangular(triangular)
|
||||||
| #Gamma(gamma)
|
| #Gamma(gamma)
|
||||||
| #Float(float)
|
| #Float(float)
|
||||||
|
| #Bernoulli(bernoulli)
|
||||||
|
| #Logistic(logistic)
|
||||||
]
|
]
|
||||||
|
|
||||||
type analyticalSimplificationResult = [
|
type analyticalSimplificationResult = [
|
||||||
|
|
|
@ -6,11 +6,13 @@ module Math = {
|
||||||
module Epsilon = {
|
module Epsilon = {
|
||||||
let ten = 1e-10
|
let ten = 1e-10
|
||||||
let seven = 1e-7
|
let seven = 1e-7
|
||||||
|
let five = 1e-5
|
||||||
}
|
}
|
||||||
|
|
||||||
module Environment = {
|
module Environment = {
|
||||||
let defaultXYPointLength = 1000
|
let defaultXYPointLength = 1000
|
||||||
let defaultSampleCount = 10000
|
let defaultSampleCount = 10000
|
||||||
|
let sparklineLength = 20
|
||||||
}
|
}
|
||||||
|
|
||||||
module OpCost = {
|
module OpCost = {
|
||||||
|
|
|
@ -101,6 +101,18 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
||||||
rMappedList->Result.map(mappedList => mappedList->Belt.List.toArray->EvArray)
|
rMappedList->Result.map(mappedList => mappedList->Belt.List.toArray->EvArray)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let doMapSampleSetDist = (sampleSetDist: SampleSetDist.t, aLambdaValue) => {
|
||||||
|
let fn = r =>
|
||||||
|
switch Lambda.doLambdaCall(aLambdaValue, list{EvNumber(r)}, environment, reducer) {
|
||||||
|
| Ok(EvNumber(f)) => Ok(f)
|
||||||
|
| _ => Error(Operation.SampleMapNeedsNtoNFunction)
|
||||||
|
}
|
||||||
|
switch SampleSetDist.samplesMap(~fn, sampleSetDist) {
|
||||||
|
| Ok(r) => Ok(EvDistribution(SampleSet(r)))
|
||||||
|
| Error(r) => Error(REDistributionError(SampleSetError(r)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let doReduceArray = (aValueArray, initialValue, aLambdaValue) => {
|
let doReduceArray = (aValueArray, initialValue, aLambdaValue) => {
|
||||||
aValueArray->Belt.Array.reduce(Ok(initialValue), (rAcc, elem) =>
|
aValueArray->Belt.Array.reduce(Ok(initialValue), (rAcc, elem) =>
|
||||||
rAcc->Result.flatMap(acc =>
|
rAcc->Result.flatMap(acc =>
|
||||||
|
@ -130,6 +142,8 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
||||||
| ("keep", [EvArray(aValueArray), EvLambda(aLambdaValue)]) =>
|
| ("keep", [EvArray(aValueArray), EvLambda(aLambdaValue)]) =>
|
||||||
doKeepArray(aValueArray, aLambdaValue)
|
doKeepArray(aValueArray, aLambdaValue)
|
||||||
| ("map", [EvArray(aValueArray), EvLambda(aLambdaValue)]) => doMapArray(aValueArray, aLambdaValue)
|
| ("map", [EvArray(aValueArray), EvLambda(aLambdaValue)]) => doMapArray(aValueArray, aLambdaValue)
|
||||||
|
| ("mapSamples", [EvDistribution(SampleSet(dist)), EvLambda(aLambdaValue)]) =>
|
||||||
|
doMapSampleSetDist(dist, aLambdaValue)
|
||||||
| ("reduce", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
| ("reduce", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
||||||
doReduceArray(aValueArray, initialValue, aLambdaValue)
|
doReduceArray(aValueArray, initialValue, aLambdaValue)
|
||||||
| ("reduceReverse", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
| ("reduceReverse", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
||||||
|
|
|
@ -4,6 +4,7 @@ type errorValue =
|
||||||
| REArrayIndexNotFound(string, int)
|
| REArrayIndexNotFound(string, int)
|
||||||
| REAssignmentExpected
|
| REAssignmentExpected
|
||||||
| REDistributionError(DistributionTypes.error)
|
| REDistributionError(DistributionTypes.error)
|
||||||
|
| REOperationError(Operation.operationError)
|
||||||
| REExpressionExpected
|
| REExpressionExpected
|
||||||
| REFunctionExpected(string)
|
| REFunctionExpected(string)
|
||||||
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
||||||
|
@ -29,6 +30,7 @@ let errorToString = err =>
|
||||||
| REExpressionExpected => "Expression expected"
|
| REExpressionExpected => "Expression expected"
|
||||||
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
||||||
| REDistributionError(err) => `Distribution Math Error: ${DistributionTypes.Error.toString(err)}`
|
| REDistributionError(err) => `Distribution Math Error: ${DistributionTypes.Error.toString(err)}`
|
||||||
|
| REOperationError(err) => `Math Error: ${Operation.Error.toString(err)}`
|
||||||
| REJavaScriptExn(omsg, oname) => {
|
| REJavaScriptExn(omsg, oname) => {
|
||||||
let answer = "JS Exception:"
|
let answer = "JS Exception:"
|
||||||
let answer = switch oname {
|
let answer = switch oname {
|
||||||
|
|
|
@ -14,7 +14,7 @@ let eArray = anArray => anArray->BExpressionValue.EvArray->BExpressionT.EValue
|
||||||
let eArrayString = anArray => anArray->BExpressionValue.EvArrayString->BExpressionT.EValue
|
let eArrayString = anArray => anArray->BExpressionValue.EvArrayString->BExpressionT.EValue
|
||||||
|
|
||||||
let eBindings = (anArray: array<(string, BExpressionValue.expressionValue)>) =>
|
let eBindings = (anArray: array<(string, BExpressionValue.expressionValue)>) =>
|
||||||
anArray->Js.Dict.fromArray->EvRecord->BExpressionT.EValue
|
anArray->Js.Dict.fromArray->BExpressionValue.EvRecord->BExpressionT.EValue
|
||||||
|
|
||||||
let eBool = aBool => aBool->BExpressionValue.EvBool->BExpressionT.EValue
|
let eBool = aBool => aBool->BExpressionValue.EvBool->BExpressionT.EValue
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,5 @@
|
||||||
module ExpressionValue = ReducerInterface_ExpressionValue
|
module ExpressionValue = ReducerInterface_ExpressionValue
|
||||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
type expressionValue = ExpressionValue.expressionValue
|
||||||
|
|
||||||
let defaultEnv: DistributionOperation.env = {
|
|
||||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
|
||||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
|
||||||
}
|
|
||||||
|
|
||||||
let runGenericOperation = DistributionOperation.run(~env=defaultEnv)
|
|
||||||
|
|
||||||
module Helpers = {
|
module Helpers = {
|
||||||
let arithmeticMap = r =>
|
let arithmeticMap = r =>
|
||||||
|
@ -39,37 +32,44 @@ module Helpers = {
|
||||||
let toFloatFn = (
|
let toFloatFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
|
FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
|
||||||
->runGenericOperation
|
->DistributionOperation.run(~env)
|
||||||
->Some
|
->Some
|
||||||
}
|
}
|
||||||
|
|
||||||
let toStringFn = (
|
let toStringFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toString,
|
fnCall: DistributionTypes.DistributionOperation.toString,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
|
FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
|
||||||
->runGenericOperation
|
->DistributionOperation.run(~env)
|
||||||
->Some
|
->Some
|
||||||
}
|
}
|
||||||
|
|
||||||
let toBoolFn = (
|
let toBoolFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toBool,
|
fnCall: DistributionTypes.DistributionOperation.toBool,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
|
FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
|
||||||
->runGenericOperation
|
->DistributionOperation.run(~env)
|
||||||
->Some
|
->Some
|
||||||
}
|
}
|
||||||
|
|
||||||
let toDistFn = (fnCall: DistributionTypes.DistributionOperation.toDist, dist) => {
|
let toDistFn = (
|
||||||
|
fnCall: DistributionTypes.DistributionOperation.toDist,
|
||||||
|
dist,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
|
FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
|
||||||
->runGenericOperation
|
->DistributionOperation.run(~env)
|
||||||
->Some
|
->Some
|
||||||
}
|
}
|
||||||
|
|
||||||
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2) => {
|
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: DistributionOperation.env) => {
|
||||||
FromDist(
|
FromDist(
|
||||||
DistributionTypes.DistributionOperation.ToDistCombination(
|
DistributionTypes.DistributionOperation.ToDistCombination(
|
||||||
direction,
|
direction,
|
||||||
|
@ -77,8 +77,9 @@ module Helpers = {
|
||||||
#Dist(dist2),
|
#Dist(dist2),
|
||||||
),
|
),
|
||||||
dist1,
|
dist1,
|
||||||
)->runGenericOperation
|
)->DistributionOperation.run(~env)
|
||||||
}
|
}
|
||||||
|
|
||||||
let parseNumber = (args: expressionValue): Belt.Result.t<float, string> =>
|
let parseNumber = (args: expressionValue): Belt.Result.t<float, string> =>
|
||||||
switch args {
|
switch args {
|
||||||
| EvNumber(x) => Ok(x)
|
| EvNumber(x) => Ok(x)
|
||||||
|
@ -103,22 +104,43 @@ module Helpers = {
|
||||||
let mixtureWithGivenWeights = (
|
let mixtureWithGivenWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
weights: array<float>,
|
weights: array<float>,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
): DistributionOperation.outputType =>
|
): DistributionOperation.outputType =>
|
||||||
E.A.length(distributions) == E.A.length(weights)
|
E.A.length(distributions) == E.A.length(weights)
|
||||||
? Mixture(Belt.Array.zip(distributions, weights))->runGenericOperation
|
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
||||||
: GenDistError(
|
: GenDistError(
|
||||||
ArgumentError("Error, mixture call has different number of distributions and weights"),
|
ArgumentError("Error, mixture call has different number of distributions and weights"),
|
||||||
)
|
)
|
||||||
|
|
||||||
let mixtureWithDefaultWeights = (
|
let mixtureWithDefaultWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
): DistributionOperation.outputType => {
|
): DistributionOperation.outputType => {
|
||||||
let length = E.A.length(distributions)
|
let length = E.A.length(distributions)
|
||||||
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
||||||
mixtureWithGivenWeights(distributions, weights)
|
mixtureWithGivenWeights(distributions, weights, ~env)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixture = (args: array<expressionValue>): DistributionOperation.outputType =>
|
let mixture = (
|
||||||
|
args: array<expressionValue>,
|
||||||
|
~env: DistributionOperation.env,
|
||||||
|
): DistributionOperation.outputType => {
|
||||||
|
let error = (err: string): DistributionOperation.outputType =>
|
||||||
|
err->DistributionTypes.ArgumentError->GenDistError
|
||||||
|
switch args {
|
||||||
|
| [EvArray(distributions)] =>
|
||||||
|
switch parseDistributionArray(distributions) {
|
||||||
|
| Ok(distrs) => mixtureWithDefaultWeights(distrs, ~env)
|
||||||
|
| Error(err) => error(err)
|
||||||
|
}
|
||||||
|
| [EvArray(distributions), EvArray(weights)] =>
|
||||||
|
switch (parseDistributionArray(distributions), parseNumberArray(weights)) {
|
||||||
|
| (Ok(distrs), Ok(wghts)) => mixtureWithGivenWeights(distrs, wghts, ~env)
|
||||||
|
| (Error(err), Ok(_)) => error(err)
|
||||||
|
| (Ok(_), Error(err)) => error(err)
|
||||||
|
| (Error(err1), Error(err2)) => error(`${err1}|${err2}`)
|
||||||
|
}
|
||||||
|
| _ =>
|
||||||
switch E.A.last(args) {
|
switch E.A.last(args) {
|
||||||
| Some(EvArray(b)) => {
|
| Some(EvArray(b)) => {
|
||||||
let weights = parseNumberArray(b)
|
let weights = parseNumberArray(b)
|
||||||
|
@ -126,17 +148,33 @@ module Helpers = {
|
||||||
Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1),
|
Belt.Array.slice(args, ~offset=0, ~len=E.A.length(args) - 1),
|
||||||
)
|
)
|
||||||
switch E.R.merge(distributions, weights) {
|
switch E.R.merge(distributions, weights) {
|
||||||
| Ok(d, w) => mixtureWithGivenWeights(d, w)
|
| Ok(d, w) => mixtureWithGivenWeights(d, w, ~env)
|
||||||
| Error(err) => GenDistError(ArgumentError(err))
|
| Error(err) => error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
| Some(EvNumber(_))
|
| Some(EvNumber(_))
|
||||||
| Some(EvDistribution(_)) =>
|
| Some(EvDistribution(_)) =>
|
||||||
switch parseDistributionArray(args) {
|
switch parseDistributionArray(args) {
|
||||||
| Ok(distributions) => mixtureWithDefaultWeights(distributions)
|
| Ok(distributions) => mixtureWithDefaultWeights(distributions, ~env)
|
||||||
| Error(err) => GenDistError(ArgumentError(err))
|
| Error(err) => error(err)
|
||||||
|
}
|
||||||
|
| _ => error("Last argument of mx must be array or distribution")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let klDivergenceWithPrior = (
|
||||||
|
prediction: DistributionTypes.genericDist,
|
||||||
|
answer: DistributionTypes.genericDist,
|
||||||
|
prior: DistributionTypes.genericDist,
|
||||||
|
env: DistributionOperation.env,
|
||||||
|
) => {
|
||||||
|
let term1 = DistributionOperation.Constructors.klDivergence(~env, prediction, answer)
|
||||||
|
let term2 = DistributionOperation.Constructors.klDivergence(~env, prior, answer)
|
||||||
|
switch E.R.merge(term1, term2)->E.R2.fmap(((a, b)) => a -. b) {
|
||||||
|
| Ok(x) => x->DistributionOperation.Float->Some
|
||||||
|
| Error(_) => None
|
||||||
}
|
}
|
||||||
| _ => GenDistError(ArgumentError("Last argument of mx must be array or distribution"))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,6 +182,7 @@ module SymbolicConstructors = {
|
||||||
let oneFloat = name =>
|
let oneFloat = name =>
|
||||||
switch name {
|
switch name {
|
||||||
| "exponential" => Ok(SymbolicDist.Exponential.make)
|
| "exponential" => Ok(SymbolicDist.Exponential.make)
|
||||||
|
| "bernoulli" => Ok(SymbolicDist.Bernoulli.make)
|
||||||
| _ => Error("Unreachable state")
|
| _ => Error("Unreachable state")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,6 +192,7 @@ module SymbolicConstructors = {
|
||||||
| "uniform" => Ok(SymbolicDist.Uniform.make)
|
| "uniform" => Ok(SymbolicDist.Uniform.make)
|
||||||
| "beta" => Ok(SymbolicDist.Beta.make)
|
| "beta" => Ok(SymbolicDist.Beta.make)
|
||||||
| "lognormal" => Ok(SymbolicDist.Lognormal.make)
|
| "lognormal" => Ok(SymbolicDist.Lognormal.make)
|
||||||
|
| "logistic" => Ok(SymbolicDist.Logistic.make)
|
||||||
| "cauchy" => Ok(SymbolicDist.Cauchy.make)
|
| "cauchy" => Ok(SymbolicDist.Cauchy.make)
|
||||||
| "gamma" => Ok(SymbolicDist.Gamma.make)
|
| "gamma" => Ok(SymbolicDist.Gamma.make)
|
||||||
| "to" => Ok(SymbolicDist.From90thPercentile.make)
|
| "to" => Ok(SymbolicDist.From90thPercentile.make)
|
||||||
|
@ -174,19 +214,27 @@ module SymbolicConstructors = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment): option<
|
let dispatchToGenericOutput = (
|
||||||
DistributionOperation.outputType,
|
call: ExpressionValue.functionCall,
|
||||||
> => {
|
env: DistributionOperation.env,
|
||||||
|
): option<DistributionOperation.outputType> => {
|
||||||
let (fnName, args) = call
|
let (fnName, args) = call
|
||||||
switch (fnName, args) {
|
switch (fnName, args) {
|
||||||
| ("exponential" as fnName, [EvNumber(f)]) =>
|
| (("exponential" | "bernoulli") as fnName, [EvNumber(f)]) =>
|
||||||
SymbolicConstructors.oneFloat(fnName)
|
SymbolicConstructors.oneFloat(fnName)
|
||||||
->E.R.bind(r => r(f))
|
->E.R.bind(r => r(f))
|
||||||
->SymbolicConstructors.symbolicResultToOutput
|
->SymbolicConstructors.symbolicResultToOutput
|
||||||
| ("delta", [EvNumber(f)]) =>
|
| ("delta", [EvNumber(f)]) =>
|
||||||
SymbolicDist.Float.makeSafe(f)->SymbolicConstructors.symbolicResultToOutput
|
SymbolicDist.Float.makeSafe(f)->SymbolicConstructors.symbolicResultToOutput
|
||||||
| (
|
| (
|
||||||
("normal" | "uniform" | "beta" | "lognormal" | "cauchy" | "gamma" | "to") as fnName,
|
("normal"
|
||||||
|
| "uniform"
|
||||||
|
| "beta"
|
||||||
|
| "lognormal"
|
||||||
|
| "cauchy"
|
||||||
|
| "gamma"
|
||||||
|
| "to"
|
||||||
|
| "logistic") as fnName,
|
||||||
[EvNumber(f1), EvNumber(f2)],
|
[EvNumber(f1), EvNumber(f2)],
|
||||||
) =>
|
) =>
|
||||||
SymbolicConstructors.twoFloat(fnName)
|
SymbolicConstructors.twoFloat(fnName)
|
||||||
|
@ -196,13 +244,16 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment)
|
||||||
SymbolicConstructors.threeFloat(fnName)
|
SymbolicConstructors.threeFloat(fnName)
|
||||||
->E.R.bind(r => r(f1, f2, f3))
|
->E.R.bind(r => r(f1, f2, f3))
|
||||||
->SymbolicConstructors.symbolicResultToOutput
|
->SymbolicConstructors.symbolicResultToOutput
|
||||||
| ("sample", [EvDistribution(dist)]) => Helpers.toFloatFn(#Sample, dist)
|
| ("sample", [EvDistribution(dist)]) => Helpers.toFloatFn(#Sample, dist, ~env)
|
||||||
| ("mean", [EvDistribution(dist)]) => Helpers.toFloatFn(#Mean, dist)
|
| ("sampleN", [EvDistribution(dist), EvNumber(n)]) =>
|
||||||
| ("integralSum", [EvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist)
|
Some(FloatArray(GenericDist.sampleN(dist, Belt.Int.fromFloat(n))))
|
||||||
| ("toString", [EvDistribution(dist)]) => Helpers.toStringFn(ToString, dist)
|
| ("mean", [EvDistribution(dist)]) => Helpers.toFloatFn(#Mean, dist, ~env)
|
||||||
| ("toSparkline", [EvDistribution(dist)]) => Helpers.toStringFn(ToSparkline(20), dist)
|
| ("integralSum", [EvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
||||||
|
| ("toString", [EvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
||||||
|
| ("toSparkline", [EvDistribution(dist)]) =>
|
||||||
|
Helpers.toStringFn(ToSparkline(MagicNumbers.Environment.sparklineLength), dist, ~env)
|
||||||
| ("toSparkline", [EvDistribution(dist), EvNumber(n)]) =>
|
| ("toSparkline", [EvDistribution(dist), EvNumber(n)]) =>
|
||||||
Helpers.toStringFn(ToSparkline(Belt.Float.toInt(n)), dist)
|
Helpers.toStringFn(ToSparkline(Belt.Float.toInt(n)), dist, ~env)
|
||||||
| ("exp", [EvDistribution(a)]) =>
|
| ("exp", [EvDistribution(a)]) =>
|
||||||
// https://mathjs.org/docs/reference/functions/exp.html
|
// https://mathjs.org/docs/reference/functions/exp.html
|
||||||
Helpers.twoDiststoDistFn(
|
Helpers.twoDiststoDistFn(
|
||||||
|
@ -210,56 +261,96 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment)
|
||||||
"pow",
|
"pow",
|
||||||
GenericDist.fromFloat(MagicNumbers.Math.e),
|
GenericDist.fromFloat(MagicNumbers.Math.e),
|
||||||
a,
|
a,
|
||||||
|
~env,
|
||||||
)->Some
|
)->Some
|
||||||
| ("normalize", [EvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist)
|
| ("normalize", [EvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
||||||
| ("isNormalized", [EvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist)
|
| ("klDivergence", [EvDistribution(prediction), EvDistribution(answer)]) =>
|
||||||
| ("toPointSet", [EvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist)
|
Some(DistributionOperation.run(FromDist(ToScore(KLDivergence(answer)), prediction), ~env))
|
||||||
|
| ("klDivergence", [EvDistribution(prediction), EvDistribution(answer), EvDistribution(prior)]) =>
|
||||||
|
Helpers.klDivergenceWithPrior(prediction, answer, prior, env)
|
||||||
|
| (
|
||||||
|
"logScoreWithPointAnswer",
|
||||||
|
[EvDistribution(prediction), EvNumber(answer), EvDistribution(prior)],
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
"logScoreWithPointAnswer",
|
||||||
|
[EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer))), EvDistribution(prior)],
|
||||||
|
) =>
|
||||||
|
DistributionOperation.run(
|
||||||
|
FromDist(ToScore(LogScore(answer, prior->Some)), prediction),
|
||||||
|
~env,
|
||||||
|
)->Some
|
||||||
|
| ("logScoreWithPointAnswer", [EvDistribution(prediction), EvNumber(answer)])
|
||||||
|
| (
|
||||||
|
"logScoreWithPointAnswer",
|
||||||
|
[EvDistribution(prediction), EvDistribution(Symbolic(#Float(answer)))],
|
||||||
|
) =>
|
||||||
|
DistributionOperation.run(FromDist(ToScore(LogScore(answer, None)), prediction), ~env)->Some
|
||||||
|
| ("isNormalized", [EvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
||||||
|
| ("toPointSet", [EvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
||||||
| ("scaleLog", [EvDistribution(dist)]) =>
|
| ("scaleLog", [EvDistribution(dist)]) =>
|
||||||
Helpers.toDistFn(Scale(#Logarithm, MagicNumbers.Math.e), dist)
|
Helpers.toDistFn(Scale(#Logarithm, MagicNumbers.Math.e), dist, ~env)
|
||||||
| ("scaleLog10", [EvDistribution(dist)]) => Helpers.toDistFn(Scale(#Logarithm, 10.0), dist)
|
| ("scaleLog10", [EvDistribution(dist)]) => Helpers.toDistFn(Scale(#Logarithm, 10.0), dist, ~env)
|
||||||
| ("scaleLog", [EvDistribution(dist), EvNumber(float)]) =>
|
| ("scaleLog", [EvDistribution(dist), EvNumber(float)]) =>
|
||||||
Helpers.toDistFn(Scale(#Logarithm, float), dist)
|
Helpers.toDistFn(Scale(#Logarithm, float), dist, ~env)
|
||||||
|
| ("scaleLogWithThreshold", [EvDistribution(dist), EvNumber(base), EvNumber(eps)]) =>
|
||||||
|
Helpers.toDistFn(Scale(#LogarithmWithThreshold(eps), base), dist, ~env)
|
||||||
| ("scalePow", [EvDistribution(dist), EvNumber(float)]) =>
|
| ("scalePow", [EvDistribution(dist), EvNumber(float)]) =>
|
||||||
Helpers.toDistFn(Scale(#Power, float), dist)
|
Helpers.toDistFn(Scale(#Power, float), dist, ~env)
|
||||||
| ("scaleExp", [EvDistribution(dist)]) =>
|
| ("scaleExp", [EvDistribution(dist)]) =>
|
||||||
Helpers.toDistFn(Scale(#Power, MagicNumbers.Math.e), dist)
|
Helpers.toDistFn(Scale(#Power, MagicNumbers.Math.e), dist, ~env)
|
||||||
| ("cdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Cdf(float), dist)
|
| ("cdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Cdf(float), dist, ~env)
|
||||||
| ("pdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Pdf(float), dist)
|
| ("pdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Pdf(float), dist, ~env)
|
||||||
| ("inv", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Inv(float), dist)
|
| ("inv", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Inv(float), dist, ~env)
|
||||||
| ("toSampleSet", [EvDistribution(dist), EvNumber(float)]) =>
|
| ("toSampleSet", [EvDistribution(dist), EvNumber(float)]) =>
|
||||||
Helpers.toDistFn(ToSampleSet(Belt.Int.fromFloat(float)), dist)
|
Helpers.toDistFn(ToSampleSet(Belt.Int.fromFloat(float)), dist, ~env)
|
||||||
| ("toSampleSet", [EvDistribution(dist)]) =>
|
| ("toSampleSet", [EvDistribution(dist)]) =>
|
||||||
Helpers.toDistFn(ToSampleSet(MagicNumbers.Environment.defaultSampleCount), dist)
|
Helpers.toDistFn(ToSampleSet(env.sampleCount), dist, ~env)
|
||||||
|
| ("toInternalSampleArray", [EvDistribution(SampleSet(dist))]) =>
|
||||||
|
Some(FloatArray(SampleSetDist.T.get(dist)))
|
||||||
| ("fromSamples", [EvArray(inputArray)]) => {
|
| ("fromSamples", [EvArray(inputArray)]) => {
|
||||||
let _wrapInputErrors = x => SampleSetDist.NonNumericInput(x)
|
let _wrapInputErrors = x => SampleSetDist.NonNumericInput(x)
|
||||||
let parsedArray = Helpers.parseNumberArray(inputArray)->E.R2.errMap(_wrapInputErrors)
|
let parsedArray = Helpers.parseNumberArray(inputArray)->E.R2.errMap(_wrapInputErrors)
|
||||||
switch parsedArray {
|
switch parsedArray {
|
||||||
| Ok(array) => runGenericOperation(FromSamples(array))
|
| Ok(array) => DistributionOperation.run(FromSamples(array), ~env)
|
||||||
| Error(e) => GenDistError(SampleSetError(e))
|
| Error(e) => GenDistError(SampleSetError(e))
|
||||||
}->Some
|
}->Some
|
||||||
}
|
}
|
||||||
| ("inspect", [EvDistribution(dist)]) => Helpers.toDistFn(Inspect, dist)
|
| ("inspect", [EvDistribution(dist)]) => Helpers.toDistFn(Inspect, dist, ~env)
|
||||||
| ("truncateLeft", [EvDistribution(dist), EvNumber(float)]) =>
|
| ("truncateLeft", [EvDistribution(dist), EvNumber(float)]) =>
|
||||||
Helpers.toDistFn(Truncate(Some(float), None), dist)
|
Helpers.toDistFn(Truncate(Some(float), None), dist, ~env)
|
||||||
| ("truncateRight", [EvDistribution(dist), EvNumber(float)]) =>
|
| ("truncateRight", [EvDistribution(dist), EvNumber(float)]) =>
|
||||||
Helpers.toDistFn(Truncate(None, Some(float)), dist)
|
Helpers.toDistFn(Truncate(None, Some(float)), dist, ~env)
|
||||||
| ("truncate", [EvDistribution(dist), EvNumber(float1), EvNumber(float2)]) =>
|
| ("truncate", [EvDistribution(dist), EvNumber(float1), EvNumber(float2)]) =>
|
||||||
Helpers.toDistFn(Truncate(Some(float1), Some(float2)), dist)
|
Helpers.toDistFn(Truncate(Some(float1), Some(float2)), dist, ~env)
|
||||||
| ("mx" | "mixture", args) => Helpers.mixture(args)->Some
|
| ("mx" | "mixture", args) => Helpers.mixture(args, ~env)->Some
|
||||||
| ("log", [EvDistribution(a)]) =>
|
| ("log", [EvDistribution(a)]) =>
|
||||||
Helpers.twoDiststoDistFn(
|
Helpers.twoDiststoDistFn(
|
||||||
Algebraic(AsDefault),
|
Algebraic(AsDefault),
|
||||||
"log",
|
"log",
|
||||||
a,
|
a,
|
||||||
GenericDist.fromFloat(MagicNumbers.Math.e),
|
GenericDist.fromFloat(MagicNumbers.Math.e),
|
||||||
|
~env,
|
||||||
)->Some
|
)->Some
|
||||||
| ("log10", [EvDistribution(a)]) =>
|
| ("log10", [EvDistribution(a)]) =>
|
||||||
Helpers.twoDiststoDistFn(Algebraic(AsDefault), "log", a, GenericDist.fromFloat(10.0))->Some
|
Helpers.twoDiststoDistFn(
|
||||||
|
Algebraic(AsDefault),
|
||||||
|
"log",
|
||||||
|
a,
|
||||||
|
GenericDist.fromFloat(10.0),
|
||||||
|
~env,
|
||||||
|
)->Some
|
||||||
| ("unaryMinus", [EvDistribution(a)]) =>
|
| ("unaryMinus", [EvDistribution(a)]) =>
|
||||||
Helpers.twoDiststoDistFn(Algebraic(AsDefault), "multiply", a, GenericDist.fromFloat(-1.0))->Some
|
Helpers.twoDiststoDistFn(
|
||||||
|
Algebraic(AsDefault),
|
||||||
|
"multiply",
|
||||||
|
a,
|
||||||
|
GenericDist.fromFloat(-1.0),
|
||||||
|
~env,
|
||||||
|
)->Some
|
||||||
| (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [_, _] as args) =>
|
| (("add" | "multiply" | "subtract" | "divide" | "pow" | "log") as arithmetic, [_, _] as args) =>
|
||||||
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
|
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
|
||||||
Helpers.twoDiststoDistFn(Algebraic(AsDefault), arithmetic, fst, snd)
|
Helpers.twoDiststoDistFn(Algebraic(AsDefault), arithmetic, fst, snd, ~env)
|
||||||
)
|
)
|
||||||
| (
|
| (
|
||||||
("dotAdd"
|
("dotAdd"
|
||||||
|
@ -270,7 +361,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment)
|
||||||
[_, _] as args,
|
[_, _] as args,
|
||||||
) =>
|
) =>
|
||||||
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
|
Helpers.catchAndConvertTwoArgsToDists(args)->E.O2.fmap(((fst, snd)) =>
|
||||||
Helpers.twoDiststoDistFn(Pointwise, arithmetic, fst, snd)
|
Helpers.twoDiststoDistFn(Pointwise, arithmetic, fst, snd, ~env)
|
||||||
)
|
)
|
||||||
| ("dotExp", [EvDistribution(a)]) =>
|
| ("dotExp", [EvDistribution(a)]) =>
|
||||||
Helpers.twoDiststoDistFn(
|
Helpers.twoDiststoDistFn(
|
||||||
|
@ -278,6 +369,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment)
|
||||||
"dotPow",
|
"dotPow",
|
||||||
GenericDist.fromFloat(MagicNumbers.Math.e),
|
GenericDist.fromFloat(MagicNumbers.Math.e),
|
||||||
a,
|
a,
|
||||||
|
~env,
|
||||||
)->Some
|
)->Some
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
|
@ -292,6 +384,7 @@ let genericOutputToReducerValue = (o: DistributionOperation.outputType): result<
|
||||||
| Float(d) => Ok(EvNumber(d))
|
| Float(d) => Ok(EvNumber(d))
|
||||||
| String(d) => Ok(EvString(d))
|
| String(d) => Ok(EvString(d))
|
||||||
| Bool(d) => Ok(EvBool(d))
|
| Bool(d) => Ok(EvBool(d))
|
||||||
|
| FloatArray(d) => Ok(EvArray(d |> E.A.fmap(r => ReducerInterface_ExpressionValue.EvNumber(r))))
|
||||||
| GenDistError(err) => Error(REDistributionError(err))
|
| GenDistError(err) => Error(REDistributionError(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
let defaultEnv: DistributionOperation.env
|
|
||||||
let dispatch: (
|
let dispatch: (
|
||||||
ReducerInterface_ExpressionValue.functionCall,
|
ReducerInterface_ExpressionValue.functionCall,
|
||||||
ReducerInterface_ExpressionValue.environment,
|
ReducerInterface_ExpressionValue.environment,
|
||||||
|
|
|
@ -77,10 +77,13 @@ let distributionErrorToString = DistributionTypes.Error.toString
|
||||||
type lambdaValue = ReducerInterface_ExpressionValue.lambdaValue
|
type lambdaValue = ReducerInterface_ExpressionValue.lambdaValue
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let defaultSamplingEnv = ReducerInterface_GenericDistribution.defaultEnv
|
let defaultSamplingEnv = DistributionOperation.defaultEnv
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type environment = ReducerInterface_ExpressionValue.environment
|
type environment = ReducerInterface_ExpressionValue.environment
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let defaultEnvironment = ReducerInterface_ExpressionValue.defaultEnvironment
|
let defaultEnvironment = ReducerInterface_ExpressionValue.defaultEnvironment
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let foreignFunctionInterface = Reducer.foreignFunctionInterface
|
||||||
|
|
|
@ -235,13 +235,16 @@ module R = {
|
||||||
| Ok(a) => f(a)
|
| Ok(a) => f(a)
|
||||||
| Error(err) => Error(err)
|
| Error(err) => Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
let toExn = (msg: string, x: result<'a, 'b>): 'a =>
|
let toExn = (msg: string, x: result<'a, 'b>): 'a =>
|
||||||
switch x {
|
switch x {
|
||||||
| Ok(r) => r
|
| Ok(r) => r
|
||||||
| Error(_) => raise(Assertion(msg))
|
| Error(_) => raise(Assertion(msg))
|
||||||
}
|
}
|
||||||
|
let toExnFnString = (errorToStringFn, o) =>
|
||||||
|
switch o {
|
||||||
|
| Ok(r) => r
|
||||||
|
| Error(r) => raise(Assertion(errorToStringFn(r)))
|
||||||
|
}
|
||||||
let default = (default, res: Belt.Result.t<'a, 'b>) =>
|
let default = (default, res: Belt.Result.t<'a, 'b>) =>
|
||||||
switch res {
|
switch res {
|
||||||
| Ok(r) => r
|
| Ok(r) => r
|
||||||
|
@ -607,6 +610,9 @@ module A = {
|
||||||
let filter = Js.Array.filter
|
let filter = Js.Array.filter
|
||||||
let joinWith = Js.Array.joinWith
|
let joinWith = Js.Array.joinWith
|
||||||
|
|
||||||
|
let all = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) == length(xs)
|
||||||
|
let any = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) > 0
|
||||||
|
|
||||||
module O = {
|
module O = {
|
||||||
let concatSomes = (optionals: array<option<'a>>): array<'a> =>
|
let concatSomes = (optionals: array<option<'a>>): array<'a> =>
|
||||||
optionals
|
optionals
|
||||||
|
@ -617,6 +623,19 @@ module A = {
|
||||||
| Some(o) => o
|
| Some(o) => o
|
||||||
| None => []
|
| None => []
|
||||||
}
|
}
|
||||||
|
// REturns `None` there are no non-`None` elements
|
||||||
|
let rec arrSomeToSomeArr = (optionals: array<option<'a>>): option<array<'a>> => {
|
||||||
|
let optionals' = optionals->Belt.List.fromArray
|
||||||
|
switch optionals' {
|
||||||
|
| list{} => []->Some
|
||||||
|
| list{x, ...xs} =>
|
||||||
|
switch x {
|
||||||
|
| Some(_) => xs->Belt.List.toArray->arrSomeToSomeArr
|
||||||
|
| None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let firstSome = x => Belt.Array.getBy(x, O.isSome)
|
||||||
}
|
}
|
||||||
|
|
||||||
module R = {
|
module R = {
|
||||||
|
|
|
@ -8,6 +8,7 @@ type algebraicOperation = [
|
||||||
| #Divide
|
| #Divide
|
||||||
| #Power
|
| #Power
|
||||||
| #Logarithm
|
| #Logarithm
|
||||||
|
| #LogarithmWithThreshold(float)
|
||||||
]
|
]
|
||||||
|
|
||||||
type convolutionOperation = [
|
type convolutionOperation = [
|
||||||
|
@ -18,7 +19,7 @@ type convolutionOperation = [
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type pointwiseOperation = [#Add | #Multiply | #Power]
|
type pointwiseOperation = [#Add | #Multiply | #Power]
|
||||||
type scaleOperation = [#Multiply | #Power | #Logarithm | #Divide]
|
type scaleOperation = [#Multiply | #Power | #Logarithm | #LogarithmWithThreshold(float) | #Divide]
|
||||||
type distToFloatOperation = [
|
type distToFloatOperation = [
|
||||||
| #Pdf(float)
|
| #Pdf(float)
|
||||||
| #Cdf(float)
|
| #Cdf(float)
|
||||||
|
@ -35,7 +36,7 @@ module Convolution = {
|
||||||
| #Add => Some(#Add)
|
| #Add => Some(#Add)
|
||||||
| #Subtract => Some(#Subtract)
|
| #Subtract => Some(#Subtract)
|
||||||
| #Multiply => Some(#Multiply)
|
| #Multiply => Some(#Multiply)
|
||||||
| #Divide | #Power | #Logarithm => None
|
| #Divide | #Power | #Logarithm | #LogarithmWithThreshold(_) => None
|
||||||
}
|
}
|
||||||
|
|
||||||
let canDoAlgebraicOperation = (op: algebraicOperation): bool =>
|
let canDoAlgebraicOperation = (op: algebraicOperation): bool =>
|
||||||
|
@ -52,6 +53,11 @@ module Convolution = {
|
||||||
type operationError =
|
type operationError =
|
||||||
| DivisionByZeroError
|
| DivisionByZeroError
|
||||||
| ComplexNumberError
|
| ComplexNumberError
|
||||||
|
| InfinityError
|
||||||
|
| NegativeInfinityError
|
||||||
|
| SampleMapNeedsNtoNFunction
|
||||||
|
| PdfInvalidError
|
||||||
|
| NotYetImplemented // should be removed when `klDivergence` for mixed and discrete is implemented.
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
module Error = {
|
module Error = {
|
||||||
|
@ -62,6 +68,11 @@ module Error = {
|
||||||
switch err {
|
switch err {
|
||||||
| DivisionByZeroError => "Cannot divide by zero"
|
| DivisionByZeroError => "Cannot divide by zero"
|
||||||
| ComplexNumberError => "Operation returned complex result"
|
| ComplexNumberError => "Operation returned complex result"
|
||||||
|
| InfinityError => "Operation returned positive infinity"
|
||||||
|
| NegativeInfinityError => "Operation returned negative infinity"
|
||||||
|
| SampleMapNeedsNtoNFunction => "SampleMap needs a function that converts a number to a number"
|
||||||
|
| PdfInvalidError => "This Pdf is invalid"
|
||||||
|
| NotYetImplemented => "This pathway is not yet implemented"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,6 +97,8 @@ let logarithm = (a: float, b: float): result<float, Error.t> =>
|
||||||
Ok(0.)
|
Ok(0.)
|
||||||
} else if a > 0.0 && b > 0.0 {
|
} else if a > 0.0 && b > 0.0 {
|
||||||
Ok(log(a) /. log(b))
|
Ok(log(a) /. log(b))
|
||||||
|
} else if a == 0.0 {
|
||||||
|
Error(NegativeInfinityError)
|
||||||
} else {
|
} else {
|
||||||
Error(ComplexNumberError)
|
Error(ComplexNumberError)
|
||||||
}
|
}
|
||||||
|
@ -102,6 +115,12 @@ module Algebraic = {
|
||||||
| #Power => power(a, b)
|
| #Power => power(a, b)
|
||||||
| #Divide => divide(a, b)
|
| #Divide => divide(a, b)
|
||||||
| #Logarithm => logarithm(a, b)
|
| #Logarithm => logarithm(a, b)
|
||||||
|
| #LogarithmWithThreshold(eps) =>
|
||||||
|
if a < eps {
|
||||||
|
Ok(0.0)
|
||||||
|
} else {
|
||||||
|
logarithm(a, b)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let toString = x =>
|
let toString = x =>
|
||||||
|
@ -112,6 +131,7 @@ module Algebraic = {
|
||||||
| #Power => "**"
|
| #Power => "**"
|
||||||
| #Divide => "/"
|
| #Divide => "/"
|
||||||
| #Logarithm => "log"
|
| #Logarithm => "log"
|
||||||
|
| #LogarithmWithThreshold(_) => "log"
|
||||||
}
|
}
|
||||||
|
|
||||||
let format = (a, b, c) => b ++ (" " ++ (toString(a) ++ (" " ++ c)))
|
let format = (a, b, c) => b ++ (" " ++ (toString(a) ++ (" " ++ c)))
|
||||||
|
@ -151,6 +171,12 @@ module Scale = {
|
||||||
| #Divide => divide(a, b)
|
| #Divide => divide(a, b)
|
||||||
| #Power => power(a, b)
|
| #Power => power(a, b)
|
||||||
| #Logarithm => logarithm(a, b)
|
| #Logarithm => logarithm(a, b)
|
||||||
|
| #LogarithmWithThreshold(eps) =>
|
||||||
|
if a < eps {
|
||||||
|
Ok(0.0)
|
||||||
|
} else {
|
||||||
|
logarithm(a, b)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let format = (operation: t, value, scaleBy) =>
|
let format = (operation: t, value, scaleBy) =>
|
||||||
|
@ -159,14 +185,14 @@ module Scale = {
|
||||||
| #Divide => j`verticalDivide($value, $scaleBy) `
|
| #Divide => j`verticalDivide($value, $scaleBy) `
|
||||||
| #Power => j`verticalPower($value, $scaleBy) `
|
| #Power => j`verticalPower($value, $scaleBy) `
|
||||||
| #Logarithm => j`verticalLog($value, $scaleBy) `
|
| #Logarithm => j`verticalLog($value, $scaleBy) `
|
||||||
|
| #LogarithmWithThreshold(eps) => j`verticalLog($value, $scaleBy, epsilon=$eps) `
|
||||||
}
|
}
|
||||||
|
|
||||||
let toIntegralSumCacheFn = x =>
|
let toIntegralSumCacheFn = x =>
|
||||||
switch x {
|
switch x {
|
||||||
| #Multiply => (a, b) => Some(a *. b)
|
| #Multiply => (a, b) => Some(a *. b)
|
||||||
| #Divide => (a, b) => Some(a /. b)
|
| #Divide => (a, b) => Some(a /. b)
|
||||||
| #Power => (_, _) => None
|
| #Power | #Logarithm | #LogarithmWithThreshold(_) => (_, _) => None
|
||||||
| #Logarithm => (_, _) => None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toIntegralCacheFn = x =>
|
let toIntegralCacheFn = x =>
|
||||||
|
@ -175,6 +201,7 @@ module Scale = {
|
||||||
| #Divide => (_, _) => None
|
| #Divide => (_, _) => None
|
||||||
| #Power => (_, _) => None
|
| #Power => (_, _) => None
|
||||||
| #Logarithm => (_, _) => None
|
| #Logarithm => (_, _) => None
|
||||||
|
| #LogarithmWithThreshold(_) => (_, _) => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
40
packages/squiggle-lang/src/rescript/Utility/Stdlib.res
Normal file
40
packages/squiggle-lang/src/rescript/Utility/Stdlib.res
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
module Bernoulli = {
|
||||||
|
@module external cdf: (float, float) => float = "@stdlib/stats/base/dists/bernoulli/cdf"
|
||||||
|
let cdf = cdf
|
||||||
|
|
||||||
|
@module external pmf: (float, float) => float = "@stdlib/stats/base/dists/bernoulli/pmf"
|
||||||
|
let pmf = pmf
|
||||||
|
|
||||||
|
@module external quantile: (float, float) => float = "@stdlib/stats/base/dists/bernoulli/quantile"
|
||||||
|
let quantile = quantile
|
||||||
|
|
||||||
|
@module external mean: float => float = "@stdlib/stats/base/dists/bernoulli/mean"
|
||||||
|
let mean = mean
|
||||||
|
|
||||||
|
@module external stdev: float => float = "@stdlib/stats/base/dists/bernoulli/stdev"
|
||||||
|
let stdev = stdev
|
||||||
|
|
||||||
|
@module external variance: float => float = "@stdlib/stats/base/dists/bernoulli/variance"
|
||||||
|
let variance = variance
|
||||||
|
}
|
||||||
|
|
||||||
|
module Logistic = {
|
||||||
|
@module external cdf: (float, float, float) => float = "@stdlib/stats/base/dists/logistic/cdf"
|
||||||
|
let cdf = cdf
|
||||||
|
|
||||||
|
@module external pdf: (float, float, float) => float = "@stdlib/stats/base/dists/logistic/pdf"
|
||||||
|
let pdf = pdf
|
||||||
|
|
||||||
|
@module
|
||||||
|
external quantile: (float, float, float) => float = "@stdlib/stats/base/dists/logistic/quantile"
|
||||||
|
let quantile = quantile
|
||||||
|
|
||||||
|
@module external mean: (float, float) => float = "@stdlib/stats/base/dists/logistic/mean"
|
||||||
|
let mean = mean
|
||||||
|
|
||||||
|
@module external stdev: (float, float) => float = "@stdlib/stats/base/dists/logistic/stdev"
|
||||||
|
let stdev = stdev
|
||||||
|
|
||||||
|
@module external variance: (float, float) => float = "@stdlib/stats/base/dists/logistic/variance"
|
||||||
|
let variance = variance
|
||||||
|
}
|
|
@ -96,7 +96,21 @@ module T = {
|
||||||
let fromZippedArray = (pairs: array<(float, float)>): t => pairs |> Belt.Array.unzip |> fromArray
|
let fromZippedArray = (pairs: array<(float, float)>): t => pairs |> Belt.Array.unzip |> fromArray
|
||||||
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
|
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
|
||||||
let toJs = (t: t) => {"xs": t.xs, "ys": t.ys}
|
let toJs = (t: t) => {"xs": t.xs, "ys": t.ys}
|
||||||
|
let filterYValues = (fn, t: t): t => t |> zip |> E.A.filter(((_, y)) => fn(y)) |> fromZippedArray
|
||||||
|
let filterOkYs = (xs: array<float>, ys: array<result<float, 'b>>): t => {
|
||||||
|
let n = E.A.length(xs) // Assume length(xs) == length(ys)
|
||||||
|
let newXs = []
|
||||||
|
let newYs = []
|
||||||
|
for i in 0 to n - 1 {
|
||||||
|
switch ys[i] {
|
||||||
|
| Ok(y) =>
|
||||||
|
let _ = Js.Array.push(xs[i], newXs)
|
||||||
|
let _ = Js.Array.push(y, newYs)
|
||||||
|
| Error(_) => ()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{xs: newXs, ys: newYs}
|
||||||
|
}
|
||||||
module Validator = {
|
module Validator = {
|
||||||
let fnName = "XYShape validate"
|
let fnName = "XYShape validate"
|
||||||
let notSortedError = (p: string): error => NotSorted(p)
|
let notSortedError = (p: string): error => NotSorted(p)
|
||||||
|
@ -376,6 +390,128 @@ module PointwiseCombination = {
|
||||||
}
|
}
|
||||||
`)
|
`)
|
||||||
|
|
||||||
|
/*
|
||||||
|
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||||
|
*/
|
||||||
|
let combineAlongSupportOfSecondArgument0: (
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
|
interpolator,
|
||||||
|
T.t,
|
||||||
|
T.t,
|
||||||
|
) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
|
||||||
|
let newYs = []
|
||||||
|
let newXs = []
|
||||||
|
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
||||||
|
let (i, j) = (ref(0), ref(0))
|
||||||
|
let minX = t2.xs[0]
|
||||||
|
let maxX = t2.xs[l2 - 1]
|
||||||
|
while j.contents < l2 - 1 && i.contents < l1 - 1 {
|
||||||
|
let someTuple = {
|
||||||
|
let x1 = t1.xs[i.contents + 1]
|
||||||
|
let x2 = t2.xs[j.contents + 1]
|
||||||
|
if (
|
||||||
|
/* if t1 has to catch up to t2 */
|
||||||
|
i.contents < l1 - 1 && j.contents < l2 && x1 < x2 && minX <= x1 && x2 <= maxX
|
||||||
|
) {
|
||||||
|
i := i.contents + 1
|
||||||
|
let x = x1
|
||||||
|
let y1 = t1.ys[i.contents]
|
||||||
|
let y2 = interpolator(t2, j.contents, x)
|
||||||
|
Some((x, y1, y2))
|
||||||
|
} else if (
|
||||||
|
/* if t2 has to catch up to t1 */
|
||||||
|
i.contents < l1 && j.contents < l2 - 1 && x1 > x2 && x2 >= minX && maxX >= x1
|
||||||
|
) {
|
||||||
|
j := j.contents + 1
|
||||||
|
let x = x2
|
||||||
|
let y1 = interpolator(t1, i.contents, x)
|
||||||
|
let y2 = t2.ys[j.contents]
|
||||||
|
Some((x, y1, y2))
|
||||||
|
} else if (
|
||||||
|
/* move both ahead if they are equal */
|
||||||
|
i.contents < l1 - 1 && j.contents < l2 - 1 && x1 == x2 && x1 >= minX && maxX >= x2
|
||||||
|
) {
|
||||||
|
i := i.contents + 1
|
||||||
|
j := j.contents + 1
|
||||||
|
let x = x1
|
||||||
|
let y1 = t1.ys[i.contents]
|
||||||
|
let y2 = t2.ys[j.contents]
|
||||||
|
Some((x, y1, y2))
|
||||||
|
} else {
|
||||||
|
i := i.contents + 1
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch someTuple {
|
||||||
|
| Some((x, y1, y2)) => {
|
||||||
|
let _ = Js.Array.push(fn(y1, y2), newYs)
|
||||||
|
let _ = Js.Array.push(x, newXs)
|
||||||
|
}
|
||||||
|
| None => ()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
T.filterOkYs(newXs, newYs)->Ok
|
||||||
|
}
|
||||||
|
|
||||||
|
/* *Dead code*: Nuño wrote this function to try to increase precision, but it didn't work.
|
||||||
|
If another traveler comes through with a similar idea, we hope this implementation will help them.
|
||||||
|
By "enrich" we mean to increase granularity.
|
||||||
|
*/
|
||||||
|
let enrichXyShape = (t: T.t): T.t => {
|
||||||
|
let defaultEnrichmentFactor = 10
|
||||||
|
let length = E.A.length(t.xs)
|
||||||
|
let points =
|
||||||
|
length < MagicNumbers.Environment.defaultXYPointLength
|
||||||
|
? defaultEnrichmentFactor * MagicNumbers.Environment.defaultXYPointLength / length
|
||||||
|
: defaultEnrichmentFactor
|
||||||
|
|
||||||
|
let getInBetween = (x1: float, x2: float): array<float> => {
|
||||||
|
if abs_float(x1 -. x2) < 2.0 *. MagicNumbers.Epsilon.seven {
|
||||||
|
[x1]
|
||||||
|
} else {
|
||||||
|
let newPointsArray = Belt.Array.makeBy(points - 1, i => i)
|
||||||
|
// don't repeat the x2 point, it will be gotten in the next iteration.
|
||||||
|
let result = Js.Array.mapi((pos, i) =>
|
||||||
|
if i == 0 {
|
||||||
|
x1
|
||||||
|
} else {
|
||||||
|
let points' = Belt.Float.fromInt(points)
|
||||||
|
let pos' = Belt.Float.fromInt(pos)
|
||||||
|
x1 *. (points' -. pos') /. points' +. x2 *. pos' /. points'
|
||||||
|
}
|
||||||
|
, newPointsArray)
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let newXsUnflattened = Js.Array.mapi(
|
||||||
|
(x, i) => i < length - 2 ? getInBetween(x, t.xs[i + 1]) : [x],
|
||||||
|
t.xs,
|
||||||
|
)
|
||||||
|
let newXs = Belt.Array.concatMany(newXsUnflattened)
|
||||||
|
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
||||||
|
{xs: newXs, ys: newYs}
|
||||||
|
}
|
||||||
|
// This function is used for klDivergence
|
||||||
|
let combineAlongSupportOfSecondArgument: (
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
|
T.t,
|
||||||
|
T.t,
|
||||||
|
) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
|
||||||
|
let combineWithFn = (answerX: float, i: int) => {
|
||||||
|
let answerY = answer.ys[i]
|
||||||
|
let predictionY = XtoY.linear(answerX, prediction)
|
||||||
|
fn(predictionY, answerY)
|
||||||
|
}
|
||||||
|
let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
|
||||||
|
let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
|
||||||
|
let result = switch newYsOrError {
|
||||||
|
| Ok(a) => Ok({xs: answer.xs, ys: a})
|
||||||
|
| Error(b) => Error(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
||||||
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
||||||
"Add operation should never fail",
|
"Add operation should never fail",
|
||||||
|
@ -467,7 +603,7 @@ module Range = {
|
||||||
// TODO: I think this isn't needed by any functions anymore.
|
// TODO: I think this isn't needed by any functions anymore.
|
||||||
let stepsToContinuous = t => {
|
let stepsToContinuous = t => {
|
||||||
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
||||||
let diff = T.xTotalRange(t) |> (r => r *. 0.00001)
|
let diff = T.xTotalRange(t) |> (r => r *. MagicNumbers.Epsilon.five)
|
||||||
let items = switch E.A.toRanges(Belt.Array.zip(t.xs, t.ys)) {
|
let items = switch E.A.toRanges(Belt.Array.zip(t.xs, t.ys)) {
|
||||||
| Ok(items) =>
|
| Ok(items) =>
|
||||||
Some(
|
Some(
|
||||||
|
@ -489,25 +625,6 @@ module Range = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let pointLogScore = (prediction, answer) =>
|
|
||||||
switch answer {
|
|
||||||
| 0. => 0.0
|
|
||||||
| answer => answer *. Js.Math.log2(Js.Math.abs_float(prediction /. answer))
|
|
||||||
}
|
|
||||||
|
|
||||||
let logScorePoint = (sampleCount, t1, t2) =>
|
|
||||||
PointwiseCombination.combineEvenXs(
|
|
||||||
~fn=pointLogScore,
|
|
||||||
~xToYSelection=XtoY.linear,
|
|
||||||
sampleCount,
|
|
||||||
t1,
|
|
||||||
t2,
|
|
||||||
)
|
|
||||||
|> Range.integrateWithTriangles
|
|
||||||
|> E.O.fmap(T.accumulateYs(\"+."))
|
|
||||||
|> E.O.fmap(Pairs.last)
|
|
||||||
|> E.O.fmap(Pairs.y)
|
|
||||||
|
|
||||||
module Analysis = {
|
module Analysis = {
|
||||||
let getVarianceDangerously = (t: 't, mean: 't => float, getMeanOfSquares: 't => float): float => {
|
let getVarianceDangerously = (t: 't, mean: 't => float, getMeanOfSquares: 't => float): float => {
|
||||||
let meanSquared = mean(t) ** 2.0
|
let meanSquared = mean(t) ** 2.0
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
title: "Known Bugs"
|
title: "Known Bugs"
|
||||||
sidebar_position: 6
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
---
|
---
|
||||||
sidebar_position: 4
|
title: Future Features
|
||||||
|
sidebar_position: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
# Future Features
|
|
||||||
|
|
||||||
Squiggle is still very early. The main first goal is to become stable. This means having a clean codebase, having decent test coverage, and having a syntax we are reasonably confident in. Later on, there are many other features that will be interesting to explore.
|
Squiggle is still very early. The main first goal is to become stable. This means having a clean codebase, having decent test coverage, and having a syntax we are reasonably confident in. Later on, there are many other features that will be interesting to explore.
|
||||||
|
|
||||||
## Programming Language Features
|
## Programming Language Features
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
sidebar_position: 6
|
sidebar_position: 2
|
||||||
title: Gallery
|
title: Gallery
|
||||||
---
|
---
|
||||||
|
|
||||||
- [Adjusting probabilities for the passage of time](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3/p/j8o6sgRerE3tqNWdj) by Nuño Sempere
|
- [Adjusting probabilities for the passage of time](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3/p/j8o6sgRerE3tqNWdj) by Nuño Sempere
|
||||||
- [GiveWell's GiveDirectly cost effectiveness analysis](https://observablehq.com/@hazelfire/givewells-givedirectly-cost-effectiveness-analysis) by Sam Nolan
|
- [GiveWell's GiveDirectly cost effectiveness analysis](https://observablehq.com/@hazelfire/givewells-givedirectly-cost-effectiveness-analysis) by Sam Nolan
|
||||||
|
- [Astronomical Waste](https://observablehq.com/@quinn-dougherty/waste)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
sidebar_position: 5
|
sidebar_position: 4
|
||||||
title: Three Formats of Distributions
|
title: Three Formats of Distributions
|
||||||
author: Ozzie Gooen
|
author: Ozzie Gooen
|
||||||
date: 02-19-2022
|
date: 02-19-2022
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
title: "Distribution Creation"
|
title: "Distribution Creation"
|
||||||
sidebar_position: 8
|
sidebar_position: 2
|
||||||
---
|
---
|
||||||
|
|
||||||
import TOCInline from "@theme/TOCInline";
|
import TOCInline from "@theme/TOCInline";
|
||||||
|
@ -72,6 +72,8 @@ If both values are above zero, a `lognormal` distribution is used. If not, a `no
|
||||||
|
|
||||||
`mixture(...distributions: Distribution[], weights?: number[])`
|
`mixture(...distributions: Distribution[], weights?: number[])`
|
||||||
`mx(...distributions: Distribution[], weights?: number[])`
|
`mx(...distributions: Distribution[], weights?: number[])`
|
||||||
|
`mixture(distributions: Distributions[], weights?: number[])`
|
||||||
|
`mx(distributions: Distributions[], weights?: number[])`
|
||||||
|
|
||||||
The `mixture` mixes combines multiple distributions to create a mixture. You can optionally pass in a list of proportional weights.
|
The `mixture` mixes combines multiple distributions to create a mixture. You can optionally pass in a list of proportional weights.
|
||||||
|
|
||||||
|
@ -85,6 +87,9 @@ The `mixture` mixes combines multiple distributions to create a mixture. You can
|
||||||
<TabItem value="ex3" label="With Continuous and Discrete Inputs">
|
<TabItem value="ex3" label="With Continuous and Discrete Inputs">
|
||||||
<SquiggleEditor initialSquiggleString="mixture(1 to 5, 8 to 10, 1, 3, 20)" />
|
<SquiggleEditor initialSquiggleString="mixture(1 to 5, 8 to 10, 1, 3, 20)" />
|
||||||
</TabItem>
|
</TabItem>
|
||||||
|
<TabItem value="ex4" label="Array of Distributions Input">
|
||||||
|
<SquiggleEditor initialSquiggleString="mx([1 to 2, exponential(1)], [1,1])" />
|
||||||
|
</TabItem>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
### Arguments
|
### Arguments
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
title: "Functions Reference"
|
title: "Functions Reference"
|
||||||
sidebar_position: 7
|
sidebar_position: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
sidebar_position: 2
|
sidebar_position: 1
|
||||||
title: Language Basics
|
title: Language Basics
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -49,5 +49,6 @@ ozzie_estimate(1) * nuno_estimate(1, 1)`}
|
||||||
|
|
||||||
## See more
|
## See more
|
||||||
|
|
||||||
- [Functions reference](https://squiggle-language.com/docs/Features/Functions)
|
- [Distribution creation](./Distributions)
|
||||||
- [Gallery](https://squiggle-language.com/docs/Discussions/Gallery)
|
- [Functions reference](./Functions)
|
||||||
|
- [Gallery](../Discussions/Gallery)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 4
|
||||||
title: Node Packages
|
title: Node Packages
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -12,25 +12,13 @@ Types are available for both packages.
|
||||||
|
|
||||||
## Squiggle Language
|
## Squiggle Language
|
||||||
|
|
||||||
The `@quri/squiggle-lang` package exports a single function, `run`, which given
|
[_See `README.md` in Github_](https://github.com/quantified-uncertainty/squiggle/tree/develop/packages/squiggle-lang#use-the-npm-package)
|
||||||
a string of Squiggle code, will execute the code and return any exports and the
|
|
||||||
environment created from the squiggle code.
|
|
||||||
|
|
||||||
`run` has two optional arguments. The first optional argument allows you to set
|
|
||||||
sampling settings for Squiggle when representing distributions. The second optional
|
|
||||||
argument allows you to pass an environment previously created by another `run`
|
|
||||||
call. Passing this environment will mean that all previously declared variables
|
|
||||||
in the previous environment will be made available.
|
|
||||||
|
|
||||||
The return type of `run` is a bit complicated, and comes from auto generated `js`
|
|
||||||
code that comes from rescript. We highly recommend using typescript when using
|
|
||||||
this library to help navigate the return type.
|
|
||||||
|
|
||||||
## Squiggle Components
|
## Squiggle Components
|
||||||
|
|
||||||
The `@quri/squiggle-components` package offers several components and utilities
|
[_See `README.md` in Github_](https://github.com/quantified-uncertainty/squiggle/tree/develop/packages/components#usage-in-a-react-project)
|
||||||
for people who want to embed Squiggle components into websites. This documentation
|
|
||||||
uses `@quri/squiggle-components` frequently.
|
This documentation uses `@quri/squiggle-components` frequently.
|
||||||
|
|
||||||
We host [a storybook](https://squiggle-components.netlify.app/) with details
|
We host [a storybook](https://squiggle-components.netlify.app/) with details
|
||||||
and usage of each of the components made available.
|
and usage of each of the components made available.
|
||||||
|
|
|
@ -7,10 +7,10 @@ Squiggle is an _estimation language_, and a syntax for _calculating and expressi
|
||||||
|
|
||||||
## Get started
|
## Get started
|
||||||
|
|
||||||
- [Gallery](https://www.squiggle-language.com/docs/Discussions/Gallery)
|
- [Gallery](./Discussions/Gallery)
|
||||||
- [Squiggle playground](https://squiggle-language.com/playground)
|
- [Squiggle playground](/playground)
|
||||||
- [Language basics](https://www.squiggle-language.com/docs/Features/Language)
|
- [Language basics](./Features/Language)
|
||||||
- [Squiggle functions source of truth](https://www.squiggle-language.com/docs/Features/Functions)
|
- [Squiggle functions source of truth](./docs/Features/Functions)
|
||||||
- [Known bugs](https://www.squiggle-language.com/docs/Discussions/Bugs)
|
- [Known bugs](./Discussions/Bugs)
|
||||||
- [Original lesswrong sequence](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3)
|
- [Original lesswrong sequence](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3)
|
||||||
- [Author your squiggle models as Observable notebooks](https://observablehq.com/@hazelfire/squiggle)
|
- [Author your squiggle models as Observable notebooks](https://observablehq.com/@hazelfire/squiggle)
|
||||||
|
|
|
@ -9,8 +9,8 @@ const path = require("path");
|
||||||
|
|
||||||
/** @type {import('@docusaurus/types').Config} */
|
/** @type {import('@docusaurus/types').Config} */
|
||||||
const config = {
|
const config = {
|
||||||
title: "Squiggle (alpha)",
|
title: "Squiggle",
|
||||||
tagline: "Estimation language for forecasters",
|
tagline: "An estimation language for forecasters",
|
||||||
url: "https://squiggle-language.com",
|
url: "https://squiggle-language.com",
|
||||||
baseUrl: "/",
|
baseUrl: "/",
|
||||||
onBrokenLinks: "throw",
|
onBrokenLinks: "throw",
|
||||||
|
|
|
@ -12,11 +12,11 @@
|
||||||
"format": "prettier --write ."
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "2.0.0-beta.18",
|
"@docusaurus/core": "2.0.0-beta.20",
|
||||||
"@docusaurus/preset-classic": "2.0.0-beta.18",
|
"@docusaurus/preset-classic": "2.0.0-beta.20",
|
||||||
"@quri/squiggle-components": "0.2.9",
|
"@quri/squiggle-components": "^0.2.20",
|
||||||
"clsx": "^1.1.1",
|
"clsx": "^1.1.1",
|
||||||
"prism-react-renderer": "^1.2.1",
|
"prism-react-renderer": "^1.3.3",
|
||||||
"react": "^18.1.0",
|
"react": "^18.1.0",
|
||||||
"react-dom": "^18.1.0",
|
"react-dom": "^18.1.0",
|
||||||
"remark-math": "^3",
|
"remark-math": "^3",
|
||||||
|
|
|
@ -12,6 +12,9 @@ function HomepageHeader() {
|
||||||
<header className={clsx("hero hero--primary", styles.heroBanner)}>
|
<header className={clsx("hero hero--primary", styles.heroBanner)}>
|
||||||
<div className="container">
|
<div className="container">
|
||||||
<h1 className="hero__title">{siteConfig.title}</h1>
|
<h1 className="hero__title">{siteConfig.title}</h1>
|
||||||
|
<p className="hero__subtitle">
|
||||||
|
<i>Early access</i>
|
||||||
|
</p>
|
||||||
<p className="hero__subtitle">{siteConfig.tagline}</p>
|
<p className="hero__subtitle">{siteConfig.tagline}</p>
|
||||||
<div className={styles.buttons}></div>
|
<div className={styles.buttons}></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
---
|
|
||||||
title: Markdown page example
|
|
||||||
---
|
|
||||||
|
|
||||||
# Markdown page example
|
|
||||||
|
|
||||||
You don't need React to write simple standalone pages.
|
|
|
@ -10,7 +10,11 @@ export default function PlaygroundPage() {
|
||||||
maxWidth: 2000,
|
maxWidth: 2000,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<SquigglePlayground initialSquiggleString="normal(0,1)" height={700} />
|
<SquigglePlayground
|
||||||
|
initialSquiggleString="normal(0,1)"
|
||||||
|
height={700}
|
||||||
|
showTypes={true}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Layout>
|
</Layout>
|
||||||
);
|
);
|
||||||
|
|
Loading…
Reference in New Issue
Block a user