Merge branch 'develop' into component-stats
This commit is contained in:
commit
19ebc322ad
|
@ -1,9 +1,9 @@
|
|||
[![npm version](https://badge.fury.io/js/@quri%2Fsquiggle-components.svg)](https://www.npmjs.com/package/@quri/squiggle-components)
|
||||
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/quantified-uncertainty/squiggle/blob/develop/LICENSE)
|
||||
|
||||
# Squiggle Components
|
||||
# Squiggle components
|
||||
|
||||
This package contains all the components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
||||
This package contains the react components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
||||
|
||||
# Usage in a `react` project
|
||||
|
||||
|
@ -17,7 +17,10 @@ Add to `App.js`:
|
|||
|
||||
```jsx
|
||||
import { SquiggleEditor } from "@quri/squiggle-components";
|
||||
<SquiggleEditor initialSquiggleString="x = beta(3, 10); x + 20" />;
|
||||
<SquiggleEditor
|
||||
initialSquiggleString="x = beta($alpha, 10); x + $shift"
|
||||
jsImports={{ alpha: 3, shift: 20 }}
|
||||
/>;
|
||||
```
|
||||
|
||||
# Build storybook for development
|
||||
|
@ -38,9 +41,3 @@ Run a development server
|
|||
```sh
|
||||
yarn start
|
||||
```
|
||||
|
||||
And build artefacts for production,
|
||||
|
||||
```sh
|
||||
yarn build # builds storybook app
|
||||
```
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{
|
||||
"name": "@quri/squiggle-components",
|
||||
"version": "0.2.15",
|
||||
"version": "0.2.20",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@quri/squiggle-lang": "^0.2.7",
|
||||
"@quri/squiggle-lang": "^0.2.8",
|
||||
"@react-hook/size": "^2.1.2",
|
||||
"lodash": "^4.17.21",
|
||||
"react": "^18.1.0",
|
||||
"react-ace": "10.1.0",
|
||||
"react-ace": "^10.1.0",
|
||||
"react-dom": "^18.1.0",
|
||||
"react-vega": "^7.5.0",
|
||||
"react-use": "^17.3.2",
|
||||
"react-vega": "^7.5.1",
|
||||
"styled-components": "^5.3.5",
|
||||
"vega": "^5.22.1",
|
||||
"vega-embed": "^6.20.6",
|
||||
|
@ -26,34 +27,34 @@
|
|||
"@storybook/preset-create-react-app": "^4.1.0",
|
||||
"@storybook/react": "^6.4.22",
|
||||
"@testing-library/jest-dom": "^5.16.4",
|
||||
"@testing-library/react": "^13.1.1",
|
||||
"@testing-library/react": "^13.2.0",
|
||||
"@testing-library/user-event": "^14.1.1",
|
||||
"@types/jest": "^27.4.0",
|
||||
"@types/jest": "^27.5.0",
|
||||
"@types/lodash": "^4.14.182",
|
||||
"@types/node": "^17.0.29",
|
||||
"@types/node": "^17.0.31",
|
||||
"@types/react": "^18.0.3",
|
||||
"@types/react-dom": "^18.0.2",
|
||||
"@types/styled-components": "^5.1.24",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"react-scripts": "5.0.1",
|
||||
"react-scripts": "^5.0.1",
|
||||
"style-loader": "^3.3.1",
|
||||
"ts-loader": "^9.2.9",
|
||||
"ts-loader": "^9.3.0",
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||
"typescript": "^4.6.3",
|
||||
"web-vitals": "^2.1.4",
|
||||
"webpack": "^5.72.0",
|
||||
"webpack-cli": "^4.9.2",
|
||||
"webpack-dev-server": "^4.8.1"
|
||||
"webpack-dev-server": "^4.9.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
||||
"build": "tsc -b && build-storybook -s public",
|
||||
"build:package": "tsc -b",
|
||||
"bundle": "webpack",
|
||||
"all": "yarn bundle && yarn build",
|
||||
"lint": "prettier --check .",
|
||||
"format": "prettier --write ."
|
||||
"format": "prettier --write .",
|
||||
"prepack": "yarn bundle && tsc -b"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
|
@ -87,7 +88,6 @@
|
|||
"@types/react": "17.0.43"
|
||||
},
|
||||
"source": "./src/index.ts",
|
||||
"browser": "dist/bundle.js",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts"
|
||||
"main": "./dist/src/index.js",
|
||||
"types": "./dist/src/index.d.ts"
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,56 +1,140 @@
|
|||
import * as React from "react";
|
||||
import _ from "lodash";
|
||||
import type { Spec } from "vega";
|
||||
import {
|
||||
Distribution,
|
||||
result,
|
||||
distributionError,
|
||||
distributionErrorToString,
|
||||
} from "@quri/squiggle-lang";
|
||||
import { createClassFromSpec } from "react-vega";
|
||||
import { Vega, VisualizationSpec } from "react-vega";
|
||||
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
||||
import { ErrorBox } from "./ErrorBox";
|
||||
import { useSize } from "react-use";
|
||||
import {
|
||||
linearXScale,
|
||||
logXScale,
|
||||
linearYScale,
|
||||
expYScale,
|
||||
} from "./DistributionVegaScales";
|
||||
import styled from "styled-components";
|
||||
|
||||
let SquiggleVegaChart = createClassFromSpec({
|
||||
spec: chartSpecification as Spec,
|
||||
});
|
||||
|
||||
type DistributionChartProps = {
|
||||
distribution: Distribution;
|
||||
width: number;
|
||||
width?: number;
|
||||
height: number;
|
||||
/** Whether to show a summary of means, stdev, percentiles etc */
|
||||
showSummary: boolean;
|
||||
/** Whether to show the user graph controls (scale etc) */
|
||||
showControls?: boolean;
|
||||
};
|
||||
|
||||
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||
distribution,
|
||||
width,
|
||||
height,
|
||||
showSummary,
|
||||
width,
|
||||
showControls = false,
|
||||
}: DistributionChartProps) => {
|
||||
let [isLogX, setLogX] = React.useState(false);
|
||||
let [isExpY, setExpY] = React.useState(false);
|
||||
let shape = distribution.pointSet();
|
||||
if (shape.tag === "Ok") {
|
||||
let widthProp = width ? width - 20 : undefined;
|
||||
return (
|
||||
<>
|
||||
<SquiggleVegaChart
|
||||
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||
width={widthProp}
|
||||
height={height}
|
||||
actions={false}
|
||||
/>
|
||||
{showSummary ? <SummaryTable distribution={distribution} /> : <></>}
|
||||
</>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<ErrorBox heading="Distribution Error">
|
||||
{distributionErrorToString(shape.value)}
|
||||
</ErrorBox>
|
||||
);
|
||||
}
|
||||
const [sized, _] = useSize((size) => {
|
||||
if (shape.tag === "Ok") {
|
||||
let massBelow0 =
|
||||
shape.value.continuous.some((x) => x.x <= 0) ||
|
||||
shape.value.discrete.some((x) => x.x <= 0);
|
||||
let spec = buildVegaSpec(isLogX, isExpY);
|
||||
let widthProp = width ? width - 20 : size.width - 10;
|
||||
|
||||
// Check whether we should disable the checkbox
|
||||
var logCheckbox = (
|
||||
<CheckBox label="Log X scale" value={isLogX} onChange={setLogX} />
|
||||
);
|
||||
if (massBelow0) {
|
||||
logCheckbox = (
|
||||
<CheckBox
|
||||
label="Log X scale"
|
||||
value={isLogX}
|
||||
onChange={setLogX}
|
||||
disabled={true}
|
||||
tooltip={
|
||||
"Your distribution has mass lower than or equal to 0. Log only works on strictly positive values."
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
var result = (
|
||||
<div>
|
||||
<Vega
|
||||
spec={spec}
|
||||
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||
width={widthProp}
|
||||
height={height}
|
||||
actions={false}
|
||||
/>
|
||||
{showSummary && <SummaryTable distribution={distribution} />}
|
||||
{showControls && (
|
||||
<div>
|
||||
{logCheckbox}
|
||||
<CheckBox label="Exp Y scale" value={isExpY} onChange={setExpY} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
var result = (
|
||||
<ErrorBox heading="Distribution Error">
|
||||
{distributionErrorToString(shape.value)}
|
||||
</ErrorBox>
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
return sized;
|
||||
};
|
||||
|
||||
function buildVegaSpec(isLogX: boolean, isExpY: boolean): VisualizationSpec {
|
||||
return {
|
||||
...chartSpecification,
|
||||
scales: [
|
||||
isLogX ? logXScale : linearXScale,
|
||||
isExpY ? expYScale : linearYScale,
|
||||
],
|
||||
} as VisualizationSpec;
|
||||
}
|
||||
|
||||
interface CheckBoxProps {
|
||||
label: string;
|
||||
onChange: (x: boolean) => void;
|
||||
value: boolean;
|
||||
disabled?: boolean;
|
||||
tooltip?: string;
|
||||
}
|
||||
|
||||
const Label = styled.label<{ disabled: boolean }>`
|
||||
${(props) => props.disabled && "color: #999;"}
|
||||
`;
|
||||
|
||||
export const CheckBox = ({
|
||||
label,
|
||||
onChange,
|
||||
value,
|
||||
disabled = false,
|
||||
tooltip,
|
||||
}: CheckBoxProps) => {
|
||||
return (
|
||||
<span title={tooltip}>
|
||||
<input
|
||||
type="checkbox"
|
||||
value={value + ""}
|
||||
onChange={() => onChange(!value)}
|
||||
disabled={disabled}
|
||||
/>
|
||||
<Label disabled={disabled}>{label}</Label>
|
||||
</span>
|
||||
);
|
||||
};
|
||||
|
||||
type SummaryTableProps = {
|
||||
|
|
80
packages/components/src/components/DistributionVegaScales.ts
Normal file
80
packages/components/src/components/DistributionVegaScales.ts
Normal file
|
@ -0,0 +1,80 @@
|
|||
import type { LogScale, LinearScale, PowScale } from "vega";
|
||||
export let linearXScale: LinearScale = {
|
||||
name: "xscale",
|
||||
type: "linear",
|
||||
range: "width",
|
||||
zero: false,
|
||||
nice: false,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "x",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "x",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
export let linearYScale: LinearScale = {
|
||||
name: "yscale",
|
||||
type: "linear",
|
||||
range: "height",
|
||||
zero: true,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "y",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "y",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export let logXScale: LogScale = {
|
||||
name: "xscale",
|
||||
type: "log",
|
||||
range: "width",
|
||||
zero: false,
|
||||
base: 10,
|
||||
nice: false,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "x",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "x",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export let expYScale: PowScale = {
|
||||
name: "yscale",
|
||||
type: "pow",
|
||||
exponent: 0.1,
|
||||
range: "height",
|
||||
zero: true,
|
||||
nice: false,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "y",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "y",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
|
@ -33,18 +33,29 @@ const variableBox = {
|
|||
`,
|
||||
};
|
||||
|
||||
export const VariableBox: React.FC<{
|
||||
interface VariableBoxProps {
|
||||
heading: string;
|
||||
children: React.ReactNode;
|
||||
}> = ({ heading = "Error", children }) => {
|
||||
return (
|
||||
<variableBox.Component>
|
||||
<variableBox.Heading>
|
||||
<h3>{heading}</h3>
|
||||
</variableBox.Heading>
|
||||
<variableBox.Body>{children}</variableBox.Body>
|
||||
</variableBox.Component>
|
||||
);
|
||||
showTypes?: boolean;
|
||||
}
|
||||
|
||||
export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||
heading = "Error",
|
||||
children,
|
||||
showTypes = false,
|
||||
}: VariableBoxProps) => {
|
||||
if (showTypes) {
|
||||
return (
|
||||
<variableBox.Component>
|
||||
<variableBox.Heading>
|
||||
<h3>{heading}</h3>
|
||||
</variableBox.Heading>
|
||||
<variableBox.Body>{children}</variableBox.Body>
|
||||
</variableBox.Component>
|
||||
);
|
||||
} else {
|
||||
return <div>{children}</div>;
|
||||
}
|
||||
};
|
||||
|
||||
let RecordKeyHeader = styled.h3``;
|
||||
|
@ -52,10 +63,14 @@ let RecordKeyHeader = styled.h3``;
|
|||
export interface SquiggleItemProps {
|
||||
/** The input string for squiggle */
|
||||
expression: squiggleExpression;
|
||||
width: number;
|
||||
width?: number;
|
||||
height: number;
|
||||
/** Whether to show a summary of statistics for distributions */
|
||||
showSummary: boolean;
|
||||
/** Whether to show type information */
|
||||
showTypes?: boolean;
|
||||
/** Whether to show users graph controls (scale etc) */
|
||||
showControls?: boolean;
|
||||
}
|
||||
|
||||
const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||
|
@ -63,19 +78,24 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
|||
width,
|
||||
height,
|
||||
showSummary,
|
||||
showTypes = false,
|
||||
showControls = false,
|
||||
}: SquiggleItemProps) => {
|
||||
switch (expression.tag) {
|
||||
case "number":
|
||||
return (
|
||||
<VariableBox heading="Number">
|
||||
<VariableBox heading="Number" showTypes={showTypes}>
|
||||
<NumberShower precision={3} number={expression.value} />
|
||||
</VariableBox>
|
||||
);
|
||||
case "distribution": {
|
||||
let distType = expression.value.type();
|
||||
return (
|
||||
<VariableBox heading={`Distribution (${distType})`}>
|
||||
{distType === "Symbolic" ? (
|
||||
<VariableBox
|
||||
heading={`Distribution (${distType})`}
|
||||
showTypes={showTypes}
|
||||
>
|
||||
{distType === "Symbolic" && showTypes ? (
|
||||
<>
|
||||
<div>{expression.value.toString()}</div>
|
||||
</>
|
||||
|
@ -87,32 +107,46 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
|||
height={height}
|
||||
width={width}
|
||||
showSummary={showSummary}
|
||||
showControls={showControls}
|
||||
/>
|
||||
</VariableBox>
|
||||
);
|
||||
}
|
||||
case "string":
|
||||
return (
|
||||
<VariableBox heading="String">{`"${expression.value}"`}</VariableBox>
|
||||
<VariableBox
|
||||
heading="String"
|
||||
showTypes={showTypes}
|
||||
>{`"${expression.value}"`}</VariableBox>
|
||||
);
|
||||
case "boolean":
|
||||
return (
|
||||
<VariableBox heading="Boolean">
|
||||
<VariableBox heading="Boolean" showTypes={showTypes}>
|
||||
{expression.value.toString()}
|
||||
</VariableBox>
|
||||
);
|
||||
case "symbol":
|
||||
return <VariableBox heading="Symbol">{expression.value}</VariableBox>;
|
||||
return (
|
||||
<VariableBox heading="Symbol" showTypes={showTypes}>
|
||||
{expression.value}
|
||||
</VariableBox>
|
||||
);
|
||||
case "call":
|
||||
return <VariableBox heading="Call">{expression.value}</VariableBox>;
|
||||
return (
|
||||
<VariableBox heading="Call" showTypes={showTypes}>
|
||||
{expression.value}
|
||||
</VariableBox>
|
||||
);
|
||||
case "array":
|
||||
return (
|
||||
<VariableBox heading="Array">
|
||||
<VariableBox heading="Array" showTypes={showTypes}>
|
||||
{expression.value.map((r) => (
|
||||
<SquiggleItem
|
||||
expression={r}
|
||||
width={width - 20}
|
||||
width={width !== undefined ? width - 20 : width}
|
||||
height={50}
|
||||
showTypes={showTypes}
|
||||
showControls={showControls}
|
||||
showSummary={showSummary}
|
||||
/>
|
||||
))}
|
||||
|
@ -120,20 +154,34 @@ const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
|||
);
|
||||
case "record":
|
||||
return (
|
||||
<VariableBox heading="Record">
|
||||
<VariableBox heading="Record" showTypes={showTypes}>
|
||||
{Object.entries(expression.value).map(([key, r]) => (
|
||||
<>
|
||||
<RecordKeyHeader>{key}</RecordKeyHeader>
|
||||
<SquiggleItem
|
||||
expression={r}
|
||||
width={width - 20}
|
||||
width={width !== undefined ? width - 20 : width}
|
||||
height={50}
|
||||
showTypes={showTypes}
|
||||
showSummary={showSummary}
|
||||
showControls={showControls}
|
||||
/>
|
||||
</>
|
||||
))}
|
||||
</VariableBox>
|
||||
);
|
||||
case "arraystring":
|
||||
return (
|
||||
<VariableBox heading="Array String" showTypes={showTypes}>
|
||||
{expression.value.map((r) => `"${r}"`)}
|
||||
</VariableBox>
|
||||
);
|
||||
case "lambda":
|
||||
return (
|
||||
<ErrorBox heading="No Viewer">
|
||||
There is no viewer currently available for function types.
|
||||
</ErrorBox>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -152,8 +200,6 @@ export interface SquiggleChartProps {
|
|||
diagramStop?: number;
|
||||
/** If the result is a function, how many points along the function it samples */
|
||||
diagramCount?: number;
|
||||
/** variables declared before this expression */
|
||||
environment?: unknown;
|
||||
/** When the environment changes */
|
||||
onChange?(expr: squiggleExpression): void;
|
||||
/** CSS width of the element */
|
||||
|
@ -165,6 +211,10 @@ export interface SquiggleChartProps {
|
|||
jsImports?: jsImports;
|
||||
/** Whether to show a summary of the distirbution */
|
||||
showSummary?: boolean;
|
||||
/** Whether to show type information about returns, default false */
|
||||
showTypes?: boolean;
|
||||
/** Whether to show graph controls (scale etc)*/
|
||||
showControls?: boolean;
|
||||
}
|
||||
|
||||
const ChartWrapper = styled.div`
|
||||
|
@ -181,8 +231,10 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
|||
height = 60,
|
||||
bindings = defaultBindings,
|
||||
jsImports = defaultImports,
|
||||
width = NaN,
|
||||
showSummary = false,
|
||||
width,
|
||||
showTypes = false,
|
||||
showControls = false,
|
||||
}: SquiggleChartProps) => {
|
||||
let samplingInputs: samplingParams = {
|
||||
sampleCount: sampleCount,
|
||||
|
@ -204,6 +256,8 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
|||
width={width}
|
||||
height={height}
|
||||
showSummary={showSummary}
|
||||
showTypes={showTypes}
|
||||
showControls={showControls}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -32,16 +32,18 @@ export interface SquiggleEditorProps {
|
|||
diagramStop?: number;
|
||||
/** If the result is a function, how many points along the function it samples */
|
||||
diagramCount?: number;
|
||||
/** The environment, other variables that were already declared */
|
||||
environment?: unknown;
|
||||
/** when the environment changes. Used again for notebook magic*/
|
||||
onChange?(expr: squiggleExpression): void;
|
||||
/** The width of the element */
|
||||
width: number;
|
||||
width?: number;
|
||||
/** Previous variable declarations */
|
||||
bindings: bindings;
|
||||
bindings?: bindings;
|
||||
/** JS Imports */
|
||||
jsImports: jsImports;
|
||||
jsImports?: jsImports;
|
||||
/** Whether to show detail about types of the returns, default false */
|
||||
showTypes?: boolean;
|
||||
/** Whether to give users access to graph controls */
|
||||
showControls: boolean;
|
||||
}
|
||||
|
||||
const Input = styled.div`
|
||||
|
@ -52,7 +54,7 @@ const Input = styled.div`
|
|||
|
||||
export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
||||
initialSquiggleString = "",
|
||||
width = 500,
|
||||
width,
|
||||
sampleCount,
|
||||
outputXYPoints,
|
||||
kernelWidth,
|
||||
|
@ -61,9 +63,10 @@ export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
|||
diagramStop,
|
||||
diagramCount,
|
||||
onChange,
|
||||
environment,
|
||||
bindings = defaultBindings,
|
||||
jsImports = defaultImports,
|
||||
showTypes = false,
|
||||
showControls = false,
|
||||
}: SquiggleEditorProps) => {
|
||||
let [expression, setExpression] = React.useState(initialSquiggleString);
|
||||
return (
|
||||
|
@ -87,10 +90,11 @@ export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
|||
diagramStart={diagramStart}
|
||||
diagramStop={diagramStop}
|
||||
diagramCount={diagramCount}
|
||||
environment={environment}
|
||||
onChange={onChange}
|
||||
bindings={bindings}
|
||||
jsImports={jsImports}
|
||||
showTypes={showTypes}
|
||||
showControls={showControls}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
@ -145,12 +149,12 @@ export interface SquigglePartialProps {
|
|||
diagramCount?: number;
|
||||
/** when the environment changes. Used again for notebook magic*/
|
||||
onChange?(expr: bindings): void;
|
||||
/** The width of the element */
|
||||
width: number;
|
||||
/** Previously declared variables */
|
||||
bindings?: bindings;
|
||||
/** Variables imported from js */
|
||||
jsImports?: jsImports;
|
||||
/** Whether to give users access to graph controls */
|
||||
showControls?: boolean;
|
||||
}
|
||||
|
||||
export let SquigglePartial: React.FC<SquigglePartialProps> = ({
|
||||
|
@ -166,15 +170,25 @@ export let SquigglePartial: React.FC<SquigglePartialProps> = ({
|
|||
xyPointLength: outputXYPoints,
|
||||
};
|
||||
let [expression, setExpression] = React.useState(initialSquiggleString);
|
||||
let squiggleResult = runPartial(
|
||||
expression,
|
||||
bindings,
|
||||
samplingInputs,
|
||||
jsImports
|
||||
);
|
||||
if (squiggleResult.tag == "Ok") {
|
||||
if (onChange) onChange(squiggleResult.value);
|
||||
}
|
||||
let [error, setError] = React.useState<string | null>(null);
|
||||
|
||||
let runSquiggleAndUpdateBindings = () => {
|
||||
let squiggleResult = runPartial(
|
||||
expression,
|
||||
bindings,
|
||||
samplingInputs,
|
||||
jsImports
|
||||
);
|
||||
if (squiggleResult.tag == "Ok") {
|
||||
if (onChange) onChange(squiggleResult.value);
|
||||
setError(null);
|
||||
} else {
|
||||
setError(errorValueToString(squiggleResult.value));
|
||||
}
|
||||
};
|
||||
|
||||
React.useEffect(runSquiggleAndUpdateBindings, [expression]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Input>
|
||||
|
@ -186,13 +200,7 @@ export let SquigglePartial: React.FC<SquigglePartialProps> = ({
|
|||
height={20}
|
||||
/>
|
||||
</Input>
|
||||
{squiggleResult.tag == "Error" ? (
|
||||
<ErrorBox heading="Error">
|
||||
{errorValueToString(squiggleResult.value)}
|
||||
</ErrorBox>
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
{error !== null ? <ErrorBox heading="Error">{error}</ErrorBox> : <></>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -43,6 +43,8 @@ function FieldFloat(Props: FieldFloatProps) {
|
|||
interface Props {
|
||||
initialSquiggleString?: string;
|
||||
height?: number;
|
||||
showTypes?: boolean;
|
||||
showControls?: boolean;
|
||||
}
|
||||
|
||||
interface Props2 {
|
||||
|
@ -55,10 +57,6 @@ const ShowBox = styled.div<Props2>`
|
|||
height: ${(props) => props.height};
|
||||
`;
|
||||
|
||||
const MyComponent = styled.div`
|
||||
color: ${(props) => props.theme.colors.main};
|
||||
`;
|
||||
|
||||
interface TitleProps {
|
||||
readonly maxHeight: number;
|
||||
}
|
||||
|
@ -74,13 +72,15 @@ const Display = styled.div<TitleProps>`
|
|||
|
||||
const Row = styled.div`
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
grid-template-columns: 50% 50%;
|
||||
`;
|
||||
const Col = styled.div``;
|
||||
|
||||
let SquigglePlayground: FC<Props> = ({
|
||||
initialSquiggleString = "",
|
||||
height = 300,
|
||||
showTypes = false,
|
||||
showControls = false,
|
||||
}: Props) => {
|
||||
let [squiggleString, setSquiggleString] = useState(initialSquiggleString);
|
||||
let [sampleCount, setSampleCount] = useState(1000);
|
||||
|
@ -112,6 +112,8 @@ let SquigglePlayground: FC<Props> = ({
|
|||
diagramCount={diagramCount}
|
||||
pointDistLength={pointDistLength}
|
||||
height={150}
|
||||
showTypes={showTypes}
|
||||
showControls={showControls}
|
||||
/>
|
||||
</Display>
|
||||
</Col>
|
||||
|
|
|
@ -9,3 +9,5 @@ import SquigglePlayground, {
|
|||
renderSquigglePlaygroundToDom,
|
||||
} from "./components/SquigglePlayground";
|
||||
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
||||
|
||||
export { mergeBindings } from "@quri/squiggle-lang";
|
||||
|
|
51
packages/components/src/stories/SquigglePartial.stories.mdx
Normal file
51
packages/components/src/stories/SquigglePartial.stories.mdx
Normal file
|
@ -0,0 +1,51 @@
|
|||
import { SquigglePartial, SquiggleEditor } from "../components/SquiggleEditor";
|
||||
import { useState } from "react";
|
||||
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||
|
||||
<Meta title="Squiggle/SquigglePartial" component={SquigglePartial} />
|
||||
|
||||
export const Template = (props) => <SquigglePartial {...props} />;
|
||||
|
||||
# Squiggle Partial
|
||||
|
||||
A Squiggle Partial is an editor that does not return a graph to the user, but
|
||||
instead returns bindings that can be used by further Squiggle Editors.
|
||||
|
||||
<Canvas>
|
||||
<Story
|
||||
name="Standalone"
|
||||
args={{
|
||||
initialSquiggleString: "x = normal(5,2)",
|
||||
}}
|
||||
>
|
||||
{Template.bind({})}
|
||||
</Story>
|
||||
</Canvas>
|
||||
|
||||
<Canvas>
|
||||
<Story
|
||||
name="With Editor"
|
||||
args={{
|
||||
initialPartialString: "x = normal(5,2)",
|
||||
initialEditorString: "x",
|
||||
}}
|
||||
>
|
||||
{(props) => {
|
||||
let [bindings, setBindings] = useState({});
|
||||
return (
|
||||
<>
|
||||
<SquigglePartial
|
||||
{...props}
|
||||
initialSquiggleString={props.initialPartialString}
|
||||
onChange={setBindings}
|
||||
/>
|
||||
<SquiggleEditor
|
||||
{...props}
|
||||
initialSquiggleString={props.initialEditorString}
|
||||
bindings={bindings}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}}
|
||||
</Story>
|
||||
</Canvas>
|
|
@ -3,7 +3,6 @@
|
|||
"description": "A basic area chart example",
|
||||
"width": 500,
|
||||
"height": 100,
|
||||
"autosize": "fit",
|
||||
"padding": 5,
|
||||
"data": [
|
||||
{
|
||||
|
@ -13,72 +12,8 @@
|
|||
"name": "dis"
|
||||
}
|
||||
],
|
||||
"signals": [
|
||||
{
|
||||
"name": "xscale",
|
||||
"description": "The transform of the x scale",
|
||||
"value": false,
|
||||
"bind": {
|
||||
"input": "checkbox",
|
||||
"name": "log x scale"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "yscale",
|
||||
"description": "The transform of the y scale",
|
||||
"value": false,
|
||||
"bind": {
|
||||
"input": "checkbox",
|
||||
"name": "log y scale"
|
||||
}
|
||||
}
|
||||
],
|
||||
"scales": [
|
||||
{
|
||||
"name": "xscale",
|
||||
"type": "pow",
|
||||
"exponent": {
|
||||
"signal": "xscale ? 0.1 : 1"
|
||||
},
|
||||
"range": "width",
|
||||
"zero": false,
|
||||
"nice": false,
|
||||
"domain": {
|
||||
"fields": [
|
||||
{
|
||||
"data": "con",
|
||||
"field": "x"
|
||||
},
|
||||
{
|
||||
"data": "dis",
|
||||
"field": "x"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "yscale",
|
||||
"type": "pow",
|
||||
"exponent": {
|
||||
"signal": "yscale ? 0.1 : 1"
|
||||
},
|
||||
"range": "height",
|
||||
"nice": true,
|
||||
"zero": true,
|
||||
"domain": {
|
||||
"fields": [
|
||||
{
|
||||
"data": "con",
|
||||
"field": "y"
|
||||
},
|
||||
{
|
||||
"data": "dis",
|
||||
"field": "y"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"signals": [],
|
||||
"scales": [],
|
||||
"axes": [
|
||||
{
|
||||
"orient": "bottom",
|
||||
|
@ -113,7 +48,7 @@
|
|||
"value": 0
|
||||
},
|
||||
"fill": {
|
||||
"signal": "{gradient: 'linear', x1: 1, y1: 1, x2: 0, y2: 1, stops: [ {offset: 0.0, color: '#4C78A8'}] }"
|
||||
"value": "#4C78A8"
|
||||
},
|
||||
"interpolate": {
|
||||
"value": "monotone"
|
||||
|
|
|
@ -3,7 +3,26 @@
|
|||
|
||||
# Squiggle language
|
||||
|
||||
## Build for development
|
||||
_An estimation language_
|
||||
|
||||
# Use the `npm` package
|
||||
|
||||
For instance, in a javascript project, you can
|
||||
|
||||
```sh
|
||||
yarn add @quri/squiggle-lang
|
||||
```
|
||||
|
||||
```js
|
||||
import { run } from "@quri/squiggle-lang";
|
||||
run(
|
||||
"normal(0, 1) * fromSamples([-3,-2,-1,1,2,3,3,3,4,9]"
|
||||
).value.value.toSparkline().value;
|
||||
```
|
||||
|
||||
**However, for most use cases you'll prefer to use our [library of react components](https://www.npmjs.com/package/@quri/squiggle-components)**, and let your app transitively depend on `@quri/squiggle-lang`.
|
||||
|
||||
# Build for development
|
||||
|
||||
We assume that you ran `yarn` at the monorepo level.
|
||||
|
||||
|
@ -23,7 +42,7 @@ yarn test
|
|||
yarn coverage:rescript; o _coverage/index.html # produces coverage report and opens it in browser
|
||||
```
|
||||
|
||||
## Distributing this package or using this package from other monorepo packages
|
||||
# Distributing this package or using this package from other monorepo packages
|
||||
|
||||
As it says in the other `packages/*/README.md`s, building this package is an essential step of building other packages.
|
||||
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open TestHelpers
|
||||
open FastCheck
|
||||
open Arbitrary
|
||||
open Property.Sync
|
||||
|
||||
describe("dotSubtract", () => {
|
||||
test("mean of normal minus exponential (unit)", () => {
|
||||
let mean = 0.0
|
||||
let rate = 10.0
|
||||
exception MeanFailed
|
||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||
~env,
|
||||
mkNormal(mean, 1.0),
|
||||
mkExponential(rate),
|
||||
)
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||
let meanAnalytical =
|
||||
mean -.
|
||||
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||
"On trusted input this should never happen",
|
||||
)
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||
| Error(_) => raise(MeanFailed)
|
||||
}
|
||||
})
|
||||
/*
|
||||
It seems like this test should work, and it's plausible that
|
||||
there's some bug in `pointwiseSubtract`
|
||||
*/
|
||||
Skip.test("mean of normal minus exponential (property)", () => {
|
||||
assert_(
|
||||
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
|
||||
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||
~env,
|
||||
mkNormal(mean, 1.0),
|
||||
mkExponential(rate),
|
||||
)
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||
// according to algebra or random variables,
|
||||
let meanAnalytical =
|
||||
mean -.
|
||||
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||
"On trusted input this should never happen",
|
||||
)
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||
}
|
||||
}),
|
||||
)
|
||||
pass
|
||||
})
|
||||
})
|
|
@ -0,0 +1,112 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open TestHelpers
|
||||
|
||||
describe("kl divergence", () => {
|
||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||
exception KlFailed
|
||||
|
||||
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
||||
test("of two uniforms is equal to the analytic expression", () => {
|
||||
let answer =
|
||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
let prediction =
|
||||
uniformMakeR(
|
||||
lowPrediction,
|
||||
highPrediction,
|
||||
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
||||
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||
switch kl {
|
||||
| Ok(kl') => kl'->expect->toBeCloseTo(analyticalKl)
|
||||
| Error(err) => {
|
||||
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||
raise(KlFailed)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
// The pair on the right (the answer) can be wider than the pair on the left (the prediction), but not the other way around.
|
||||
testUniform(0.0, 1.0, -1.0, 2.0)
|
||||
testUniform(0.0, 1.0, 0.0, 2.0) // equal left endpoints
|
||||
testUniform(0.0, 1.0, -1.0, 1.0) // equal rightendpoints
|
||||
testUniform(0.0, 1e1, 0.0, 1e1) // equal (klDivergence = 0)
|
||||
// testUniform(-1.0, 1.0, 0.0, 2.0)
|
||||
|
||||
test("of two normals is equal to the formula", () => {
|
||||
// This test case comes via Nuño https://github.com/quantified-uncertainty/squiggle/issues/433
|
||||
let mean1 = 4.0
|
||||
let mean2 = 1.0
|
||||
let stdev1 = 4.0
|
||||
let stdev2 = 1.0
|
||||
|
||||
let prediction =
|
||||
normalMakeR(mean1, stdev1)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
let answer = normalMakeR(mean2, stdev2)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
// https://stats.stackexchange.com/questions/7440/kl-divergence-between-two-univariate-gaussians
|
||||
let analyticalKl =
|
||||
Js.Math.log(stdev1 /. stdev2) +.
|
||||
(stdev2 ** 2.0 +. (mean2 -. mean1) ** 2.0) /. (2.0 *. stdev1 ** 2.0) -. 0.5
|
||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||
|
||||
switch kl {
|
||||
| Ok(kl') => kl'->expect->toBeCloseTo(analyticalKl)
|
||||
| Error(err) => {
|
||||
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||
raise(KlFailed)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("combine along support test", () => {
|
||||
// This tests the version of the function that we're NOT using. Haven't deleted the test in case we use the code later.
|
||||
test("combine along support test", _ => {
|
||||
let combineAlongSupportOfSecondArgument = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument0
|
||||
let lowAnswer = 0.0
|
||||
let highAnswer = 1.0
|
||||
let lowPrediction = 0.0
|
||||
let highPrediction = 2.0
|
||||
|
||||
let answer =
|
||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
let prediction =
|
||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||
s,
|
||||
))
|
||||
let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
|
||||
let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
|
||||
|
||||
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
||||
let integrand = PointSetDist_Scoring.KLDivergence.integrand
|
||||
|
||||
let result = switch (answerWrapped, predictionWrapped) {
|
||||
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
||||
Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
|
||||
| _ => None
|
||||
}
|
||||
result
|
||||
->expect
|
||||
->toEqual(
|
||||
Some(
|
||||
Ok({
|
||||
xs: [
|
||||
0.0,
|
||||
MagicNumbers.Epsilon.ten,
|
||||
2.0 *. MagicNumbers.Epsilon.ten,
|
||||
1.0 -. MagicNumbers.Epsilon.ten,
|
||||
1.0,
|
||||
],
|
||||
ys: [
|
||||
-0.34657359027997264,
|
||||
-0.34657359027997264,
|
||||
-0.34657359027997264,
|
||||
-0.34657359027997264,
|
||||
-0.34657359027997264,
|
||||
],
|
||||
}),
|
||||
),
|
||||
)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,38 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open TestHelpers
|
||||
|
||||
describe("Scale logarithm", () => {
|
||||
/* These tests may not be important, because scalelog isn't normalized
|
||||
The first one may be failing for a number of reasons.
|
||||
*/
|
||||
Skip.test("mean of the base e scalar logarithm of an exponential(10)", () => {
|
||||
let rate = 10.0
|
||||
let scalelog = DistributionOperation.Constructors.scaleLogarithm(
|
||||
~env,
|
||||
mkExponential(rate),
|
||||
MagicNumbers.Math.e,
|
||||
)
|
||||
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
||||
// expected value of log of exponential distribution.
|
||||
let meanAnalytical = Js.Math.log(rate) +. 1.0
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||
| Error(err) => err->expect->toBe(DistributionTypes.OperationError(DivisionByZeroError))
|
||||
}
|
||||
})
|
||||
let low = 10.0
|
||||
let high = 100.0
|
||||
let scalelog = DistributionOperation.Constructors.scaleLogarithm(~env, mkUniform(low, high), 2.0)
|
||||
|
||||
test("mean of the base 2 scalar logarithm of a uniform(10, 100)", () => {
|
||||
//For uniform pdf `_ => 1 / (b - a)`, the expected value of log of uniform is `integral from a to b of x * log(1 / (b -a)) dx`
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), scalelog)
|
||||
let meanAnalytical = -.Js.Math.log2(high -. low) /. 2.0 *. (high ** 2.0 -. low ** 2.0) // -. Js.Math.log2(high -. low)
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => meanValue->expect->toBeCloseTo(meanAnalytical)
|
||||
| Error(err) => err->expect->toEqual(DistributionTypes.OperationError(NegativeInfinityError))
|
||||
}
|
||||
})
|
||||
})
|
|
@ -0,0 +1,142 @@
|
|||
open Jest
|
||||
// open Expect
|
||||
|
||||
open Reducer_Expression_ExpressionBuilder
|
||||
open Reducer_TestMacroHelpers
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
|
||||
let exampleExpression = eNumber(1.)
|
||||
let exampleExpressionY = eSymbol("y")
|
||||
let exampleStatementY = eLetStatement("y", eNumber(1.))
|
||||
let exampleStatementX = eLetStatement("y", eSymbol("x"))
|
||||
let exampleStatementZ = eLetStatement("z", eSymbol("y"))
|
||||
|
||||
// If it is not a macro then it is not expanded
|
||||
testMacro([], exampleExpression, "Ok(1)")
|
||||
|
||||
describe("bindStatement", () => {
|
||||
// A statement is bound by the bindings created by the previous statement
|
||||
testMacro([], eBindStatement(eBindings([]), exampleStatementY), "Ok((:$setBindings {} :y 1))")
|
||||
// Then it answers the bindings for the next statement when reduced
|
||||
testMacroEval([], eBindStatement(eBindings([]), exampleStatementY), "Ok({y: 1})")
|
||||
// Now let's feed a binding to see what happens
|
||||
testMacro(
|
||||
[],
|
||||
eBindStatement(eBindings([("x", EvNumber(2.))]), exampleStatementX),
|
||||
"Ok((:$setBindings {x: 2} :y 2))",
|
||||
)
|
||||
// An expression does not return a binding, thus error
|
||||
testMacro([], eBindStatement(eBindings([]), exampleExpression), "Error(Assignment expected)")
|
||||
// When bindings from previous statement are missing the context is injected. This must be the first statement of a block
|
||||
testMacro(
|
||||
[("z", EvNumber(99.))],
|
||||
eBindStatementDefault(exampleStatementY),
|
||||
"Ok((:$setBindings {z: 99} :y 1))",
|
||||
)
|
||||
})
|
||||
|
||||
describe("bindExpression", () => {
|
||||
// x is simply bound in the expression
|
||||
testMacro([], eBindExpression(eBindings([("x", EvNumber(2.))]), eSymbol("x")), "Ok(2)")
|
||||
// When an let statement is the end expression then bindings are returned
|
||||
testMacro(
|
||||
[],
|
||||
eBindExpression(eBindings([("x", EvNumber(2.))]), exampleStatementY),
|
||||
"Ok((:$exportBindings (:$setBindings {x: 2} :y 1)))",
|
||||
)
|
||||
// Now let's reduce that expression
|
||||
testMacroEval(
|
||||
[],
|
||||
eBindExpression(eBindings([("x", EvNumber(2.))]), exampleStatementY),
|
||||
"Ok({x: 2,y: 1})",
|
||||
)
|
||||
// When bindings are missing the context is injected. This must be the first and last statement of a block
|
||||
testMacroEval(
|
||||
[("z", EvNumber(99.))],
|
||||
eBindExpressionDefault(exampleStatementY),
|
||||
"Ok({y: 1,z: 99})",
|
||||
)
|
||||
})
|
||||
|
||||
describe("block", () => {
|
||||
// Block with a single expression
|
||||
testMacro([], eBlock(list{exampleExpression}), "Ok((:$$bindExpression 1))")
|
||||
testMacroEval([], eBlock(list{exampleExpression}), "Ok(1)")
|
||||
// Block with a single statement
|
||||
testMacro([], eBlock(list{exampleStatementY}), "Ok((:$$bindExpression (:$let :y 1)))")
|
||||
testMacroEval([], eBlock(list{exampleStatementY}), "Ok({y: 1})")
|
||||
// Block with a statement and an expression
|
||||
testMacro(
|
||||
[],
|
||||
eBlock(list{exampleStatementY, exampleExpressionY}),
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$let :y 1)) :y))",
|
||||
)
|
||||
testMacroEval([], eBlock(list{exampleStatementY, exampleExpressionY}), "Ok(1)")
|
||||
// Block with a statement and another statement
|
||||
testMacro(
|
||||
[],
|
||||
eBlock(list{exampleStatementY, exampleStatementZ}),
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$let :y 1)) (:$let :z :y)))",
|
||||
)
|
||||
testMacroEval([], eBlock(list{exampleStatementY, exampleStatementZ}), "Ok({y: 1,z: 1})")
|
||||
// Block inside a block
|
||||
testMacro(
|
||||
[],
|
||||
eBlock(list{eBlock(list{exampleExpression})}),
|
||||
"Ok((:$$bindExpression (:$$block 1)))",
|
||||
)
|
||||
testMacroEval([], eBlock(list{eBlock(list{exampleExpression})}), "Ok(1)")
|
||||
// Block assigned to a variable
|
||||
testMacro(
|
||||
[],
|
||||
eBlock(list{eLetStatement("z", eBlock(list{eBlock(list{exampleExpressionY})}))}),
|
||||
"Ok((:$$bindExpression (:$let :z (:$$block (:$$block :y)))))",
|
||||
)
|
||||
testMacroEval(
|
||||
[],
|
||||
eBlock(list{eLetStatement("z", eBlock(list{eBlock(list{exampleExpressionY})}))}),
|
||||
"Ok({z: :y})",
|
||||
)
|
||||
// Empty block
|
||||
testMacro([], eBlock(list{}), "Ok(:undefined block)") //TODO: should be an error
|
||||
// :$$block (:$$block (:$let :y (:add :x 1)) :y)"
|
||||
testMacro(
|
||||
[],
|
||||
eBlock(list{
|
||||
eBlock(list{
|
||||
eLetStatement("y", eFunction("add", list{eSymbol("x"), eNumber(1.)})),
|
||||
eSymbol("y"),
|
||||
}),
|
||||
}),
|
||||
"Ok((:$$bindExpression (:$$block (:$let :y (:add :x 1)) :y)))",
|
||||
)
|
||||
MyOnly.testMacroEval(
|
||||
[("x", EvNumber(1.))],
|
||||
eBlock(list{
|
||||
eBlock(list{
|
||||
eLetStatement("y", eFunction("add", list{eSymbol("x"), eNumber(1.)})),
|
||||
eSymbol("y"),
|
||||
}),
|
||||
}),
|
||||
"Ok(2)",
|
||||
)
|
||||
})
|
||||
|
||||
describe("lambda", () => {
|
||||
// assign a lambda to a variable
|
||||
let lambdaExpression = eFunction("$$lambda", list{eArrayString(["y"]), exampleExpressionY})
|
||||
testMacro([], lambdaExpression, "Ok(lambda(y=>internal))")
|
||||
// call a lambda
|
||||
let callLambdaExpression = list{lambdaExpression, eNumber(1.)}->ExpressionT.EList
|
||||
testMacro([], callLambdaExpression, "Ok(((:$$lambda [y] :y) 1))")
|
||||
testMacroEval([], callLambdaExpression, "Ok(1)")
|
||||
// Parameters shadow the outer scope
|
||||
testMacroEval([("y", EvNumber(666.))], callLambdaExpression, "Ok(1)")
|
||||
// When not shadowed by the parameters, the outer scope variables are available
|
||||
let lambdaExpression = eFunction(
|
||||
"$$lambda",
|
||||
list{eArrayString(["z"]), eFunction("add", list{eSymbol("y"), eSymbol("z")})},
|
||||
)
|
||||
let callLambdaExpression = eList(list{lambdaExpression, eNumber(1.)})
|
||||
testMacroEval([("y", EvNumber(666.))], callLambdaExpression, "Ok(667)")
|
||||
})
|
|
@ -0,0 +1,6 @@
|
|||
open Jest
|
||||
open Expect
|
||||
|
||||
test("dummy", () => {
|
||||
expect(true)->toBe(true)
|
||||
})
|
|
@ -1,5 +1,5 @@
|
|||
open ReducerInterface.ExpressionValue
|
||||
module MathJs = Reducer.MathJs
|
||||
module MathJs = Reducer_MathJs
|
||||
module ErrorValue = Reducer.ErrorValue
|
||||
|
||||
open Jest
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
module Parse = Reducer.MathJs.Parse
|
||||
module Parse = Reducer_MathJs.Parse
|
||||
module Result = Belt.Result
|
||||
|
||||
open Jest
|
||||
|
@ -18,8 +18,14 @@ module MySkip = {
|
|||
Skip.test(desc, () => expectParseToBe(expr, answer))
|
||||
}
|
||||
|
||||
module MyOnly = {
|
||||
let testParse = (expr, answer) => Only.test(expr, () => expectParseToBe(expr, answer))
|
||||
let testDescriptionParse = (desc, expr, answer) =>
|
||||
Only.test(desc, () => expectParseToBe(expr, answer))
|
||||
}
|
||||
|
||||
describe("MathJs parse", () => {
|
||||
describe("literals operators paranthesis", () => {
|
||||
describe("literals operators parenthesis", () => {
|
||||
testParse("1", "1")
|
||||
testParse("'hello'", "'hello'")
|
||||
testParse("true", "true")
|
||||
|
@ -40,15 +46,15 @@ describe("MathJs parse", () => {
|
|||
})
|
||||
|
||||
describe("functions", () => {
|
||||
MySkip.testParse("identity(x) = x", "???")
|
||||
MySkip.testParse("identity(x)", "???")
|
||||
testParse("identity(x) = x", "identity = (x) => x")
|
||||
testParse("identity(x)", "identity(x)")
|
||||
})
|
||||
|
||||
describe("arrays", () => {
|
||||
testDescriptionParse("empty", "[]", "[]")
|
||||
testDescriptionParse("define", "[0, 1, 2]", "[0, 1, 2]")
|
||||
testDescriptionParse("define with strings", "['hello', 'world']", "['hello', 'world']")
|
||||
MySkip.testParse("range(0, 4)", "range(0, 4)")
|
||||
testParse("range(0, 4)", "range(0, 4)")
|
||||
testDescriptionParse("index", "([0,1,2])[1]", "([0, 1, 2])[1]")
|
||||
})
|
||||
|
||||
|
@ -58,11 +64,11 @@ describe("MathJs parse", () => {
|
|||
})
|
||||
|
||||
describe("comments", () => {
|
||||
MySkip.testDescriptionParse("define", "# This is a comment", "???")
|
||||
testDescriptionParse("define", "1 # This is a comment", "1")
|
||||
})
|
||||
|
||||
describe("if statement", () => {
|
||||
// TODO Tertiary operator instead
|
||||
MySkip.testDescriptionParse("define", "if (true) { 1 } else { 0 }", "???")
|
||||
describe("ternary operator", () => {
|
||||
testParse("1 ? 2 : 3", "ternary(1, 2, 3)")
|
||||
testParse("1 ? 2 : 3 ? 4 : 5", "ternary(1, 2, ternary(3, 4, 5))")
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,40 +1,31 @@
|
|||
module Expression = Reducer.Expression
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
||||
let unwrapRecord = rValue =>
|
||||
rValue->Belt.Result.flatMap(value =>
|
||||
switch value {
|
||||
| ExpressionValue.EvRecord(aRecord) => Ok(aRecord)
|
||||
| _ => ErrorValue.RETodo("TODO: External bindings must be returned")->Error
|
||||
}
|
||||
)
|
||||
|
||||
let expectParseToBe = (expr: string, answer: string) =>
|
||||
Reducer.parse(expr)->Expression.toStringResult->expect->toBe(answer)
|
||||
|
||||
let expectParseOuterToBe = (expr: string, answer: string) =>
|
||||
Reducer.parseOuter(expr)->Expression.toStringResult->expect->toBe(answer)
|
||||
|
||||
let expectParsePartialToBe = (expr: string, answer: string) =>
|
||||
Reducer.parsePartial(expr)->Expression.toStringResult->expect->toBe(answer)
|
||||
Reducer.parse(expr)->ExpressionT.toStringResult->expect->toBe(answer)
|
||||
|
||||
let expectEvalToBe = (expr: string, answer: string) =>
|
||||
Reducer.evaluate(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
||||
|
||||
let expectEvalBindingsToBe = (expr: string, bindings: Reducer.externalBindings, answer: string) =>
|
||||
Reducer.evaluateUsingExternalBindings(expr, bindings)
|
||||
Reducer.evaluateUsingOptions(expr, ~externalBindings=Some(bindings), ~environment=None)
|
||||
->ExpressionValue.toStringResult
|
||||
->expect
|
||||
->toBe(answer)
|
||||
|
||||
let expectEvalPartialBindingsToBe = (
|
||||
expr: string,
|
||||
bindings: Reducer.externalBindings,
|
||||
answer: string,
|
||||
) =>
|
||||
Reducer.evaluatePartialUsingExternalBindings(expr, bindings)
|
||||
->ExpressionValue.toStringResultRecord
|
||||
->expect
|
||||
->toBe(answer)
|
||||
|
||||
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||
let testParseOuterToBe = (expr, answer) => test(expr, () => expectParseOuterToBe(expr, answer))
|
||||
let testParsePartialToBe = (expr, answer) => test(expr, () => expectParsePartialToBe(expr, answer))
|
||||
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||
test(desc, () => expectParseToBe(expr, answer))
|
||||
|
||||
|
@ -42,34 +33,16 @@ let testEvalToBe = (expr, answer) => test(expr, () => expectEvalToBe(expr, answe
|
|||
let testDescriptionEvalToBe = (desc, expr, answer) => test(desc, () => expectEvalToBe(expr, answer))
|
||||
let testEvalBindingsToBe = (expr, bindingsList, answer) =>
|
||||
test(expr, () => expectEvalBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer))
|
||||
let testEvalPartialBindingsToBe = (expr, bindingsList, answer) =>
|
||||
test(expr, () => expectEvalPartialBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer))
|
||||
|
||||
module MySkip = {
|
||||
let testParseToBe = (expr, answer) => Skip.test(expr, () => expectParseToBe(expr, answer))
|
||||
let testParseOuterToBe = (expr, answer) =>
|
||||
Skip.test(expr, () => expectParseOuterToBe(expr, answer))
|
||||
let testParsePartialToBe = (expr, answer) =>
|
||||
Skip.test(expr, () => expectParsePartialToBe(expr, answer))
|
||||
let testEvalToBe = (expr, answer) => Skip.test(expr, () => expectEvalToBe(expr, answer))
|
||||
let testEvalBindingsToBe = (expr, bindingsList, answer) =>
|
||||
Skip.test(expr, () => expectEvalBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer))
|
||||
let testEvalPartialBindingsToBe = (expr, bindingsList, answer) =>
|
||||
Skip.test(expr, () =>
|
||||
expectEvalPartialBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer)
|
||||
)
|
||||
}
|
||||
module MyOnly = {
|
||||
let testParseToBe = (expr, answer) => Only.test(expr, () => expectParseToBe(expr, answer))
|
||||
let testParseOuterToBe = (expr, answer) =>
|
||||
Only.test(expr, () => expectParseOuterToBe(expr, answer))
|
||||
let testParsePartialToBe = (expr, answer) =>
|
||||
Only.test(expr, () => expectParsePartialToBe(expr, answer))
|
||||
let testEvalToBe = (expr, answer) => Only.test(expr, () => expectEvalToBe(expr, answer))
|
||||
let testEvalBindingsToBe = (expr, bindingsList, answer) =>
|
||||
Only.test(expr, () => expectEvalBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer))
|
||||
let testEvalPartialBindingsToBe = (expr, bindingsList, answer) =>
|
||||
Only.test(expr, () =>
|
||||
expectEvalPartialBindingsToBe(expr, bindingsList->Js.Dict.fromList, answer)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
open Jest
|
||||
open Expect
|
||||
|
||||
module Bindings = Reducer_Expression_Bindings
|
||||
module Expression = Reducer_Expression
|
||||
module ExpressionValue = ReducerInterface_ExpressionValue
|
||||
module ExpressionWithContext = Reducer_ExpressionWithContext
|
||||
module Macro = Reducer_Expression_Macro
|
||||
module T = Reducer_Expression_T
|
||||
|
||||
let testMacro_ = (
|
||||
tester,
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedCode: string,
|
||||
) => {
|
||||
let bindings = Belt.Map.String.fromArray(bindArray)
|
||||
tester(expr->T.toString, () =>
|
||||
expr
|
||||
->Macro.expandMacroCall(
|
||||
bindings,
|
||||
ExpressionValue.defaultEnvironment,
|
||||
Expression.reduceExpression,
|
||||
)
|
||||
->ExpressionWithContext.toStringResult
|
||||
->expect
|
||||
->toEqual(expectedCode)
|
||||
)
|
||||
}
|
||||
|
||||
let testMacroEval_ = (
|
||||
tester,
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedValue: string,
|
||||
) => {
|
||||
let bindings = Belt.Map.String.fromArray(bindArray)
|
||||
tester(expr->T.toString, () =>
|
||||
expr
|
||||
->Macro.doMacroCall(bindings, ExpressionValue.defaultEnvironment, Expression.reduceExpression)
|
||||
->ExpressionValue.toStringResult
|
||||
->expect
|
||||
->toEqual(expectedValue)
|
||||
)
|
||||
}
|
||||
|
||||
let testMacro = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedExpr: string,
|
||||
) => testMacro_(test, bindArray, expr, expectedExpr)
|
||||
let testMacroEval = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedValue: string,
|
||||
) => testMacroEval_(test, bindArray, expr, expectedValue)
|
||||
|
||||
module MySkip = {
|
||||
let testMacro = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedExpr: string,
|
||||
) => testMacro_(Skip.test, bindArray, expr, expectedExpr)
|
||||
let testMacroEval = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedValue: string,
|
||||
) => testMacroEval_(Skip.test, bindArray, expr, expectedValue)
|
||||
}
|
||||
|
||||
module MyOnly = {
|
||||
let testMacro = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedExpr: string,
|
||||
) => testMacro_(Only.test, bindArray, expr, expectedExpr)
|
||||
let testMacroEval = (
|
||||
bindArray: array<(string, ExpressionValue.expressionValue)>,
|
||||
expr: T.expression,
|
||||
expectedValue: string,
|
||||
) => testMacroEval_(Only.test, bindArray, expr, expectedValue)
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
/*
|
||||
You can wrap around any expression with inspect(expr) to log the value of that expression.
|
||||
This is useful for debugging. inspect(expr) returns the value of expr, but also prints it out.
|
||||
|
||||
There is a second version of inspect that takes a label, which will print out the label and the value.
|
||||
|
||||
inspectPerformace(expr, label) will print out the value of expr, the label, and the time it took to evaluate expr.
|
||||
*/
|
||||
describe("Debugging", () => {
|
||||
testEvalToBe("inspect(1)", "Ok(1)")
|
||||
testEvalToBe("inspect(1, \"one\")", "Ok(1)")
|
||||
})
|
|
@ -1,60 +1,63 @@
|
|||
// TODO: Reimplement with usual parse
|
||||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
describe("Parse for Bindings", () => {
|
||||
testParseOuterToBe("x", "Ok((:$$bindExpression (:$$bindings) :x))")
|
||||
testParseOuterToBe("x+1", "Ok((:$$bindExpression (:$$bindings) (:add :x 1)))")
|
||||
testParseOuterToBe(
|
||||
"y = x+1; y",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) :y))",
|
||||
)
|
||||
})
|
||||
// describe("Parse for Bindings", () => {
|
||||
// testParseOuterToBe("x", "Ok((:$$bindExpression (:$$bindings) :x))")
|
||||
// testParseOuterToBe("x+1", "Ok((:$$bindExpression (:$$bindings) (:add :x 1)))")
|
||||
// testParseOuterToBe(
|
||||
// "y = x+1; y",
|
||||
// "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) :y))",
|
||||
// )
|
||||
// })
|
||||
|
||||
describe("Parse Partial", () => {
|
||||
testParsePartialToBe(
|
||||
"x",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) :x) (:$exportVariablesExpression)))",
|
||||
)
|
||||
testParsePartialToBe(
|
||||
"y=x",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y :x)) (:$exportVariablesExpression)))",
|
||||
)
|
||||
testParsePartialToBe(
|
||||
"y=x+1",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) (:$exportVariablesExpression)))",
|
||||
)
|
||||
testParsePartialToBe(
|
||||
"y = x+1; z = y",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) (:$let :z :y)) (:$exportVariablesExpression)))",
|
||||
)
|
||||
})
|
||||
// describe("Parse Partial", () => {
|
||||
// testParsePartialToBe(
|
||||
// "x",
|
||||
// "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) :x) (:$exportVariablesExpression)))",
|
||||
// )
|
||||
// testParsePartialToBe(
|
||||
// "y=x",
|
||||
// "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y :x)) (:$exportVariablesExpression)))",
|
||||
// )
|
||||
// testParsePartialToBe(
|
||||
// "y=x+1",
|
||||
// "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) (:$exportVariablesExpression)))",
|
||||
// )
|
||||
// testParsePartialToBe(
|
||||
// "y = x+1; z = y",
|
||||
// "Ok((:$$bindExpression (:$$bindStatement (:$$bindStatement (:$$bindings) (:$let :y (:add :x 1))) (:$let :z :y)) (:$exportVariablesExpression)))",
|
||||
// )
|
||||
// })
|
||||
|
||||
describe("Eval with Bindings", () => {
|
||||
testEvalBindingsToBe("x", list{("x", ExpressionValue.EvNumber(1.))}, "Ok(1)")
|
||||
testEvalBindingsToBe("x+1", list{("x", ExpressionValue.EvNumber(1.))}, "Ok(2)")
|
||||
testParseToBe("y = x+1; y", "Ok((:$$block (:$$block (:$let :y (:add :x 1)) :y)))")
|
||||
testEvalBindingsToBe("y = x+1; y", list{("x", ExpressionValue.EvNumber(1.))}, "Ok(2)")
|
||||
testEvalBindingsToBe("y = x+1", list{("x", ExpressionValue.EvNumber(1.))}, "Ok({x: 1,y: 2})")
|
||||
})
|
||||
|
||||
/*
|
||||
Partial code is a partial code fragment that is cut out from a larger code.
|
||||
Therefore it does not end with an expression.
|
||||
*/
|
||||
describe("Eval Partial", () => {
|
||||
testEvalPartialBindingsToBe(
|
||||
// A partial cannot end with an expression
|
||||
"x",
|
||||
list{("x", ExpressionValue.EvNumber(1.))},
|
||||
"Error(Assignment expected)",
|
||||
)
|
||||
testEvalPartialBindingsToBe("y=x", list{("x", ExpressionValue.EvNumber(1.))}, "Ok({x: 1, y: 1})")
|
||||
testEvalPartialBindingsToBe(
|
||||
"y=x+1",
|
||||
list{("x", ExpressionValue.EvNumber(1.))},
|
||||
"Ok({x: 1, y: 2})",
|
||||
)
|
||||
testEvalPartialBindingsToBe(
|
||||
"y = x+1; z = y",
|
||||
list{("x", ExpressionValue.EvNumber(1.))},
|
||||
"Ok({x: 1, y: 2, z: 2})",
|
||||
)
|
||||
})
|
||||
// describe("Eval Partial", () => {
|
||||
// testEvalPartialBindingsToBe(
|
||||
// // A partial cannot end with an expression
|
||||
// "x",
|
||||
// list{("x", ExpressionValue.EvNumber(1.))},
|
||||
// "Error(Assignment expected)",
|
||||
// )
|
||||
// testEvalPartialBindingsToBe("y=x", list{("x", ExpressionValue.EvNumber(1.))}, "Ok({x: 1,y: 1})")
|
||||
// testEvalPartialBindingsToBe(
|
||||
// "y=x+1",
|
||||
// list{("x", ExpressionValue.EvNumber(1.))},
|
||||
// "Ok({x: 1,y: 2})",
|
||||
// )
|
||||
// testEvalPartialBindingsToBe(
|
||||
// "y = x+1; z = y",
|
||||
// list{("x", ExpressionValue.EvNumber(1.))},
|
||||
// "Ok({x: 1,y: 2,z: 2})",
|
||||
// )
|
||||
// })
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
describe("Parse function assignment", () => {
|
||||
testParseToBe("f(x)=x", "Ok((:$$block (:$let :f (:$$lambda [x] (:$$block :x)))))")
|
||||
testParseToBe("f(x)=2*x", "Ok((:$$block (:$let :f (:$$lambda [x] (:$$block (:multiply 2 :x))))))")
|
||||
//MathJs does not allow blocks in function definitions
|
||||
})
|
||||
|
||||
describe("Evaluate function assignment", () => {
|
||||
testEvalToBe("f(x)=x; f(1)", "Ok(1)")
|
||||
})
|
|
@ -0,0 +1,77 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
describe("Arity check", () => {
|
||||
testEvalToBe("f(x,y) = x + y; f(1,2)", "Ok(3)")
|
||||
testEvalToBe(
|
||||
"f(x,y) = x + y; f(1)",
|
||||
"Error(2 arguments expected. Instead 1 argument(s) were passed.)",
|
||||
)
|
||||
testEvalToBe(
|
||||
"f(x,y) = x + y; f(1,2,3)",
|
||||
"Error(2 arguments expected. Instead 3 argument(s) were passed.)",
|
||||
)
|
||||
testEvalToBe(
|
||||
"f(x,y)=x+y; f(1,2,3,4)",
|
||||
"Error(2 arguments expected. Instead 4 argument(s) were passed.)",
|
||||
)
|
||||
testEvalToBe(
|
||||
"f(x,y)=x+y; f(1)",
|
||||
"Error(2 arguments expected. Instead 1 argument(s) were passed.)",
|
||||
)
|
||||
testEvalToBe(
|
||||
"f(x,y)=x(y); f(f)",
|
||||
"Error(2 arguments expected. Instead 1 argument(s) were passed.)",
|
||||
)
|
||||
testEvalToBe("f(x)=x; f(f)", "Ok(lambda(x=>internal code))")
|
||||
testEvalToBe(
|
||||
"f(x,y)=x(y); f(z)",
|
||||
"Error(2 arguments expected. Instead 1 argument(s) were passed.)",
|
||||
)
|
||||
})
|
||||
|
||||
describe("symbol not defined", () => {
|
||||
testEvalToBe("f(x)=x(y); f(f)", "Error(y is not defined)")
|
||||
testEvalToBe("f(x)=x; f(f)", "Ok(lambda(x=>internal code))")
|
||||
testEvalToBe("f(x)=x(y); f(z)", "Error(z is not defined)")
|
||||
testEvalToBe("f(x)=x(y); f(2)", "Error(2 is not a function)")
|
||||
testEvalToBe("f(x)=x(1); f(2)", "Error(2 is not a function)")
|
||||
})
|
||||
|
||||
describe("call and bindings", () => {
|
||||
testEvalToBe("f(x)=x+1", "Ok({f: lambda(x=>internal code)})")
|
||||
testEvalToBe("f(x)=x+1; f(1)", "Ok(2)")
|
||||
testEvalToBe("f=1;y=2", "Ok({f: 1,y: 2})")
|
||||
testEvalToBe("f(x)=x+1; y=f(1)", "Ok({f: lambda(x=>internal code),y: 2})")
|
||||
testEvalToBe("f(x)=x+1; y=f(1); f(1)", "Ok(2)")
|
||||
testEvalToBe("f(x)=x+1; y=f(1); z=f(1)", "Ok({f: lambda(x=>internal code),y: 2,z: 2})")
|
||||
testEvalToBe(
|
||||
"f(x)=x+1; g(x)=f(x)+1",
|
||||
"Ok({f: lambda(x=>internal code),g: lambda(x=>internal code)})",
|
||||
)
|
||||
testParseToBe(
|
||||
"f=99; g(x)=f; g(2)",
|
||||
"Ok((:$$block (:$$block (:$let :f 99) (:$let :g (:$$lambda [x] (:$$block :f))) (:g 2))))",
|
||||
)
|
||||
testEvalToBe("f=99; g(x)=f; g(2)", "Ok(99)")
|
||||
testEvalToBe("f(x)=x; g(x)=f(x); g(2)", "Ok(2)")
|
||||
testEvalToBe(
|
||||
"f(x)=x+1; g(x)=f(x)+1; y=g(2)",
|
||||
"Ok({f: lambda(x=>internal code),g: lambda(x=>internal code),y: 4})",
|
||||
)
|
||||
testEvalToBe("f(x)=x+1; g(x)=f(x)+1; g(2)", "Ok(4)")
|
||||
})
|
||||
|
||||
describe("function tricks", () => {
|
||||
testParseToBe(
|
||||
"f(x)=f(y)=2; f(2)",
|
||||
"Ok((:$$block (:$$block (:$let :f (:$$lambda [x] (:$$block (:$let :f (:$$lambda [y] (:$$block 2)))))) (:f 2))))",
|
||||
)
|
||||
testEvalToBe("f(x)=f(y)=2; f(2)", "Ok({f: lambda(y=>internal code),x: 2})")
|
||||
testEvalToBe("y=2;g(x)=y+1;g(2)", "Ok(3)")
|
||||
testEvalToBe("y=2;g(x)=inspect(y)+1", "Ok({g: lambda(x=>internal code),y: 2})")
|
||||
MySkip.testEvalToBe("f(x) = x(x); f(f)", "????") // TODO: Infinite loop. Any solution? Catching proper exception or timeout?
|
||||
MySkip.testEvalToBe("f(x, x)=x+x; f(1,2)", "????") // TODO: Duplicate parameters
|
||||
MySkip.testEvalToBe("myadd(x,y)=x+y; z=[add]; z[0](3,2)", "????") //TODO: to fix with new parser
|
||||
MySkip.testEvalToBe("myaddd(x,y)=x+y; z={x: add}; z.x(3,2)", "????") //TODO: to fix with new parser
|
||||
})
|
|
@ -0,0 +1,16 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
describe("map reduce", () => {
|
||||
testEvalToBe("double(x)=2*x; arr=[1,2,3]; map(arr, double)", "Ok([2,4,6])")
|
||||
testEvalToBe("myadd(acc,x)=acc+x; arr=[1,2,3]; reduce(arr, 0, myadd)", "Ok(6)")
|
||||
testEvalToBe("change(acc,x)=acc*x+x; arr=[1,2,3]; reduce(arr, 0, change)", "Ok(15)")
|
||||
testEvalToBe("change(acc,x)=acc*x+x; arr=[1,2,3]; reduceReverse(arr, 0, change)", "Ok(9)")
|
||||
testEvalToBe("arr=[1,2,3]; reverse(arr)", "Ok([3,2,1])")
|
||||
testEvalToBe("check(x)=(x==2);arr=[1,2,3]; keep(arr,check)", "Ok([2])")
|
||||
})
|
||||
|
||||
Skip.describe("map reduce (sam)", () => {
|
||||
testEvalToBe("addone(x)=x+1; map(2, addone)", "Error???")
|
||||
testEvalToBe("addone(x)=x+1; map(2, {x: addone})", "Error???")
|
||||
})
|
|
@ -1,12 +1,14 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
Skip.describe("Parse ternary operator", () => {
|
||||
testParseToBe("true ? 'YES' : 'NO'", "Ok('YES')")
|
||||
testParseToBe("false ? 'YES' : 'NO'", "Ok('NO')")
|
||||
describe("Parse ternary operator", () => {
|
||||
testParseToBe("true ? 'YES' : 'NO'", "Ok((:$$block (:$$ternary true 'YES' 'NO')))")
|
||||
})
|
||||
|
||||
Skip.describe("Evaluate ternary operator", () => {
|
||||
describe("Evaluate ternary operator", () => {
|
||||
testEvalToBe("true ? 'YES' : 'NO'", "Ok('YES')")
|
||||
testEvalToBe("false ? 'YES' : 'NO'", "Ok('NO')")
|
||||
testEvalToBe("2 > 1 ? 'YES' : 'NO'", "Ok('YES')")
|
||||
testEvalToBe("2 <= 1 ? 'YES' : 'NO'", "Ok('NO')")
|
||||
testEvalToBe("1+1 ? 'YES' : 'NO'", "Error(Expected type: Boolean)")
|
||||
})
|
||||
|
|
|
@ -10,46 +10,39 @@ describe("reducer using mathjs parse", () => {
|
|||
// Those tests toString that we are converting mathjs parse tree to what we need
|
||||
|
||||
describe("expressions", () => {
|
||||
testParseToBe("1", "Ok(1)")
|
||||
testParseToBe("(1)", "Ok(1)")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))")
|
||||
testParseToBe("1", "Ok((:$$block 1))")
|
||||
testParseToBe("(1)", "Ok((:$$block 1))")
|
||||
testParseToBe("1+2", "Ok((:$$block (:add 1 2)))")
|
||||
testParseToBe("1+2*3", "Ok((:$$block (:add 1 (:multiply 2 3))))")
|
||||
})
|
||||
describe("arrays", () => {
|
||||
//Note. () is a empty list in Lisp
|
||||
// The only builtin structure in Lisp is list. There are no arrays
|
||||
// [1,2,3] becomes (1 2 3)
|
||||
testDescriptionParseToBe("empty", "[]", "Ok(())")
|
||||
testParseToBe("[1, 2, 3]", "Ok((1 2 3))")
|
||||
testParseToBe("['hello', 'world']", "Ok(('hello' 'world'))")
|
||||
testDescriptionParseToBe("index", "([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))")
|
||||
testDescriptionParseToBe("empty", "[]", "Ok((:$$block ()))")
|
||||
testParseToBe("[1, 2, 3]", "Ok((:$$block (1 2 3)))")
|
||||
testParseToBe("['hello', 'world']", "Ok((:$$block ('hello' 'world')))")
|
||||
testDescriptionParseToBe("index", "([0,1,2])[1]", "Ok((:$$block (:$atIndex (0 1 2) (1))))")
|
||||
})
|
||||
describe("records", () => {
|
||||
testDescriptionParseToBe("define", "{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
||||
testDescriptionParseToBe(
|
||||
"define",
|
||||
"{a: 1, b: 2}",
|
||||
"Ok((:$$block (:$constructRecord (('a' 1) ('b' 2)))))",
|
||||
)
|
||||
testDescriptionParseToBe(
|
||||
"use",
|
||||
"{a: 1, b: 2}.a",
|
||||
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
||||
"Ok((:$$block (:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a'))))",
|
||||
)
|
||||
})
|
||||
describe("multi-line", () => {
|
||||
testParseToBe("1; 2", "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) 1) 2))")
|
||||
testParseToBe(
|
||||
"1+1; 2+1",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:add 1 1)) (:add 2 1)))",
|
||||
)
|
||||
testParseToBe("1; 2", "Ok((:$$block (:$$block 1 2)))")
|
||||
testParseToBe("1+1; 2+1", "Ok((:$$block (:$$block (:add 1 1) (:add 2 1))))")
|
||||
})
|
||||
describe("assignment", () => {
|
||||
testParseToBe(
|
||||
"x=1; x",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x 1)) :x))",
|
||||
)
|
||||
testParseToBe(
|
||||
"x=1+1; x+1",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x (:add 1 1))) (:add :x 1)))",
|
||||
)
|
||||
testParseToBe("x=1; x", "Ok((:$$block (:$$block (:$let :x 1) :x)))")
|
||||
testParseToBe("x=1+1; x+1", "Ok((:$$block (:$$block (:$let :x (:add 1 1)) (:add :x 1))))")
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -70,13 +63,13 @@ describe("eval", () => {
|
|||
})
|
||||
describe("arrays", () => {
|
||||
test("empty array", () => expectEvalToBe("[]", "Ok([])"))
|
||||
testEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])")
|
||||
testEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])")
|
||||
testEvalToBe("[1, 2, 3]", "Ok([1,2,3])")
|
||||
testEvalToBe("['hello', 'world']", "Ok(['hello','world'])")
|
||||
testEvalToBe("([0,1,2])[1]", "Ok(1)")
|
||||
testDescriptionEvalToBe("index not found", "([0,1,2])[10]", "Error(Array index not found: 10)")
|
||||
})
|
||||
describe("records", () => {
|
||||
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1, b: 2})"))
|
||||
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1,b: 2})"))
|
||||
test("index", () => expectEvalToBe("{a: 1}.a", "Ok(1)"))
|
||||
test("index not found", () => expectEvalToBe("{a: 1}.b", "Error(Record property not found: b)"))
|
||||
})
|
||||
|
@ -91,7 +84,7 @@ describe("eval", () => {
|
|||
testEvalToBe("x=1; y=x+1; y+1", "Ok(3)")
|
||||
testEvalToBe("1; x=1", "Error(Assignment expected)")
|
||||
testEvalToBe("1; 1", "Error(Assignment expected)")
|
||||
testEvalToBe("x=1; x=1", "Error(Expression expected)")
|
||||
testEvalToBe("x=1; x=1", "Ok({x: 1})")
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ describe("eval on distribution functions", () => {
|
|||
describe("mean", () => {
|
||||
testEval("mean(normal(5,2))", "Ok(5)")
|
||||
testEval("mean(lognormal(1,2))", "Ok(20.085536923187668)")
|
||||
testEval("mean(gamma(5,5))", "Ok(25)")
|
||||
})
|
||||
describe("toString", () => {
|
||||
testEval("toString(normal(5,2))", "Ok('Normal(5,2)')")
|
||||
|
@ -119,27 +120,34 @@ describe("eval on distribution functions", () => {
|
|||
|
||||
describe("parse on distribution functions", () => {
|
||||
describe("power", () => {
|
||||
testParse("normal(5,2) ^ normal(5,1)", "Ok((:pow (:normal 5 2) (:normal 5 1)))")
|
||||
testParse("3 ^ normal(5,1)", "Ok((:pow 3 (:normal 5 1)))")
|
||||
testParse("normal(5,2) ^ 3", "Ok((:pow (:normal 5 2) 3))")
|
||||
testParse("normal(5,2) ^ normal(5,1)", "Ok((:$$block (:pow (:normal 5 2) (:normal 5 1))))")
|
||||
testParse("3 ^ normal(5,1)", "Ok((:$$block (:pow 3 (:normal 5 1))))")
|
||||
testParse("normal(5,2) ^ 3", "Ok((:$$block (:pow (:normal 5 2) 3)))")
|
||||
})
|
||||
describe("subtraction", () => {
|
||||
testParse("10 - normal(5,1)", "Ok((:subtract 10 (:normal 5 1)))")
|
||||
testParse("normal(5,1) - 10", "Ok((:subtract (:normal 5 1) 10))")
|
||||
testParse("10 - normal(5,1)", "Ok((:$$block (:subtract 10 (:normal 5 1))))")
|
||||
testParse("normal(5,1) - 10", "Ok((:$$block (:subtract (:normal 5 1) 10)))")
|
||||
})
|
||||
describe("pointwise arithmetic expressions", () => {
|
||||
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||
testParse(
|
||||
~skip=true,
|
||||
"normal(5,2) .- normal(5,1)",
|
||||
"Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))",
|
||||
"Ok((:$$block (:dotSubtract (:normal 5 2) (:normal 5 1))))",
|
||||
// TODO: !!! returns "Ok((:$$block (:dotPow (:normal 5 2) (:normal 5 1))))"
|
||||
)
|
||||
testParse("normal(5,2) .* normal(5,1)", "Ok((:dotMultiply (:normal 5 2) (:normal 5 1)))")
|
||||
testParse("normal(5,2) ./ normal(5,1)", "Ok((:dotDivide (:normal 5 2) (:normal 5 1)))")
|
||||
testParse("normal(5,2) .^ normal(5,1)", "Ok((:dotPow (:normal 5 2) (:normal 5 1)))")
|
||||
testParse(
|
||||
"normal(5,2) .* normal(5,1)",
|
||||
"Ok((:$$block (:dotMultiply (:normal 5 2) (:normal 5 1))))",
|
||||
)
|
||||
testParse(
|
||||
"normal(5,2) ./ normal(5,1)",
|
||||
"Ok((:$$block (:dotDivide (:normal 5 2) (:normal 5 1))))",
|
||||
)
|
||||
testParse("normal(5,2) .^ normal(5,1)", "Ok((:$$block (:dotPow (:normal 5 2) (:normal 5 1))))")
|
||||
})
|
||||
describe("equality", () => {
|
||||
testParse("5 == normal(5,2)", "Ok((:equal 5 (:normal 5 2)))")
|
||||
testParse("5 == normal(5,2)", "Ok((:$$block (:equal 5 (:normal 5 2))))")
|
||||
})
|
||||
describe("pointwise adding two normals", () => {
|
||||
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||
|
|
|
@ -3,9 +3,9 @@ open Jest
|
|||
open Expect
|
||||
|
||||
describe("ExpressionValue", () => {
|
||||
test("argsToString", () => expect([EvNumber(1.), EvString("a")]->argsToString)->toBe("1, 'a'"))
|
||||
test("argsToString", () => expect([EvNumber(1.), EvString("a")]->argsToString)->toBe("1,'a'"))
|
||||
|
||||
test("toStringFunctionCall", () =>
|
||||
expect(("fn", [EvNumber(1.), EvString("a")])->toStringFunctionCall)->toBe("fn(1, 'a')")
|
||||
expect(("fn", [EvNumber(1.), EvString("a")])->toStringFunctionCall)->toBe("fn(1,'a')")
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
import { Distribution, resultMap, defaultBindings } from "../../src/js/index";
|
||||
import {
|
||||
Distribution,
|
||||
resultMap,
|
||||
defaultBindings,
|
||||
mergeBindings,
|
||||
} from "../../src/js/index";
|
||||
import { testRun, testRunPartial } from "./TestHelpers";
|
||||
|
||||
function Ok<b>(x: b) {
|
||||
|
@ -66,6 +71,17 @@ describe("Partials", () => {
|
|||
value: 10,
|
||||
});
|
||||
});
|
||||
test("Can merge bindings from three partials", () => {
|
||||
let bindings1 = testRunPartial(`x = 1`);
|
||||
let bindings2 = testRunPartial(`y = 2`);
|
||||
let bindings3 = testRunPartial(`z = 3`);
|
||||
expect(
|
||||
testRun(`x + y + z`, mergeBindings([bindings1, bindings2, bindings3]))
|
||||
).toEqual({
|
||||
tag: "number",
|
||||
value: 6,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("JS Imports", () => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Distribution } from "../../src/js/index";
|
||||
import { expectErrorToBeBounded, failDefault } from "./TestHelpers";
|
||||
import { expectErrorToBeBounded, failDefault, testRun } from "./TestHelpers";
|
||||
import * as fc from "fast-check";
|
||||
|
||||
// Beware: float64Array makes it appear in an infinite loop.
|
||||
|
@ -212,3 +212,18 @@ describe("mean is mean", () => {
|
|||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("fromSamples function", () => {
|
||||
test.skip("gives a mean near the mean of the input", () => {
|
||||
fc.assert(
|
||||
fc.property(arrayGen(), (xs_) => {
|
||||
let xs = Array.from(xs_);
|
||||
let xsString = xs.toString();
|
||||
let squiggleString = `x = fromSamples([${xsString}]); mean(x)`;
|
||||
let squiggleResult = testRun(squiggleString);
|
||||
let mean = xs.reduce((a, b) => a + b, 0.0) / xs.length;
|
||||
expect(squiggleResult.value).toBeCloseTo(mean, 4);
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -60,3 +60,13 @@ let cauchyMake = SymbolicDist.Cauchy.make
|
|||
let lognormalMake = SymbolicDist.Lognormal.make
|
||||
let triangularMake = SymbolicDist.Triangular.make
|
||||
let floatMake = SymbolicDist.Float.make
|
||||
|
||||
let fmapGenDist = symbdistres => E.R.fmap(s => DistributionTypes.Symbolic(s), symbdistres)
|
||||
let normalMakeR = (mean, stdev) => fmapGenDist(SymbolicDist.Normal.make(mean, stdev))
|
||||
let betaMakeR = (alpha, beta) => fmapGenDist(SymbolicDist.Beta.make(alpha, beta))
|
||||
let exponentialMakeR = rate => fmapGenDist(SymbolicDist.Exponential.make(rate))
|
||||
let uniformMakeR = (low, high) => fmapGenDist(SymbolicDist.Uniform.make(low, high))
|
||||
let cauchyMakeR = (local, rate) => fmapGenDist(SymbolicDist.Cauchy.make(local, rate))
|
||||
let lognormalMakeR = (mu, sigma) => fmapGenDist(SymbolicDist.Lognormal.make(mu, sigma))
|
||||
let triangularMakeR = (low, mode, high) =>
|
||||
fmapGenDist(SymbolicDist.Triangular.make(low, mode, high))
|
||||
|
|
|
@ -38,19 +38,6 @@ describe("XYShapes", () => {
|
|||
)
|
||||
})
|
||||
|
||||
describe("logScorePoint", () => {
|
||||
makeTest("When identical", XYShape.logScorePoint(30, pointSetDist1, pointSetDist1), Some(0.0))
|
||||
makeTest(
|
||||
"When similar",
|
||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist2),
|
||||
Some(1.658971191043856),
|
||||
)
|
||||
makeTest(
|
||||
"When very different",
|
||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist3),
|
||||
Some(210.3721280423322),
|
||||
)
|
||||
})
|
||||
describe("integrateWithTriangles", () =>
|
||||
makeTest(
|
||||
"integrates correctly",
|
||||
|
|
|
@ -20,7 +20,8 @@
|
|||
],
|
||||
"suffix": ".bs.js",
|
||||
"namespace": true,
|
||||
"bs-dependencies": ["@glennsl/rescript-jest", "bisect_ppx"],
|
||||
"bs-dependencies": ["bisect_ppx"],
|
||||
"bs-dev-dependencies": ["@glennsl/rescript-jest", "rescript-fast-check"],
|
||||
"gentypeconfig": {
|
||||
"language": "typescript",
|
||||
"module": "commonjs",
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
{
|
||||
"name": "@quri/squiggle-lang",
|
||||
"version": "0.2.7",
|
||||
"version": "0.2.8",
|
||||
"homepage": "https://squiggle-language.com",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "rescript build -with-deps && tsc",
|
||||
"build": "yarn build:rescript && yarn build:typescript",
|
||||
"build:rescript": "rescript build -with-deps",
|
||||
"build:typescript": "tsc",
|
||||
"bundle": "webpack",
|
||||
"start": "rescript build -w -with-deps",
|
||||
"clean": "rescript clean && rm -r dist",
|
||||
|
@ -24,6 +26,7 @@
|
|||
"format:rescript": "rescript format -all",
|
||||
"format:prettier": "prettier --write .",
|
||||
"format": "yarn format:rescript && yarn format:prettier",
|
||||
"prepack": "yarn build && yarn test && yarn bundle",
|
||||
"all": "yarn build && yarn bundle && yarn test"
|
||||
},
|
||||
"keywords": [
|
||||
|
@ -39,22 +42,22 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"bisect_ppx": "^2.7.1",
|
||||
"lodash": "4.17.21",
|
||||
"lodash": "^4.17.21",
|
||||
"rescript-fast-check": "^1.1.1",
|
||||
"@glennsl/rescript-jest": "^0.9.0",
|
||||
"@istanbuljs/nyc-config-typescript": "^1.0.2",
|
||||
"@types/jest": "^27.4.0",
|
||||
"@types/jest": "^27.5.0",
|
||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||
"chalk": "^5.0.1",
|
||||
"codecov": "3.8.3",
|
||||
"fast-check": "2.25.0",
|
||||
"codecov": "^3.8.3",
|
||||
"fast-check": "^2.25.0",
|
||||
"gentype": "^4.3.0",
|
||||
"jest": "^27.5.1",
|
||||
"moduleserve": "0.9.1",
|
||||
"moduleserve": "^0.9.1",
|
||||
"nyc": "^15.1.0",
|
||||
"reanalyze": "^2.19.0",
|
||||
"ts-jest": "^27.1.4",
|
||||
"ts-loader": "^9.2.8",
|
||||
"ts-loader": "^9.3.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"typescript": "^4.6.3",
|
||||
"webpack": "^5.72.0",
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
genericDist,
|
||||
continuousShape,
|
||||
discreteShape,
|
||||
samplingParams,
|
||||
environment,
|
||||
distributionError,
|
||||
toPointSet,
|
||||
distributionErrorToString,
|
||||
|
@ -51,9 +51,9 @@ export type shape = {
|
|||
|
||||
export class Distribution {
|
||||
t: genericDist;
|
||||
env: samplingParams;
|
||||
env: environment;
|
||||
|
||||
constructor(t: genericDist, env: samplingParams) {
|
||||
constructor(t: genericDist, env: environment) {
|
||||
this.t = t;
|
||||
this.env = env;
|
||||
return this;
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import * as _ from "lodash";
|
||||
import {
|
||||
samplingParams,
|
||||
evaluateUsingExternalBindings,
|
||||
environment,
|
||||
defaultEnvironment,
|
||||
evaluatePartialUsingExternalBindings,
|
||||
evaluateUsingOptions,
|
||||
externalBindings,
|
||||
expressionValue,
|
||||
errorValue,
|
||||
|
@ -27,9 +29,9 @@ import {
|
|||
convertRawToTypescript,
|
||||
} from "./rescript_interop";
|
||||
import { result, resultMap, tag, tagged } from "./types";
|
||||
import { Distribution } from "./distribution";
|
||||
import { Distribution, shape } from "./distribution";
|
||||
|
||||
export { Distribution, squiggleExpression, result, resultMap };
|
||||
export { Distribution, squiggleExpression, result, resultMap, shape };
|
||||
|
||||
export let defaultSamplingInputs: samplingParams = {
|
||||
sampleCount: 10000,
|
||||
|
@ -39,37 +41,38 @@ export let defaultSamplingInputs: samplingParams = {
|
|||
export function run(
|
||||
squiggleString: string,
|
||||
bindings?: externalBindings,
|
||||
samplingInputs?: samplingParams,
|
||||
environment?: environment,
|
||||
imports?: jsImports
|
||||
): result<squiggleExpression, errorValue> {
|
||||
let b = bindings ? bindings : defaultBindings;
|
||||
let i = imports ? imports : defaultImports;
|
||||
let si: samplingParams = samplingInputs
|
||||
? samplingInputs
|
||||
: defaultSamplingInputs;
|
||||
|
||||
let result: result<expressionValue, errorValue> =
|
||||
evaluateUsingExternalBindings(squiggleString, mergeImports(b, i));
|
||||
return resultMap(result, (x) => createTsExport(x, si));
|
||||
let e = environment ? environment : defaultEnvironment;
|
||||
let res: result<expressionValue, errorValue> = evaluateUsingOptions(
|
||||
{ externalBindings: mergeImportsWithBindings(b, i), environment: e },
|
||||
squiggleString
|
||||
);
|
||||
return resultMap(res, (x) => createTsExport(x, e));
|
||||
}
|
||||
|
||||
// Run Partial. A partial is a block of code that doesn't return a value
|
||||
export function runPartial(
|
||||
squiggleString: string,
|
||||
bindings?: externalBindings,
|
||||
_samplingInputs?: samplingParams,
|
||||
environment?: environment,
|
||||
imports?: jsImports
|
||||
): result<externalBindings, errorValue> {
|
||||
let b = bindings ? bindings : defaultBindings;
|
||||
let i = imports ? imports : defaultImports;
|
||||
let e = environment ? environment : defaultEnvironment;
|
||||
|
||||
return evaluatePartialUsingExternalBindings(
|
||||
squiggleString,
|
||||
mergeImports(b, i)
|
||||
mergeImportsWithBindings(b, i),
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
function mergeImports(
|
||||
function mergeImportsWithBindings(
|
||||
bindings: externalBindings,
|
||||
imports: jsImports
|
||||
): externalBindings {
|
||||
|
@ -87,9 +90,15 @@ type jsImports = { [key: string]: jsValue };
|
|||
export let defaultImports: jsImports = {};
|
||||
export let defaultBindings: externalBindings = {};
|
||||
|
||||
export function mergeBindings(
|
||||
allBindings: externalBindings[]
|
||||
): externalBindings {
|
||||
return allBindings.reduce((acc, x) => ({ ...acc, ...x }));
|
||||
}
|
||||
|
||||
function createTsExport(
|
||||
x: expressionValue,
|
||||
sampEnv: samplingParams
|
||||
environment: environment
|
||||
): squiggleExpression {
|
||||
switch (x.tag) {
|
||||
case "EvArray":
|
||||
|
@ -108,7 +117,10 @@ function createTsExport(
|
|||
return tag(
|
||||
"record",
|
||||
_.mapValues(arrayItem.value, (recordValue: unknown) =>
|
||||
convertRawToTypescript(recordValue as rescriptExport, sampEnv)
|
||||
convertRawToTypescript(
|
||||
recordValue as rescriptExport,
|
||||
environment
|
||||
)
|
||||
)
|
||||
);
|
||||
case "EvArray":
|
||||
|
@ -116,20 +128,24 @@ function createTsExport(
|
|||
return tag(
|
||||
"array",
|
||||
y.map((childArrayItem) =>
|
||||
convertRawToTypescript(childArrayItem, sampEnv)
|
||||
convertRawToTypescript(childArrayItem, environment)
|
||||
)
|
||||
);
|
||||
default:
|
||||
return createTsExport(arrayItem, sampEnv);
|
||||
return createTsExport(arrayItem, environment);
|
||||
}
|
||||
})
|
||||
);
|
||||
case "EvArrayString":
|
||||
return tag("arraystring", x.value);
|
||||
case "EvBool":
|
||||
return tag("boolean", x.value);
|
||||
case "EvCall":
|
||||
return tag("call", x.value);
|
||||
case "EvLambda":
|
||||
return tag("lambda", x.value);
|
||||
case "EvDistribution":
|
||||
return tag("distribution", new Distribution(x.value, sampEnv));
|
||||
return tag("distribution", new Distribution(x.value, environment));
|
||||
case "EvNumber":
|
||||
return tag("number", x.value);
|
||||
case "EvRecord":
|
||||
|
@ -137,7 +153,7 @@ function createTsExport(
|
|||
let result: tagged<"record", { [key: string]: squiggleExpression }> = tag(
|
||||
"record",
|
||||
_.mapValues(x.value, (x: unknown) =>
|
||||
convertRawToTypescript(x as rescriptExport, sampEnv)
|
||||
convertRawToTypescript(x as rescriptExport, environment)
|
||||
)
|
||||
);
|
||||
return result;
|
||||
|
|
|
@ -3,10 +3,11 @@ import {
|
|||
mixedShape,
|
||||
sampleSetDist,
|
||||
genericDist,
|
||||
samplingParams,
|
||||
environment,
|
||||
symbolicDist,
|
||||
discreteShape,
|
||||
continuousShape,
|
||||
lambdaValue,
|
||||
} from "../rescript/TypescriptInterface.gen";
|
||||
import { Distribution } from "./distribution";
|
||||
import { tagged, tag } from "./types";
|
||||
|
@ -19,31 +20,39 @@ export type rescriptExport =
|
|||
_0: rescriptExport[];
|
||||
}
|
||||
| {
|
||||
TAG: 1; // EvBool
|
||||
TAG: 1; // EvString
|
||||
_0: string[];
|
||||
}
|
||||
| {
|
||||
TAG: 2; // EvBool
|
||||
_0: boolean;
|
||||
}
|
||||
| {
|
||||
TAG: 2; // EvCall
|
||||
TAG: 3; // EvCall
|
||||
_0: string;
|
||||
}
|
||||
| {
|
||||
TAG: 3; // EvDistribution
|
||||
TAG: 4; // EvDistribution
|
||||
_0: rescriptDist;
|
||||
}
|
||||
| {
|
||||
TAG: 4; // EvNumber
|
||||
TAG: 5; // EvLambda
|
||||
_0: lambdaValue;
|
||||
}
|
||||
| {
|
||||
TAG: 6; // EvNumber
|
||||
_0: number;
|
||||
}
|
||||
| {
|
||||
TAG: 5; // EvRecord
|
||||
TAG: 7; // EvRecord
|
||||
_0: { [key: string]: rescriptExport };
|
||||
}
|
||||
| {
|
||||
TAG: 6; // EvString
|
||||
TAG: 8; // EvString
|
||||
_0: string;
|
||||
}
|
||||
| {
|
||||
TAG: 7; // EvSymbol
|
||||
TAG: 9; // EvSymbol
|
||||
_0: string;
|
||||
};
|
||||
|
||||
|
@ -70,7 +79,9 @@ export type squiggleExpression =
|
|||
| tagged<"symbol", string>
|
||||
| tagged<"string", string>
|
||||
| tagged<"call", string>
|
||||
| tagged<"lambda", lambdaValue>
|
||||
| tagged<"array", squiggleExpression[]>
|
||||
| tagged<"arraystring", string[]>
|
||||
| tagged<"boolean", boolean>
|
||||
| tagged<"distribution", Distribution>
|
||||
| tagged<"number", number>
|
||||
|
@ -78,36 +89,40 @@ export type squiggleExpression =
|
|||
|
||||
export function convertRawToTypescript(
|
||||
result: rescriptExport,
|
||||
sampEnv: samplingParams
|
||||
environment: environment
|
||||
): squiggleExpression {
|
||||
switch (result.TAG) {
|
||||
case 0: // EvArray
|
||||
return tag(
|
||||
"array",
|
||||
result._0.map((x) => convertRawToTypescript(x, sampEnv))
|
||||
result._0.map((x) => convertRawToTypescript(x, environment))
|
||||
);
|
||||
case 1: // EvBool
|
||||
case 1: // EvArrayString
|
||||
return tag("arraystring", result._0);
|
||||
case 2: // EvBool
|
||||
return tag("boolean", result._0);
|
||||
case 2: // EvCall
|
||||
case 3: // EvCall
|
||||
return tag("call", result._0);
|
||||
case 3: // EvDistribution
|
||||
case 4: // EvDistribution
|
||||
return tag(
|
||||
"distribution",
|
||||
new Distribution(
|
||||
convertRawDistributionToGenericDist(result._0),
|
||||
sampEnv
|
||||
environment
|
||||
)
|
||||
);
|
||||
case 4: // EvNumber
|
||||
case 5: // EvDistribution
|
||||
return tag("lambda", result._0);
|
||||
case 6: // EvNumber
|
||||
return tag("number", result._0);
|
||||
case 5: // EvRecord
|
||||
case 7: // EvRecord
|
||||
return tag(
|
||||
"record",
|
||||
_.mapValues(result._0, (x) => convertRawToTypescript(x, sampEnv))
|
||||
_.mapValues(result._0, (x) => convertRawToTypescript(x, environment))
|
||||
);
|
||||
case 6: // EvString
|
||||
case 8: // EvString
|
||||
return tag("string", result._0);
|
||||
case 7: // EvSymbol
|
||||
case 9: // EvSymbol
|
||||
return tag("symbol", result._0);
|
||||
}
|
||||
}
|
||||
|
@ -141,15 +156,15 @@ export type jsValue =
|
|||
|
||||
export function jsValueToBinding(value: jsValue): rescriptExport {
|
||||
if (typeof value === "boolean") {
|
||||
return { TAG: 1, _0: value as boolean };
|
||||
return { TAG: 2, _0: value as boolean };
|
||||
} else if (typeof value === "string") {
|
||||
return { TAG: 6, _0: value as string };
|
||||
return { TAG: 8, _0: value as string };
|
||||
} else if (typeof value === "number") {
|
||||
return { TAG: 4, _0: value as number };
|
||||
return { TAG: 6, _0: value as number };
|
||||
} else if (Array.isArray(value)) {
|
||||
return { TAG: 0, _0: value.map(jsValueToBinding) };
|
||||
} else {
|
||||
// Record
|
||||
return { TAG: 5, _0: _.mapValues(value, jsValueToBinding) };
|
||||
return { TAG: 7, _0: _.mapValues(value, jsValueToBinding) };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,11 @@ type env = {
|
|||
xyPointLength: int,
|
||||
}
|
||||
|
||||
let defaultEnv = {
|
||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||
}
|
||||
|
||||
type outputType =
|
||||
| Dist(genericDist)
|
||||
| Float(float)
|
||||
|
@ -123,7 +128,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
let fromDistFn = (
|
||||
subFnName: DistributionTypes.DistributionOperation.fromDist,
|
||||
dist: genericDist,
|
||||
) => {
|
||||
): outputType => {
|
||||
let response = switch subFnName {
|
||||
| ToFloat(distToFloatOperation) =>
|
||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||
|
@ -139,6 +144,10 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
Dist(dist)
|
||||
}
|
||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||
| ToScore(KLDivergence(t2)) =>
|
||||
GenericDist.klDivergence(dist, t2, ~toPointSetFn)
|
||||
->E.R2.fmap(r => Float(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||
|
@ -154,6 +163,25 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(
|
||||
~toPointSetFn,
|
||||
~algebraicCombination=#LogarithmWithThreshold(eps),
|
||||
~f,
|
||||
)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#Logarithm, f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#Power, f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||
| ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
||||
dist
|
||||
|
@ -189,6 +217,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| FromSamples(xs) =>
|
||||
xs
|
||||
->SampleSetDist.make
|
||||
->E.R2.errMap(x => DistributionTypes.SampleSetError(x))
|
||||
->E.R2.fmap(x => x->DistributionTypes.SampleSet->Dist)
|
||||
->OutputLocal.fromResult
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,8 +261,10 @@ module Constructors = {
|
|||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||
let truncate = (~env, dist, leftCutoff, rightCutoff) =>
|
||||
C.truncate(dist, leftCutoff, rightCutoff)->run(~env)->toDistR
|
||||
let inspect = (~env, dist) => C.inspect(dist)->run(~env)->toDistR
|
||||
|
@ -244,6 +280,8 @@ module Constructors = {
|
|||
let algebraicLogarithm = (~env, dist1, dist2) =>
|
||||
C.algebraicLogarithm(dist1, dist2)->run(~env)->toDistR
|
||||
let algebraicPower = (~env, dist1, dist2) => C.algebraicPower(dist1, dist2)->run(~env)->toDistR
|
||||
let scalePower = (~env, dist, n) => C.scalePower(dist, n)->run(~env)->toDistR
|
||||
let scaleLogarithm = (~env, dist, n) => C.scaleLogarithm(dist, n)->run(~env)->toDistR
|
||||
let pointwiseAdd = (~env, dist1, dist2) => C.pointwiseAdd(dist1, dist2)->run(~env)->toDistR
|
||||
let pointwiseMultiply = (~env, dist1, dist2) =>
|
||||
C.pointwiseMultiply(dist1, dist2)->run(~env)->toDistR
|
||||
|
|
|
@ -4,6 +4,9 @@ type env = {
|
|||
xyPointLength: int,
|
||||
}
|
||||
|
||||
@genType
|
||||
let defaultEnv: env
|
||||
|
||||
open DistributionTypes
|
||||
|
||||
@genType
|
||||
|
@ -57,10 +60,14 @@ module Constructors: {
|
|||
@genType
|
||||
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
||||
@genType
|
||||
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
||||
@genType
|
||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||
@genType
|
||||
let fromSamples: (~env: env, SampleSetDist.t) => result<genericDist, error>
|
||||
@genType
|
||||
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
||||
@genType
|
||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||
|
@ -81,6 +88,10 @@ module Constructors: {
|
|||
@genType
|
||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
|
|
|
@ -11,7 +11,7 @@ type error =
|
|||
| NotYetImplemented
|
||||
| Unreachable
|
||||
| DistributionVerticalShiftIsInvalid
|
||||
| TooFewSamples
|
||||
| SampleSetError(SampleSetDist.sampleSetError)
|
||||
| ArgumentError(string)
|
||||
| OperationError(Operation.Error.t)
|
||||
| PointSetConversionError(SampleSetDist.pointsetConversionError)
|
||||
|
@ -35,7 +35,8 @@ module Error = {
|
|||
| DistributionVerticalShiftIsInvalid => "Distribution Vertical Shift is Invalid"
|
||||
| ArgumentError(s) => `Argument Error ${s}`
|
||||
| LogarithmOfDistributionError(s) => `Logarithm of input error: ${s}`
|
||||
| TooFewSamples => "Too Few Samples"
|
||||
| SampleSetError(TooFewSamples) => "Too Few Samples"
|
||||
| SampleSetError(NonNumericInput(err)) => `Found a non-number in input: ${err}`
|
||||
| OperationError(err) => Operation.Error.toString(err)
|
||||
| PointSetConversionError(err) => SampleSetDist.pointsetConversionErrorToString(err)
|
||||
| SparklineError(err) => PointSetTypes.sparklineErrorToString(err)
|
||||
|
@ -47,10 +48,7 @@ module Error = {
|
|||
let resultStringToResultError: result<'a, string> => result<'a, error> = n =>
|
||||
n->E.R2.errMap(r => r->fromString)
|
||||
|
||||
let sampleErrorToDistErr = (err: SampleSetDist.sampleSetError): error =>
|
||||
switch err {
|
||||
| TooFewSamples => TooFewSamples
|
||||
}
|
||||
let sampleErrorToDistErr = (err: SampleSetDist.sampleSetError): error => SampleSetError(err)
|
||||
}
|
||||
|
||||
@genType
|
||||
|
@ -68,12 +66,20 @@ module DistributionOperation = {
|
|||
| #Pdf(float)
|
||||
| #Mean
|
||||
| #Sample
|
||||
| #IntegralSum
|
||||
]
|
||||
|
||||
type toScaleFn = [
|
||||
| #Power
|
||||
| #Logarithm
|
||||
| #LogarithmWithThreshold(float)
|
||||
]
|
||||
|
||||
type toDist =
|
||||
| Normalize
|
||||
| ToPointSet
|
||||
| ToSampleSet(int)
|
||||
| Scale(toScaleFn, float)
|
||||
| Truncate(option<float>, option<float>)
|
||||
| Inspect
|
||||
|
||||
|
@ -85,9 +91,12 @@ module DistributionOperation = {
|
|||
| ToString
|
||||
| ToSparkline(int)
|
||||
|
||||
type toScore = KLDivergence(genericDist)
|
||||
|
||||
type fromDist =
|
||||
| ToFloat(toFloat)
|
||||
| ToDist(toDist)
|
||||
| ToScore(toScore)
|
||||
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||
| ToString(toString)
|
||||
| ToBool(toBool)
|
||||
|
@ -99,6 +108,7 @@ module DistributionOperation = {
|
|||
type genericFunctionCallInfo =
|
||||
| FromDist(fromDist, genericDist)
|
||||
| FromFloat(fromDist, float)
|
||||
| FromSamples(array<float>)
|
||||
| Mixture(array<(genericDist, float)>)
|
||||
|
||||
let distCallToString = (distFunction: fromDist): string =>
|
||||
|
@ -108,11 +118,17 @@ module DistributionOperation = {
|
|||
| ToFloat(#Mean) => `mean`
|
||||
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||
| ToFloat(#Sample) => `sample`
|
||||
| ToFloat(#IntegralSum) => `integralSum`
|
||||
| ToScore(KLDivergence(_)) => `klDivergence`
|
||||
| ToDist(Normalize) => `normalize`
|
||||
| ToDist(ToPointSet) => `toPointSet`
|
||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||
| ToDist(Truncate(_, _)) => `truncate`
|
||||
| ToDist(Inspect) => `inspect`
|
||||
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||
| ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
||||
| ToString(ToString) => `toString`
|
||||
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
||||
| ToBool(IsNormalized) => `isNormalized`
|
||||
|
@ -124,6 +140,7 @@ module DistributionOperation = {
|
|||
switch d {
|
||||
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
||||
| Mixture(_) => `mixture`
|
||||
| FromSamples(_) => `fromSamples`
|
||||
}
|
||||
}
|
||||
module Constructors = {
|
||||
|
@ -140,8 +157,16 @@ module Constructors = {
|
|||
let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
|
||||
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
||||
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
||||
let fromSamples = (xs): t => FromSamples(xs)
|
||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
||||
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
||||
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
||||
ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||
dist,
|
||||
)
|
||||
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
||||
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
||||
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||
|
|
|
@ -59,29 +59,41 @@ let integralEndY = (t: t): float =>
|
|||
|
||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||
|
||||
let klDivergence = (t1, t2, ~toPointSetFn: toPointSetFn): result<float, error> => {
|
||||
let pointSets = E.R.merge(toPointSetFn(t1), toPointSetFn(t2))
|
||||
pointSets |> E.R2.bind(((a, b)) =>
|
||||
PointSetDist.T.klDivergence(a, b)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||
)
|
||||
}
|
||||
|
||||
let toFloatOperation = (
|
||||
t,
|
||||
~toPointSetFn: toPointSetFn,
|
||||
~distToFloatOperation: Operation.distToFloatOperation,
|
||||
~distToFloatOperation: DistributionTypes.DistributionOperation.toFloat,
|
||||
) => {
|
||||
let trySymbolicSolution = switch (t: t) {
|
||||
| Symbolic(r) => SymbolicDist.T.operate(distToFloatOperation, r)->E.R.toOption
|
||||
| _ => None
|
||||
}
|
||||
switch distToFloatOperation {
|
||||
| #IntegralSum => Ok(integralEndY(t))
|
||||
| (#Pdf(_) | #Cdf(_) | #Inv(_) | #Mean | #Sample) as op => {
|
||||
let trySymbolicSolution = switch (t: t) {
|
||||
| Symbolic(r) => SymbolicDist.T.operate(op, r)->E.R.toOption
|
||||
| _ => None
|
||||
}
|
||||
|
||||
let trySampleSetSolution = switch ((t: t), distToFloatOperation) {
|
||||
| (SampleSet(sampleSet), #Mean) => SampleSetDist.mean(sampleSet)->Some
|
||||
| (SampleSet(sampleSet), #Sample) => SampleSetDist.sample(sampleSet)->Some
|
||||
| (SampleSet(sampleSet), #Inv(r)) => SampleSetDist.percentile(sampleSet, r)->Some
|
||||
| _ => None
|
||||
}
|
||||
let trySampleSetSolution = switch ((t: t), distToFloatOperation) {
|
||||
| (SampleSet(sampleSet), #Mean) => SampleSetDist.mean(sampleSet)->Some
|
||||
| (SampleSet(sampleSet), #Sample) => SampleSetDist.sample(sampleSet)->Some
|
||||
| (SampleSet(sampleSet), #Inv(r)) => SampleSetDist.percentile(sampleSet, r)->Some
|
||||
| _ => None
|
||||
}
|
||||
|
||||
switch trySymbolicSolution {
|
||||
| Some(r) => Ok(r)
|
||||
| None =>
|
||||
switch trySampleSetSolution {
|
||||
| Some(r) => Ok(r)
|
||||
| None => toPointSetFn(t)->E.R2.fmap(PointSetDist.operate(distToFloatOperation))
|
||||
switch trySymbolicSolution {
|
||||
| Some(r) => Ok(r)
|
||||
| None =>
|
||||
switch trySampleSetSolution {
|
||||
| Some(r) => Ok(r)
|
||||
| None => toPointSetFn(t)->E.R2.fmap(PointSetDist.operate(op))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -379,14 +391,12 @@ let pointwiseCombinationFloat = (
|
|||
~algebraicCombination: Operation.algebraicOperation,
|
||||
~f: float,
|
||||
): result<t, error> => {
|
||||
let m = switch algebraicCombination {
|
||||
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
||||
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||
let executeCombination = arithOp =>
|
||||
toPointSetFn(t)->E.R.bind(t => {
|
||||
//TODO: Move to PointSet codebase
|
||||
let fn = (secondary, main) => Operation.Scale.toFn(arithmeticOperation, main, secondary)
|
||||
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(arithmeticOperation)
|
||||
let integralCacheFn = Operation.Scale.toIntegralCacheFn(arithmeticOperation)
|
||||
let fn = (secondary, main) => Operation.Scale.toFn(arithOp, main, secondary)
|
||||
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(arithOp)
|
||||
let integralCacheFn = Operation.Scale.toIntegralCacheFn(arithOp)
|
||||
PointSetDist.T.mapYResult(
|
||||
~integralSumCacheFn=integralSumCacheFn(f),
|
||||
~integralCacheFn=integralCacheFn(f),
|
||||
|
@ -394,6 +404,11 @@ let pointwiseCombinationFloat = (
|
|||
t,
|
||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||
})
|
||||
let m = switch algebraicCombination {
|
||||
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
||||
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||
executeCombination(arithmeticOperation)
|
||||
| #LogarithmWithThreshold(eps) => executeCombination(#LogarithmWithThreshold(eps))
|
||||
}
|
||||
m->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||
}
|
||||
|
|
|
@ -20,9 +20,11 @@ let isNormalized: t => bool
|
|||
let toFloatOperation: (
|
||||
t,
|
||||
~toPointSetFn: toPointSetFn,
|
||||
~distToFloatOperation: Operation.distToFloatOperation,
|
||||
~distToFloatOperation: DistributionTypes.DistributionOperation.toFloat,
|
||||
) => result<float, error>
|
||||
|
||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
||||
|
||||
@genType
|
||||
let toPointSet: (
|
||||
t,
|
||||
|
|
|
@ -86,6 +86,7 @@ let stepwiseToLinear = (t: t): t =>
|
|||
|
||||
// Note: This results in a distribution with as many points as the sum of those in t1 and t2.
|
||||
let combinePointwise = (
|
||||
~combiner=XYShape.PointwiseCombination.combine,
|
||||
~integralSumCachesFn=(_, _) => None,
|
||||
~distributionType: PointSetTypes.distributionType=#PDF,
|
||||
fn: (float, float) => result<float, Operation.Error.t>,
|
||||
|
@ -119,7 +120,7 @@ let combinePointwise = (
|
|||
|
||||
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
||||
|
||||
XYShape.PointwiseCombination.combine(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||
combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||
make(~integralSumCache=combinedIntegralSum, x)
|
||||
)
|
||||
}
|
||||
|
@ -156,8 +157,10 @@ let reduce = (
|
|||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||
fn: (float, float) => result<float, 'e>,
|
||||
continuousShapes,
|
||||
): result<t, 'e> =>
|
||||
continuousShapes |> E.A.R.foldM(combinePointwise(~integralSumCachesFn, fn), empty)
|
||||
): result<t, 'e> => {
|
||||
let merge = combinePointwise(~integralSumCachesFn, fn)
|
||||
continuousShapes |> E.A.R.foldM(merge, empty)
|
||||
}
|
||||
|
||||
let mapYResult = (
|
||||
~integralSumCacheFn=_ => None,
|
||||
|
@ -267,11 +270,27 @@ module T = Dist({
|
|||
}
|
||||
let variance = (t: t): float =>
|
||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
||||
PointSetDist_Scoring.KLDivergence.integrand,
|
||||
prediction.xyShape,
|
||||
answer.xyShape,
|
||||
)
|
||||
let xyShapeToContinuous: XYShape.xyShape => t = xyShape => {
|
||||
xyShape: xyShape,
|
||||
interpolation: #Linear,
|
||||
integralSumCache: None,
|
||||
integralCache: None,
|
||||
}
|
||||
newShape->E.R2.fmap(x => x->xyShapeToContinuous->integralEndY)
|
||||
}
|
||||
})
|
||||
|
||||
let isNormalized = (t: t): bool => {
|
||||
let areaUnderIntegral = t |> updateIntegralCache(Some(T.integral(t))) |> T.integralEndY
|
||||
areaUnderIntegral < 1. +. 1e-7 && areaUnderIntegral > 1. -. 1e-7
|
||||
areaUnderIntegral < 1. +. MagicNumbers.Epsilon.seven &&
|
||||
areaUnderIntegral > 1. -. MagicNumbers.Epsilon.seven
|
||||
}
|
||||
|
||||
let downsampleEquallyOverX = (length, t): t =>
|
||||
|
|
|
@ -33,32 +33,37 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
|
|||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
||||
|
||||
let combinePointwise = (
|
||||
~combiner=XYShape.PointwiseCombination.combine,
|
||||
~integralSumCachesFn=(_, _) => None,
|
||||
~fn=(a, b) => Ok(a +. b),
|
||||
t1: PointSetTypes.discreteShape,
|
||||
t2: PointSetTypes.discreteShape,
|
||||
): PointSetTypes.discreteShape => {
|
||||
let combinedIntegralSum = Common.combineIntegralSums(
|
||||
integralSumCachesFn,
|
||||
t1.integralSumCache,
|
||||
t2.integralSumCache,
|
||||
)
|
||||
): result<PointSetTypes.discreteShape, 'e> => {
|
||||
// let combinedIntegralSum = Common.combineIntegralSums(
|
||||
// integralSumCachesFn,
|
||||
// t1.integralSumCache,
|
||||
// t2.integralSumCache,
|
||||
// )
|
||||
|
||||
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||
// It could be done for pointwise additions, but is that ever needed?
|
||||
|
||||
make(
|
||||
~integralSumCache=combinedIntegralSum,
|
||||
XYShape.PointwiseCombination.combine(
|
||||
(a, b) => Ok(a +. b),
|
||||
XYShape.XtoY.discreteInterpolator,
|
||||
t1.xyShape,
|
||||
t2.xyShape,
|
||||
)->E.R.toExn("Addition operation should never fail", _),
|
||||
)
|
||||
combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R.toExn(
|
||||
"Addition operation should never fail",
|
||||
_,
|
||||
),
|
||||
)->Ok
|
||||
}
|
||||
|
||||
let reduce = (~integralSumCachesFn=(_, _) => None, discreteShapes): PointSetTypes.discreteShape =>
|
||||
discreteShapes |> E.A.fold_left(combinePointwise(~integralSumCachesFn), empty)
|
||||
let reduce = (
|
||||
~integralSumCachesFn=(_, _) => None,
|
||||
fn: (float, float) => result<float, 'e>,
|
||||
discreteShapes: array<PointSetTypes.discreteShape>,
|
||||
): result<t, 'e> => {
|
||||
let merge = combinePointwise(~integralSumCachesFn, ~fn)
|
||||
discreteShapes |> E.A.R.foldM(merge, empty)
|
||||
}
|
||||
|
||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||
...t,
|
||||
|
@ -158,6 +163,7 @@ module T = Dist({
|
|||
}
|
||||
|
||||
let integralEndY = (t: t) => t.integralSumCache |> E.O.default(t |> integral |> Continuous.lastY)
|
||||
let integralEndYResult = (t: t) => t->integralEndY->Ok
|
||||
let minX = shapeFn(XYShape.T.minX)
|
||||
let maxX = shapeFn(XYShape.T.maxX)
|
||||
let toDiscreteProbabilityMassFraction = _ => 1.0
|
||||
|
@ -221,4 +227,13 @@ module T = Dist({
|
|||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||
}
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
combinePointwise(
|
||||
~combiner=XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument0,
|
||||
~fn=PointSetDist_Scoring.KLDivergence.integrand,
|
||||
prediction,
|
||||
answer,
|
||||
) |> E.R2.bind(integralEndYResult)
|
||||
}
|
||||
})
|
||||
|
|
|
@ -33,6 +33,7 @@ module type dist = {
|
|||
|
||||
let mean: t => float
|
||||
let variance: t => float
|
||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
||||
}
|
||||
|
||||
module Dist = (T: dist) => {
|
||||
|
@ -55,6 +56,7 @@ module Dist = (T: dist) => {
|
|||
let mean = T.mean
|
||||
let variance = T.variance
|
||||
let integralEndY = T.integralEndY
|
||||
let klDivergence = T.klDivergence
|
||||
|
||||
let updateIntegralCache = T.updateIntegralCache
|
||||
|
||||
|
|
|
@ -36,6 +36,47 @@ let updateIntegralCache = (integralCache, t: t): t => {
|
|||
integralCache: integralCache,
|
||||
}
|
||||
|
||||
let combinePointwise = (
|
||||
~integralSumCachesFn=(_, _) => None,
|
||||
~integralCachesFn=(_, _) => None,
|
||||
fn: (float, float) => result<float, 'e>,
|
||||
t1: t,
|
||||
t2: t,
|
||||
): result<t, 'e> => {
|
||||
let reducedDiscrete =
|
||||
[t1, t2]
|
||||
|> E.A.fmap(toDiscrete)
|
||||
|> E.A.O.concatSomes
|
||||
|> Discrete.reduce(~integralSumCachesFn, fn)
|
||||
|> E.R.toExn("Theoretically unreachable state")
|
||||
|
||||
let reducedContinuous =
|
||||
[t1, t2]
|
||||
|> E.A.fmap(toContinuous)
|
||||
|> E.A.O.concatSomes
|
||||
|> Continuous.reduce(~integralSumCachesFn, fn)
|
||||
|
||||
let combinedIntegralSum = Common.combineIntegralSums(
|
||||
integralSumCachesFn,
|
||||
t1.integralSumCache,
|
||||
t2.integralSumCache,
|
||||
)
|
||||
|
||||
let combinedIntegral = Common.combineIntegrals(
|
||||
integralCachesFn,
|
||||
t1.integralCache,
|
||||
t2.integralCache,
|
||||
)
|
||||
reducedContinuous->E.R2.fmap(continuous =>
|
||||
make(
|
||||
~integralSumCache=combinedIntegralSum,
|
||||
~integralCache=combinedIntegral,
|
||||
~discrete=reducedDiscrete,
|
||||
~continuous,
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
module T = Dist({
|
||||
type t = PointSetTypes.mixedShape
|
||||
type integral = PointSetTypes.continuousShape
|
||||
|
@ -259,6 +300,12 @@ module T = Dist({
|
|||
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||
}
|
||||
}
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
combinePointwise(PointSetDist_Scoring.KLDivergence.integrand, prediction, answer) |> E.R.fmap(
|
||||
integralEndY,
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
||||
|
@ -316,7 +363,10 @@ let combinePointwise = (
|
|||
t2: t,
|
||||
): result<t, 'e> => {
|
||||
let reducedDiscrete =
|
||||
[t1, t2] |> E.A.fmap(toDiscrete) |> E.A.O.concatSomes |> Discrete.reduce(~integralSumCachesFn)
|
||||
[t1, t2]
|
||||
|> E.A.fmap(toDiscrete)
|
||||
|> E.A.O.concatSomes
|
||||
|> Discrete.reduce(~integralSumCachesFn, fn)
|
||||
|
||||
let reducedContinuous =
|
||||
[t1, t2]
|
||||
|
@ -335,11 +385,11 @@ let combinePointwise = (
|
|||
t1.integralCache,
|
||||
t2.integralCache,
|
||||
)
|
||||
reducedContinuous->E.R2.fmap(continuous =>
|
||||
E.R.merge(reducedContinuous, reducedDiscrete)->E.R2.fmap(((continuous, discrete)) =>
|
||||
make(
|
||||
~integralSumCache=combinedIntegralSum,
|
||||
~integralCache=combinedIntegral,
|
||||
~discrete=reducedDiscrete,
|
||||
~discrete,
|
||||
~continuous,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -84,7 +84,12 @@ let combinePointwise = (
|
|||
m2,
|
||||
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
||||
| (Discrete(m1), Discrete(m2)) =>
|
||||
Ok(PointSetTypes.Discrete(Discrete.combinePointwise(~integralSumCachesFn, m1, m2)))
|
||||
Discrete.combinePointwise(
|
||||
~integralSumCachesFn,
|
||||
~fn,
|
||||
m1,
|
||||
m2,
|
||||
)->E.R2.fmap(x => PointSetTypes.Discrete(x))
|
||||
| (m1, m2) =>
|
||||
Mixed.combinePointwise(
|
||||
~integralSumCachesFn,
|
||||
|
@ -190,6 +195,12 @@ module T = Dist({
|
|||
| Discrete(m) => Discrete.T.variance(m)
|
||||
| Continuous(m) => Continuous.T.variance(m)
|
||||
}
|
||||
|
||||
let klDivergence = (t1: t, t2: t) =>
|
||||
switch (t1, t2) {
|
||||
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
|
||||
| _ => Error(NotYetImplemented)
|
||||
}
|
||||
})
|
||||
|
||||
let pdf = (f: float, t: t) => {
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
module KLDivergence = {
|
||||
let logFn = Js.Math.log // base e
|
||||
let integrand = (predictionElement: float, answerElement: float): result<
|
||||
float,
|
||||
Operation.Error.t,
|
||||
> =>
|
||||
if answerElement == 0.0 {
|
||||
Ok(0.0)
|
||||
} else if predictionElement == 0.0 {
|
||||
Error(Operation.NegativeInfinityError)
|
||||
} else {
|
||||
let quot = predictionElement /. answerElement
|
||||
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
|
||||
}
|
||||
}
|
|
@ -1,11 +1,12 @@
|
|||
@genType
|
||||
module Error = {
|
||||
@genType
|
||||
type sampleSetError = TooFewSamples
|
||||
type sampleSetError = TooFewSamples | NonNumericInput(string)
|
||||
|
||||
let sampleSetErrorToString = (err: sampleSetError): string =>
|
||||
switch err {
|
||||
| TooFewSamples => "Too few samples when constructing sample set"
|
||||
| NonNumericInput(err) => `Found a non-number in input: ${err}`
|
||||
}
|
||||
|
||||
@genType
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
//The math here was taken from https://github.com/jasondavies/science.js/blob/master/src/stats/SampleSetDist_Bandwidth.js
|
||||
|
||||
let {iqr_percentile, nrd0_lo_denominator, one, nrd0_coef, nrd_coef, nrd_fractionalPower} = module(
|
||||
MagicNumbers.SampleSetBandwidth
|
||||
)
|
||||
let len = x => E.A.length(x) |> float_of_int
|
||||
|
||||
let iqr = x => Jstat.percentile(x, 0.75, true) -. Jstat.percentile(x, 0.25, true)
|
||||
let iqr = x =>
|
||||
Jstat.percentile(x, iqr_percentile, true) -. Jstat.percentile(x, 1.0 -. iqr_percentile, true)
|
||||
|
||||
// Silverman, B. W. (1986) Density Estimation. London: Chapman and Hall.
|
||||
let nrd0 = x => {
|
||||
let hi = Js_math.sqrt(Jstat.variance(x))
|
||||
let lo = Js_math.minMany_float([hi, iqr(x) /. 1.34])
|
||||
let lo = Js_math.minMany_float([hi, iqr(x) /. nrd0_lo_denominator])
|
||||
let e = Js_math.abs_float(x[1])
|
||||
let lo' = switch (lo, hi, e) {
|
||||
| (lo, _, _) if !Js.Float.isNaN(lo) => lo
|
||||
| (_, hi, _) if !Js.Float.isNaN(hi) => hi
|
||||
| (_, _, e) if !Js.Float.isNaN(e) => e
|
||||
| _ => 1.0
|
||||
| _ => one
|
||||
}
|
||||
0.9 *. lo' *. Js.Math.pow_float(~base=len(x), ~exp=-0.2)
|
||||
nrd0_coef *. lo' *. Js.Math.pow_float(~base=len(x), ~exp=nrd_fractionalPower)
|
||||
}
|
||||
|
||||
// Scott, D. W. (1992) Multivariate Density Estimation: Theory, Practice, and Visualization. Wiley.
|
||||
let nrd = x => {
|
||||
let h = iqr(x) /. 1.34
|
||||
1.06 *.
|
||||
let h = iqr(x) /. nrd0_lo_denominator
|
||||
nrd_coef *.
|
||||
Js.Math.min_float(Js.Math.sqrt(Jstat.variance(x)), h) *.
|
||||
Js.Math.pow_float(~base=len(x), ~exp=-1.0 /. 5.0)
|
||||
Js.Math.pow_float(~base=len(x), ~exp=nrd_fractionalPower)
|
||||
}
|
||||
|
|
|
@ -216,15 +216,42 @@ module Uniform = {
|
|||
}
|
||||
}
|
||||
|
||||
module Gamma = {
|
||||
type t = gamma
|
||||
let make = (shape: float, scale: float) => {
|
||||
if shape > 0. {
|
||||
if scale > 0. {
|
||||
Ok(#Gamma({shape: shape, scale: scale}))
|
||||
} else {
|
||||
Error("scale must be larger than 0")
|
||||
}
|
||||
} else {
|
||||
Error("shape must be larger than 0")
|
||||
}
|
||||
}
|
||||
let pdf = (x: float, t: t) => Jstat.Gamma.pdf(x, t.shape, t.scale)
|
||||
let cdf = (x: float, t: t) => Jstat.Gamma.cdf(x, t.shape, t.scale)
|
||||
let inv = (p: float, t: t) => Jstat.Gamma.inv(p, t.shape, t.scale)
|
||||
let sample = (t: t) => Jstat.Gamma.sample(t.shape, t.scale)
|
||||
let mean = (t: t) => Ok(Jstat.Gamma.mean(t.shape, t.scale))
|
||||
let toString = ({shape, scale}: t) => j`($shape, $scale)`
|
||||
}
|
||||
|
||||
module Float = {
|
||||
type t = float
|
||||
let make = t => #Float(t)
|
||||
let makeSafe = t =>
|
||||
if E.Float.isFinite(t) {
|
||||
Ok(#Float(t))
|
||||
} else {
|
||||
Error("Float must be finite")
|
||||
}
|
||||
let pdf = (x, t: t) => x == t ? 1.0 : 0.0
|
||||
let cdf = (x, t: t) => x >= t ? 1.0 : 0.0
|
||||
let inv = (p, t: t) => p < t ? 0.0 : 1.0
|
||||
let mean = (t: t) => Ok(t)
|
||||
let sample = (t: t) => t
|
||||
let toString = Js.Float.toString
|
||||
let toString = (t: t) => j`Delta($t)`
|
||||
}
|
||||
|
||||
module From90thPercentile = {
|
||||
|
@ -246,6 +273,7 @@ module T = {
|
|||
| #Triangular(n) => Triangular.pdf(x, n)
|
||||
| #Exponential(n) => Exponential.pdf(x, n)
|
||||
| #Cauchy(n) => Cauchy.pdf(x, n)
|
||||
| #Gamma(n) => Gamma.pdf(x, n)
|
||||
| #Lognormal(n) => Lognormal.pdf(x, n)
|
||||
| #Uniform(n) => Uniform.pdf(x, n)
|
||||
| #Beta(n) => Beta.pdf(x, n)
|
||||
|
@ -258,6 +286,7 @@ module T = {
|
|||
| #Triangular(n) => Triangular.cdf(x, n)
|
||||
| #Exponential(n) => Exponential.cdf(x, n)
|
||||
| #Cauchy(n) => Cauchy.cdf(x, n)
|
||||
| #Gamma(n) => Gamma.cdf(x, n)
|
||||
| #Lognormal(n) => Lognormal.cdf(x, n)
|
||||
| #Uniform(n) => Uniform.cdf(x, n)
|
||||
| #Beta(n) => Beta.cdf(x, n)
|
||||
|
@ -270,6 +299,7 @@ module T = {
|
|||
| #Triangular(n) => Triangular.inv(x, n)
|
||||
| #Exponential(n) => Exponential.inv(x, n)
|
||||
| #Cauchy(n) => Cauchy.inv(x, n)
|
||||
| #Gamma(n) => Gamma.inv(x, n)
|
||||
| #Lognormal(n) => Lognormal.inv(x, n)
|
||||
| #Uniform(n) => Uniform.inv(x, n)
|
||||
| #Beta(n) => Beta.inv(x, n)
|
||||
|
@ -282,6 +312,7 @@ module T = {
|
|||
| #Triangular(n) => Triangular.sample(n)
|
||||
| #Exponential(n) => Exponential.sample(n)
|
||||
| #Cauchy(n) => Cauchy.sample(n)
|
||||
| #Gamma(n) => Gamma.sample(n)
|
||||
| #Lognormal(n) => Lognormal.sample(n)
|
||||
| #Uniform(n) => Uniform.sample(n)
|
||||
| #Beta(n) => Beta.sample(n)
|
||||
|
@ -304,6 +335,7 @@ module T = {
|
|||
| #Exponential(n) => Exponential.toString(n)
|
||||
| #Cauchy(n) => Cauchy.toString(n)
|
||||
| #Normal(n) => Normal.toString(n)
|
||||
| #Gamma(n) => Gamma.toString(n)
|
||||
| #Lognormal(n) => Lognormal.toString(n)
|
||||
| #Uniform(n) => Uniform.toString(n)
|
||||
| #Beta(n) => Beta.toString(n)
|
||||
|
@ -317,6 +349,7 @@ module T = {
|
|||
| #Cauchy(n) => Cauchy.inv(minCdfValue, n)
|
||||
| #Normal(n) => Normal.inv(minCdfValue, n)
|
||||
| #Lognormal(n) => Lognormal.inv(minCdfValue, n)
|
||||
| #Gamma(n) => Gamma.inv(minCdfValue, n)
|
||||
| #Uniform({low}) => low
|
||||
| #Beta(n) => Beta.inv(minCdfValue, n)
|
||||
| #Float(n) => n
|
||||
|
@ -328,6 +361,7 @@ module T = {
|
|||
| #Exponential(n) => Exponential.inv(maxCdfValue, n)
|
||||
| #Cauchy(n) => Cauchy.inv(maxCdfValue, n)
|
||||
| #Normal(n) => Normal.inv(maxCdfValue, n)
|
||||
| #Gamma(n) => Gamma.inv(maxCdfValue, n)
|
||||
| #Lognormal(n) => Lognormal.inv(maxCdfValue, n)
|
||||
| #Beta(n) => Beta.inv(maxCdfValue, n)
|
||||
| #Uniform({high}) => high
|
||||
|
@ -343,6 +377,7 @@ module T = {
|
|||
| #Lognormal(n) => Lognormal.mean(n)
|
||||
| #Beta(n) => Beta.mean(n)
|
||||
| #Uniform(n) => Uniform.mean(n)
|
||||
| #Gamma(n) => Gamma.mean(n)
|
||||
| #Float(n) => Float.mean(n)
|
||||
}
|
||||
|
||||
|
@ -361,8 +396,9 @@ module T = {
|
|||
| (#ByWeight, #Uniform(n)) =>
|
||||
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
|
||||
// on either side for proper rendering (just left and right of the discontinuities).
|
||||
let dx = 0.00001 *. (n.high -. n.low)
|
||||
[n.low -. dx, n.low +. dx, n.high -. dx, n.high +. dx]
|
||||
let distance = n.high -. n.low
|
||||
let dx = MagicNumbers.Epsilon.ten *. distance
|
||||
[n.low -. dx, n.low, n.low +. dx, n.high -. dx, n.high, n.high +. dx]
|
||||
| (#ByWeight, _) =>
|
||||
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n)
|
||||
ys |> E.A.fmap(y => inv(y, dist))
|
||||
|
|
|
@ -31,6 +31,11 @@ type triangular = {
|
|||
high: float,
|
||||
}
|
||||
|
||||
type gamma = {
|
||||
shape: float,
|
||||
scale: float,
|
||||
}
|
||||
|
||||
@genType
|
||||
type symbolicDist = [
|
||||
| #Normal(normal)
|
||||
|
@ -40,6 +45,7 @@ type symbolicDist = [
|
|||
| #Exponential(exponential)
|
||||
| #Cauchy(cauchy)
|
||||
| #Triangular(triangular)
|
||||
| #Gamma(gamma)
|
||||
| #Float(float)
|
||||
]
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ module Math = {
|
|||
module Epsilon = {
|
||||
let ten = 1e-10
|
||||
let seven = 1e-7
|
||||
let five = 1e-5
|
||||
}
|
||||
|
||||
module Environment = {
|
||||
|
@ -35,3 +36,16 @@ module ToPointSet = {
|
|||
*/
|
||||
let minDiscreteToKeep = samples => max(20, E.A.length(samples) / 50)
|
||||
}
|
||||
|
||||
module SampleSetBandwidth = {
|
||||
// Silverman, B. W. (1986) Density Estimation. London: Chapman and Hall.
|
||||
// Scott, D. W. (1992) Multivariate Density Estimation: Theory, Practice, and Visualization. Wiley.
|
||||
let iqr_percentile = 0.75
|
||||
let iqr_percentile_complement = 1.0 -. iqr_percentile
|
||||
let nrd0_lo_denominator = 1.34
|
||||
let one = 1.0
|
||||
let nrd0_coef = 0.9
|
||||
|
||||
let nrd_coef = 1.06
|
||||
let nrd_fractionalPower = -0.2
|
||||
}
|
||||
|
|
|
@ -1,15 +1,27 @@
|
|||
module Dispatch = Reducer_Dispatch
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module Expression = Reducer_Expression
|
||||
module Extra = Reducer_Extra
|
||||
module Js = Reducer_Js
|
||||
module MathJs = Reducer_MathJs
|
||||
module ExpressionValue = ReducerInterface_ExpressionValue
|
||||
module Lambda = Reducer_Expression_Lambda
|
||||
|
||||
type expressionValue = Reducer_Expression.expressionValue
|
||||
type externalBindings = Expression.externalBindings
|
||||
let evaluate = Expression.eval
|
||||
let evaluateUsingExternalBindings = Expression.evalUsingExternalBindings
|
||||
let evaluatePartialUsingExternalBindings = Expression.evalPartialUsingExternalBindings
|
||||
type environment = ReducerInterface_ExpressionValue.environment
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
type lambdaValue = ExpressionValue.lambdaValue
|
||||
|
||||
let evaluate = Expression.evaluate
|
||||
let evaluateUsingOptions = Expression.evaluateUsingOptions
|
||||
let evaluatePartialUsingExternalBindings = Expression.evaluatePartialUsingExternalBindings
|
||||
let parse = Expression.parse
|
||||
let parseOuter = Expression.parseOuter
|
||||
let parsePartial = Expression.parsePartial
|
||||
|
||||
let foreignFunctionInterface = (
|
||||
lambdaValue: lambdaValue,
|
||||
argArray: array<expressionValue>,
|
||||
environment: ExpressionValue.environment,
|
||||
) => {
|
||||
Lambda.foreignFunctionInterface(lambdaValue, argArray, environment, Expression.reduceExpression)
|
||||
}
|
||||
|
||||
let defaultEnvironment = ExpressionValue.defaultEnvironment
|
||||
|
||||
let defaultExternalBindings = ExpressionValue.defaultExternalBindings
|
||||
|
|
|
@ -1,26 +1,43 @@
|
|||
module Dispatch = Reducer_Dispatch
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module Expression = Reducer_Expression
|
||||
module Extra = Reducer_Extra
|
||||
module Js = Reducer_Js
|
||||
module MathJs = Reducer_MathJs
|
||||
|
||||
@genType
|
||||
type environment = ReducerInterface_ExpressionValue.environment
|
||||
@genType
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
@genType
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
@genType
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
@genType
|
||||
let evaluate: string => result<expressionValue, Reducer_ErrorValue.errorValue>
|
||||
type lambdaValue = ReducerInterface_ExpressionValue.lambdaValue
|
||||
|
||||
@genType
|
||||
let evaluateUsingExternalBindings: (
|
||||
let evaluateUsingOptions: (
|
||||
~environment: option<QuriSquiggleLang.ReducerInterface_ExpressionValue.environment>,
|
||||
~externalBindings: option<QuriSquiggleLang.ReducerInterface_ExpressionValue.externalBindings>,
|
||||
string,
|
||||
externalBindings,
|
||||
) => result<expressionValue, Reducer_ErrorValue.errorValue>
|
||||
) => result<expressionValue, errorValue>
|
||||
@genType
|
||||
let evaluatePartialUsingExternalBindings: (
|
||||
string,
|
||||
externalBindings,
|
||||
) => result<externalBindings, Reducer_ErrorValue.errorValue>
|
||||
let parse: string => result<Expression.expression, ErrorValue.errorValue>
|
||||
let parseOuter: string => result<Expression.expression, ErrorValue.errorValue>
|
||||
let parsePartial: string => result<Expression.expression, ErrorValue.errorValue>
|
||||
QuriSquiggleLang.ReducerInterface_ExpressionValue.externalBindings,
|
||||
QuriSquiggleLang.ReducerInterface_ExpressionValue.environment,
|
||||
) => result<externalBindings, errorValue>
|
||||
@genType
|
||||
let evaluate: string => result<expressionValue, errorValue>
|
||||
|
||||
let parse: string => result<Expression.expression, errorValue>
|
||||
|
||||
@genType
|
||||
let foreignFunctionInterface: (
|
||||
QuriSquiggleLang.ReducerInterface_ExpressionValue.lambdaValue,
|
||||
array<QuriSquiggleLang.ReducerInterface_ExpressionValue.expressionValue>,
|
||||
QuriSquiggleLang.ReducerInterface_ExpressionValue.environment,
|
||||
) => result<expressionValue, errorValue>
|
||||
|
||||
@genType
|
||||
let defaultEnvironment: environment
|
||||
|
||||
@genType
|
||||
let defaultExternalBindings: externalBindings
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
module Bindings = Reducer_Expression_Bindings
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExternalLibrary = ReducerInterface.ExternalLibrary
|
||||
module Lambda = Reducer_Expression_Lambda
|
||||
module MathJs = Reducer_MathJs
|
||||
module Result = Belt.Result
|
||||
open ReducerInterface.ExpressionValue
|
||||
open Reducer_ErrorValue
|
||||
|
||||
|
@ -11,7 +15,10 @@ open Reducer_ErrorValue
|
|||
|
||||
exception TestRescriptException
|
||||
|
||||
let callInternal = (call: functionCall): result<'b, errorValue> => {
|
||||
let callInternal = (call: functionCall, environment, reducer: ExpressionT.reducerFn): result<
|
||||
'b,
|
||||
errorValue,
|
||||
> => {
|
||||
let callMathJs = (call: functionCall): result<'b, errorValue> =>
|
||||
switch call {
|
||||
| ("javascriptraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
||||
|
@ -20,12 +27,12 @@ let callInternal = (call: functionCall): result<'b, errorValue> => {
|
|||
}
|
||||
|
||||
let constructRecord = arrayOfPairs => {
|
||||
Belt.Array.map(arrayOfPairs, pairValue => {
|
||||
Belt.Array.map(arrayOfPairs, pairValue =>
|
||||
switch pairValue {
|
||||
| EvArray([EvString(key), valueValue]) => (key, valueValue)
|
||||
| _ => ("wrong key type", pairValue->toStringWithType->EvString)
|
||||
}
|
||||
})
|
||||
)
|
||||
->Js.Dict.fromArray
|
||||
->EvRecord
|
||||
->Ok
|
||||
|
@ -43,16 +50,89 @@ let callInternal = (call: functionCall): result<'b, errorValue> => {
|
|||
| None => RERecordPropertyNotFound("Record property not found", sIndex)->Error
|
||||
}
|
||||
|
||||
let inspect = (value: expressionValue) => {
|
||||
Js.log(value->toString)
|
||||
value->Ok
|
||||
}
|
||||
|
||||
let inspectLabel = (value: expressionValue, label: string) => {
|
||||
Js.log(`${label}: ${value->toString}`)
|
||||
value->Ok
|
||||
}
|
||||
|
||||
let doSetBindings = (
|
||||
externalBindings: externalBindings,
|
||||
symbol: string,
|
||||
value: expressionValue,
|
||||
) => {
|
||||
Bindings.fromExternalBindings(externalBindings)
|
||||
->Belt.Map.String.set(symbol, value)
|
||||
->Bindings.toExternalBindings
|
||||
->EvRecord
|
||||
->Ok
|
||||
}
|
||||
|
||||
let doExportBindings = (externalBindings: externalBindings) => EvRecord(externalBindings)->Ok
|
||||
|
||||
let doKeepArray = (aValueArray, aLambdaValue) => {
|
||||
let rMappedList = aValueArray->Belt.Array.reduceReverse(Ok(list{}), (rAcc, elem) =>
|
||||
rAcc->Result.flatMap(acc => {
|
||||
let rNewElem = Lambda.doLambdaCall(aLambdaValue, list{elem}, environment, reducer)
|
||||
rNewElem->Result.map(newElem =>
|
||||
switch newElem {
|
||||
| EvBool(true) => list{elem, ...acc}
|
||||
| _ => acc
|
||||
}
|
||||
)
|
||||
})
|
||||
)
|
||||
rMappedList->Result.map(mappedList => mappedList->Belt.List.toArray->EvArray)
|
||||
}
|
||||
|
||||
let doMapArray = (aValueArray, aLambdaValue) => {
|
||||
let rMappedList = aValueArray->Belt.Array.reduceReverse(Ok(list{}), (rAcc, elem) =>
|
||||
rAcc->Result.flatMap(acc => {
|
||||
let rNewElem = Lambda.doLambdaCall(aLambdaValue, list{elem}, environment, reducer)
|
||||
rNewElem->Result.map(newElem => list{newElem, ...acc})
|
||||
})
|
||||
)
|
||||
rMappedList->Result.map(mappedList => mappedList->Belt.List.toArray->EvArray)
|
||||
}
|
||||
|
||||
let doReduceArray = (aValueArray, initialValue, aLambdaValue) => {
|
||||
aValueArray->Belt.Array.reduce(Ok(initialValue), (rAcc, elem) =>
|
||||
rAcc->Result.flatMap(acc =>
|
||||
Lambda.doLambdaCall(aLambdaValue, list{acc, elem}, environment, reducer)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
let doReduceReverseArray = (aValueArray, initialValue, aLambdaValue) => {
|
||||
aValueArray->Belt.Array.reduceReverse(Ok(initialValue), (rAcc, elem) =>
|
||||
rAcc->Result.flatMap(acc =>
|
||||
Lambda.doLambdaCall(aLambdaValue, list{acc, elem}, environment, reducer)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
switch call {
|
||||
// | ("$constructRecord", pairArray)
|
||||
// | ("$atIndex", [EvArray(anArray), EvNumber(fIndex)]) => arrayAtIndex(anArray, fIndex)
|
||||
// | ("$atIndex", [EvRecord(aRecord), EvString(sIndex)]) => recordAtIndex(aRecord, sIndex)
|
||||
| ("$constructRecord", [EvArray(arrayOfPairs)]) => constructRecord(arrayOfPairs)
|
||||
| ("$atIndex", [EvArray(aValueArray), EvArray([EvNumber(fIndex)])]) =>
|
||||
arrayAtIndex(aValueArray, fIndex)
|
||||
| ("$atIndex", [EvRecord(dict), EvArray([EvString(sIndex)])]) => recordAtIndex(dict, sIndex)
|
||||
| ("$atIndex", [obj, index]) =>
|
||||
(toStringWithType(obj) ++ "??~~~~" ++ toStringWithType(index))->EvString->Ok
|
||||
| ("$constructRecord", [EvArray(arrayOfPairs)]) => constructRecord(arrayOfPairs)
|
||||
| ("$exportBindings", [EvRecord(externalBindings)]) => doExportBindings(externalBindings)
|
||||
| ("$setBindings", [EvRecord(externalBindings), EvSymbol(symbol), value]) =>
|
||||
doSetBindings(externalBindings, symbol, value)
|
||||
| ("inspect", [value, EvString(label)]) => inspectLabel(value, label)
|
||||
| ("inspect", [value]) => inspect(value)
|
||||
| ("keep", [EvArray(aValueArray), EvLambda(aLambdaValue)]) =>
|
||||
doKeepArray(aValueArray, aLambdaValue)
|
||||
| ("map", [EvArray(aValueArray), EvLambda(aLambdaValue)]) => doMapArray(aValueArray, aLambdaValue)
|
||||
| ("reduce", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
||||
doReduceArray(aValueArray, initialValue, aLambdaValue)
|
||||
| ("reduceReverse", [EvArray(aValueArray), initialValue, EvLambda(aLambdaValue)]) =>
|
||||
doReduceReverseArray(aValueArray, initialValue, aLambdaValue)
|
||||
| ("reverse", [EvArray(aValueArray)]) => aValueArray->Belt.Array.reverse->EvArray->Ok
|
||||
| call => callMathJs(call)
|
||||
}
|
||||
}
|
||||
|
@ -60,12 +140,16 @@ let callInternal = (call: functionCall): result<'b, errorValue> => {
|
|||
/*
|
||||
Reducer uses Result monad while reducing expressions
|
||||
*/
|
||||
let dispatch = (call: functionCall): result<expressionValue, errorValue> =>
|
||||
let dispatch = (call: functionCall, environment, reducer: ExpressionT.reducerFn): result<
|
||||
expressionValue,
|
||||
errorValue,
|
||||
> =>
|
||||
try {
|
||||
let callInternalWithReducer = (call, environment) => callInternal(call, environment, reducer)
|
||||
let (fn, args) = call
|
||||
// There is a bug that prevents string match in patterns
|
||||
// So we have to recreate a copy of the string
|
||||
ExternalLibrary.dispatch((Js.String.make(fn), args), callInternal)
|
||||
ExternalLibrary.dispatch((Js.String.make(fn), args), environment, callInternalWithReducer)
|
||||
} catch {
|
||||
| Js.Exn.Error(obj) => REJavaScriptExn(Js.Exn.message(obj), Js.Exn.name(obj))->Error
|
||||
| _ => RETodo("unhandled rescript exception")->Error
|
||||
|
|
|
@ -3,120 +3,189 @@
|
|||
they take expressions as parameters and return a new expression.
|
||||
Macros are used to define language building blocks. They are like Lisp macros.
|
||||
*/
|
||||
module Bindings = Reducer_Expression_Bindings
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module ExpressionWithContext = Reducer_ExpressionWithContext
|
||||
module Result = Belt.Result
|
||||
open Reducer_Expression_ExpressionBuilder
|
||||
|
||||
open Reducer_ErrorValue
|
||||
|
||||
type environment = ExpressionValue.environment
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expression = ExpressionT.expression
|
||||
|
||||
type reducerFn = (
|
||||
expression,
|
||||
ExpressionT.bindings,
|
||||
) => result<ExpressionValue.expressionValue, errorValue>
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type expressionWithContext = ExpressionWithContext.expressionWithContext
|
||||
|
||||
let dispatchMacroCall = (
|
||||
list: list<expression>,
|
||||
macroExpression: expression,
|
||||
bindings: ExpressionT.bindings,
|
||||
reduceExpression: reducerFn,
|
||||
): result<expression, 'e> => {
|
||||
let rec replaceSymbols = (expression: expression, bindings: ExpressionT.bindings): result<
|
||||
expression,
|
||||
errorValue,
|
||||
> =>
|
||||
switch expression {
|
||||
| ExpressionT.EValue(EvSymbol(aSymbol)) =>
|
||||
switch bindings->Belt.Map.String.get(aSymbol) {
|
||||
| Some(boundExpression) => boundExpression->Ok
|
||||
| None => RESymbolNotFound(aSymbol)->Error
|
||||
}
|
||||
| ExpressionT.EValue(_) => expression->Ok
|
||||
| ExpressionT.EBindings(_) => expression->Ok
|
||||
| ExpressionT.EList(list) => {
|
||||
let racc = list->Belt.List.reduceReverse(Ok(list{}), (racc, each: expression) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->replaceSymbols(bindings)
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.map(acc => acc->ExpressionT.EList)
|
||||
}
|
||||
}
|
||||
|
||||
let doBindStatement = (statement: expression, bindings: ExpressionT.bindings) => {
|
||||
environment,
|
||||
reduceExpression: ExpressionT.reducerFn,
|
||||
): result<expressionWithContext, errorValue> => {
|
||||
let doBindStatement = (bindingExpr: expression, statement: expression, environment) =>
|
||||
switch statement {
|
||||
| ExpressionT.EList(list{
|
||||
ExpressionT.EValue(EvCall("$let")),
|
||||
ExpressionT.EValue(EvSymbol(aSymbol)),
|
||||
expressionToReduce,
|
||||
}) => {
|
||||
let rNewExpressionToReduce = replaceSymbols(expressionToReduce, bindings)
|
||||
| ExpressionT.EList(list{ExpressionT.EValue(EvCall("$let")), symbolExpr, statement}) => {
|
||||
let rExternalBindingsValue = reduceExpression(bindingExpr, bindings, environment)
|
||||
|
||||
let rNewValue =
|
||||
rNewExpressionToReduce->Result.flatMap(newExpressionToReduce =>
|
||||
reduceExpression(newExpressionToReduce, bindings)
|
||||
rExternalBindingsValue->Result.flatMap(externalBindingsValue => {
|
||||
let newBindings = Bindings.fromValue(externalBindingsValue)
|
||||
|
||||
// Js.log(
|
||||
// `bindStatement ${Bindings.toString(newBindings)}<==${ExpressionT.toString(
|
||||
// bindingExpr,
|
||||
// )} statement: $let ${ExpressionT.toString(symbolExpr)}=${ExpressionT.toString(
|
||||
// statement,
|
||||
// )}`,
|
||||
// )
|
||||
|
||||
let rNewStatement = Bindings.replaceSymbols(newBindings, statement)
|
||||
rNewStatement->Result.map(newStatement =>
|
||||
ExpressionWithContext.withContext(
|
||||
eFunction(
|
||||
"$setBindings",
|
||||
list{newBindings->Bindings.toExternalBindings->eRecord, symbolExpr, newStatement},
|
||||
),
|
||||
newBindings,
|
||||
)
|
||||
)
|
||||
|
||||
let rNewExpression = rNewValue->Result.map(newValue => ExpressionT.EValue(newValue))
|
||||
rNewExpression->Result.map(newExpression =>
|
||||
Belt.Map.String.set(bindings, aSymbol, newExpression)->ExpressionT.EBindings
|
||||
)
|
||||
})
|
||||
}
|
||||
| _ => REAssignmentExpected->Error
|
||||
}
|
||||
}
|
||||
|
||||
let doExportVariableExpression = (bindings: ExpressionT.bindings) => {
|
||||
let emptyDictionary: Js.Dict.t<ExpressionValue.expressionValue> = Js.Dict.empty()
|
||||
let reducedBindings = bindings->Belt.Map.String.keep((_key, value) =>
|
||||
switch value {
|
||||
| ExpressionT.EValue(_) => true
|
||||
| _ => false
|
||||
}
|
||||
)
|
||||
let externalBindings = reducedBindings->Belt.Map.String.reduce(emptyDictionary, (
|
||||
acc,
|
||||
key,
|
||||
expressionValue,
|
||||
) => {
|
||||
let value = switch expressionValue {
|
||||
| EValue(aValue) => aValue
|
||||
| _ => EvSymbol("internal")
|
||||
}
|
||||
Js.Dict.set(acc, key, value)
|
||||
acc
|
||||
})
|
||||
externalBindings->ExpressionValue.EvRecord->ExpressionT.EValue->Ok
|
||||
}
|
||||
let doBindExpression = (bindingExpr: expression, statement: expression, environment): result<
|
||||
expressionWithContext,
|
||||
errorValue,
|
||||
> =>
|
||||
switch statement {
|
||||
| ExpressionT.EList(list{ExpressionT.EValue(EvCall("$let")), symbolExpr, statement}) => {
|
||||
let rExternalBindingsValue = reduceExpression(bindingExpr, bindings, environment)
|
||||
|
||||
let doBindExpression = (expression: expression, bindings: ExpressionT.bindings) =>
|
||||
switch expression {
|
||||
| ExpressionT.EList(list{ExpressionT.EValue(EvCall("$let")), ..._}) =>
|
||||
REExpressionExpected->Error
|
||||
| ExpressionT.EList(list{ExpressionT.EValue(EvCall("$exportVariablesExpression"))}) =>
|
||||
doExportVariableExpression(bindings)
|
||||
| _ => replaceSymbols(expression, bindings)
|
||||
rExternalBindingsValue->Result.flatMap(externalBindingsValue => {
|
||||
let newBindings = Bindings.fromValue(externalBindingsValue)
|
||||
let rNewStatement = Bindings.replaceSymbols(newBindings, statement)
|
||||
rNewStatement->Result.map(newStatement =>
|
||||
ExpressionWithContext.withContext(
|
||||
eFunction(
|
||||
"$exportBindings",
|
||||
list{
|
||||
eFunction(
|
||||
"$setBindings",
|
||||
list{
|
||||
newBindings->Bindings.toExternalBindings->eRecord,
|
||||
symbolExpr,
|
||||
newStatement,
|
||||
},
|
||||
),
|
||||
},
|
||||
),
|
||||
newBindings,
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
| _ => {
|
||||
let rExternalBindingsValue: result<expressionValue, errorValue> = reduceExpression(
|
||||
bindingExpr,
|
||||
bindings,
|
||||
environment,
|
||||
)
|
||||
|
||||
rExternalBindingsValue->Result.flatMap(externalBindingsValue => {
|
||||
let newBindings = Bindings.fromValue(externalBindingsValue)
|
||||
let rNewStatement = Bindings.replaceSymbols(newBindings, statement)
|
||||
rNewStatement->Result.map(newStatement =>
|
||||
ExpressionWithContext.withContext(newStatement, newBindings)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
switch list {
|
||||
| list{ExpressionT.EValue(EvCall("$$bindings"))} => bindings->ExpressionT.EBindings->Ok
|
||||
let doBlock = (exprs: list<expression>, _bindings: ExpressionT.bindings, _environment): result<
|
||||
expressionWithContext,
|
||||
errorValue,
|
||||
> => {
|
||||
let exprsArray = Belt.List.toArray(exprs)
|
||||
let maxIndex = Js.Array2.length(exprsArray) - 1
|
||||
let newStatement = exprsArray->Js.Array2.reducei((acc, statement, index) =>
|
||||
if index == 0 {
|
||||
if index == maxIndex {
|
||||
eBindExpressionDefault(statement)
|
||||
} else {
|
||||
eBindStatementDefault(statement)
|
||||
}
|
||||
} else if index == maxIndex {
|
||||
eBindExpression(acc, statement)
|
||||
} else {
|
||||
eBindStatement(acc, statement)
|
||||
}
|
||||
, eSymbol("undefined block"))
|
||||
ExpressionWithContext.noContext(newStatement)->Ok
|
||||
}
|
||||
|
||||
| list{
|
||||
ExpressionT.EValue(EvCall("$$bindStatement")),
|
||||
ExpressionT.EBindings(bindings),
|
||||
statement,
|
||||
} =>
|
||||
doBindStatement(statement, bindings)
|
||||
| list{
|
||||
ExpressionT.EValue(EvCall("$$bindExpression")),
|
||||
ExpressionT.EBindings(bindings),
|
||||
expression,
|
||||
} =>
|
||||
doBindExpression(expression, bindings)
|
||||
| _ => list->ExpressionT.EList->Ok
|
||||
let doLambdaDefinition = (
|
||||
bindings: ExpressionT.bindings,
|
||||
parameters: array<string>,
|
||||
lambdaDefinition: ExpressionT.expression,
|
||||
) =>
|
||||
ExpressionWithContext.noContext(
|
||||
eLambda(parameters, bindings->Bindings.toExternalBindings, lambdaDefinition),
|
||||
)->Ok
|
||||
|
||||
let doTernary = (
|
||||
condition: expression,
|
||||
ifTrue: expression,
|
||||
ifFalse: expression,
|
||||
bindings: ExpressionT.bindings,
|
||||
environment,
|
||||
): result<expressionWithContext, errorValue> => {
|
||||
let rCondition = reduceExpression(condition, bindings, environment)
|
||||
rCondition->Result.flatMap(conditionValue =>
|
||||
switch conditionValue {
|
||||
| ExpressionValue.EvBool(false) => ExpressionWithContext.noContext(ifFalse)->Ok
|
||||
| ExpressionValue.EvBool(true) => ExpressionWithContext.noContext(ifTrue)->Ok
|
||||
| _ => REExpectedType("Boolean")->Error
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
let expandExpressionList = (aList, bindings: ExpressionT.bindings, environment): result<
|
||||
expressionWithContext,
|
||||
errorValue,
|
||||
> =>
|
||||
switch aList {
|
||||
| list{
|
||||
ExpressionT.EValue(EvCall("$$bindStatement")),
|
||||
bindingExpr: ExpressionT.expression,
|
||||
statement,
|
||||
} =>
|
||||
doBindStatement(bindingExpr, statement, environment)
|
||||
| list{ExpressionT.EValue(EvCall("$$bindStatement")), statement} =>
|
||||
// bindings of the context are used when there is no binding expression
|
||||
doBindStatement(eRecord(Bindings.toExternalBindings(bindings)), statement, environment)
|
||||
| list{
|
||||
ExpressionT.EValue(EvCall("$$bindExpression")),
|
||||
bindingExpr: ExpressionT.expression,
|
||||
expression,
|
||||
} =>
|
||||
doBindExpression(bindingExpr, expression, environment)
|
||||
| list{ExpressionT.EValue(EvCall("$$bindExpression")), expression} =>
|
||||
// bindings of the context are used when there is no binding expression
|
||||
doBindExpression(eRecord(Bindings.toExternalBindings(bindings)), expression, environment)
|
||||
| list{ExpressionT.EValue(EvCall("$$block")), ...exprs} => doBlock(exprs, bindings, environment)
|
||||
| list{
|
||||
ExpressionT.EValue(EvCall("$$lambda")),
|
||||
ExpressionT.EValue(EvArrayString(parameters)),
|
||||
lambdaDefinition,
|
||||
} =>
|
||||
doLambdaDefinition(bindings, parameters, lambdaDefinition)
|
||||
| list{ExpressionT.EValue(EvCall("$$ternary")), condition, ifTrue, ifFalse} =>
|
||||
doTernary(condition, ifTrue, ifFalse, bindings, environment)
|
||||
| _ => ExpressionWithContext.noContext(ExpressionT.EList(aList))->Ok
|
||||
}
|
||||
|
||||
switch macroExpression {
|
||||
| EList(aList) => expandExpressionList(aList, bindings, environment)
|
||||
| _ => ExpressionWithContext.noContext(macroExpression)->Ok
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +1,29 @@
|
|||
@genType
|
||||
type errorValue =
|
||||
| REArityError(option<string>, int, int) //TODO: Binding a lambda to a variable should record the variable name in lambda for error reporting
|
||||
| REArrayIndexNotFound(string, int)
|
||||
| REAssignmentExpected
|
||||
| REDistributionError(DistributionTypes.error)
|
||||
| REExpressionExpected
|
||||
| REFunctionExpected(string)
|
||||
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
||||
| REMacroNotFound(string)
|
||||
| RENotAFunction(string)
|
||||
| RERecordPropertyNotFound(string, string)
|
||||
| RESymbolNotFound(string)
|
||||
| RESyntaxError(string)
|
||||
| REDistributionError(DistributionTypes.error)
|
||||
| RETodo(string) // To do
|
||||
| REExpectedType(string)
|
||||
|
||||
type t = errorValue
|
||||
|
||||
@genType
|
||||
let errorToString = err =>
|
||||
switch err {
|
||||
| REArityError(_oFnName, arity, usedArity) =>
|
||||
`${Js.String.make(arity)} arguments expected. Instead ${Js.String.make(
|
||||
usedArity,
|
||||
)} argument(s) were passed.`
|
||||
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
||||
| REAssignmentExpected => "Assignment expected"
|
||||
| REExpressionExpected => "Expression expected"
|
||||
|
@ -35,8 +42,10 @@ let errorToString = err =>
|
|||
answer
|
||||
}
|
||||
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
||||
| RENotAFunction(valueString) => `${valueString} is not a function`
|
||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||
| RESymbolNotFound(symbolName) => `${symbolName} is not defined`
|
||||
| RESyntaxError(desc) => `Syntax Error: ${desc}`
|
||||
| RETodo(msg) => `TODO: ${msg}`
|
||||
| REExpectedType(typeName) => `Expected type: ${typeName}`
|
||||
}
|
||||
|
|
|
@ -1,35 +1,22 @@
|
|||
module Bindings = Reducer_Expression_Bindings
|
||||
module BuiltIn = Reducer_Dispatch_BuiltIn
|
||||
module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module Extra = Reducer_Extra
|
||||
module Lambda = Reducer_Expression_Lambda
|
||||
module Macro = Reducer_Expression_Macro
|
||||
module MathJs = Reducer_MathJs
|
||||
module Result = Belt.Result
|
||||
module T = Reducer_Expression_T
|
||||
open Reducer_ErrorValue
|
||||
|
||||
type environment = ReducerInterface_ExpressionValue.environment
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expression = T.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
type internalCode = ReducerInterface_ExpressionValue.internalCode
|
||||
type t = expression
|
||||
|
||||
/*
|
||||
Shows the expression as text of expression
|
||||
*/
|
||||
let rec toString = expression =>
|
||||
switch expression {
|
||||
| T.EBindings(_) => "$$bound"
|
||||
| T.EList(aList) =>
|
||||
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||
->Extra.List.interperse(" ")
|
||||
->Belt.List.toArray
|
||||
->Js.String.concatMany("")})`
|
||||
| EValue(aValue) => ExpressionValue.toString(aValue)
|
||||
}
|
||||
|
||||
let toStringResult = codeResult =>
|
||||
switch codeResult {
|
||||
| Ok(a) => `Ok(${toString(a)})`
|
||||
| Error(m) => `Error(${Js.String.make(m)})`
|
||||
}
|
||||
|
||||
/*
|
||||
Converts a MathJs code to expression
|
||||
*/
|
||||
|
@ -39,148 +26,116 @@ let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
|||
let parse = (mathJsCode: string): result<t, errorValue> =>
|
||||
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromNode)
|
||||
|
||||
let parsePartial = (mathJsCode: string): result<t, errorValue> =>
|
||||
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromPartialNode)
|
||||
|
||||
let parseOuter = (mathJsCode: string): result<t, errorValue> =>
|
||||
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromOuterNode)
|
||||
|
||||
let defaultBindings: T.bindings = Belt.Map.String.empty
|
||||
|
||||
/*
|
||||
Recursively evaluate/reduce the expression (Lisp AST)
|
||||
*/
|
||||
let rec reduceExpression = (expression: t, bindings: T.bindings): result<expressionValue, 'e> => {
|
||||
/*
|
||||
Macros are like functions but instead of taking values as parameters,
|
||||
they take expressions as parameters and return a new expression.
|
||||
Macros are used to define language building blocks. They are like Lisp macros.
|
||||
*/
|
||||
let doMacroCall = (list: list<t>, bindings: T.bindings): result<t, 'e> =>
|
||||
Reducer_Dispatch_BuiltInMacros.dispatchMacroCall(list, bindings, reduceExpression)
|
||||
let rec reduceExpression = (expression: t, bindings: T.bindings, environment: environment): result<
|
||||
expressionValue,
|
||||
'e,
|
||||
> => {
|
||||
// Js.log(`reduce: ${T.toString(expression)} bindings: ${bindings->Bindings.toString}`)
|
||||
switch expression {
|
||||
| T.EValue(value) => value->Ok
|
||||
| T.EList(list) =>
|
||||
switch list {
|
||||
| list{EValue(EvCall(fName)), ..._args} =>
|
||||
switch Macro.isMacroName(fName) {
|
||||
// A macro expands then reduces itself
|
||||
| true => Macro.doMacroCall(expression, bindings, environment, reduceExpression)
|
||||
| false => reduceExpressionList(list, bindings, environment)
|
||||
}
|
||||
| _ => reduceExpressionList(list, bindings, environment)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
and reduceExpressionList = (
|
||||
expressions: list<t>,
|
||||
bindings: T.bindings,
|
||||
environment: environment,
|
||||
): result<expressionValue, 'e> => {
|
||||
let racc: result<list<expressionValue>, 'e> = expressions->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->reduceExpression(bindings, environment)
|
||||
->Result.map(newNode => {
|
||||
acc->Belt.List.add(newNode)
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->reduceValueList(environment))
|
||||
}
|
||||
|
||||
/*
|
||||
After reducing each level of expression(Lisp AST), we have a value list to evaluate
|
||||
*/
|
||||
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
||||
switch valueList {
|
||||
| list{EvCall(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
||||
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||
}
|
||||
and reduceValueList = (valueList: list<expressionValue>, environment): result<
|
||||
expressionValue,
|
||||
'e,
|
||||
> =>
|
||||
switch valueList {
|
||||
| list{EvCall(fName), ...args} =>
|
||||
(fName, args->Belt.List.toArray)->BuiltIn.dispatch(environment, reduceExpression)
|
||||
|
||||
let rec seekMacros = (expression: t, bindings: T.bindings): result<t, 'e> =>
|
||||
switch expression {
|
||||
| T.EValue(_value) => expression->Ok
|
||||
| T.EBindings(_value) => expression->Ok
|
||||
| T.EList(list) => {
|
||||
let racc: result<list<t>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->seekMacros(bindings)
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->doMacroCall(bindings))
|
||||
}
|
||||
}
|
||||
| list{EvLambda(lamdaCall), ...args} =>
|
||||
Lambda.doLambdaCall(lamdaCall, args, environment, reduceExpression)
|
||||
| _ =>
|
||||
valueList
|
||||
->Lambda.checkIfReduced
|
||||
->Result.flatMap(reducedValueList =>
|
||||
reducedValueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||
)
|
||||
}
|
||||
|
||||
let rec reduceExpandedExpression = (expression: t): result<expressionValue, 'e> =>
|
||||
switch expression {
|
||||
| T.EValue(value) => value->Ok
|
||||
| T.EList(list) => {
|
||||
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->reduceExpandedExpression
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->reduceValueList)
|
||||
}
|
||||
| EBindings(_bindings) => RETodo("Error: Bindings cannot be reduced to values")->Error
|
||||
}
|
||||
let evalUsingBindingsExpression_ = (aExpression, bindings, environment): result<
|
||||
expressionValue,
|
||||
'e,
|
||||
> => reduceExpression(aExpression, bindings, environment)
|
||||
|
||||
let rExpandedExpression: result<t, 'e> = expression->seekMacros(bindings)
|
||||
rExpandedExpression->Result.flatMap(expandedExpression =>
|
||||
expandedExpression->reduceExpandedExpression
|
||||
)
|
||||
}
|
||||
let evaluateUsingOptions = (
|
||||
~environment: option<ReducerInterface_ExpressionValue.environment>,
|
||||
~externalBindings: option<ReducerInterface_ExpressionValue.externalBindings>,
|
||||
code: string,
|
||||
): result<expressionValue, errorValue> => {
|
||||
let anEnvironment = switch environment {
|
||||
| Some(env) => env
|
||||
| None => ReducerInterface_ExpressionValue.defaultEnvironment
|
||||
}
|
||||
|
||||
let evalUsingExternalBindingsExpression_ = (aExpression, bindings): result<expressionValue, 'e> =>
|
||||
reduceExpression(aExpression, bindings)
|
||||
let anExternalBindings = switch externalBindings {
|
||||
| Some(bindings) => bindings
|
||||
| None => ReducerInterface_ExpressionValue.defaultExternalBindings
|
||||
}
|
||||
|
||||
/*
|
||||
Evaluates MathJs code via Reducer using bindings and answers the result.
|
||||
When bindings are used, the code is a partial code as if it is cut from a larger code.
|
||||
Therefore all statements are assignments.
|
||||
*/
|
||||
let evalPartialUsingExternalBindings_ = (codeText: string, bindings: T.bindings) => {
|
||||
parsePartial(codeText)->Result.flatMap(expression =>
|
||||
expression->evalUsingExternalBindingsExpression_(bindings)
|
||||
)
|
||||
}
|
||||
let bindings = anExternalBindings->Bindings.fromExternalBindings
|
||||
|
||||
/*
|
||||
Evaluates MathJs code via Reducer using bindings and answers the result.
|
||||
When bindings are used, the code is a partial code as if it is cut from a larger code.
|
||||
Therefore all statments are assignments.
|
||||
*/
|
||||
let evalOuterWBindings_ = (codeText: string, bindings: T.bindings) => {
|
||||
parseOuter(codeText)->Result.flatMap(expression =>
|
||||
expression->evalUsingExternalBindingsExpression_(bindings)
|
||||
)
|
||||
parse(code)->Result.flatMap(expr => evalUsingBindingsExpression_(expr, bindings, anEnvironment))
|
||||
}
|
||||
|
||||
/*
|
||||
Evaluates MathJs code and bindings via Reducer and answers the result
|
||||
*/
|
||||
let eval = (codeText: string) => {
|
||||
parse(codeText)->Result.flatMap(expression =>
|
||||
expression->evalUsingExternalBindingsExpression_(defaultBindings)
|
||||
)
|
||||
}
|
||||
|
||||
type externalBindings = ReducerInterface.ExpressionValue.externalBindings //Js.Dict.t<expressionValue>
|
||||
|
||||
let externalBindingsToBindings = (externalBindings: externalBindings): T.bindings => {
|
||||
let keys = Js.Dict.keys(externalBindings)
|
||||
keys->Belt.Array.reduce(defaultBindings, (acc, key) => {
|
||||
let value = Js.Dict.unsafeGet(externalBindings, key)
|
||||
acc->Belt.Map.String.set(key, T.EValue(value))
|
||||
})
|
||||
}
|
||||
/*
|
||||
Evaluates code with external bindings. External bindings are a record of expression values.
|
||||
*/
|
||||
let evalUsingExternalBindings = (code: string, externalBindings: externalBindings) => {
|
||||
let bindings = externalBindings->externalBindingsToBindings
|
||||
evalOuterWBindings_(code, bindings)
|
||||
}
|
||||
|
||||
/*
|
||||
Evaluates code with external bindings. External bindings are a record of expression values.
|
||||
The code is a partial code as if it is cut from a larger code. Therefore all statments are assignments.
|
||||
*/
|
||||
let evalPartialUsingExternalBindings = (code: string, externalBindings: externalBindings): result<
|
||||
externalBindings,
|
||||
'e,
|
||||
> => {
|
||||
let bindings = externalBindings->externalBindingsToBindings
|
||||
let answer = evalPartialUsingExternalBindings_(code, bindings)
|
||||
answer->Result.flatMap(answer =>
|
||||
switch answer {
|
||||
| EvRecord(aRecord) => Ok(aRecord)
|
||||
| _ => RETodo("TODO: External bindings must be returned")->Error
|
||||
}
|
||||
let evaluate = (code: string): result<expressionValue, errorValue> => {
|
||||
evaluateUsingOptions(~environment=None, ~externalBindings=None, code)
|
||||
}
|
||||
let eval = evaluate
|
||||
let evaluatePartialUsingExternalBindings = (
|
||||
code: string,
|
||||
externalBindings: ReducerInterface_ExpressionValue.externalBindings,
|
||||
environment: ReducerInterface_ExpressionValue.environment,
|
||||
): result<externalBindings, errorValue> => {
|
||||
let rAnswer = evaluateUsingOptions(
|
||||
~environment=Some(environment),
|
||||
~externalBindings=Some(externalBindings),
|
||||
code,
|
||||
)
|
||||
switch rAnswer {
|
||||
| Ok(EvRecord(externalBindings)) => Ok(externalBindings)
|
||||
| Ok(_) =>
|
||||
Error(Reducer_ErrorValue.RESyntaxError(`Partials must end with an assignment or record`))
|
||||
| Error(err) => err->Error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
module Bindings = Reducer_Expression_Bindings
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module Result = Belt.Result
|
||||
|
||||
type bindings = ExpressionT.bindings
|
||||
type context = bindings
|
||||
type environment = ExpressionValue.environment
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
type reducerFn = ExpressionT.reducerFn
|
||||
|
||||
type expressionWithContext =
|
||||
| ExpressionWithContext(expression, context)
|
||||
| ExpressionNoContext(expression)
|
||||
|
||||
let callReducer = (
|
||||
expressionWithContext: expressionWithContext,
|
||||
bindings: bindings,
|
||||
environment: environment,
|
||||
reducer: reducerFn,
|
||||
): result<expressionValue, errorValue> =>
|
||||
switch expressionWithContext {
|
||||
| ExpressionNoContext(expr) => reducer(expr, bindings, environment)
|
||||
| ExpressionWithContext(expr, context) => reducer(expr, context, environment)
|
||||
}
|
||||
|
||||
let withContext = (expression, context) => ExpressionWithContext(expression, context)
|
||||
let noContext = expression => ExpressionNoContext(expression)
|
||||
|
||||
let toString = expressionWithContext =>
|
||||
switch expressionWithContext {
|
||||
| ExpressionNoContext(expr) => ExpressionT.toString(expr)
|
||||
| ExpressionWithContext(expr, context) =>
|
||||
`${ExpressionT.toString(expr)} context: ${Bindings.toString(context)}`
|
||||
}
|
||||
|
||||
let toStringResult = rExpressionWithContext =>
|
||||
switch rExpressionWithContext {
|
||||
| Ok(expressionWithContext) => `Ok(${toString(expressionWithContext)})`
|
||||
| Error(errorValue) => ErrorValue.errorToString(errorValue)
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module Result = Belt.Result
|
||||
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
|
||||
let defaultBindings: ExpressionT.bindings = Belt.Map.String.empty
|
||||
|
||||
let fromExternalBindings = (externalBindings: externalBindings): ExpressionT.bindings => {
|
||||
let keys = Js.Dict.keys(externalBindings)
|
||||
keys->Belt.Array.reduce(defaultBindings, (acc, key) => {
|
||||
let value = Js.Dict.unsafeGet(externalBindings, key)
|
||||
acc->Belt.Map.String.set(key, value)
|
||||
})
|
||||
}
|
||||
|
||||
let toExternalBindings = (bindings: ExpressionT.bindings): externalBindings => {
|
||||
let keys = Belt.Map.String.keysToArray(bindings)
|
||||
keys->Belt.Array.reduce(Js.Dict.empty(), (acc, key) => {
|
||||
let value = bindings->Belt.Map.String.getExn(key)
|
||||
Js.Dict.set(acc, key, value)
|
||||
acc
|
||||
})
|
||||
}
|
||||
|
||||
let fromValue = (aValue: expressionValue) =>
|
||||
switch aValue {
|
||||
| EvRecord(externalBindings) => fromExternalBindings(externalBindings)
|
||||
| _ => defaultBindings
|
||||
}
|
||||
|
||||
let externalFromArray = anArray => Js.Dict.fromArray(anArray)
|
||||
|
||||
let isMacroName = (fName: string): bool => fName->Js.String2.startsWith("$$")
|
||||
|
||||
let rec replaceSymbols = (bindings: ExpressionT.bindings, expression: expression): result<
|
||||
expression,
|
||||
errorValue,
|
||||
> =>
|
||||
switch expression {
|
||||
| ExpressionT.EValue(value) =>
|
||||
replaceSymbolOnValue(bindings, value)->Result.map(evValue => evValue->ExpressionT.EValue)
|
||||
| ExpressionT.EList(list) =>
|
||||
switch list {
|
||||
| list{EValue(EvCall(fName)), ..._args} =>
|
||||
switch isMacroName(fName) {
|
||||
// A macro reduces itself so we dont dive in it
|
||||
| true => expression->Ok
|
||||
| false => replaceSymbolsOnExpressionList(bindings, list)
|
||||
}
|
||||
| _ => replaceSymbolsOnExpressionList(bindings, list)
|
||||
}
|
||||
}
|
||||
|
||||
and replaceSymbolsOnExpressionList = (bindings, list) => {
|
||||
let racc = list->Belt.List.reduceReverse(Ok(list{}), (racc, each: expression) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
replaceSymbols(bindings, each)->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.map(acc => acc->ExpressionT.EList)
|
||||
}
|
||||
and replaceSymbolOnValue = (bindings, evValue: expressionValue) =>
|
||||
switch evValue {
|
||||
| EvSymbol(symbol) => Belt.Map.String.getWithDefault(bindings, symbol, evValue)->Ok
|
||||
| EvCall(symbol) => Belt.Map.String.getWithDefault(bindings, symbol, evValue)->checkIfCallable
|
||||
| _ => evValue->Ok
|
||||
}
|
||||
and checkIfCallable = (evValue: expressionValue) =>
|
||||
switch evValue {
|
||||
| EvCall(_) | EvLambda(_) => evValue->Ok
|
||||
| _ => ErrorValue.RENotAFunction(ExpressionValue.toString(evValue))->Error
|
||||
}
|
||||
|
||||
let toString = (bindings: ExpressionT.bindings) =>
|
||||
bindings->toExternalBindings->ExpressionValue.EvRecord->ExpressionValue.toString
|
||||
|
||||
let externalBindingsToString = (externalBindings: externalBindings) =>
|
||||
externalBindings->ExpressionValue.EvRecord->ExpressionValue.toString
|
|
@ -0,0 +1,66 @@
|
|||
module BBindings = Reducer_Expression_Bindings
|
||||
module BErrorValue = Reducer_ErrorValue
|
||||
module BExpressionT = Reducer_Expression_T
|
||||
module BExpressionValue = ReducerInterface.ExpressionValue
|
||||
|
||||
type errorValue = BErrorValue.errorValue
|
||||
type expression = BExpressionT.expression
|
||||
type internalCode = ReducerInterface_ExpressionValue.internalCode
|
||||
|
||||
external castExpressionToInternalCode: expression => internalCode = "%identity"
|
||||
|
||||
let eArray = anArray => anArray->BExpressionValue.EvArray->BExpressionT.EValue
|
||||
|
||||
let eArrayString = anArray => anArray->BExpressionValue.EvArrayString->BExpressionT.EValue
|
||||
|
||||
let eBindings = (anArray: array<(string, BExpressionValue.expressionValue)>) =>
|
||||
anArray->Js.Dict.fromArray->BExpressionValue.EvRecord->BExpressionT.EValue
|
||||
|
||||
let eBool = aBool => aBool->BExpressionValue.EvBool->BExpressionT.EValue
|
||||
|
||||
let eCall = (name: string): expression => name->BExpressionValue.EvCall->BExpressionT.EValue
|
||||
|
||||
let eFunction = (fName: string, lispArgs: list<expression>): expression => {
|
||||
let fn = fName->eCall
|
||||
list{fn, ...lispArgs}->BExpressionT.EList
|
||||
}
|
||||
|
||||
let eLambda = (
|
||||
parameters: array<string>,
|
||||
context: BExpressionValue.externalBindings,
|
||||
expr: expression,
|
||||
) => {
|
||||
// Js.log(`eLambda context ${BBindings.externalBindingsToString(context)}`)
|
||||
BExpressionValue.EvLambda({
|
||||
parameters: parameters,
|
||||
context: context,
|
||||
body: expr->castExpressionToInternalCode,
|
||||
})->BExpressionT.EValue
|
||||
}
|
||||
|
||||
let eNumber = aNumber => aNumber->BExpressionValue.EvNumber->BExpressionT.EValue
|
||||
|
||||
let eRecord = aRecord => aRecord->BExpressionValue.EvRecord->BExpressionT.EValue
|
||||
|
||||
let eString = aString => aString->BExpressionValue.EvString->BExpressionT.EValue
|
||||
|
||||
let eSymbol = (name: string): expression => name->BExpressionValue.EvSymbol->BExpressionT.EValue
|
||||
|
||||
let eList = (list: list<expression>): expression => list->BExpressionT.EList
|
||||
|
||||
let eBlock = (exprs: list<expression>): expression => eFunction("$$block", exprs)
|
||||
|
||||
let eLetStatement = (symbol: string, valueExpression: expression): expression =>
|
||||
eFunction("$let", list{eSymbol(symbol), valueExpression})
|
||||
|
||||
let eBindStatement = (bindingExpr: expression, letStatement: expression): expression =>
|
||||
eFunction("$$bindStatement", list{bindingExpr, letStatement})
|
||||
|
||||
let eBindStatementDefault = (letStatement: expression): expression =>
|
||||
eFunction("$$bindStatement", list{letStatement})
|
||||
|
||||
let eBindExpression = (bindingExpr: expression, expression: expression): expression =>
|
||||
eFunction("$$bindExpression", list{bindingExpr, expression})
|
||||
|
||||
let eBindExpressionDefault = (expression: expression): expression =>
|
||||
eFunction("$$bindExpression", list{expression})
|
|
@ -0,0 +1,69 @@
|
|||
module Bindings = Reducer_Expression_Bindings
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module Result = Belt.Result
|
||||
|
||||
type environment = ReducerInterface_ExpressionValue.environment
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
type externalBindings = ReducerInterface_ExpressionValue.externalBindings
|
||||
type internalCode = ReducerInterface_ExpressionValue.internalCode
|
||||
|
||||
external castInternalCodeToExpression: internalCode => expression = "%identity"
|
||||
|
||||
let checkArity = (lambdaValue: ExpressionValue.lambdaValue, args: list<expressionValue>) => {
|
||||
let argsLength = Belt.List.length(args)
|
||||
let parametersLength = Js.Array2.length(lambdaValue.parameters)
|
||||
if argsLength !== parametersLength {
|
||||
ErrorValue.REArityError(None, parametersLength, argsLength)->Error
|
||||
} else {
|
||||
args->Ok
|
||||
}
|
||||
}
|
||||
|
||||
let checkIfReduced = (args: list<expressionValue>) =>
|
||||
args->Belt.List.reduceReverse(Ok(list{}), (rAcc, arg) =>
|
||||
rAcc->Result.flatMap(acc =>
|
||||
switch arg {
|
||||
| EvSymbol(symbol) => ErrorValue.RESymbolNotFound(symbol)->Error
|
||||
| _ => list{arg, ...acc}->Ok
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
let applyParametersToLambda = (
|
||||
lambdaValue: ExpressionValue.lambdaValue,
|
||||
args,
|
||||
environment,
|
||||
reducer: ExpressionT.reducerFn,
|
||||
): result<expressionValue, 'e> => {
|
||||
checkArity(lambdaValue, args)->Result.flatMap(args =>
|
||||
checkIfReduced(args)->Result.flatMap(args => {
|
||||
let expr = castInternalCodeToExpression(lambdaValue.body)
|
||||
let parameterList = lambdaValue.parameters->Belt.List.fromArray
|
||||
let zippedParameterList = parameterList->Belt.List.zip(args)
|
||||
let bindings = Belt.List.reduce(
|
||||
zippedParameterList,
|
||||
lambdaValue.context->Bindings.fromExternalBindings,
|
||||
(acc, (variable, variableValue)) => acc->Belt.Map.String.set(variable, variableValue),
|
||||
)
|
||||
let newExpression = ExpressionBuilder.eBlock(list{expr})
|
||||
reducer(newExpression, bindings, environment)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
let doLambdaCall = (lambdaValue: ExpressionValue.lambdaValue, args, environment, reducer) =>
|
||||
applyParametersToLambda(lambdaValue, args, environment, reducer)
|
||||
|
||||
let foreignFunctionInterface = (
|
||||
lambdaValue: ExpressionValue.lambdaValue,
|
||||
argArray: array<expressionValue>,
|
||||
environment: ExpressionValue.environment,
|
||||
reducer: ExpressionT.reducerFn,
|
||||
): result<expressionValue, 'e> => {
|
||||
let args = argArray->Belt.List.fromArray
|
||||
applyParametersToLambda(lambdaValue, args, environment, reducer)
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module ExpressionWithContext = Reducer_ExpressionWithContext
|
||||
module Result = Belt.Result
|
||||
|
||||
type environment = ExpressionValue.environment
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type expressionWithContext = ExpressionWithContext.expressionWithContext
|
||||
|
||||
let expandMacroCall = (
|
||||
macroExpression: expression,
|
||||
bindings: ExpressionT.bindings,
|
||||
environment: environment,
|
||||
reduceExpression: ExpressionT.reducerFn,
|
||||
): result<expressionWithContext, 'e> =>
|
||||
Reducer_Dispatch_BuiltInMacros.dispatchMacroCall(
|
||||
macroExpression,
|
||||
bindings,
|
||||
environment,
|
||||
reduceExpression,
|
||||
)
|
||||
|
||||
let doMacroCall = (
|
||||
macroExpression: expression,
|
||||
bindings: ExpressionT.bindings,
|
||||
environment: environment,
|
||||
reduceExpression: ExpressionT.reducerFn,
|
||||
): result<expressionValue, 'e> =>
|
||||
expandMacroCall(
|
||||
macroExpression,
|
||||
bindings,
|
||||
environment,
|
||||
reduceExpression,
|
||||
)->Result.flatMap(expressionWithContext =>
|
||||
ExpressionWithContext.callReducer(
|
||||
expressionWithContext,
|
||||
bindings,
|
||||
environment,
|
||||
reduceExpression,
|
||||
)
|
||||
)
|
||||
|
||||
let isMacroName = (fName: string): bool => fName->Js.String2.startsWith("$$")
|
|
@ -1,5 +1,3 @@
|
|||
open ReducerInterface.ExpressionValue
|
||||
|
||||
/*
|
||||
An expression is a Lisp AST. An expression is either a primitive value or a list of expressions.
|
||||
In the case of a list of expressions (e1, e2, e3, ...eN), the semantic is
|
||||
|
@ -8,8 +6,51 @@ open ReducerInterface.ExpressionValue
|
|||
A Lisp AST contains only expressions/primitive values to apply to their left.
|
||||
The act of defining the semantics of a functional language is to write it in terms of Lisp AST.
|
||||
*/
|
||||
module Extra = Reducer_Extra
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type environment = ExpressionValue.environment
|
||||
|
||||
type rec expression =
|
||||
| EList(list<expression>) // A list to map-reduce
|
||||
| EValue(expressionValue) // Irreducible built-in value. Reducer should not know the internals. External libraries are responsible
|
||||
| EBindings(bindings) // let/def kind of statements return bindings
|
||||
and bindings = Belt.Map.String.t<expression>
|
||||
and bindings = Belt.Map.String.t<expressionValue>
|
||||
|
||||
type reducerFn = (
|
||||
expression,
|
||||
bindings,
|
||||
environment,
|
||||
) => result<expressionValue, Reducer_ErrorValue.errorValue>
|
||||
|
||||
/*
|
||||
Converts the expression to String
|
||||
*/
|
||||
let rec toString = expression =>
|
||||
switch expression {
|
||||
| EList(aList) =>
|
||||
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||
->Extra.List.interperse(" ")
|
||||
->Belt.List.toArray
|
||||
->Js.String.concatMany("")})`
|
||||
| EValue(aValue) => ExpressionValue.toString(aValue)
|
||||
}
|
||||
|
||||
let toStringResult = codeResult =>
|
||||
switch codeResult {
|
||||
| Ok(a) => `Ok(${toString(a)})`
|
||||
| Error(m) => `Error(${Reducer_ErrorValue.errorToString(m)})`
|
||||
}
|
||||
|
||||
let inspect = (expr: expression): expression => {
|
||||
Js.log(toString(expr))
|
||||
expr
|
||||
}
|
||||
|
||||
let inspectResult = (r: result<expression, Reducer_ErrorValue.errorValue>): result<
|
||||
expression,
|
||||
Reducer_ErrorValue.errorValue,
|
||||
> => {
|
||||
Js.log(toStringResult(r))
|
||||
r
|
||||
}
|
||||
|
|
|
@ -8,11 +8,10 @@ external castString: unit => string = "%identity"
|
|||
/*
|
||||
As JavaScript returns us any type, we need to type check and cast type propertype before using it
|
||||
*/
|
||||
let jsToEv = (jsValue): result<expressionValue, errorValue> => {
|
||||
let jsToEv = (jsValue): result<expressionValue, errorValue> =>
|
||||
switch Js.typeof(jsValue) {
|
||||
| "boolean" => jsValue->castBool->EvBool->Ok
|
||||
| "number" => jsValue->castNumber->EvNumber->Ok
|
||||
| "string" => jsValue->castString->EvString->Ok
|
||||
| other => RETodo(`Unhandled MathJs literal type: ${Js.String.make(other)}`)->Error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,13 +9,12 @@ type node = {"type": string, "isNode": bool, "comment": string}
|
|||
type arrayNode = {...node, "items": array<node>}
|
||||
type block = {"node": node}
|
||||
type blockNode = {...node, "blocks": array<block>}
|
||||
//conditionalNode
|
||||
type conditionalNode = {...node, "condition": node, "trueExpr": node, "falseExpr": node}
|
||||
type constantNode = {...node, "value": unit}
|
||||
//functionAssignmentNode
|
||||
type functionAssignmentNode = {...node, "name": string, "params": array<string>, "expr": node}
|
||||
type indexNode = {...node, "dimensions": array<node>}
|
||||
type objectNode = {...node, "properties": Js.Dict.t<node>}
|
||||
type accessorNode = {...node, "object": node, "index": indexNode, "name": string}
|
||||
|
||||
type parenthesisNode = {...node, "content": node}
|
||||
//rangeNode
|
||||
//relationalNode
|
||||
|
@ -32,7 +31,9 @@ external castAssignmentNode: node => assignmentNode = "%identity"
|
|||
external castAssignmentNodeWAccessor: node => assignmentNodeWAccessor = "%identity"
|
||||
external castAssignmentNodeWIndex: node => assignmentNodeWIndex = "%identity"
|
||||
external castBlockNode: node => blockNode = "%identity"
|
||||
external castConditionalNode: node => conditionalNode = "%identity"
|
||||
external castConstantNode: node => constantNode = "%identity"
|
||||
external castFunctionAssignmentNode: node => functionAssignmentNode = "%identity"
|
||||
external castFunctionNode: node => functionNode = "%identity"
|
||||
external castIndexNode: node => indexNode = "%identity"
|
||||
external castObjectNode: node => objectNode = "%identity"
|
||||
|
@ -58,7 +59,9 @@ type mathJsNode =
|
|||
| MjArrayNode(arrayNode)
|
||||
| MjAssignmentNode(assignmentNode)
|
||||
| MjBlockNode(blockNode)
|
||||
| MjConditionalNode(conditionalNode)
|
||||
| MjConstantNode(constantNode)
|
||||
| MjFunctionAssignmentNode(functionAssignmentNode)
|
||||
| MjFunctionNode(functionNode)
|
||||
| MjIndexNode(indexNode)
|
||||
| MjObjectNode(objectNode)
|
||||
|
@ -81,7 +84,9 @@ let castNodeType = (node: node) => {
|
|||
| "ArrayNode" => node->castArrayNode->MjArrayNode->Ok
|
||||
| "AssignmentNode" => node->decideAssignmentNode
|
||||
| "BlockNode" => node->castBlockNode->MjBlockNode->Ok
|
||||
| "ConditionalNode" => node->castConditionalNode->MjConditionalNode->Ok
|
||||
| "ConstantNode" => node->castConstantNode->MjConstantNode->Ok
|
||||
| "FunctionAssignmentNode" => node->castFunctionAssignmentNode->MjFunctionAssignmentNode->Ok
|
||||
| "FunctionNode" => node->castFunctionNode->MjFunctionNode->Ok
|
||||
| "IndexNode" => node->castIndexNode->MjIndexNode->Ok
|
||||
| "ObjectNode" => node->castObjectNode->MjObjectNode->Ok
|
||||
|
@ -118,6 +123,10 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
|||
->Extra.Array.interperse(", ")
|
||||
->Js.String.concatMany("")
|
||||
|
||||
let toStringFunctionAssignmentNode = (faNode: functionAssignmentNode): string => {
|
||||
let paramNames = Js.Array2.toString(faNode["params"])
|
||||
`${faNode["name"]} = (${paramNames}) => ${toStringMathJsNode(faNode["expr"])}`
|
||||
}
|
||||
let toStringFunctionNode = (fnode: functionNode): string =>
|
||||
`${fnode->nameOfFunctionNode}(${fnode["args"]->toStringNodeArray})`
|
||||
|
||||
|
@ -151,7 +160,12 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
|||
| MjAssignmentNode(aNode) =>
|
||||
`${aNode["object"]->toStringSymbolNode} = ${aNode["value"]->toStringMathJsNode}`
|
||||
| MjBlockNode(bNode) => `{${bNode["blocks"]->toStringBlocks}}`
|
||||
| MjConditionalNode(cNode) =>
|
||||
`ternary(${toStringMathJsNode(cNode["condition"])}, ${toStringMathJsNode(
|
||||
cNode["trueExpr"],
|
||||
)}, ${toStringMathJsNode(cNode["falseExpr"])})`
|
||||
| MjConstantNode(cNode) => cNode["value"]->toStringValue
|
||||
| MjFunctionAssignmentNode(faNode) => faNode->toStringFunctionAssignmentNode
|
||||
| MjFunctionNode(fNode) => fNode->toStringFunctionNode
|
||||
| MjIndexNode(iNode) => iNode->toStringIndexNode
|
||||
| MjObjectNode(oNode) => oNode->toStringObjectNode
|
||||
|
|
|
@ -1,45 +1,35 @@
|
|||
/* * WARNING. DO NOT EDIT, BEAUTIFY, COMMENT ON OR REFACTOR THIS CODE.
|
||||
We will stop using MathJs parser and
|
||||
this whole file will go to trash
|
||||
**/
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module JavaScript = Reducer_Js
|
||||
module Parse = Reducer_MathJs_Parse
|
||||
module Result = Belt.Result
|
||||
|
||||
type errorValue = ErrorValue.errorValue
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type errorValue = ErrorValue.errorValue
|
||||
|
||||
let passToFunction = (fName: string, rLispArgs): result<expression, errorValue> => {
|
||||
let toEvCallValue = (name: string): expression => name->ExpressionValue.EvCall->ExpressionT.EValue
|
||||
let blockToNode = block => block["node"]
|
||||
|
||||
let fn = fName->toEvCallValue
|
||||
rLispArgs->Result.flatMap(lispArgs => list{fn, ...lispArgs}->ExpressionT.EList->Ok)
|
||||
}
|
||||
|
||||
type blockTag =
|
||||
| ImportVariablesStatement
|
||||
| ExportVariablesExpression
|
||||
type tagOrNode =
|
||||
| BlockTag(blockTag)
|
||||
| BlockNode(Parse.node)
|
||||
|
||||
let toTagOrNode = block => BlockNode(block["node"])
|
||||
|
||||
let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
||||
let rec fromInnerNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
||||
Parse.castNodeType(mathJsNode)->Result.flatMap(typedMathJsNode => {
|
||||
let fromNodeList = (nodeList: list<Parse.node>): result<list<expression>, 'e> =>
|
||||
Belt.List.reduceReverse(nodeList, Ok(list{}), (racc, currNode) =>
|
||||
racc->Result.flatMap(acc =>
|
||||
fromNode(currNode)->Result.map(currCode => list{currCode, ...acc})
|
||||
fromInnerNode(currNode)->Result.map(currCode => list{currCode, ...acc})
|
||||
)
|
||||
)
|
||||
|
||||
let toEvSymbolValue = (name: string): expression =>
|
||||
name->ExpressionValue.EvSymbol->ExpressionT.EValue
|
||||
|
||||
let caseFunctionNode = fNode => {
|
||||
let lispArgs = fNode["args"]->Belt.List.fromArray->fromNodeList
|
||||
passToFunction(fNode->Parse.nameOfFunctionNode, lispArgs)
|
||||
let rLispArgs = fNode["args"]->Belt.List.fromArray->fromNodeList
|
||||
rLispArgs->Result.map(lispArgs =>
|
||||
ExpressionBuilder.eFunction(fNode->Parse.nameOfFunctionNode, lispArgs)
|
||||
)
|
||||
}
|
||||
|
||||
let caseObjectNode = oNode => {
|
||||
|
@ -49,19 +39,16 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
(key: string, value: Parse.node),
|
||||
) =>
|
||||
racc->Result.flatMap(acc =>
|
||||
fromNode(value)->Result.map(valueExpression => {
|
||||
fromInnerNode(value)->Result.map(valueExpression => {
|
||||
let entryCode =
|
||||
list{
|
||||
key->ExpressionValue.EvString->ExpressionT.EValue,
|
||||
valueExpression,
|
||||
}->ExpressionT.EList
|
||||
list{ExpressionBuilder.eString(key), valueExpression}->ExpressionT.EList
|
||||
list{entryCode, ...acc}
|
||||
})
|
||||
)
|
||||
)
|
||||
rargs->Result.flatMap(args =>
|
||||
passToFunction("$constructRecord", list{ExpressionT.EList(args)}->Ok)
|
||||
) // $consturctRecord gets a single argument: List of key-value paiers
|
||||
ExpressionBuilder.eFunction("$constructRecord", list{ExpressionT.EList(args)})->Ok
|
||||
) // $constructRecord gets a single argument: List of key-value paiers
|
||||
}
|
||||
|
||||
oNode["properties"]->Js.Dict.entries->Belt.List.fromArray->fromObjectEntries
|
||||
|
@ -73,7 +60,7 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
Ok(list{}),
|
||||
(racc, currentPropertyMathJsNode) =>
|
||||
racc->Result.flatMap(acc =>
|
||||
fromNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{
|
||||
fromInnerNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{
|
||||
propertyCode,
|
||||
...acc,
|
||||
})
|
||||
|
@ -84,18 +71,41 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
|
||||
let caseAccessorNode = (objectNode, indexNode) => {
|
||||
caseIndexNode(indexNode)->Result.flatMap(indexCode => {
|
||||
fromNode(objectNode)->Result.flatMap(objectCode =>
|
||||
passToFunction("$atIndex", list{objectCode, indexCode}->Ok)
|
||||
fromInnerNode(objectNode)->Result.flatMap(objectCode =>
|
||||
ExpressionBuilder.eFunction("$atIndex", list{objectCode, indexCode})->Ok
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
let caseBlock = (nodesArray: array<Parse.node>): result<expression, errorValue> => {
|
||||
let rStatements: result<list<expression>, 'a> =
|
||||
nodesArray
|
||||
->Belt.List.fromArray
|
||||
->Belt.List.reduceReverse(Ok(list{}), (racc, currNode) =>
|
||||
racc->Result.flatMap(acc =>
|
||||
fromInnerNode(currNode)->Result.map(currCode => list{currCode, ...acc})
|
||||
)
|
||||
)
|
||||
rStatements->Result.map(statements => ExpressionBuilder.eBlock(statements))
|
||||
}
|
||||
|
||||
let caseAssignmentNode = aNode => {
|
||||
let symbol = aNode["object"]["name"]->toEvSymbolValue
|
||||
let rValueExpression = fromNode(aNode["value"])
|
||||
let symbolName = aNode["object"]["name"]
|
||||
let rValueExpression = fromInnerNode(aNode["value"])
|
||||
rValueExpression->Result.map(valueExpression =>
|
||||
ExpressionBuilder.eLetStatement(symbolName, valueExpression)
|
||||
)
|
||||
}
|
||||
|
||||
let caseFunctionAssignmentNode = faNode => {
|
||||
let symbol = faNode["name"]->ExpressionBuilder.eSymbol
|
||||
let rValueExpression = fromInnerNode(faNode["expr"])
|
||||
|
||||
rValueExpression->Result.flatMap(valueExpression => {
|
||||
let lispArgs = list{symbol, valueExpression}->Ok
|
||||
passToFunction("$let", lispArgs)
|
||||
let lispParams = ExpressionBuilder.eArrayString(faNode["params"])
|
||||
let valueBlock = ExpressionBuilder.eBlock(list{valueExpression})
|
||||
let lambda = ExpressionBuilder.eFunction("$$lambda", list{lispParams, valueBlock})
|
||||
ExpressionBuilder.eFunction("$let", list{symbol, lambda})->Ok
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -103,93 +113,42 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
aNode["items"]->Belt.List.fromArray->fromNodeList->Result.map(list => ExpressionT.EList(list))
|
||||
}
|
||||
|
||||
let caseConditionalNode = cndNode => {
|
||||
let rCondition = fromInnerNode(cndNode["condition"])
|
||||
let rTrueExpr = fromInnerNode(cndNode["trueExpr"])
|
||||
let rFalse = fromInnerNode(cndNode["falseExpr"])
|
||||
|
||||
rCondition->Result.flatMap(condition =>
|
||||
rTrueExpr->Result.flatMap(trueExpr =>
|
||||
rFalse->Result.flatMap(falseExpr =>
|
||||
ExpressionBuilder.eFunction("$$ternary", list{condition, trueExpr, falseExpr})->Ok
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
let rFinalExpression: result<expression, errorValue> = switch typedMathJsNode {
|
||||
| MjAccessorNode(aNode) => caseAccessorNode(aNode["object"], aNode["index"])
|
||||
| MjArrayNode(aNode) => caseArrayNode(aNode)
|
||||
| MjAssignmentNode(aNode) => caseAssignmentNode(aNode)
|
||||
| MjSymbolNode(sNode) => {
|
||||
let expr: expression = toEvSymbolValue(sNode["name"])
|
||||
let expr: expression = ExpressionBuilder.eSymbol(sNode["name"])
|
||||
let rExpr: result<expression, errorValue> = expr->Ok
|
||||
rExpr
|
||||
}
|
||||
| MjBlockNode(bNode) => bNode["blocks"]->Belt.Array.map(toTagOrNode)->caseTagOrNodes
|
||||
| MjBlockNode(bNode) => bNode["blocks"]->Js.Array2.map(blockToNode)->caseBlock
|
||||
| MjConditionalNode(cndNode) => caseConditionalNode(cndNode)
|
||||
| MjConstantNode(cNode) =>
|
||||
cNode["value"]->JavaScript.Gate.jsToEv->Result.flatMap(v => v->ExpressionT.EValue->Ok)
|
||||
| MjFunctionAssignmentNode(faNode) => caseFunctionAssignmentNode(faNode)
|
||||
| MjFunctionNode(fNode) => fNode->caseFunctionNode
|
||||
| MjIndexNode(iNode) => caseIndexNode(iNode)
|
||||
| MjObjectNode(oNode) => caseObjectNode(oNode)
|
||||
| MjOperatorNode(opNode) => opNode->Parse.castOperatorNodeToFunctionNode->caseFunctionNode
|
||||
| MjParenthesisNode(pNode) => pNode["content"]->fromNode
|
||||
| MjParenthesisNode(pNode) => pNode["content"]->fromInnerNode
|
||||
}
|
||||
rFinalExpression
|
||||
})
|
||||
and caseTagOrNodes = (tagOrNodes): result<expression, errorValue> => {
|
||||
let initialBindings = passToFunction("$$bindings", list{}->Ok)
|
||||
let lastIndex = Belt.Array.length(tagOrNodes) - 1
|
||||
tagOrNodes->Belt.Array.reduceWithIndex(initialBindings, (rPreviousBindings, tagOrNode, i) => {
|
||||
rPreviousBindings->Result.flatMap(previousBindings => {
|
||||
let rStatement: result<expression, errorValue> = switch tagOrNode {
|
||||
| BlockNode(node) => fromNode(node)
|
||||
| BlockTag(tag) =>
|
||||
switch tag {
|
||||
| ImportVariablesStatement => passToFunction("$importVariablesStatement", list{}->Ok)
|
||||
| ExportVariablesExpression => passToFunction("$exportVariablesExpression", list{}->Ok)
|
||||
}
|
||||
}
|
||||
|
||||
let bindName = if i == lastIndex {
|
||||
"$$bindExpression"
|
||||
} else {
|
||||
"$$bindStatement"
|
||||
}
|
||||
|
||||
rStatement->Result.flatMap((statement: expression) => {
|
||||
let lispArgs = list{previousBindings, statement}->Ok
|
||||
passToFunction(bindName, lispArgs)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
let fromPartialNode = (mathJsNode: Parse.node): result<expression, errorValue> => {
|
||||
Parse.castNodeType(mathJsNode)->Result.flatMap(typedMathJsNode => {
|
||||
let casePartialBlockNode = (bNode: Parse.blockNode) => {
|
||||
let blocksOrTags = bNode["blocks"]->Belt.Array.map(toTagOrNode)
|
||||
let completed = Js.Array2.concat(blocksOrTags, [BlockTag(ExportVariablesExpression)])
|
||||
completed->caseTagOrNodes
|
||||
}
|
||||
|
||||
let casePartialExpression = (node: Parse.node) => {
|
||||
let completed = [BlockNode(node), BlockTag(ExportVariablesExpression)]
|
||||
|
||||
completed->caseTagOrNodes
|
||||
}
|
||||
|
||||
let rFinalExpression: result<expression, errorValue> = switch typedMathJsNode {
|
||||
| MjBlockNode(bNode) => casePartialBlockNode(bNode)
|
||||
| _ => casePartialExpression(mathJsNode)
|
||||
}
|
||||
rFinalExpression
|
||||
})
|
||||
}
|
||||
|
||||
let fromOuterNode = (mathJsNode: Parse.node): result<expression, errorValue> => {
|
||||
Parse.castNodeType(mathJsNode)->Result.flatMap(typedMathJsNode => {
|
||||
let casePartialBlockNode = (bNode: Parse.blockNode) => {
|
||||
let blocksOrTags = bNode["blocks"]->Belt.Array.map(toTagOrNode)
|
||||
let completed = blocksOrTags
|
||||
completed->caseTagOrNodes
|
||||
}
|
||||
|
||||
let casePartialExpression = (node: Parse.node) => {
|
||||
let completed = [BlockNode(node)]
|
||||
completed->caseTagOrNodes
|
||||
}
|
||||
|
||||
let rFinalExpression: result<expression, errorValue> = switch typedMathJsNode {
|
||||
| MjBlockNode(bNode) => casePartialBlockNode(bNode)
|
||||
| _ => casePartialExpression(mathJsNode)
|
||||
}
|
||||
rFinalExpression
|
||||
})
|
||||
}
|
||||
let fromNode = (node: Parse.node): result<expression, errorValue> =>
|
||||
fromInnerNode(node)->Result.map(expr => ExpressionBuilder.eBlock(list{expr}))
|
||||
|
|
|
@ -5,37 +5,50 @@
|
|||
module Extra_Array = Reducer_Extra_Array
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
|
||||
@genType.opaque
|
||||
type internalCode = Object
|
||||
|
||||
@genType
|
||||
type rec expressionValue =
|
||||
| EvArray(array<expressionValue>)
|
||||
| EvArrayString(array<string>)
|
||||
| EvBool(bool)
|
||||
| EvCall(string) // External function call
|
||||
| EvDistribution(DistributionTypes.genericDist)
|
||||
| EvLambda(lambdaValue)
|
||||
| EvNumber(float)
|
||||
| EvRecord(Js.Dict.t<expressionValue>)
|
||||
| EvRecord(record)
|
||||
| EvString(string)
|
||||
| EvSymbol(string)
|
||||
and record = Js.Dict.t<expressionValue>
|
||||
and externalBindings = record
|
||||
and lambdaValue = {
|
||||
parameters: array<string>,
|
||||
context: externalBindings,
|
||||
body: internalCode,
|
||||
}
|
||||
|
||||
@genType
|
||||
type externalBindings = Js.Dict.t<expressionValue>
|
||||
let defaultExternalBindings: externalBindings = Js.Dict.empty()
|
||||
|
||||
type functionCall = (string, array<expressionValue>)
|
||||
|
||||
let rec toString = aValue =>
|
||||
switch aValue {
|
||||
| EvArray(anArray) => {
|
||||
let args = anArray->Js.Array2.map(each => toString(each))->Js.Array2.toString
|
||||
`[${args}]`
|
||||
}
|
||||
| EvArrayString(anArray) => {
|
||||
let args = anArray->Js.Array2.toString
|
||||
`[${args}]`
|
||||
}
|
||||
| EvBool(aBool) => Js.String.make(aBool)
|
||||
| EvCall(fName) => `:${fName}`
|
||||
| EvLambda(lambdaValue) => `lambda(${Js.Array2.toString(lambdaValue.parameters)}=>internal code)`
|
||||
| EvNumber(aNumber) => Js.String.make(aNumber)
|
||||
| EvString(aString) => `'${aString}'`
|
||||
| EvSymbol(aString) => `:${aString}`
|
||||
| EvArray(anArray) => {
|
||||
let args =
|
||||
anArray
|
||||
->Belt.Array.map(each => toString(each))
|
||||
->Extra_Array.interperse(", ")
|
||||
->Js.String.concatMany("")
|
||||
`[${args}]`
|
||||
}
|
||||
| EvRecord(aRecord) => aRecord->toStringRecord
|
||||
| EvDistribution(dist) => GenericDist.toString(dist)
|
||||
}
|
||||
|
@ -43,26 +56,27 @@ and toStringRecord = aRecord => {
|
|||
let pairs =
|
||||
aRecord
|
||||
->Js.Dict.entries
|
||||
->Belt.Array.map(((eachKey, eachValue)) => `${eachKey}: ${toString(eachValue)}`)
|
||||
->Extra_Array.interperse(", ")
|
||||
->Js.String.concatMany("")
|
||||
->Js.Array2.map(((eachKey, eachValue)) => `${eachKey}: ${toString(eachValue)}`)
|
||||
->Js.Array2.toString
|
||||
`{${pairs}}`
|
||||
}
|
||||
|
||||
let toStringWithType = aValue =>
|
||||
switch aValue {
|
||||
| EvArray(_) => `Array::${toString(aValue)}`
|
||||
| EvArrayString(_) => `ArrayString::${toString(aValue)}`
|
||||
| EvBool(_) => `Bool::${toString(aValue)}`
|
||||
| EvCall(_) => `Call::${toString(aValue)}`
|
||||
| EvDistribution(_) => `Distribution::${toString(aValue)}`
|
||||
| EvLambda(_) => `Lambda::${toString(aValue)}`
|
||||
| EvNumber(_) => `Number::${toString(aValue)}`
|
||||
| EvRecord(_) => `Record::${toString(aValue)}`
|
||||
| EvString(_) => `String::${toString(aValue)}`
|
||||
| EvSymbol(_) => `Symbol::${toString(aValue)}`
|
||||
| EvArray(_) => `Array::${toString(aValue)}`
|
||||
| EvRecord(_) => `Record::${toString(aValue)}`
|
||||
| EvDistribution(_) => `Distribution::${toString(aValue)}`
|
||||
}
|
||||
|
||||
let argsToString = (args: array<expressionValue>): string => {
|
||||
args->Belt.Array.map(arg => arg->toString)->Extra_Array.interperse(", ")->Js.String.concatMany("")
|
||||
args->Js.Array2.map(arg => arg->toString)->Js.Array2.toString
|
||||
}
|
||||
|
||||
let toStringFunctionCall = ((fn, args)): string => `${fn}(${argsToString(args)})`
|
||||
|
@ -78,3 +92,9 @@ let toStringResultRecord = x =>
|
|||
| Ok(a) => `Ok(${toStringRecord(a)})`
|
||||
| Error(m) => `Error(${ErrorValue.errorToString(m)})`
|
||||
}
|
||||
|
||||
@genType
|
||||
type environment = DistributionOperation.env
|
||||
|
||||
@genType
|
||||
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
||||
|
|
|
@ -14,8 +14,13 @@ type expressionValue = ExpressionValue.expressionValue
|
|||
Map external calls of Reducer
|
||||
*/
|
||||
|
||||
let dispatch = (call: ExpressionValue.functionCall, chain): result<expressionValue, 'e> =>
|
||||
ReducerInterface_GenericDistribution.dispatch(call) |> E.O.default(chain(call))
|
||||
let dispatch = (call: ExpressionValue.functionCall, environment, chain): result<
|
||||
expressionValue,
|
||||
'e,
|
||||
> =>
|
||||
ReducerInterface_GenericDistribution.dispatch(call, environment) |> E.O.default(
|
||||
chain(call, environment),
|
||||
)
|
||||
/*
|
||||
If your dispatch is too big you can divide it into smaller dispatches and pass the call so that it gets called finally.
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
module ExpressionValue = ReducerInterface_ExpressionValue
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
|
||||
let runGenericOperation = DistributionOperation.run(
|
||||
~env={
|
||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||
},
|
||||
)
|
||||
let defaultEnv: DistributionOperation.env = {
|
||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||
}
|
||||
|
||||
let runGenericOperation = DistributionOperation.run(~env=defaultEnv)
|
||||
|
||||
module Helpers = {
|
||||
let arithmeticMap = r =>
|
||||
|
@ -28,14 +28,13 @@ module Helpers = {
|
|||
let catchAndConvertTwoArgsToDists = (args: array<expressionValue>): option<(
|
||||
DistributionTypes.genericDist,
|
||||
DistributionTypes.genericDist,
|
||||
)> => {
|
||||
)> =>
|
||||
switch args {
|
||||
| [EvDistribution(a), EvDistribution(b)] => Some((a, b))
|
||||
| [EvNumber(a), EvDistribution(b)] => Some((GenericDist.fromFloat(a), b))
|
||||
| [EvDistribution(a), EvNumber(b)] => Some((a, GenericDist.fromFloat(b)))
|
||||
| _ => None
|
||||
}
|
||||
}
|
||||
|
||||
let toFloatFn = (
|
||||
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
||||
|
@ -80,6 +79,7 @@ module Helpers = {
|
|||
dist1,
|
||||
)->runGenericOperation
|
||||
}
|
||||
|
||||
let parseNumber = (args: expressionValue): Belt.Result.t<float, string> =>
|
||||
switch args {
|
||||
| EvNumber(x) => Ok(x)
|
||||
|
@ -119,7 +119,7 @@ module Helpers = {
|
|||
mixtureWithGivenWeights(distributions, weights)
|
||||
}
|
||||
|
||||
let mixture = (args: array<expressionValue>): DistributionOperation.outputType => {
|
||||
let mixture = (args: array<expressionValue>): DistributionOperation.outputType =>
|
||||
switch E.A.last(args) {
|
||||
| Some(EvArray(b)) => {
|
||||
let weights = parseNumberArray(b)
|
||||
|
@ -131,6 +131,7 @@ module Helpers = {
|
|||
| Error(err) => GenDistError(ArgumentError(err))
|
||||
}
|
||||
}
|
||||
| Some(EvNumber(_))
|
||||
| Some(EvDistribution(_)) =>
|
||||
switch parseDistributionArray(args) {
|
||||
| Ok(distributions) => mixtureWithDefaultWeights(distributions)
|
||||
|
@ -138,7 +139,6 @@ module Helpers = {
|
|||
}
|
||||
| _ => GenDistError(ArgumentError("Last argument of mx must be array or distribution"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module SymbolicConstructors = {
|
||||
|
@ -155,6 +155,7 @@ module SymbolicConstructors = {
|
|||
| "beta" => Ok(SymbolicDist.Beta.make)
|
||||
| "lognormal" => Ok(SymbolicDist.Lognormal.make)
|
||||
| "cauchy" => Ok(SymbolicDist.Cauchy.make)
|
||||
| "gamma" => Ok(SymbolicDist.Gamma.make)
|
||||
| "to" => Ok(SymbolicDist.From90thPercentile.make)
|
||||
| _ => Error("Unreachable state")
|
||||
}
|
||||
|
@ -174,17 +175,19 @@ module SymbolicConstructors = {
|
|||
}
|
||||
}
|
||||
|
||||
let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
|
||||
let dispatchToGenericOutput = (call: ExpressionValue.functionCall, _environment): option<
|
||||
DistributionOperation.outputType,
|
||||
> => {
|
||||
let (fnName, args) = call
|
||||
switch (fnName, args) {
|
||||
| ("exponential" as fnName, [EvNumber(f1)]) =>
|
||||
| ("exponential" as fnName, [EvNumber(f)]) =>
|
||||
SymbolicConstructors.oneFloat(fnName)
|
||||
->E.R.bind(r => r(f1))
|
||||
->E.R.bind(r => r(f))
|
||||
->SymbolicConstructors.symbolicResultToOutput
|
||||
| ("delta", [EvNumber(f)]) =>
|
||||
SymbolicDist.Float.makeSafe(f)->SymbolicConstructors.symbolicResultToOutput
|
||||
| (
|
||||
("normal" | "uniform" | "beta" | "lognormal" | "cauchy" | "to") as fnName,
|
||||
("normal" | "uniform" | "beta" | "lognormal" | "cauchy" | "gamma" | "to") as fnName,
|
||||
[EvNumber(f1), EvNumber(f2)],
|
||||
) =>
|
||||
SymbolicConstructors.twoFloat(fnName)
|
||||
|
@ -196,6 +199,7 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
|
|||
->SymbolicConstructors.symbolicResultToOutput
|
||||
| ("sample", [EvDistribution(dist)]) => Helpers.toFloatFn(#Sample, dist)
|
||||
| ("mean", [EvDistribution(dist)]) => Helpers.toFloatFn(#Mean, dist)
|
||||
| ("integralSum", [EvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist)
|
||||
| ("toString", [EvDistribution(dist)]) => Helpers.toStringFn(ToString, dist)
|
||||
| ("toSparkline", [EvDistribution(dist)]) => Helpers.toStringFn(ToSparkline(20), dist)
|
||||
| ("toSparkline", [EvDistribution(dist), EvNumber(n)]) =>
|
||||
|
@ -209,8 +213,21 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
|
|||
a,
|
||||
)->Some
|
||||
| ("normalize", [EvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist)
|
||||
| ("klDivergence", [EvDistribution(a), EvDistribution(b)]) =>
|
||||
Some(runGenericOperation(FromDist(ToScore(KLDivergence(b)), a)))
|
||||
| ("isNormalized", [EvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist)
|
||||
| ("toPointSet", [EvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist)
|
||||
| ("scaleLog", [EvDistribution(dist)]) =>
|
||||
Helpers.toDistFn(Scale(#Logarithm, MagicNumbers.Math.e), dist)
|
||||
| ("scaleLog10", [EvDistribution(dist)]) => Helpers.toDistFn(Scale(#Logarithm, 10.0), dist)
|
||||
| ("scaleLog", [EvDistribution(dist), EvNumber(float)]) =>
|
||||
Helpers.toDistFn(Scale(#Logarithm, float), dist)
|
||||
| ("scaleLogWithThreshold", [EvDistribution(dist), EvNumber(base), EvNumber(eps)]) =>
|
||||
Helpers.toDistFn(Scale(#LogarithmWithThreshold(eps), base), dist)
|
||||
| ("scalePow", [EvDistribution(dist), EvNumber(float)]) =>
|
||||
Helpers.toDistFn(Scale(#Power, float), dist)
|
||||
| ("scaleExp", [EvDistribution(dist)]) =>
|
||||
Helpers.toDistFn(Scale(#Power, MagicNumbers.Math.e), dist)
|
||||
| ("cdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Cdf(float), dist)
|
||||
| ("pdf", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Pdf(float), dist)
|
||||
| ("inv", [EvDistribution(dist), EvNumber(float)]) => Helpers.toFloatFn(#Inv(float), dist)
|
||||
|
@ -218,6 +235,14 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
|
|||
Helpers.toDistFn(ToSampleSet(Belt.Int.fromFloat(float)), dist)
|
||||
| ("toSampleSet", [EvDistribution(dist)]) =>
|
||||
Helpers.toDistFn(ToSampleSet(MagicNumbers.Environment.defaultSampleCount), dist)
|
||||
| ("fromSamples", [EvArray(inputArray)]) => {
|
||||
let _wrapInputErrors = x => SampleSetDist.NonNumericInput(x)
|
||||
let parsedArray = Helpers.parseNumberArray(inputArray)->E.R2.errMap(_wrapInputErrors)
|
||||
switch parsedArray {
|
||||
| Ok(array) => runGenericOperation(FromSamples(array))
|
||||
| Error(e) => GenDistError(SampleSetError(e))
|
||||
}->Some
|
||||
}
|
||||
| ("inspect", [EvDistribution(dist)]) => Helpers.toDistFn(Inspect, dist)
|
||||
| ("truncateLeft", [EvDistribution(dist), EvNumber(float)]) =>
|
||||
Helpers.toDistFn(Truncate(Some(float), None), dist)
|
||||
|
@ -275,6 +300,6 @@ let genericOutputToReducerValue = (o: DistributionOperation.outputType): result<
|
|||
| GenDistError(err) => Error(REDistributionError(err))
|
||||
}
|
||||
|
||||
let dispatch = call => {
|
||||
dispatchToGenericOutput(call)->E.O2.fmap(genericOutputToReducerValue)
|
||||
let dispatch = (call, environment) => {
|
||||
dispatchToGenericOutput(call, environment)->E.O2.fmap(genericOutputToReducerValue)
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
let dispatch: ReducerInterface_ExpressionValue.functionCall => option<
|
||||
result<ReducerInterface_ExpressionValue.expressionValue, Reducer_ErrorValue.errorValue>,
|
||||
>
|
||||
let defaultEnv: DistributionOperation.env
|
||||
let dispatch: (
|
||||
ReducerInterface_ExpressionValue.functionCall,
|
||||
ReducerInterface_ExpressionValue.environment,
|
||||
) => option<result<ReducerInterface_ExpressionValue.expressionValue, Reducer_ErrorValue.errorValue>>
|
||||
|
|
|
@ -38,7 +38,7 @@ let makeSampleSetDist = SampleSetDist.make
|
|||
let evaluate = Reducer.evaluate
|
||||
|
||||
@genType
|
||||
let evaluateUsingExternalBindings = Reducer.evaluateUsingExternalBindings
|
||||
let evaluateUsingOptions = Reducer.evaluateUsingOptions
|
||||
|
||||
@genType
|
||||
let evaluatePartialUsingExternalBindings = Reducer.evaluatePartialUsingExternalBindings
|
||||
|
@ -49,6 +49,9 @@ type externalBindings = Reducer.externalBindings
|
|||
@genType
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
|
||||
@genType
|
||||
type recordEV = ReducerInterface_ExpressionValue.record
|
||||
|
||||
@genType
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
|
||||
|
@ -69,3 +72,15 @@ let errorValueToString = Reducer_ErrorValue.errorToString
|
|||
|
||||
@genType
|
||||
let distributionErrorToString = DistributionTypes.Error.toString
|
||||
|
||||
@genType
|
||||
type lambdaValue = ReducerInterface_ExpressionValue.lambdaValue
|
||||
|
||||
@genType
|
||||
let defaultSamplingEnv = ReducerInterface_GenericDistribution.defaultEnv
|
||||
|
||||
@genType
|
||||
type environment = ReducerInterface_ExpressionValue.environment
|
||||
|
||||
@genType
|
||||
let defaultEnvironment = ReducerInterface_ExpressionValue.defaultEnvironment
|
||||
|
|
|
@ -198,6 +198,7 @@ module Float = {
|
|||
let with3DigitsPrecision = Js.Float.toPrecisionWithPrecision(_, ~digits=3)
|
||||
let toFixed = Js.Float.toFixed
|
||||
let toString = Js.Float.toString
|
||||
let isFinite = Js.Float.isFinite
|
||||
}
|
||||
|
||||
module I = {
|
||||
|
@ -289,6 +290,13 @@ module R = {
|
|||
| Ok(r) => r->Ok
|
||||
| Error(x) => x->f->Error
|
||||
}
|
||||
|
||||
//I'm not sure what to call this.
|
||||
let unify = (a: result<'a, 'b>, c: 'b => 'a): 'a =>
|
||||
switch a {
|
||||
| Ok(x) => x
|
||||
| Error(x) => c(x)
|
||||
}
|
||||
}
|
||||
|
||||
module R2 = {
|
||||
|
@ -307,6 +315,8 @@ module R2 = {
|
|||
| Ok(x) => x->Ok
|
||||
| Error(x) => x->f->Error
|
||||
}
|
||||
|
||||
let toExn = (a, b) => R.toExn(b, a)
|
||||
}
|
||||
|
||||
let safe_fn_of_string = (fn, s: string): option<'a> =>
|
||||
|
@ -597,6 +607,9 @@ module A = {
|
|||
let filter = Js.Array.filter
|
||||
let joinWith = Js.Array.joinWith
|
||||
|
||||
let all = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) == length(xs)
|
||||
let any = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) > 0
|
||||
|
||||
module O = {
|
||||
let concatSomes = (optionals: array<option<'a>>): array<'a> =>
|
||||
optionals
|
||||
|
|
|
@ -81,6 +81,14 @@ module Binomial = {
|
|||
@module("jstat") @scope("binomial") external cdf: (float, float, float) => float = "cdf"
|
||||
}
|
||||
|
||||
module Gamma = {
|
||||
@module("jstat") @scope("gamma") external pdf: (float, float, float) => float = "pdf"
|
||||
@module("jstat") @scope("gamma") external cdf: (float, float, float) => float = "cdf"
|
||||
@module("jstat") @scope("gamma") external inv: (float, float, float) => float = "inv"
|
||||
@module("jstat") @scope("gamma") external mean: (float, float) => float = "mean"
|
||||
@module("jstat") @scope("gamma") external sample: (float, float) => float = "sample"
|
||||
}
|
||||
|
||||
@module("jstat") external sum: array<float> => float = "sum"
|
||||
@module("jstat") external product: array<float> => float = "product"
|
||||
@module("jstat") external min: array<float> => float = "min"
|
||||
|
|
|
@ -8,6 +8,7 @@ type algebraicOperation = [
|
|||
| #Divide
|
||||
| #Power
|
||||
| #Logarithm
|
||||
| #LogarithmWithThreshold(float)
|
||||
]
|
||||
|
||||
type convolutionOperation = [
|
||||
|
@ -18,7 +19,7 @@ type convolutionOperation = [
|
|||
|
||||
@genType
|
||||
type pointwiseOperation = [#Add | #Multiply | #Power]
|
||||
type scaleOperation = [#Multiply | #Power | #Logarithm | #Divide]
|
||||
type scaleOperation = [#Multiply | #Power | #Logarithm | #LogarithmWithThreshold(float) | #Divide]
|
||||
type distToFloatOperation = [
|
||||
| #Pdf(float)
|
||||
| #Cdf(float)
|
||||
|
@ -35,7 +36,7 @@ module Convolution = {
|
|||
| #Add => Some(#Add)
|
||||
| #Subtract => Some(#Subtract)
|
||||
| #Multiply => Some(#Multiply)
|
||||
| #Divide | #Power | #Logarithm => None
|
||||
| #Divide | #Power | #Logarithm | #LogarithmWithThreshold(_) => None
|
||||
}
|
||||
|
||||
let canDoAlgebraicOperation = (op: algebraicOperation): bool =>
|
||||
|
@ -52,6 +53,10 @@ module Convolution = {
|
|||
type operationError =
|
||||
| DivisionByZeroError
|
||||
| ComplexNumberError
|
||||
| InfinityError
|
||||
| NegativeInfinityError
|
||||
| LogicallyInconsistentPathwayError
|
||||
| NotYetImplemented // should be removed when `klDivergence` for mixed and discrete is implemented.
|
||||
|
||||
@genType
|
||||
module Error = {
|
||||
|
@ -62,6 +67,10 @@ module Error = {
|
|||
switch err {
|
||||
| DivisionByZeroError => "Cannot divide by zero"
|
||||
| ComplexNumberError => "Operation returned complex result"
|
||||
| InfinityError => "Operation returned positive infinity"
|
||||
| NegativeInfinityError => "Operation returned negative infinity"
|
||||
| LogicallyInconsistentPathwayError => "This pathway should have been logically unreachable"
|
||||
| NotYetImplemented => "This pathway is not yet implemented"
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,6 +95,8 @@ let logarithm = (a: float, b: float): result<float, Error.t> =>
|
|||
Ok(0.)
|
||||
} else if a > 0.0 && b > 0.0 {
|
||||
Ok(log(a) /. log(b))
|
||||
} else if a == 0.0 {
|
||||
Error(NegativeInfinityError)
|
||||
} else {
|
||||
Error(ComplexNumberError)
|
||||
}
|
||||
|
@ -102,6 +113,12 @@ module Algebraic = {
|
|||
| #Power => power(a, b)
|
||||
| #Divide => divide(a, b)
|
||||
| #Logarithm => logarithm(a, b)
|
||||
| #LogarithmWithThreshold(eps) =>
|
||||
if a < eps {
|
||||
Ok(0.0)
|
||||
} else {
|
||||
logarithm(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
let toString = x =>
|
||||
|
@ -112,6 +129,7 @@ module Algebraic = {
|
|||
| #Power => "**"
|
||||
| #Divide => "/"
|
||||
| #Logarithm => "log"
|
||||
| #LogarithmWithThreshold(_) => "log"
|
||||
}
|
||||
|
||||
let format = (a, b, c) => b ++ (" " ++ (toString(a) ++ (" " ++ c)))
|
||||
|
@ -151,6 +169,12 @@ module Scale = {
|
|||
| #Divide => divide(a, b)
|
||||
| #Power => power(a, b)
|
||||
| #Logarithm => logarithm(a, b)
|
||||
| #LogarithmWithThreshold(eps) =>
|
||||
if a < eps {
|
||||
Ok(0.0)
|
||||
} else {
|
||||
logarithm(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
let format = (operation: t, value, scaleBy) =>
|
||||
|
@ -159,14 +183,14 @@ module Scale = {
|
|||
| #Divide => j`verticalDivide($value, $scaleBy) `
|
||||
| #Power => j`verticalPower($value, $scaleBy) `
|
||||
| #Logarithm => j`verticalLog($value, $scaleBy) `
|
||||
| #LogarithmWithThreshold(eps) => j`verticalLog($value, $scaleBy, epsilon=$eps) `
|
||||
}
|
||||
|
||||
let toIntegralSumCacheFn = x =>
|
||||
switch x {
|
||||
| #Multiply => (a, b) => Some(a *. b)
|
||||
| #Divide => (a, b) => Some(a /. b)
|
||||
| #Power => (_, _) => None
|
||||
| #Logarithm => (_, _) => None
|
||||
| #Power | #Logarithm | #LogarithmWithThreshold(_) => (_, _) => None
|
||||
}
|
||||
|
||||
let toIntegralCacheFn = x =>
|
||||
|
@ -175,6 +199,7 @@ module Scale = {
|
|||
| #Divide => (_, _) => None
|
||||
| #Power => (_, _) => None
|
||||
| #Logarithm => (_, _) => None
|
||||
| #LogarithmWithThreshold(_) => (_, _) => None
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -96,7 +96,21 @@ module T = {
|
|||
let fromZippedArray = (pairs: array<(float, float)>): t => pairs |> Belt.Array.unzip |> fromArray
|
||||
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
|
||||
let toJs = (t: t) => {"xs": t.xs, "ys": t.ys}
|
||||
|
||||
let filterYValues = (fn, t: t): t => t |> zip |> E.A.filter(((_, y)) => fn(y)) |> fromZippedArray
|
||||
let filterOkYs = (xs: array<float>, ys: array<result<float, 'b>>): t => {
|
||||
let n = E.A.length(xs) // Assume length(xs) == length(ys)
|
||||
let newXs = []
|
||||
let newYs = []
|
||||
for i in 0 to n - 1 {
|
||||
switch ys[i] {
|
||||
| Ok(y) =>
|
||||
let _ = Js.Array.push(xs[i], newXs)
|
||||
let _ = Js.Array.push(y, newYs)
|
||||
| Error(_) => ()
|
||||
}
|
||||
}
|
||||
{xs: newXs, ys: newYs}
|
||||
}
|
||||
module Validator = {
|
||||
let fnName = "XYShape validate"
|
||||
let notSortedError = (p: string): error => NotSorted(p)
|
||||
|
@ -376,6 +390,90 @@ module PointwiseCombination = {
|
|||
}
|
||||
`)
|
||||
|
||||
/*
|
||||
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||
*/
|
||||
let combineAlongSupportOfSecondArgument0: (
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
interpolator,
|
||||
T.t,
|
||||
T.t,
|
||||
) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
|
||||
let newYs = []
|
||||
let newXs = []
|
||||
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
||||
let (i, j) = (ref(0), ref(0))
|
||||
let minX = t2.xs[0]
|
||||
let maxX = t2.xs[l2 - 1]
|
||||
while j.contents < l2 - 1 && i.contents < l1 - 1 {
|
||||
let someTuple = {
|
||||
let x1 = t1.xs[i.contents + 1]
|
||||
let x2 = t2.xs[j.contents + 1]
|
||||
if (
|
||||
/* if t1 has to catch up to t2 */
|
||||
i.contents < l1 - 1 && j.contents < l2 && x1 < x2 && minX <= x1 && x2 <= maxX
|
||||
) {
|
||||
i := i.contents + 1
|
||||
let x = x1
|
||||
let y1 = t1.ys[i.contents]
|
||||
let y2 = interpolator(t2, j.contents, x)
|
||||
Some((x, y1, y2))
|
||||
} else if (
|
||||
/* if t2 has to catch up to t1 */
|
||||
i.contents < l1 && j.contents < l2 - 1 && x1 > x2 && x2 >= minX && maxX >= x1
|
||||
) {
|
||||
j := j.contents + 1
|
||||
let x = x2
|
||||
let y1 = interpolator(t1, i.contents, x)
|
||||
let y2 = t2.ys[j.contents]
|
||||
Some((x, y1, y2))
|
||||
} else if (
|
||||
/* move both ahead if they are equal */
|
||||
i.contents < l1 - 1 && j.contents < l2 - 1 && x1 == x2 && x1 >= minX && maxX >= x2
|
||||
) {
|
||||
i := i.contents + 1
|
||||
j := j.contents + 1
|
||||
let x = x1
|
||||
let y1 = t1.ys[i.contents]
|
||||
let y2 = t2.ys[j.contents]
|
||||
Some((x, y1, y2))
|
||||
} else {
|
||||
i := i.contents + 1
|
||||
None
|
||||
}
|
||||
}
|
||||
switch someTuple {
|
||||
| Some((x, y1, y2)) => {
|
||||
let _ = Js.Array.push(fn(y1, y2), newYs)
|
||||
let _ = Js.Array.push(x, newXs)
|
||||
}
|
||||
| None => ()
|
||||
}
|
||||
}
|
||||
T.filterOkYs(newXs, newYs)->Ok
|
||||
}
|
||||
|
||||
// This function is used for klDivergence
|
||||
let combineAlongSupportOfSecondArgument: (
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
T.t,
|
||||
T.t,
|
||||
) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
|
||||
let combineWithFn = (answerX: float, i: int) => {
|
||||
let answerY = answer.ys[i]
|
||||
let predictionY = XtoY.linear(answerX, prediction)
|
||||
fn(predictionY, answerY)
|
||||
}
|
||||
let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
|
||||
let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
|
||||
let result = switch newYsOrError {
|
||||
| Ok(a) => Ok({xs: answer.xs, ys: a})
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
||||
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
||||
"Add operation should never fail",
|
||||
|
@ -467,7 +565,7 @@ module Range = {
|
|||
// TODO: I think this isn't needed by any functions anymore.
|
||||
let stepsToContinuous = t => {
|
||||
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
|
||||
let diff = T.xTotalRange(t) |> (r => r *. 0.00001)
|
||||
let diff = T.xTotalRange(t) |> (r => r *. MagicNumbers.Epsilon.five)
|
||||
let items = switch E.A.toRanges(Belt.Array.zip(t.xs, t.ys)) {
|
||||
| Ok(items) =>
|
||||
Some(
|
||||
|
@ -489,25 +587,6 @@ module Range = {
|
|||
}
|
||||
}
|
||||
|
||||
let pointLogScore = (prediction, answer) =>
|
||||
switch answer {
|
||||
| 0. => 0.0
|
||||
| answer => answer *. Js.Math.log2(Js.Math.abs_float(prediction /. answer))
|
||||
}
|
||||
|
||||
let logScorePoint = (sampleCount, t1, t2) =>
|
||||
PointwiseCombination.combineEvenXs(
|
||||
~fn=pointLogScore,
|
||||
~xToYSelection=XtoY.linear,
|
||||
sampleCount,
|
||||
t1,
|
||||
t2,
|
||||
)
|
||||
|> Range.integrateWithTriangles
|
||||
|> E.O.fmap(T.accumulateYs(\"+."))
|
||||
|> E.O.fmap(Pairs.last)
|
||||
|> E.O.fmap(Pairs.y)
|
||||
|
||||
module Analysis = {
|
||||
let getVarianceDangerously = (t: 't, mean: 't => float, getMeanOfSquares: 't => float): float => {
|
||||
let meanSquared = mean(t) ** 2.0
|
||||
|
|
|
@ -11,7 +11,7 @@ _Symbolic_ formats are just the math equations. `normal(5,3)` is the symbolic re
|
|||
|
||||
When you sample distributions (usually starting with symbolic formats), you get lists of samples. Monte Carlo techniques return lists of samples. Let’s call this the “_Sample Set_” format.
|
||||
|
||||
Lastly is what I’ll refer to as the _Graph_ format. It describes the coordinates, or the shape, of the distribution. You can save these formats in JSON, for instance, like, `{xs: [1, 2, 3, 4…], ys: [.0001, .0003, .002, …]}`.
|
||||
Lastly is what I’ll refer to as the _Graph_ format. It describes the coordinates, or the shape, of the distribution. You can save these formats in JSON, for instance, like, `{xs: [1, 2, 3, 4, …], ys: [.0001, .0003, .002, …]}`.
|
||||
|
||||
Symbolic, Sample Set, and Graph formats all have very different advantages and disadvantages.
|
||||
|
||||
|
@ -19,7 +19,7 @@ Note that the name "Symbolic" is fairly standard, but I haven't found common nam
|
|||
|
||||
## Symbolic Formats
|
||||
|
||||
**TLDR**
|
||||
**TL;DR**
|
||||
Mathematical representations. Require analytic solutions. These are often ideal where they can be applied, but apply to very few actual functions. Typically used sparsely, except for the starting distributions (before any computation is performed).
|
||||
|
||||
**Examples**
|
||||
|
@ -29,9 +29,6 @@ Mathematical representations. Require analytic solutions. These are often ideal
|
|||
**How to Do Computation**
|
||||
To perform calculations of symbolic systems, you need to find analytical solutions. For example, there are equations to find the pdf or cdf of most distribution shapes at any point. There are also lots of simplifications that could be done in particular situations. For example, there’s an analytical solution for combining normal distributions.
|
||||
|
||||
**Special: The Metalog Distribution**
|
||||
The Metalog distribution seems like it can represent almost any reasonable distribution. It’s symbolic. This is great for storage, but it’s not clear if it helps with calculation. My impression is that we don’t have symbolic ways of doing most functions (addition, multiplication, etc) on metalog distributions. Also, note that it can take a fair bit of computation to fit a shape to the Metalog distribution.
|
||||
|
||||
**Advantages**
|
||||
|
||||
- Maximally compressed; i.e. very easy to store.
|
||||
|
@ -54,10 +51,14 @@ The Metalog distribution seems like it can represent almost any reasonable distr
|
|||
**How to Visualize**
|
||||
Convert to graph, then display that. (Optionally, you can also convert to samples, then display those using a histogram, but this is often worse you have both options.)
|
||||
|
||||
**Bonus: The Metalog Distribution**
|
||||
|
||||
The Metalog distribution seems like it can represent almost any reasonable distribution. It’s symbolic. This is great for storage, but it’s not clear if it helps with calculation. My impression is that we don’t have symbolic ways of doing most functions (addition, multiplication, etc) on metalog distributions. Also, note that it can take a fair bit of computation to fit a shape to the Metalog distribution.
|
||||
|
||||
## Graph Formats
|
||||
|
||||
**TLDR**
|
||||
Lists of the x-y coordinates of the shape of a distribution. (Usually the pdf, which is more compressed than the cdf). Some key functions (like pdf, cdf) and manipulations can work on almost any graphally-described distribution.
|
||||
**TL;DR**
|
||||
Lists of the x-y coordinates of the shape of a distribution. (Usually the pdf, which is more compressed than the cdf). Some key functions (like pdf, cdf) and manipulations can work on almost any graphically-described distribution.
|
||||
|
||||
**Alternative Names:**
|
||||
Grid, Mesh, Graph, Vector, Pdf, PdfCoords/PdfPoints, Discretised, Bezier, Curve
|
||||
|
@ -77,7 +78,7 @@ Use graph techniques. These can be fairly computationally-intensive (particularl
|
|||
|
||||
**Disadvantages**
|
||||
|
||||
- Most calculations are infeasible/impossible to perform graphally. In these cases, you need to use sampling.
|
||||
- Most calculations are infeasible/impossible to perform graphically. In these cases, you need to use sampling.
|
||||
- Not as accurate or fast as symbolic methods, where the symbolic methods are applicable.
|
||||
- The tails get cut off, which is subideal. It’s assumed that the value of the pdf outside of the bounded range is exactly 0, which is not correct. (Note: If you have ideas on how to store graph formats that don’t cut off tails, let me know)
|
||||
|
||||
|
@ -108,7 +109,7 @@ Use graph techniques. These can be fairly computationally-intensive (particularl
|
|||
|
||||
## Sample Set Formats
|
||||
|
||||
**TLDR**
|
||||
**TL;DR**
|
||||
Random samples. Use Monte Carlo simulation to perform calculations. This is the predominant technique using Monte Carlo methods; in these cases, most nodes are essentially represented as sample sets. [Guesstimate](https://www.getguesstimate.com/) works this way.
|
||||
|
||||
**How to Do Computation**
|
||||
|
|
360
packages/website/docs/Features/Distributions.mdx
Normal file
360
packages/website/docs/Features/Distributions.mdx
Normal file
|
@ -0,0 +1,360 @@
|
|||
---
|
||||
title: "Distribution Creation"
|
||||
sidebar_position: 8
|
||||
---
|
||||
|
||||
import TOCInline from "@theme/TOCInline";
|
||||
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
||||
import Admonition from "@theme/Admonition";
|
||||
import Tabs from "@theme/Tabs";
|
||||
import TabItem from "@theme/TabItem";
|
||||
|
||||
<TOCInline toc={toc} maxHeadingLevel={2} />
|
||||
|
||||
## To
|
||||
|
||||
`(5thPercentile: number) to (95thPercentile: number)`
|
||||
`to(5thPercentile: number, 95thPercentile: number)`
|
||||
|
||||
The `to` function is an easy way to generate simple distributions using predicted _5th_ and _95th_ percentiles.
|
||||
|
||||
If both values are above zero, a `lognormal` distribution is used. If not, a `normal` distribution is used.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="5 to 10" default>
|
||||
When <code>5 to 10</code> is entered, both numbers are positive, so it
|
||||
generates a lognormal distribution with 5th and 95th percentiles at 5 and
|
||||
10.
|
||||
<SquiggleEditor initialSquiggleString="5 to 10" />
|
||||
</TabItem>
|
||||
<TabItem value="ex3" label="to(5,10)">
|
||||
<code>5 to 10</code> does the same thing as <code>to(5,10)</code>.
|
||||
<SquiggleEditor initialSquiggleString="to(5,10)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="-5 to 5">
|
||||
When <code>-5 to 5</code> is entered, there's negative values, so it
|
||||
generates a normal distribution. This has 5th and 95th percentiles at 5 and
|
||||
10.
|
||||
<SquiggleEditor initialSquiggleString="-5 to -3" />
|
||||
</TabItem>
|
||||
<TabItem value="ex4" label="1 to 10000">
|
||||
It's very easy to generate distributions with very long tails. If this
|
||||
happens, you can click the "log x scale" box to view this using a log scale.
|
||||
<SquiggleEditor initialSquiggleString="1 to 10000" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Arguments
|
||||
|
||||
- `5thPercentile`: number
|
||||
- `95thPercentile`: number, greater than `5thPercentile`
|
||||
|
||||
<Admonition type="tip" title="Tip">
|
||||
<p>
|
||||
"<bold>To</bold>" is a great way to generate probability distributions very
|
||||
quickly from your intuitions. It's easy to write and easy to read. It's
|
||||
often a good place to begin an estimate.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
<Admonition type="caution" title="Caution">
|
||||
<p>
|
||||
If you haven't tried{" "}
|
||||
<a href="https://www.lesswrong.com/posts/LdFbx9oqtKAAwtKF3/list-of-probability-calibration-exercises">
|
||||
calibration training
|
||||
</a>
|
||||
, you're likely to be overconfident. We recommend doing calibration training
|
||||
to get a feel for what a 90 percent confident interval feels like.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
## Mixture
|
||||
|
||||
`mixture(...distributions: Distribution[], weights?: number[])`
|
||||
`mx(...distributions: Distribution[], weights?: number[])`
|
||||
|
||||
The `mixture` mixes combines multiple distributions to create a mixture. You can optionally pass in a list of proportional weights.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="Simple" default>
|
||||
<SquiggleEditor initialSquiggleString="mixture(1 to 2, 5 to 8, 9 to 10)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="With Weights">
|
||||
<SquiggleEditor initialSquiggleString="mixture(1 to 2, 5 to 8, 9 to 10, [0.1, 0.1, 0.8])" />
|
||||
</TabItem>
|
||||
<TabItem value="ex3" label="With Continuous and Discrete Inputs">
|
||||
<SquiggleEditor initialSquiggleString="mixture(1 to 5, 8 to 10, 1, 3, 20)" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Arguments
|
||||
|
||||
- `distributions`: A set of distributions or numbers, each passed as a paramater. Numbers will be converted into Delta distributions.
|
||||
- `weights`: An optional array of numbers, each representing the weight of its corresponding distribution. The weights will be re-scaled to add to `1.0`. If a weights array is provided, it must be the same length as the distribution paramaters.
|
||||
|
||||
### Aliases
|
||||
|
||||
- `mx`
|
||||
|
||||
### Special Use Cases of Mixtures
|
||||
|
||||
<details>
|
||||
<summary>🕐 Zero or Continuous</summary>
|
||||
<p>
|
||||
One common reason to have mixtures of continous and discrete distributions is to handle the special case of 0.
|
||||
Say I want to model the time I will spend on some upcoming project. I think I have an 80% chance of doing it.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
In this case, I have a 20% chance of spending 0 time with it. I might estimate my hours with,
|
||||
</p>
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`hours_the_project_will_take = 5 to 20
|
||||
chance_of_doing_anything = 0.8
|
||||
mx(hours_the_project_will_take, 0, [chance_of_doing_anything, 1 - chance_of_doing_anything])`}
|
||||
/>
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>🔒 Model Uncertainty Safeguarding</summary>
|
||||
<p>
|
||||
One technique several <a href="https://www.foretold.io/">Foretold.io</a> users used is to combine their main guess, with a
|
||||
"just-in-case distribution". This latter distribution would have very low weight, but would be
|
||||
very wide, just in case they were dramatically off for some weird reason.
|
||||
</p>
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`forecast = 3 to 30
|
||||
chance_completely_wrong = 0.05
|
||||
forecast_if_completely_wrong = -100 to 200
|
||||
mx(forecast, forecast_if_completely_wrong, [1-chance_completely_wrong, chance_completely_wrong])`}
|
||||
/>
|
||||
|
||||
</details>
|
||||
|
||||
## Normal
|
||||
|
||||
`normal(mean:number, standardDeviation:number)`
|
||||
|
||||
Creates a [normal distribution](https://en.wikipedia.org/wiki/Normal_distribution) with the given mean and standard deviation.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="normal(5,1)" default>
|
||||
<SquiggleEditor initialSquiggleString="normal(5, 1)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="normal(100000000000, 100000000000)">
|
||||
<SquiggleEditor initialSquiggleString="normal(100000000000, 100000000000)" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Arguments
|
||||
|
||||
- `mean`: Number
|
||||
- `standard deviation`: Number greater than zero
|
||||
|
||||
[Wikipedia](https://en.wikipedia.org/wiki/Normal_distribution)
|
||||
|
||||
## Log-normal
|
||||
|
||||
`lognormal(mu: number, sigma: number)`
|
||||
|
||||
Creates a [log-normal distribution](https://en.wikipedia.org/wiki/Log-normal_distribution) with the given mu and sigma.
|
||||
|
||||
`Mu` and `sigma` represent the mean and standard deviation of the normal which results when
|
||||
you take the log of our lognormal distribution. They can be difficult to directly reason about.
|
||||
Because of this complexity, we recommend typically using the <a href="#to">to</a> syntax instead of estimating `mu` and `sigma` directly.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="lognormal(0, 0.7)" />
|
||||
|
||||
### Arguments
|
||||
|
||||
- `mu`: Number
|
||||
- `sigma`: Number greater than zero
|
||||
|
||||
[Wikipedia](https://en.wikipedia.org/wiki/Log-normal_distribution)
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
❓ Understanding <bold>mu</bold> and <bold>sigma</bold>
|
||||
</summary>
|
||||
<p>
|
||||
The log of <code>lognormal(mu, sigma)</code> is a normal distribution with
|
||||
mean <code>mu</code>
|
||||
and standard deviation <code>sigma</code>. For example, these two distributions
|
||||
are identical:
|
||||
</p>
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`normalMean = 10
|
||||
normalStdDev = 2
|
||||
logOfLognormal = log(lognormal(normalMean, normalStdDev))
|
||||
[logOfLognormal, normal(normalMean, normalStdDev)]`}
|
||||
/>
|
||||
</details>
|
||||
|
||||
## Uniform
|
||||
|
||||
`uniform(low:number, high:number)`
|
||||
|
||||
Creates a [uniform distribution](<https://en.wikipedia.org/wiki/Uniform_distribution_(continuous)>) with the given low and high values.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="uniform(3,7)" />
|
||||
|
||||
### Arguments
|
||||
|
||||
- `low`: Number
|
||||
- `high`: Number greater than `low`
|
||||
|
||||
<Admonition type="caution" title="Caution">
|
||||
<p>
|
||||
While uniform distributions are very simple to understand, we find it rare
|
||||
to find uncertainties that actually look like this. Before using a uniform
|
||||
distribution, think hard about if you are really 100% confident that the
|
||||
paramater will not wind up being just outside the stated boundaries.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
One good example of a uniform distribution uncertainty would be clear
|
||||
physical limitations. You might have complete complete uncertainty on what
|
||||
time of day an event will occur, but can say with 100% confidence it will
|
||||
happen between the hours of 0:00 and 24:00.
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
## Delta
|
||||
|
||||
`delta(value:number)`
|
||||
|
||||
Creates a discrete distribution with all of its probability mass at point `value`.
|
||||
|
||||
Few Squiggle users call the function `delta()` directly. Numbers are converted into delta distributions automatically, when it is appropriate.
|
||||
|
||||
For example, in the function `mixture(1,2,normal(5,2))`, the first two arguments will get converted into delta distributions
|
||||
with values at 1 and 2. Therefore, this is the same as `mixture(delta(1),delta(2),normal(5,2))`.
|
||||
|
||||
`Delta()` distributions are currently the only discrete distributions accessible in Squiggle.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="delta(3)" default>
|
||||
<SquiggleEditor initialSquiggleString="delta(3)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex3" label="mixture(1,3,5)">
|
||||
<SquiggleEditor initialSquiggleString="mixture(1,3,5)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="normal(5,2) * 6">
|
||||
<SquiggleEditor initialSquiggleString="normal(5,2) * 6" />
|
||||
</TabItem>
|
||||
<TabItem value="ex4" label="dotAdd(normal(5,2), 6)">
|
||||
<SquiggleEditor initialSquiggleString="dotAdd(normal(5,2), 6)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex5" label="dotMultiply(normal(5,2), 6)">
|
||||
<SquiggleEditor initialSquiggleString="dotMultiply(normal(5,2), 6)" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Arguments
|
||||
|
||||
- `value`: Number
|
||||
|
||||
## Beta
|
||||
|
||||
`beta(alpha:number, beta:number)`
|
||||
|
||||
Creates a [beta distribution](https://en.wikipedia.org/wiki/Beta_distribution) with the given `alpha` and `beta` values. For a good summary of the beta distribution, see [this explanation](https://stats.stackexchange.com/a/47782) on Stack Overflow.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="beta(10, 20)" default>
|
||||
<SquiggleEditor initialSquiggleString="beta(10,20)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="beta(1000, 1000)">
|
||||
<SquiggleEditor initialSquiggleString="beta(1000, 2000)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex3" label="beta(1, 10)">
|
||||
<SquiggleEditor initialSquiggleString="beta(1, 10)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex4" label="beta(10, 1)">
|
||||
<SquiggleEditor initialSquiggleString="beta(10, 1)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex5" label="beta(0.8, 0.8)">
|
||||
<SquiggleEditor initialSquiggleString="beta(0.8, 0.8)" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
### Arguments
|
||||
|
||||
- `alpha`: Number greater than zero
|
||||
- `beta`: Number greater than zero
|
||||
|
||||
<Admonition type="caution" title="Caution with small numbers">
|
||||
<p>
|
||||
Squiggle struggles to show beta distributions when either alpha or beta are
|
||||
below 1.0. This is because the tails at ~0.0 and ~1.0 are very high. Using a
|
||||
log scale for the y-axis helps here.
|
||||
</p>
|
||||
<details>
|
||||
<summary>Examples</summary>
|
||||
<Tabs>
|
||||
<TabItem value="ex1" label="beta(0.3, 0.3)" default>
|
||||
<SquiggleEditor initialSquiggleString="beta(0.3, 0.3)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex2" label="beta(0.5, 0.5)">
|
||||
<SquiggleEditor initialSquiggleString="beta(0.5, 0.5)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex3" label="beta(0.8, 0.8)">
|
||||
<SquiggleEditor initialSquiggleString="beta(.8,.8)" />
|
||||
</TabItem>
|
||||
<TabItem value="ex4" label="beta(0.9, 0.9)">
|
||||
<SquiggleEditor initialSquiggleString="beta(.9,.9)" />
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</details>
|
||||
</Admonition>
|
||||
|
||||
## Exponential
|
||||
|
||||
`exponential(rate:number)`
|
||||
|
||||
Creates an [exponential distribution](https://en.wikipedia.org/wiki/Exponential_distribution) with the given rate.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="exponential(4)" />
|
||||
|
||||
### Arguments
|
||||
|
||||
- `rate`: Number greater than zero
|
||||
|
||||
## Triangular distribution
|
||||
|
||||
`triangular(low:number, mode:number, high:number)`
|
||||
|
||||
Creates a [triangular distribution](https://en.wikipedia.org/wiki/Triangular_distribution) with the given low, mode, and high values.
|
||||
|
||||
### Arguments
|
||||
|
||||
- `low`: Number
|
||||
- `mode`: Number greater than `low`
|
||||
- `high`: Number greater than `mode`
|
||||
|
||||
<SquiggleEditor initialSquiggleString="triangular(1, 2, 4)" />
|
||||
|
||||
## FromSamples
|
||||
|
||||
`fromSamples(samples:number[])`
|
||||
|
||||
Creates a sample set distribution using an array of samples.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="fromSamples([1,2,3,4,6,5,5,5])" />
|
||||
|
||||
### Arguments
|
||||
|
||||
- `samples`: An array of at least 5 numbers.
|
||||
|
||||
<Admonition type="caution" title="Caution!">
|
||||
<p>
|
||||
Samples are converted into{" "}
|
||||
<a href="https://en.wikipedia.org/wiki/Probability_density_function">PDF</a>{" "}
|
||||
shapes automatically using{" "}
|
||||
<a href="https://en.wikipedia.org/wiki/Kernel_density_estimation">
|
||||
kernel density estimation
|
||||
</a>{" "}
|
||||
and an approximated bandwidth. Eventually Squiggle will allow for more
|
||||
specificity.
|
||||
</p>
|
||||
</Admonition>
|
|
@ -5,131 +5,15 @@ sidebar_position: 7
|
|||
|
||||
import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
||||
|
||||
_The source of truth for this document is [this file of code](https://github.com/quantified-uncertainty/squiggle/blob/develop/packages/squiggle-lang/src/rescript/ReducerInterface/ReducerInterface_GenericDistribution.res)_
|
||||
|
||||
## Inventory distributions
|
||||
|
||||
We provide starter distributions, computed symbolically.
|
||||
|
||||
### Normal distribution
|
||||
|
||||
The `normal(mean, sd)` function creates a normal distribution with the given mean
|
||||
and standard deviation.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="normal(5, 1)" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- `sd > 0`
|
||||
|
||||
### Uniform distribution
|
||||
|
||||
The `uniform(low, high)` function creates a uniform distribution between the
|
||||
two given numbers.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="uniform(3, 7)" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- `low < high`
|
||||
|
||||
### Lognormal distribution
|
||||
|
||||
The `lognormal(mu, sigma)` returns the log of a normal distribution with parameters
|
||||
`mu` and `sigma`. The log of `lognormal(mu, sigma)` is a normal distribution with mean `mu` and standard deviation `sigma`.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="lognormal(0, 0.7)" />
|
||||
|
||||
An alternative format is also available. The `to` notation creates a lognormal
|
||||
distribution with a 90% confidence interval between the two numbers. We add
|
||||
this convenience as lognormal distributions are commonly used in practice.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="2 to 10" />
|
||||
|
||||
#### Future feature:
|
||||
|
||||
Furthermore, it's also possible to create a lognormal from it's actual mean
|
||||
and standard deviation, using `lognormalFromMeanAndStdDev`.
|
||||
|
||||
TODO: interpreter/parser doesn't provide this in current `develop` branch
|
||||
|
||||
<SquiggleEditor initialSquiggleString="lognormalFromMeanAndStdDev(20, 10)" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- `sigma > 0`
|
||||
- In `x to y` notation, `x < y`
|
||||
|
||||
### Beta distribution
|
||||
|
||||
The `beta(a, b)` function creates a beta distribution with parameters `a` and `b`:
|
||||
|
||||
<SquiggleEditor initialSquiggleString="beta(10, 20)" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- `a > 0`
|
||||
- `b > 0`
|
||||
- Empirically, we have noticed that numerical instability arises when `a < 1` or `b < 1`
|
||||
|
||||
### Exponential distribution
|
||||
|
||||
The `exponential(rate)` function creates an exponential distribution with the given
|
||||
rate.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="exponential(1.11)" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- `rate > 0`
|
||||
|
||||
### Triangular distribution
|
||||
|
||||
The `triangular(a,b,c)` function creates a triangular distribution with lower
|
||||
bound `a`, mode `b` and upper bound `c`.
|
||||
|
||||
#### Validity
|
||||
|
||||
- `a < b < c`
|
||||
|
||||
<SquiggleEditor initialSquiggleString="triangular(1, 2, 4)" />
|
||||
|
||||
### Scalar (constant dist)
|
||||
|
||||
Squiggle, when the context is right, automatically casts a float to a constant distribution.
|
||||
|
||||
## Operating on distributions
|
||||
|
||||
Here are the ways we combine distributions.
|
||||
|
||||
### Mixture of distributions
|
||||
|
||||
The `mixture` function combines 2 or more other distributions to create a weighted
|
||||
combination of the two. The first positional arguments represent the distributions
|
||||
to be combined, and the last argument is how much to weigh every distribution in the
|
||||
combination.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="mixture(uniform(0,1), normal(1,1), [0.5, 0.5])" />
|
||||
|
||||
It's possible to create discrete distributions using this method.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="mixture(0, 1, [0.2,0.8])" />
|
||||
|
||||
As well as mixed distributions:
|
||||
|
||||
<SquiggleEditor initialSquiggleString="mixture(3, 8, 1 to 10, [0.2, 0.3, 0.5])" />
|
||||
|
||||
An alias of `mixture` is `mx`
|
||||
|
||||
#### Validity
|
||||
|
||||
Using javascript's variable arguments notation, consider `mx(...dists, weights)`:
|
||||
|
||||
- `dists.length == weights.length`
|
||||
|
||||
### Addition
|
||||
|
||||
A horizontal right shift
|
||||
A horizontal right shift. The addition operation represents the distribution of the sum of
|
||||
the value of one random sample chosen from the first distribution and the value one random sample
|
||||
chosen from the second distribution.
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist1 = 1 to 10
|
||||
|
@ -139,7 +23,9 @@ dist1 + dist2`}
|
|||
|
||||
### Subtraction
|
||||
|
||||
A horizontal left shift
|
||||
A horizontal left shift. A horizontal right shift. The substraction operation represents
|
||||
the distribution of the value of one random sample chosen from the first distribution minus
|
||||
the value of one random sample chosen from the second distribution.
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist1 = 1 to 10
|
||||
|
@ -149,7 +35,9 @@ dist1 - dist2`}
|
|||
|
||||
### Multiplication
|
||||
|
||||
TODO: provide intuition pump for the semantics
|
||||
A proportional scaling. The addition operation represents the distribution of the multiplication of
|
||||
the value of one random sample chosen from the first distribution times the value one random sample
|
||||
chosen from the second distribution.
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist1 = 1 to 10
|
||||
|
@ -163,7 +51,11 @@ We also provide concatenation of two distributions as a syntax sugar for `*`
|
|||
|
||||
### Division
|
||||
|
||||
TODO: provide intuition pump for the semantics
|
||||
A proportional scaling (normally a shrinking if the second distribution has values higher than 1).
|
||||
The addition operation represents the distribution of the division of
|
||||
the value of one random sample chosen from the first distribution over the value one random sample
|
||||
chosen from the second distribution. If the second distribution has some values near zero, it
|
||||
tends to be particularly unstable.
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist1 = 1 to 10
|
||||
|
@ -173,7 +65,9 @@ dist1 / dist2`}
|
|||
|
||||
### Exponentiation
|
||||
|
||||
TODO: provide intuition pump for the semantics
|
||||
A projection over a contracted x-axis. The exponentiation operation represents the distribution of
|
||||
the exponentiation of the value of one random sample chosen from the first distribution to the power of
|
||||
the value one random sample chosen from the second distribution.
|
||||
|
||||
<SquiggleEditor initialSquiggleString={`(0.1 to 1) ^ beta(2, 3)`} />
|
||||
|
||||
|
@ -186,6 +80,8 @@ exp(dist)`}
|
|||
|
||||
### Taking logarithms
|
||||
|
||||
A projection over a stretched x-axis.
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist = triangular(1,2,3)
|
||||
log(dist)`}
|
||||
|
@ -211,6 +107,8 @@ log(dist, x)`}
|
|||
|
||||
### Pointwise addition
|
||||
|
||||
For every point on the x-axis, operate the corresponding points in the y axis of the pdf.
|
||||
|
||||
**Pointwise operations are done with `PointSetDist` internals rather than `SampleSetDist` internals**.
|
||||
|
||||
TODO: this isn't in the new interpreter/parser yet.
|
||||
|
@ -242,8 +140,8 @@ dist1 .* dist2`}
|
|||
### Pointwise division
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`dist1 = 1 to 10
|
||||
dist2 = triangular(1,2,3)
|
||||
initialSquiggleString={`dist1 = uniform(0,20)
|
||||
dist2 = normal(10,8)
|
||||
dist1 ./ dist2`}
|
||||
/>
|
||||
|
||||
|
@ -284,7 +182,8 @@ or all values lower than x. It is the inverse of `inv`.
|
|||
### Inverse CDF
|
||||
|
||||
The `inv(dist, prob)` gives the value x or which the probability for all values
|
||||
lower than x is equal to prob. It is the inverse of `cdf`.
|
||||
lower than x is equal to prob. It is the inverse of `cdf`. In the literature, it
|
||||
is also known as the quantiles function.
|
||||
|
||||
<SquiggleEditor initialSquiggleString="inv(normal(0,1),0.5)" />
|
||||
|
||||
|
@ -315,6 +214,16 @@ Or `PointSet` format
|
|||
|
||||
<SquiggleEditor initialSquiggleString="toPointSet(normal(5, 10))" />
|
||||
|
||||
### `toSampleSet` has two signatures
|
||||
|
||||
Above, we saw the unary `toSampleSet`, which uses an internal hardcoded number of samples. If you'd like to provide the number of samples, it has a binary signature as well (floored)
|
||||
|
||||
<SquiggleEditor initialSquiggleString="[toSampleSet(0.1 to 1, 100.1), toSampleSet(0.1 to 1, 5000), toSampleSet(0.1 to 1, 20000)]" />
|
||||
|
||||
#### Validity
|
||||
|
||||
- Second argument to `toSampleSet` must be a number.
|
||||
|
||||
## Normalization
|
||||
|
||||
Some distribution operations (like horizontal shift) return an unnormalized distriibution.
|
||||
|
@ -333,18 +242,6 @@ We provide a predicate `isNormalized`, for when we have simple control flow
|
|||
|
||||
- Input to `isNormalized` must be a dist
|
||||
|
||||
## Convert any distribution to a sample set distribution
|
||||
|
||||
`toSampleSet` has two signatures
|
||||
|
||||
It is unary when you use an internal hardcoded number of samples
|
||||
|
||||
<SquiggleEditor initialSquiggleString="toSampleSet(0.1 to 1)" />
|
||||
|
||||
And binary when you provide a number of samples (floored)
|
||||
|
||||
<SquiggleEditor initialSquiggleString="toSampleSet(0.1 to 1, 100)" />
|
||||
|
||||
## `inspect`
|
||||
|
||||
You may like to debug by right clicking your browser and using the _inspect_ functionality on the webpage, and viewing the _console_ tab. Then, wrap your squiggle output with `inspect` to log an internal representation.
|
||||
|
@ -361,7 +258,7 @@ You can cut off from the left
|
|||
|
||||
You can cut off from the right
|
||||
|
||||
<SquiggleEditor initialSquiggleString="truncateRight(0.1 to 1, 10)" />
|
||||
<SquiggleEditor initialSquiggleString="truncateRight(0.1 to 1, 0.5)" />
|
||||
|
||||
You can cut off from both sides
|
||||
|
||||
|
|
|
@ -7,21 +7,21 @@ import { SquiggleEditor } from "../../src/components/SquiggleEditor";
|
|||
|
||||
## Expressions
|
||||
|
||||
A distribution
|
||||
### Distributions
|
||||
|
||||
<SquiggleEditor initialSquiggleString={`mixture(1 to 2, 3, [0.3, 0.7])`} />
|
||||
|
||||
A number
|
||||
### Numbers
|
||||
|
||||
<SquiggleEditor initialSquiggleString="4.321e-3" />
|
||||
<SquiggleEditor initialSquiggleString="4.32" />
|
||||
|
||||
Arrays
|
||||
### Arrays
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`[beta(1,10), 4, isNormalized(toSampleSet(1 to 2))]`}
|
||||
/>
|
||||
|
||||
Records
|
||||
### Records
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`d = {dist: triangular(0, 1, 2), weight: 0.25}
|
||||
|
@ -42,9 +42,9 @@ A statement assigns expressions to names. It looks like `<symbol> = <expression>
|
|||
We can define functions
|
||||
|
||||
<SquiggleEditor
|
||||
initialSquiggleString={`ozzie_estimate(t) = lognormal(1, t ^ 1.01)
|
||||
nuño_estimate(t, m) = mixture(0.5 to 2, normal(m, t ^ 1.25))
|
||||
ozzie_estimate(5) * nuño_estimate(5.01, 1)`}
|
||||
initialSquiggleString={`ozzie_estimate(t) = lognormal(t^(1.1), 0.5)
|
||||
nuno_estimate(t, m) = mixture(normal(-5, 1), lognormal(m, t / 1.25))
|
||||
ozzie_estimate(1) * nuno_estimate(1, 1)`}
|
||||
/>
|
||||
|
||||
## See more
|
||||
|
|
|
@ -30,7 +30,7 @@ this library to help navigate the return type.
|
|||
|
||||
The `@quri/squiggle-components` package offers several components and utilities
|
||||
for people who want to embed Squiggle components into websites. This documentation
|
||||
relies on `@quri/squiggle-components` frequently.
|
||||
uses `@quri/squiggle-components` frequently.
|
||||
|
||||
We host [a storybook](https://squiggle-components.netlify.app/) with details
|
||||
and usage of each of the components made available.
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.0.0-beta.18",
|
||||
"@docusaurus/preset-classic": "2.0.0-beta.18",
|
||||
"@docusaurus/core": "2.0.0-beta.20",
|
||||
"@docusaurus/preset-classic": "2.0.0-beta.20",
|
||||
"@quri/squiggle-components": "0.2.9",
|
||||
"clsx": "^1.1.1",
|
||||
"prism-react-renderer": "^1.2.1",
|
||||
|
|
|
@ -10,7 +10,11 @@ export default function PlaygroundPage() {
|
|||
maxWidth: 2000,
|
||||
}}
|
||||
>
|
||||
<SquigglePlayground initialSquiggleString="normal(0,1)" height={700} />
|
||||
<SquigglePlayground
|
||||
initialSquiggleString="normal(0,1)"
|
||||
height={700}
|
||||
showTypes={true}
|
||||
/>
|
||||
</div>
|
||||
</Layout>
|
||||
);
|
||||
|
|
Loading…
Reference in New Issue
Block a user