commit
067379dd12
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
|
@ -21,17 +21,17 @@ jobs:
|
||||||
name: Check if the changes are about squiggle-lang src files
|
name: Check if the changes are about squiggle-lang src files
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
paths: '["packages/squiggle-lang/*"]'
|
paths: '["packages/squiggle-lang/**"]'
|
||||||
- id: skip_components_check
|
- id: skip_components_check
|
||||||
name: Check if the changes are about components src files
|
name: Check if the changes are about components src files
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
paths: '["packages/components/*"]'
|
paths: '["packages/components/**"]'
|
||||||
- id: skip_website_check
|
- id: skip_website_check
|
||||||
name: Check if the changes are about website src files
|
name: Check if the changes are about website src files
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
paths: '["packages/website/*"]'
|
paths: '["packages/website/**"]'
|
||||||
|
|
||||||
lang-build-test:
|
lang-build-test:
|
||||||
name: Language build and test
|
name: Language build and test
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -4,3 +4,5 @@ yarn-error.log
|
||||||
.merlin
|
.merlin
|
||||||
.parcel-cache
|
.parcel-cache
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
**/.sync.ffs_db
|
||||||
|
.direnv
|
||||||
|
|
|
@ -38,6 +38,15 @@ Being a monorepo, where packages are connected by dependency, it's important to
|
||||||
|
|
||||||
We aspire for `ci.yaml` and `README.md`s to be in one-to-one correspondence.
|
We aspire for `ci.yaml` and `README.md`s to be in one-to-one correspondence.
|
||||||
|
|
||||||
|
## If you're on NixOS
|
||||||
|
|
||||||
|
You'll need to run a command like this in order to get `yarn build` to run, especially in `packages/squiggle-lang`.
|
||||||
|
```sh
|
||||||
|
patchelf --set-interpreter $(patchelf --print-interpreter $(which mkdir)) ./node_modules/gentype/gentype.exe
|
||||||
|
```
|
||||||
|
|
||||||
|
See [here](https://github.com/NixOS/nixpkgs/issues/107375)
|
||||||
|
|
||||||
# Pull request protocol
|
# Pull request protocol
|
||||||
|
|
||||||
Please work against `staging` branch. **Do not** work against `master`. Please do not merge without approval from some subset of Quinn, Sam, and Ozzie; they will be auto-pinged.
|
Please work against `staging` branch. **Do not** work against `master`. Please do not merge without approval from some subset of Quinn, Sam, and Ozzie; they will be auto-pinged.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Squiggle
|
# Squiggle
|
||||||
|
![Packages check](https://github.com/QURIresearch/squiggle/actions/workflows/ci.yaml/badge.svg)
|
||||||
|
|
||||||
This is an experiment DSL/language for making probabilistic estimates. The full story can be found [here](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3).
|
This is an experiment DSL/language for making probabilistic estimates. The full story can be found [here](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3).
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@
|
||||||
yarn2nix
|
yarn2nix
|
||||||
nodePackages.npm
|
nodePackages.npm
|
||||||
nodejs
|
nodejs
|
||||||
|
patchelf
|
||||||
(pkgs.vscode-with-extensions.override {
|
(pkgs.vscode-with-extensions.override {
|
||||||
vscode = pkgs.vscodium;
|
vscode = pkgs.vscodium;
|
||||||
vscodeExtensions = pkgs.vscode-utils.extensionsFromVscodeMarketplace [
|
vscodeExtensions = pkgs.vscode-utils.extensionsFromVscodeMarketplace [
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
"packages/*"
|
"packages/*"
|
||||||
],
|
],
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"@types/react": "17.0.39"
|
"@types/react": "^17.0.43"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@1.22.17"
|
"packageManager": "yarn@1.22.17"
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ const custom = require('../webpack.config.js');
|
||||||
module.exports = {
|
module.exports = {
|
||||||
webpackFinal: async (config) => {
|
webpackFinal: async (config) => {
|
||||||
config.resolve.alias = custom.resolve.alias;
|
config.resolve.alias = custom.resolve.alias;
|
||||||
return { ...config, module: { ...config.module, rules: config.module.rules.concat(custom.module.rules) } };
|
return { ...config, module: { ...config.module, rules: config.module.rules.concat(custom.module.rules.filter(x => x.loader === "ts-loader")) } };
|
||||||
},
|
},
|
||||||
"stories": [
|
"stories": [
|
||||||
"../src/**/*.stories.mdx",
|
"../src/**/*.stories.mdx",
|
||||||
|
|
|
@ -6,4 +6,4 @@ export const parameters = {
|
||||||
date: /Date$/,
|
date: /Date$/,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,29 @@
|
||||||
# Squiggle Components
|
# Squiggle Components
|
||||||
|
|
||||||
This package contains all the components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
This package contains all the components for squiggle. These can be used either as a library or hosted as a [storybook](https://storybook.js.org/).
|
||||||
|
|
||||||
# Build for development
|
# Build for development
|
||||||
We assume that you had run `yarn` at monorepo level, installing dependencies.
|
|
||||||
|
|
||||||
You need to _prepare_ by building and bundling `squiggle-lang`
|
We assume that you had run `yarn` at monorepo level, installing dependencies.
|
||||||
``` sh
|
|
||||||
|
You need to _prepare_ by building and bundling `squiggle-lang`
|
||||||
|
|
||||||
|
```sh
|
||||||
cd ../squiggle-lang
|
cd ../squiggle-lang
|
||||||
yarn build
|
yarn build
|
||||||
```
|
```
|
||||||
|
|
||||||
If you've otherwise done this recently you can skip those.
|
If you've otherwise done this recently you can skip those.
|
||||||
|
|
||||||
Run a development server
|
Run a development server
|
||||||
|
|
||||||
``` sh
|
```sh
|
||||||
yarn start
|
yarn start
|
||||||
```
|
```
|
||||||
|
|
||||||
And build artefacts for production,
|
And build artefacts for production,
|
||||||
|
|
||||||
``` sh
|
```sh
|
||||||
yarn bundle # builds components library
|
yarn bundle # builds components library
|
||||||
yarn build # builds storybook app
|
yarn build # builds storybook app
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@quri/squiggle-components",
|
"name": "@quri/squiggle-components",
|
||||||
"version": "0.1.6",
|
"version": "0.1.8",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@quri/squiggle-lang": "0.2.2",
|
"@quri/squiggle-lang": "0.2.2",
|
||||||
"@testing-library/jest-dom": "^5.16.4",
|
"@testing-library/jest-dom": "^5.16.4",
|
||||||
|
@ -11,12 +11,15 @@
|
||||||
"@types/node": "^17.0.23",
|
"@types/node": "^17.0.23",
|
||||||
"@types/react": "^18.0.0",
|
"@types/react": "^18.0.0",
|
||||||
"@types/react-dom": "^18.0.0",
|
"@types/react-dom": "^18.0.0",
|
||||||
|
"antd": "^4.19.3",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"react": "^18.0.0",
|
"react": "^18.0.0",
|
||||||
"react-dom": "^18.0.0",
|
"react-dom": "^18.0.0",
|
||||||
"react-scripts": "5.0.0",
|
"react-scripts": "5.0.0",
|
||||||
"react-vega": "^7.5.0",
|
"react-vega": "^7.5.0",
|
||||||
|
"react-ace": "9.5.0",
|
||||||
|
"styled-components": "^5.3.5",
|
||||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^4.6.3",
|
||||||
"vega": "^5.22.1",
|
"vega": "^5.22.1",
|
||||||
|
@ -27,7 +30,7 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
||||||
"build": "build-storybook -s public",
|
"build": "tsc -b && build-storybook -s public",
|
||||||
"bundle": "webpack",
|
"bundle": "webpack",
|
||||||
"all": "yarn bundle && yarn build"
|
"all": "yarn bundle && yarn build"
|
||||||
},
|
},
|
||||||
|
@ -60,6 +63,9 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/styled-components": "^5.1.24",
|
||||||
|
"css-loader": "^6.7.1",
|
||||||
|
"style-loader": "^3.3.1",
|
||||||
"@babel/plugin-proposal-private-property-in-object": "^7.16.7",
|
"@babel/plugin-proposal-private-property-in-object": "^7.16.7",
|
||||||
"@storybook/addon-actions": "^6.4.20",
|
"@storybook/addon-actions": "^6.4.20",
|
||||||
"@storybook/addon-essentials": "^6.4.20",
|
"@storybook/addon-essentials": "^6.4.20",
|
||||||
|
@ -69,7 +75,7 @@
|
||||||
"@storybook/node-logger": "^6.4.20",
|
"@storybook/node-logger": "^6.4.20",
|
||||||
"@storybook/preset-create-react-app": "^4.1.0",
|
"@storybook/preset-create-react-app": "^4.1.0",
|
||||||
"@storybook/react": "^6.4.20",
|
"@storybook/react": "^6.4.20",
|
||||||
"@types/webpack": "^5.28.0",
|
"@types/webpack": "^4.41.32",
|
||||||
"prettier": "^2.6.2",
|
"prettier": "^2.6.2",
|
||||||
"react-codejar": "^1.1.2",
|
"react-codejar": "^1.1.2",
|
||||||
"ts-loader": "^9.2.8",
|
"ts-loader": "^9.2.8",
|
||||||
|
|
45
packages/components/src/CodeEditor.tsx
Normal file
45
packages/components/src/CodeEditor.tsx
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
import _ from "lodash";
|
||||||
|
import React, { FC } from "react";
|
||||||
|
import AceEditor from "react-ace";
|
||||||
|
|
||||||
|
import "ace-builds/src-noconflict/mode-golang";
|
||||||
|
import "ace-builds/src-noconflict/theme-github";
|
||||||
|
|
||||||
|
interface CodeEditorProps {
|
||||||
|
value: string;
|
||||||
|
onChange: (value: string) => void;
|
||||||
|
oneLine?: boolean;
|
||||||
|
width?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export let CodeEditor: FC<CodeEditorProps> = ({
|
||||||
|
value,
|
||||||
|
onChange,
|
||||||
|
oneLine = false,
|
||||||
|
}: CodeEditorProps) => {
|
||||||
|
let lineCount = value.split("\n").length;
|
||||||
|
let id = _.uniqueId();
|
||||||
|
return (
|
||||||
|
<AceEditor
|
||||||
|
value={value}
|
||||||
|
mode="golang"
|
||||||
|
theme="github"
|
||||||
|
width={"100%"}
|
||||||
|
minLines={oneLine ? lineCount : 15}
|
||||||
|
maxLines={oneLine ? lineCount : 15}
|
||||||
|
showGutter={false}
|
||||||
|
highlightActiveLine={false}
|
||||||
|
showPrintMargin={false}
|
||||||
|
onChange={onChange}
|
||||||
|
name={id}
|
||||||
|
editorProps={{
|
||||||
|
$blockScrolling: true,
|
||||||
|
}}
|
||||||
|
setOptions={{
|
||||||
|
enableBasicAutocompletion: false,
|
||||||
|
enableLiveAutocompletion: false,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default CodeEditor;
|
98
packages/components/src/NumberShower.tsx
Normal file
98
packages/components/src/NumberShower.tsx
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import _ from "lodash";
|
||||||
|
|
||||||
|
const orderOfMagnitudeNum = (n: number) => {
|
||||||
|
return Math.pow(10, n);
|
||||||
|
};
|
||||||
|
|
||||||
|
// 105 -> 3
|
||||||
|
const orderOfMagnitude = (n: number) => {
|
||||||
|
return Math.floor(Math.log(n) / Math.LN10 + 0.000000001);
|
||||||
|
};
|
||||||
|
|
||||||
|
function withXSigFigs(number: number, sigFigs: number) {
|
||||||
|
const withPrecision = number.toPrecision(sigFigs);
|
||||||
|
const formatted = Number(withPrecision);
|
||||||
|
return `${formatted}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
class NumberShowerBuilder {
|
||||||
|
number: number;
|
||||||
|
precision: number;
|
||||||
|
|
||||||
|
constructor(number: number, precision = 2) {
|
||||||
|
this.number = number;
|
||||||
|
this.precision = precision;
|
||||||
|
}
|
||||||
|
|
||||||
|
convert() {
|
||||||
|
const number = Math.abs(this.number);
|
||||||
|
const response = this.evaluate(number);
|
||||||
|
if (this.number < 0) {
|
||||||
|
response.value = "-" + response.value;
|
||||||
|
}
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
metricSystem(number: number, order: number) {
|
||||||
|
const newNumber = number / orderOfMagnitudeNum(order);
|
||||||
|
const precision = this.precision;
|
||||||
|
return `${withXSigFigs(newNumber, precision)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
evaluate(number: number) {
|
||||||
|
if (number === 0) {
|
||||||
|
return { value: this.metricSystem(0, 0) };
|
||||||
|
}
|
||||||
|
|
||||||
|
const order = orderOfMagnitude(number);
|
||||||
|
if (order < -2) {
|
||||||
|
return { value: this.metricSystem(number, order), power: order };
|
||||||
|
} else if (order < 4) {
|
||||||
|
return { value: this.metricSystem(number, 0) };
|
||||||
|
} else if (order < 6) {
|
||||||
|
return { value: this.metricSystem(number, 3), symbol: "K" };
|
||||||
|
} else if (order < 9) {
|
||||||
|
return { value: this.metricSystem(number, 6), symbol: "M" };
|
||||||
|
} else if (order < 12) {
|
||||||
|
return { value: this.metricSystem(number, 9), symbol: "B" };
|
||||||
|
} else if (order < 15) {
|
||||||
|
return { value: this.metricSystem(number, 12), symbol: "T" };
|
||||||
|
} else {
|
||||||
|
return { value: this.metricSystem(number, order), power: order };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function numberShow(number: number, precision = 2) {
|
||||||
|
const ns = new NumberShowerBuilder(number, precision);
|
||||||
|
return ns.convert();
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NumberShowerProps {
|
||||||
|
number: number;
|
||||||
|
precision?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export let NumberShower: React.FC<NumberShowerProps> = ({
|
||||||
|
number,
|
||||||
|
precision = 2
|
||||||
|
}: NumberShowerProps) => {
|
||||||
|
let numberWithPresentation = numberShow(number, precision);
|
||||||
|
return (
|
||||||
|
<span>
|
||||||
|
{numberWithPresentation.value}
|
||||||
|
{numberWithPresentation.symbol}
|
||||||
|
{numberWithPresentation.power ? (
|
||||||
|
<span>
|
||||||
|
{"\u00b710"}
|
||||||
|
<span style={{ fontSize: "0.6em", verticalAlign: "super" }}>
|
||||||
|
{numberWithPresentation.power}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
|
@ -11,6 +11,8 @@ import type {
|
||||||
import { createClassFromSpec } from "react-vega";
|
import { createClassFromSpec } from "react-vega";
|
||||||
import * as chartSpecification from "./spec-distributions.json";
|
import * as chartSpecification from "./spec-distributions.json";
|
||||||
import * as percentilesSpec from "./spec-percentiles.json";
|
import * as percentilesSpec from "./spec-percentiles.json";
|
||||||
|
import { NumberShower } from "./NumberShower";
|
||||||
|
import styled from "styled-components";
|
||||||
|
|
||||||
let SquiggleVegaChart = createClassFromSpec({
|
let SquiggleVegaChart = createClassFromSpec({
|
||||||
spec: chartSpecification as Spec,
|
spec: chartSpecification as Spec,
|
||||||
|
@ -22,7 +24,7 @@ let SquigglePercentilesChart = createClassFromSpec({
|
||||||
|
|
||||||
export interface SquiggleChartProps {
|
export interface SquiggleChartProps {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
squiggleString: string;
|
squiggleString?: string;
|
||||||
|
|
||||||
/** If the output requires monte carlo sampling, the amount of samples */
|
/** If the output requires monte carlo sampling, the amount of samples */
|
||||||
sampleCount?: number;
|
sampleCount?: number;
|
||||||
|
@ -40,24 +42,58 @@ export interface SquiggleChartProps {
|
||||||
environment?: exportEnv;
|
environment?: exportEnv;
|
||||||
/** When the environment changes */
|
/** When the environment changes */
|
||||||
onEnvChange?(env: exportEnv): void;
|
onEnvChange?(env: exportEnv): void;
|
||||||
|
/** CSS width of the element */
|
||||||
|
width?: number;
|
||||||
|
height?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
const Error = styled.div`
|
||||||
|
border: 1px solid #792e2e;
|
||||||
|
background: #eee2e2;
|
||||||
|
padding: 0.4em 0.8em;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const ShowError: React.FC<{ heading: string; children: React.ReactNode }> = ({
|
||||||
|
heading = "Error",
|
||||||
|
children,
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<Error>
|
||||||
|
<h3>{heading}</h3>
|
||||||
|
{children}
|
||||||
|
</Error>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
||||||
|
squiggleString = "",
|
||||||
|
sampleCount = 1000,
|
||||||
|
outputXYPoints = 1000,
|
||||||
|
kernelWidth,
|
||||||
|
pointDistLength = 1000,
|
||||||
|
diagramStart = 0,
|
||||||
|
diagramStop = 10,
|
||||||
|
diagramCount = 20,
|
||||||
|
environment = [],
|
||||||
|
onEnvChange = () => {},
|
||||||
|
width = 500,
|
||||||
|
height = 60,
|
||||||
|
}: SquiggleChartProps) => {
|
||||||
let samplingInputs: SamplingInputs = {
|
let samplingInputs: SamplingInputs = {
|
||||||
sampleCount: props.sampleCount,
|
sampleCount: sampleCount,
|
||||||
outputXYPoints: props.outputXYPoints,
|
outputXYPoints: outputXYPoints,
|
||||||
kernelWidth: props.kernelWidth,
|
kernelWidth: kernelWidth,
|
||||||
pointDistLength: props.pointDistLength,
|
pointDistLength: pointDistLength,
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = run(props.squiggleString, samplingInputs, props.environment);
|
let result = run(squiggleString, samplingInputs, environment);
|
||||||
if (result.tag === "Ok") {
|
if (result.tag === "Ok") {
|
||||||
let environment = result.value.environment;
|
let environment = result.value.environment;
|
||||||
let exports = result.value.exports;
|
let exports = result.value.exports;
|
||||||
if (props.onEnvChange) props.onEnvChange(environment);
|
onEnvChange(environment);
|
||||||
let chartResults = exports.map((chartResult: exportDistribution) => {
|
let chartResults = exports.map((chartResult: exportDistribution) => {
|
||||||
if (chartResult["NAME"] === "Float") {
|
if (chartResult["NAME"] === "Float") {
|
||||||
return <MakeNumberShower precision={3} number={chartResult["VAL"]} />;
|
return <NumberShower precision={3} number={chartResult["VAL"]} />;
|
||||||
} else if (chartResult["NAME"] === "DistPlus") {
|
} else if (chartResult["NAME"] === "DistPlus") {
|
||||||
let shape = chartResult.VAL.pointSetDist;
|
let shape = chartResult.VAL.pointSetDist;
|
||||||
if (shape.tag === "Continuous") {
|
if (shape.tag === "Continuous") {
|
||||||
|
@ -74,7 +110,14 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
||||||
y: y,
|
y: y,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return <SquiggleVegaChart data={{ con: values }} />;
|
return (
|
||||||
|
<SquiggleVegaChart
|
||||||
|
width={width}
|
||||||
|
height={height}
|
||||||
|
data={{ con: values }}
|
||||||
|
actions={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
} else if (shape.tag === "Discrete") {
|
} else if (shape.tag === "Discrete") {
|
||||||
let xyShape = shape.value.xyShape;
|
let xyShape = shape.value.xyShape;
|
||||||
let totalY = xyShape.ys.reduce((a, b) => a + b);
|
let totalY = xyShape.ys.reduce((a, b) => a + b);
|
||||||
|
@ -89,7 +132,7 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
||||||
y: y,
|
y: y,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return <SquiggleVegaChart data={{ dis: values }} />;
|
return <SquiggleVegaChart data={{ dis: values }} actions={false} />;
|
||||||
} else if (shape.tag === "Mixed") {
|
} else if (shape.tag === "Mixed") {
|
||||||
let discreteShape = shape.value.discrete.xyShape;
|
let discreteShape = shape.value.discrete.xyShape;
|
||||||
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
||||||
|
@ -123,10 +166,10 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
||||||
|
|
||||||
let total = 0;
|
let total = 0;
|
||||||
let cdf = sortedPoints.map((point: labeledPoint) => {
|
let cdf = sortedPoints.map((point: labeledPoint) => {
|
||||||
if (point.type == "discrete") {
|
if (point.type === "discrete") {
|
||||||
total += point.y;
|
total += point.y;
|
||||||
return total;
|
return total;
|
||||||
} else if (point.type == "continuous") {
|
} else if (point.type === "continuous") {
|
||||||
total += (point.y / totalY) * totalContinuous;
|
total += (point.y / totalY) * totalContinuous;
|
||||||
return total;
|
return total;
|
||||||
}
|
}
|
||||||
|
@ -147,28 +190,29 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
let continuousValues = cdfLabeledPoint.filter(
|
let continuousValues = cdfLabeledPoint.filter(
|
||||||
(x) => x.type == "continuous"
|
(x) => x.type === "continuous"
|
||||||
);
|
);
|
||||||
let discreteValues = cdfLabeledPoint.filter(
|
let discreteValues = cdfLabeledPoint.filter(
|
||||||
(x) => x.type == "discrete"
|
(x) => x.type === "discrete"
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SquiggleVegaChart
|
<SquiggleVegaChart
|
||||||
data={{ con: continuousValues, dis: discreteValues }}
|
data={{ con: continuousValues, dis: discreteValues }}
|
||||||
|
actions={false}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if (chartResult.NAME === "Function") {
|
} else if (chartResult.NAME === "Function") {
|
||||||
// We are looking at a function. In this case, we draw a Percentiles chart
|
// We are looking at a function. In this case, we draw a Percentiles chart
|
||||||
let start = props.diagramStart ? props.diagramStart : 0;
|
let start = diagramStart;
|
||||||
let stop = props.diagramStop ? props.diagramStop : 10;
|
let stop = diagramStop;
|
||||||
let count = props.diagramCount ? props.diagramCount : 100;
|
let count = diagramCount;
|
||||||
let step = (stop - start) / count;
|
let step = (stop - start) / count;
|
||||||
let data = _.range(start, stop, step).map((x) => {
|
let data = _.range(start, stop, step).map((x) => {
|
||||||
if (chartResult.NAME == "Function") {
|
if (chartResult.NAME === "Function") {
|
||||||
let result = chartResult.VAL(x);
|
let result = chartResult.VAL(x);
|
||||||
if (result.tag == "Ok") {
|
if (result.tag === "Ok") {
|
||||||
let percentileArray = [
|
let percentileArray = [
|
||||||
0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95,
|
0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95,
|
||||||
0.99,
|
0.99,
|
||||||
|
@ -195,19 +239,28 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = (props) => {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return <SquigglePercentilesChart data={{ facet: data.filter(x => x !== null) }} />;
|
return (
|
||||||
|
<SquigglePercentilesChart
|
||||||
|
data={{ facet: data.filter((x) => x !== null) }}
|
||||||
|
actions={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return <>{chartResults}</>;
|
return <>{chartResults}</>;
|
||||||
} else if (result.tag == "Error") {
|
} else if (result.tag === "Error") {
|
||||||
// At this point, we came across an error. What was our error?
|
// At this point, we came across an error. What was our error?
|
||||||
return <p>{"Error parsing Squiggle: " + result.value}</p>;
|
return (
|
||||||
|
<ShowError heading={"Parse Error"}>
|
||||||
|
{result.value}
|
||||||
|
</ShowError>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return <p>{"Invalid Response"}</p>;
|
return <p>{"Invalid Response"}</p>;
|
||||||
};
|
};
|
||||||
|
|
||||||
function getPercentiles(percentiles: number[], t: DistPlus) {
|
function getPercentiles(percentiles: number[], t: DistPlus) {
|
||||||
if (t.pointSetDist.tag == "Discrete") {
|
if (t.pointSetDist.tag === "Discrete") {
|
||||||
let total = 0;
|
let total = 0;
|
||||||
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
||||||
let bounds = percentiles.map((_) => maxX);
|
let bounds = percentiles.map((_) => maxX);
|
||||||
|
@ -217,14 +270,14 @@ function getPercentiles(percentiles: number[], t: DistPlus) {
|
||||||
(x, y) => {
|
(x, y) => {
|
||||||
total += y;
|
total += y;
|
||||||
percentiles.forEach((v, i) => {
|
percentiles.forEach((v, i) => {
|
||||||
if (total > v && bounds[i] == maxX) {
|
if (total > v && bounds[i] === maxX) {
|
||||||
bounds[i] = x;
|
bounds[i] = x;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return bounds;
|
return bounds;
|
||||||
} else if (t.pointSetDist.tag == "Continuous") {
|
} else if (t.pointSetDist.tag === "Continuous") {
|
||||||
let total = 0;
|
let total = 0;
|
||||||
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
||||||
let totalY = _.sum(t.pointSetDist.value.xyShape.ys);
|
let totalY = _.sum(t.pointSetDist.value.xyShape.ys);
|
||||||
|
@ -235,14 +288,14 @@ function getPercentiles(percentiles: number[], t: DistPlus) {
|
||||||
(x, y) => {
|
(x, y) => {
|
||||||
total += y / totalY;
|
total += y / totalY;
|
||||||
percentiles.forEach((v, i) => {
|
percentiles.forEach((v, i) => {
|
||||||
if (total > v && bounds[i] == maxX) {
|
if (total > v && bounds[i] === maxX) {
|
||||||
bounds[i] = x;
|
bounds[i] = x;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return bounds;
|
return bounds;
|
||||||
} else if (t.pointSetDist.tag == "Mixed") {
|
} else if (t.pointSetDist.tag === "Mixed") {
|
||||||
let discreteShape = t.pointSetDist.value.discrete.xyShape;
|
let discreteShape = t.pointSetDist.value.discrete.xyShape;
|
||||||
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
||||||
|
|
||||||
|
@ -276,13 +329,13 @@ function getPercentiles(percentiles: number[], t: DistPlus) {
|
||||||
let maxX = _.max(sortedPoints.map((x) => x.x));
|
let maxX = _.max(sortedPoints.map((x) => x.x));
|
||||||
let bounds = percentiles.map((_) => maxX);
|
let bounds = percentiles.map((_) => maxX);
|
||||||
sortedPoints.map((point: labeledPoint) => {
|
sortedPoints.map((point: labeledPoint) => {
|
||||||
if (point.type == "discrete") {
|
if (point.type === "discrete") {
|
||||||
total += point.y;
|
total += point.y;
|
||||||
} else if (point.type == "continuous") {
|
} else if (point.type === "continuous") {
|
||||||
total += (point.y / totalY) * totalContinuous;
|
total += (point.y / totalY) * totalContinuous;
|
||||||
}
|
}
|
||||||
percentiles.forEach((v, i) => {
|
percentiles.forEach((v, i) => {
|
||||||
if (total > v && bounds[i] == maxX) {
|
if (total > v && bounds[i] === maxX) {
|
||||||
bounds[i] = total;
|
bounds[i] = total;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -291,91 +344,3 @@ function getPercentiles(percentiles: number[], t: DistPlus) {
|
||||||
return bounds;
|
return bounds;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function MakeNumberShower(props: { number: number; precision: number }) {
|
|
||||||
let numberWithPresentation = numberShow(props.number, props.precision);
|
|
||||||
return (
|
|
||||||
<span>
|
|
||||||
{numberWithPresentation.value}
|
|
||||||
{numberWithPresentation.symbol}
|
|
||||||
{numberWithPresentation.power ? (
|
|
||||||
<span>
|
|
||||||
{"\u00b710"}
|
|
||||||
<span style={{ fontSize: "0.6em", verticalAlign: "super" }}>
|
|
||||||
{numberWithPresentation.power}
|
|
||||||
</span>
|
|
||||||
</span>
|
|
||||||
) : (
|
|
||||||
<></>
|
|
||||||
)}
|
|
||||||
</span>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const orderOfMagnitudeNum = (n: number) => {
|
|
||||||
return Math.pow(10, n);
|
|
||||||
};
|
|
||||||
|
|
||||||
// 105 -> 3
|
|
||||||
const orderOfMagnitude = (n: number) => {
|
|
||||||
return Math.floor(Math.log(n) / Math.LN10 + 0.000000001);
|
|
||||||
};
|
|
||||||
|
|
||||||
function withXSigFigs(number: number, sigFigs: number) {
|
|
||||||
const withPrecision = number.toPrecision(sigFigs);
|
|
||||||
const formatted = Number(withPrecision);
|
|
||||||
return `${formatted}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
class NumberShower {
|
|
||||||
number: number;
|
|
||||||
precision: number;
|
|
||||||
|
|
||||||
constructor(number: number, precision = 2) {
|
|
||||||
this.number = number;
|
|
||||||
this.precision = precision;
|
|
||||||
}
|
|
||||||
|
|
||||||
convert() {
|
|
||||||
const number = Math.abs(this.number);
|
|
||||||
const response = this.evaluate(number);
|
|
||||||
if (this.number < 0) {
|
|
||||||
response.value = "-" + response.value;
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
metricSystem(number: number, order: number) {
|
|
||||||
const newNumber = number / orderOfMagnitudeNum(order);
|
|
||||||
const precision = this.precision;
|
|
||||||
return `${withXSigFigs(newNumber, precision)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
evaluate(number: number) {
|
|
||||||
if (number === 0) {
|
|
||||||
return { value: this.metricSystem(0, 0) };
|
|
||||||
}
|
|
||||||
|
|
||||||
const order = orderOfMagnitude(number);
|
|
||||||
if (order < -2) {
|
|
||||||
return { value: this.metricSystem(number, order), power: order };
|
|
||||||
} else if (order < 4) {
|
|
||||||
return { value: this.metricSystem(number, 0) };
|
|
||||||
} else if (order < 6) {
|
|
||||||
return { value: this.metricSystem(number, 3), symbol: "K" };
|
|
||||||
} else if (order < 9) {
|
|
||||||
return { value: this.metricSystem(number, 6), symbol: "M" };
|
|
||||||
} else if (order < 12) {
|
|
||||||
return { value: this.metricSystem(number, 9), symbol: "B" };
|
|
||||||
} else if (order < 15) {
|
|
||||||
return { value: this.metricSystem(number, 12), symbol: "T" };
|
|
||||||
} else {
|
|
||||||
return { value: this.metricSystem(number, order), power: order };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function numberShow(number: number, precision = 2) {
|
|
||||||
const ns = new NumberShower(number, precision);
|
|
||||||
return ns.convert();
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import * as ReactDOM from "react-dom";
|
import * as ReactDOM from "react-dom";
|
||||||
import { SquiggleChart } from "./SquiggleChart";
|
import { SquiggleChart } from "./SquiggleChart";
|
||||||
import { ReactCodeJar } from "react-codejar";
|
import { CodeEditor } from "./CodeEditor";
|
||||||
import type { exportEnv } from "@quri/squiggle-lang";
|
import type { exportEnv } from "@quri/squiggle-lang";
|
||||||
|
import styled from 'styled-components'
|
||||||
|
|
||||||
export interface SquiggleEditorProps {
|
export interface SquiggleEditorProps {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
|
@ -23,71 +24,57 @@ export interface SquiggleEditorProps {
|
||||||
environment?: exportEnv;
|
environment?: exportEnv;
|
||||||
/** when the environment changes. Used again for notebook magic*/
|
/** when the environment changes. Used again for notebook magic*/
|
||||||
onEnvChange?(env: exportEnv): void;
|
onEnvChange?(env: exportEnv): void;
|
||||||
|
/** The width of the element */
|
||||||
|
width: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
const highlight = (editor: HTMLInputElement) => {
|
const Input = styled.div`
|
||||||
let code = editor.textContent;
|
border: 1px solid #ddd;
|
||||||
code = code.replace(/\((\w+?)(\b)/g, '(<font color="#8a2be2">$1</font>$2');
|
padding: 0.3em 0.3em;
|
||||||
editor.innerHTML = code;
|
margin-bottom: 1em;
|
||||||
|
`;
|
||||||
|
|
||||||
|
export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
||||||
|
initialSquiggleString = "",
|
||||||
|
width = 500,
|
||||||
|
sampleCount,
|
||||||
|
outputXYPoints,
|
||||||
|
kernelWidth,
|
||||||
|
pointDistLength,
|
||||||
|
diagramStart,
|
||||||
|
diagramStop,
|
||||||
|
diagramCount,
|
||||||
|
onEnvChange,
|
||||||
|
environment,
|
||||||
|
}: SquiggleEditorProps) => {
|
||||||
|
let [expression, setExpression] = React.useState(initialSquiggleString);
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Input>
|
||||||
|
<CodeEditor
|
||||||
|
value={expression}
|
||||||
|
onChange={setExpression}
|
||||||
|
oneLine={true}
|
||||||
|
/>
|
||||||
|
</Input>
|
||||||
|
<SquiggleChart
|
||||||
|
width={width}
|
||||||
|
squiggleString={expression}
|
||||||
|
sampleCount={sampleCount}
|
||||||
|
outputXYPoints={outputXYPoints}
|
||||||
|
kernelWidth={kernelWidth}
|
||||||
|
pointDistLength={pointDistLength}
|
||||||
|
diagramStart={diagramStart}
|
||||||
|
diagramStop={diagramStop}
|
||||||
|
diagramCount={diagramCount}
|
||||||
|
environment={environment}
|
||||||
|
onEnvChange={onEnvChange}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
interface SquiggleEditorState {
|
export function renderSquiggleEditorToDom(props: SquiggleEditorProps) {
|
||||||
expression: string;
|
|
||||||
env: exportEnv;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SquiggleEditor extends React.Component<
|
|
||||||
SquiggleEditorProps,
|
|
||||||
SquiggleEditorState
|
|
||||||
> {
|
|
||||||
constructor(props: SquiggleEditorProps) {
|
|
||||||
super(props);
|
|
||||||
let code = props.initialSquiggleString ? props.initialSquiggleString : "";
|
|
||||||
this.state = { expression: code, env: props.environment };
|
|
||||||
}
|
|
||||||
render() {
|
|
||||||
let { expression, env } = this.state;
|
|
||||||
let props = this.props;
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<ReactCodeJar
|
|
||||||
code={expression}
|
|
||||||
onUpdate={(e) => {
|
|
||||||
this.setState({ expression: e });
|
|
||||||
}}
|
|
||||||
style={{
|
|
||||||
borderRadius: "6px",
|
|
||||||
width: "530px",
|
|
||||||
border: "1px solid grey",
|
|
||||||
fontFamily: "'Source Code Pro', monospace",
|
|
||||||
fontSize: "14px",
|
|
||||||
fontWeight: "400",
|
|
||||||
letterSpacing: "normal",
|
|
||||||
lineHeight: "20px",
|
|
||||||
padding: "10px",
|
|
||||||
tabSize: "4",
|
|
||||||
}}
|
|
||||||
highlight={highlight}
|
|
||||||
lineNumbers={false}
|
|
||||||
/>
|
|
||||||
<SquiggleChart
|
|
||||||
squiggleString={expression}
|
|
||||||
sampleCount={props.sampleCount}
|
|
||||||
outputXYPoints={props.outputXYPoints}
|
|
||||||
kernelWidth={props.kernelWidth}
|
|
||||||
pointDistLength={props.pointDistLength}
|
|
||||||
diagramStart={props.diagramStart}
|
|
||||||
diagramStop={props.diagramStop}
|
|
||||||
diagramCount={props.diagramCount}
|
|
||||||
environment={env}
|
|
||||||
onEnvChange={props.onEnvChange}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function renderSquiggleEditor(props: SquiggleEditorProps) {
|
|
||||||
let parent = document.createElement("div");
|
let parent = document.createElement("div");
|
||||||
ReactDOM.render(
|
ReactDOM.render(
|
||||||
<SquiggleEditor
|
<SquiggleEditor
|
||||||
|
|
131
packages/components/src/SquigglePlayground.tsx
Normal file
131
packages/components/src/SquigglePlayground.tsx
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
import _ from "lodash";
|
||||||
|
import React, { FC, useState } from "react";
|
||||||
|
import ReactDOM from "react-dom";
|
||||||
|
import { SquiggleChart } from "./SquiggleChart";
|
||||||
|
import CodeEditor from "./CodeEditor";
|
||||||
|
import { Form, Input, Card, Row, Col } from "antd";
|
||||||
|
import "antd/dist/antd.css";
|
||||||
|
|
||||||
|
interface FieldFloatProps {
|
||||||
|
label: string;
|
||||||
|
className?: string;
|
||||||
|
value: number;
|
||||||
|
onChange: (value: number) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldFloat(Props: FieldFloatProps) {
|
||||||
|
let [contents, setContents] = useState(Props.value + "");
|
||||||
|
return (
|
||||||
|
<Form.Item label={Props.label}>
|
||||||
|
<Input
|
||||||
|
value={contents}
|
||||||
|
className={Props.className ? Props.className : ""}
|
||||||
|
onChange={(e) => {
|
||||||
|
setContents(e.target.value);
|
||||||
|
let result = parseFloat(contents);
|
||||||
|
if (_.isFinite(result)) {
|
||||||
|
Props.onChange(result);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Form.Item>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
initialSquiggleString: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
let SquigglePlayground: FC<Props> = (props) => {
|
||||||
|
let [squiggleString, setSquiggleString] = useState(
|
||||||
|
props.initialSquiggleString
|
||||||
|
);
|
||||||
|
let [sampleCount, setSampleCount] = useState(1000);
|
||||||
|
let [outputXYPoints, setOutputXYPoints] = useState(1000);
|
||||||
|
let [pointDistLength, setPointDistLength] = useState(1000);
|
||||||
|
let [diagramStart, setDiagramStart] = useState(0);
|
||||||
|
let [diagramStop, setDiagramStop] = useState(10);
|
||||||
|
let [diagramCount, setDiagramCount] = useState(20);
|
||||||
|
var demoDist = (
|
||||||
|
<SquiggleChart
|
||||||
|
squiggleString={squiggleString}
|
||||||
|
sampleCount={sampleCount}
|
||||||
|
outputXYPoints={outputXYPoints}
|
||||||
|
diagramStart={diagramStart}
|
||||||
|
diagramStop={diagramStop}
|
||||||
|
diagramCount={diagramCount}
|
||||||
|
pointDistLength={pointDistLength}
|
||||||
|
height={150}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<Row>
|
||||||
|
<Col span={12}>
|
||||||
|
<Card title="Distribution Form">
|
||||||
|
<Form>
|
||||||
|
<Row gutter={16}>
|
||||||
|
<Col span={24}>
|
||||||
|
<CodeEditor
|
||||||
|
value={squiggleString}
|
||||||
|
onChange={setSquiggleString}
|
||||||
|
oneLine={false}
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<Row gutter={16}>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={sampleCount}
|
||||||
|
label="Sample Count"
|
||||||
|
onChange={setSampleCount}
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={outputXYPoints}
|
||||||
|
onChange={setOutputXYPoints}
|
||||||
|
label="Output XY-points"
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={pointDistLength}
|
||||||
|
onChange={setPointDistLength}
|
||||||
|
label="Downsample To"
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={diagramStart}
|
||||||
|
onChange={setDiagramStart}
|
||||||
|
label="Diagram Start"
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={diagramStop}
|
||||||
|
onChange={setDiagramStop}
|
||||||
|
label="Diagram Stop"
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<FieldFloat
|
||||||
|
value={diagramCount}
|
||||||
|
onChange={setDiagramCount}
|
||||||
|
label="Diagram Count"
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
</Form>
|
||||||
|
</Card>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>{demoDist}</Col>
|
||||||
|
</Row>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default SquigglePlayground;
|
||||||
|
export function renderSquigglePlaygroundToDom(props: Props) {
|
||||||
|
let parent = document.createElement("div");
|
||||||
|
ReactDOM.render(<SquigglePlayground {...props} />, parent);
|
||||||
|
return parent;
|
||||||
|
}
|
|
@ -1,2 +1,6 @@
|
||||||
export { SquiggleChart } from "./SquiggleChart";
|
export { SquiggleChart } from "./SquiggleChart";
|
||||||
export { SquiggleEditor, renderSquiggleEditor } from "./SquiggleEditor";
|
export { SquiggleEditor, renderSquiggleEditorToDom } from "./SquiggleEditor";
|
||||||
|
import SquigglePlayground, {
|
||||||
|
renderSquigglePlaygroundToDom,
|
||||||
|
} from "./SquigglePlayground";
|
||||||
|
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
||||||
|
|
|
@ -1,123 +1,181 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://vega.github.io/schema/vega/v5.json",
|
"$schema": "https://vega.github.io/schema/vega/v5.json",
|
||||||
"description": "A basic area chart example.",
|
"description": "A basic area chart example",
|
||||||
"width": 500,
|
"width": 500,
|
||||||
"height": 200,
|
"height": 100,
|
||||||
"padding": 5,
|
"padding": 5,
|
||||||
"data": [{ "name": "con" }, { "name": "dis" }],
|
"data": [
|
||||||
|
|
||||||
"signals": [
|
|
||||||
{
|
{
|
||||||
"name": "mousex",
|
"name": "con"
|
||||||
"description": "x position of mouse",
|
|
||||||
"update": "0",
|
|
||||||
"on": [{ "events": "mousemove", "update": "1-x()/width" }]
|
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "dis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"signals": [
|
||||||
{
|
{
|
||||||
"name": "xscale",
|
"name": "xscale",
|
||||||
"description": "The transform of the x scale",
|
"description": "The transform of the x scale",
|
||||||
"value": 1.0,
|
"value": false,
|
||||||
"bind": {
|
"bind": {
|
||||||
"input": "range",
|
"input": "checkbox",
|
||||||
"min": 0.1,
|
"name": "log x scale"
|
||||||
"max": 1
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "yscale",
|
"name": "yscale",
|
||||||
"description": "The transform of the y scale",
|
"description": "The transform of the y scale",
|
||||||
"value": 1.0,
|
"value": false,
|
||||||
"bind": {
|
"bind": {
|
||||||
"input": "range",
|
"input": "checkbox",
|
||||||
"min": 0.1,
|
"name": "log y scale"
|
||||||
"max": 1
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
"scales": [
|
"scales": [
|
||||||
{
|
{
|
||||||
"name": "xscale",
|
"name": "xscale",
|
||||||
"type": "pow",
|
"type": "pow",
|
||||||
"exponent": { "signal": "xscale" },
|
"exponent": {
|
||||||
|
"signal": "xscale ? 0.1 : 1"
|
||||||
|
},
|
||||||
"range": "width",
|
"range": "width",
|
||||||
"zero": false,
|
"zero": false,
|
||||||
"nice": false,
|
"nice": false,
|
||||||
"domain": {
|
"domain": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{ "data": "con", "field": "x" },
|
{
|
||||||
{ "data": "dis", "field": "x" }
|
"data": "con",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": "dis",
|
||||||
|
"field": "x"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "yscale",
|
"name": "yscale",
|
||||||
"type": "pow",
|
"type": "pow",
|
||||||
"exponent": { "signal": "yscale" },
|
"exponent": {
|
||||||
|
"signal": "yscale ? 0.1 : 1"
|
||||||
|
},
|
||||||
"range": "height",
|
"range": "height",
|
||||||
"nice": true,
|
"nice": true,
|
||||||
"zero": true,
|
"zero": true,
|
||||||
"domain": {
|
"domain": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{ "data": "con", "field": "y" },
|
{
|
||||||
{ "data": "dis", "field": "y" }
|
"data": "con",
|
||||||
|
"field": "y"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": "dis",
|
||||||
|
"field": "y"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
"axes": [
|
"axes": [
|
||||||
{ "orient": "bottom", "scale": "xscale", "tickCount": 20 },
|
{
|
||||||
{ "orient": "left", "scale": "yscale" }
|
"orient": "bottom",
|
||||||
|
"scale": "xscale",
|
||||||
|
"labelColor": "#666",
|
||||||
|
"tickColor": "#ddd",
|
||||||
|
"format": "~s",
|
||||||
|
"tickCount": 20
|
||||||
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
"marks": [
|
"marks": [
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "con" },
|
"from": {
|
||||||
|
"data": "con"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": {
|
|
||||||
"tooltip": { "signal": "datum.cdf" }
|
|
||||||
},
|
|
||||||
"update": {
|
"update": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"x": {
|
||||||
"y": { "scale": "yscale", "field": "y" },
|
"scale": "xscale",
|
||||||
"y2": { "scale": "yscale", "value": 0 },
|
"field": "x"
|
||||||
"fill": {
|
|
||||||
"signal": "{gradient: 'linear', x1: 1, y1: 1, x2: 0, y2: 1, stops: [ {offset: 0.0, color: '#11ac8f'}, {offset: clamp(mousex, 0, 1), color: '#11ac8f'}, {offset: clamp(mousex, 0, 1), color: '#1b6fac'}, {offset: 1.0, color: '#1b6fac'} ] }",
|
|
||||||
"color": "#000"
|
|
||||||
},
|
},
|
||||||
"interpolate": { "value": "monotone" },
|
"y": {
|
||||||
"fillOpacity": { "value": 1 }
|
"scale": "yscale",
|
||||||
|
"field": "y"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fill": {
|
||||||
|
"signal": "{gradient: 'linear', x1: 1, y1: 1, x2: 0, y2: 1, stops: [ {offset: 0.0, color: '#4C78A8'}] }"
|
||||||
|
},
|
||||||
|
"interpolate": {
|
||||||
|
"value": "monotone"
|
||||||
|
},
|
||||||
|
"fillOpacity": {
|
||||||
|
"value": 1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "rect",
|
"type": "rect",
|
||||||
"from": { "data": "dis" },
|
"from": {
|
||||||
|
"data": "dis"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": {
|
"enter": {
|
||||||
"y2": { "scale": "yscale", "value": 0 },
|
"y2": {
|
||||||
"width": { "value": 1 }
|
"scale": "yscale",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"width": {
|
||||||
|
"value": 1
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"x": {
|
||||||
"y": { "scale": "yscale", "field": "y" }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "y"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "symbol",
|
"type": "symbol",
|
||||||
"from": { "data": "dis" },
|
"from": {
|
||||||
|
"data": "dis"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": {
|
"enter": {
|
||||||
"shape": { "value": "circle" },
|
"shape": {
|
||||||
"width": { "value": 5 },
|
"value": "circle"
|
||||||
"tooltip": { "signal": "datum.y" }
|
},
|
||||||
|
"width": {
|
||||||
|
"value": 5
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"signal": "datum.y"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"x": {
|
||||||
"y": { "scale": "yscale", "field": "y" }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "y"
|
||||||
|
},
|
||||||
|
"fill": {
|
||||||
|
"value": "#1e4577"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,12 @@
|
||||||
{
|
{
|
||||||
"name": "facet",
|
"name": "facet",
|
||||||
"values": [],
|
"values": [],
|
||||||
"format": { "type": "json", "parse": { "timestamp": "date" } }
|
"format": {
|
||||||
|
"type": "json",
|
||||||
|
"parse": {
|
||||||
|
"timestamp": "date"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "table",
|
"name": "table",
|
||||||
|
@ -70,7 +75,10 @@
|
||||||
"name": "xscale",
|
"name": "xscale",
|
||||||
"type": "linear",
|
"type": "linear",
|
||||||
"nice": true,
|
"nice": true,
|
||||||
"domain": { "data": "facet", "field": "x" },
|
"domain": {
|
||||||
|
"data": "facet",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
"range": "width"
|
"range": "width"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -79,7 +87,10 @@
|
||||||
"range": "height",
|
"range": "height",
|
||||||
"nice": true,
|
"nice": true,
|
||||||
"zero": true,
|
"zero": true,
|
||||||
"domain": { "data": "facet", "field": "p99" }
|
"domain": {
|
||||||
|
"data": "facet",
|
||||||
|
"field": "p99"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"axes": [
|
"axes": [
|
||||||
|
@ -89,8 +100,20 @@
|
||||||
"grid": false,
|
"grid": false,
|
||||||
"tickSize": 2,
|
"tickSize": 2,
|
||||||
"encode": {
|
"encode": {
|
||||||
"grid": { "enter": { "stroke": { "value": "#ccc" } } },
|
"grid": {
|
||||||
"ticks": { "enter": { "stroke": { "value": "#ccc" } } }
|
"enter": {
|
||||||
|
"stroke": {
|
||||||
|
"value": "#ccc"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ticks": {
|
||||||
|
"enter": {
|
||||||
|
"stroke": {
|
||||||
|
"value": "#ccc"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -100,107 +123,249 @@
|
||||||
"domain": false,
|
"domain": false,
|
||||||
"tickSize": 2,
|
"tickSize": 2,
|
||||||
"encode": {
|
"encode": {
|
||||||
"grid": { "enter": { "stroke": { "value": "#ccc" } } },
|
"grid": {
|
||||||
"ticks": { "enter": { "stroke": { "value": "#ccc" } } }
|
"enter": {
|
||||||
|
"stroke": {
|
||||||
|
"value": "#ccc"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ticks": {
|
||||||
|
"enter": {
|
||||||
|
"stroke": {
|
||||||
|
"value": "#ccc"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"marks": [
|
"marks": [
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p1" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p99" },
|
"x": {
|
||||||
"opacity": { "value": 0.05 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p1"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p99"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.05
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p5" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p95" },
|
"x": {
|
||||||
"opacity": { "value": 0.1 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p5"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p95"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p10" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p90" },
|
"x": {
|
||||||
"opacity": { "value": 0.15 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p10"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p90"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.15
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p20" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p80" },
|
"x": {
|
||||||
"opacity": { "value": 0.2 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p20"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p80"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.2
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p30" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p70" },
|
"x": {
|
||||||
"opacity": { "value": 0.2 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p30"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p70"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.2
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": { "fill": { "value": "#4C78A8" } },
|
"enter": {
|
||||||
|
"fill": {
|
||||||
|
"value": "#4C78A8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "monotone"
|
||||||
"y": { "scale": "yscale", "field": "p40" },
|
},
|
||||||
"y2": { "scale": "yscale", "field": "p60" },
|
"x": {
|
||||||
"opacity": { "value": 0.2 }
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p40"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p60"
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.2
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "line",
|
"type": "line",
|
||||||
"from": { "data": "table" },
|
"from": {
|
||||||
|
"data": "table"
|
||||||
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"update": {
|
"update": {
|
||||||
"interpolate": { "value": "monotone" },
|
"interpolate": {
|
||||||
"stroke": { "value": "#4C78A8" },
|
"value": "monotone"
|
||||||
"strokeWidth": { "value": 2 },
|
},
|
||||||
"opacity": { "value": 0.8 },
|
"stroke": {
|
||||||
"x": { "scale": "xscale", "field": "x" },
|
"value": "#4C78A8"
|
||||||
"y": { "scale": "yscale", "field": "p50" }
|
},
|
||||||
|
"strokeWidth": {
|
||||||
|
"value": 2
|
||||||
|
},
|
||||||
|
"opacity": {
|
||||||
|
"value": 0.8
|
||||||
|
},
|
||||||
|
"x": {
|
||||||
|
"scale": "xscale",
|
||||||
|
"field": "x"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"field": "p50"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,5 @@ import { Meta } from "@storybook/addon-docs";
|
||||||
|
|
||||||
<Meta title="Squiggle/Introduction" />
|
<Meta title="Squiggle/Introduction" />
|
||||||
|
|
||||||
This is the component library for Squiggle. All of these components are react
|
This is the component library for Squiggle. These are React
|
||||||
components, and can be used in any application that you see fit.
|
components, and can be used in any application that you see fit.
|
||||||
|
|
||||||
Currently, the only component that is provided is the SquiggleChart component.
|
|
||||||
This component allows you to render the result of a squiggle expression.
|
|
60
packages/components/src/stories/NumberShower.stories.mdx
Normal file
60
packages/components/src/stories/NumberShower.stories.mdx
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
import { NumberShower } from "../NumberShower";
|
||||||
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
|
<Meta title="Squiggle/NumberShower" component={NumberShower} />
|
||||||
|
|
||||||
|
# Number Shower
|
||||||
|
|
||||||
|
The number shower is a simple component to display a number.
|
||||||
|
|
||||||
|
It uses the symbols "K", "M", "B", and "T", to represent thousands, millions, billions, and trillions. Outside of that range, it uses scientific notation.
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Ten Thousand"
|
||||||
|
args={{
|
||||||
|
number: 10000,
|
||||||
|
precision: 2
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{args => <NumberShower {...args}/>}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Ten Billion"
|
||||||
|
args={{
|
||||||
|
number: 10000000000,
|
||||||
|
precision: 2
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{args => <NumberShower {...args}/>}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="1.2*10^15"
|
||||||
|
args={{
|
||||||
|
number: 1200000000000000,
|
||||||
|
precision: 2
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{args => <NumberShower {...args}/>}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="1.35*10^-13"
|
||||||
|
args={{
|
||||||
|
number: 0.000000000000135,
|
||||||
|
precision: 2
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{args => <NumberShower {...args}/>}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
<Props of={NumberShower} />
|
|
@ -18,7 +18,7 @@ could be continuous, discrete or mixed.
|
||||||
|
|
||||||
## Distributions
|
## Distributions
|
||||||
|
|
||||||
An example of a normal distribution is:
|
### Continuous Distributions
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
|
@ -31,26 +31,26 @@ An example of a normal distribution is:
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
An example of a Discrete distribution is:
|
### Discrete Distributions
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
name="Discrete"
|
name="Discrete"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "mm(0, 1, [0.5, 0.5])",
|
squiggleString: "mm(0, 1, 3, 5, 8, 10, [0.1, 0.8, 0.5, 0.3, 0.2, 0.1])",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
An example of a Mixed distribution is:
|
## Mixed distributions
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
name="Mixed"
|
name="Mixed"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "mm(0, 5 to 10, [0.5, 0.5])",
|
squiggleString: "mm(0, 1, 3, 5, 8, normal(8, 1), [0.1, 0.3, 0.4, 0.35, 0.2, 0.8])",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -66,7 +66,7 @@ to allow large and small numbers being printed cleanly.
|
||||||
<Story
|
<Story
|
||||||
name="Constant"
|
name="Constant"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "500000 * 5000000",
|
squiggleString: "500000000",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -75,14 +75,28 @@ to allow large and small numbers being printed cleanly.
|
||||||
|
|
||||||
## Functions
|
## Functions
|
||||||
|
|
||||||
Finally, a function can be returned, and this shows how the distribution changes
|
Full functions can be returned. These plot out the results of distributions between a set of x-coordinates.
|
||||||
over the axis between x = 0 and 10.
|
|
||||||
|
The default is show 10 points between 0 and 10.
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
name="Function"
|
name="Function"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "f(x) = normal(x,x)\nf",
|
squiggleString: "f(x) = normal(x^2,x^1.8)\nf",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
## Errors
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Error"
|
||||||
|
args={{
|
||||||
|
squiggleString: "f(x) = normal(",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
|
35
packages/components/src/stories/SquiggleEditor.stories.mdx
Normal file
35
packages/components/src/stories/SquiggleEditor.stories.mdx
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
import { SquiggleEditor } from "../SquiggleEditor";
|
||||||
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
|
<Meta title="Squiggle/SquiggleEditor" component={SquiggleEditor} />
|
||||||
|
|
||||||
|
export const Template = (props) => <SquiggleEditor {...props} />;
|
||||||
|
|
||||||
|
# Squiggle Editor
|
||||||
|
|
||||||
|
Squiggle Editor is a Squiggle chart with a text editor included for changing
|
||||||
|
the distribution.
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Normal"
|
||||||
|
args={{
|
||||||
|
initialSquiggleString: "normal(5,2)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
You can also name variables like so:
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Variables"
|
||||||
|
args={{
|
||||||
|
initialSquiggleString: "x = 2\nnormal(x,2)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
|
@ -0,0 +1,22 @@
|
||||||
|
import SquigglePlayground from "../SquigglePlayground";
|
||||||
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
|
<Meta title="Squiggle/SquigglePlayground" component={SquigglePlayground} />
|
||||||
|
|
||||||
|
export const Template = (props) => <SquigglePlayground {...props} />;
|
||||||
|
|
||||||
|
# Squiggle Playground
|
||||||
|
|
||||||
|
A Squiggle playground is an environment where you can play around with all settings,
|
||||||
|
including sampling settings, in squiggle.
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Normal"
|
||||||
|
args={{
|
||||||
|
initialSquiggleString: "normal(5,2)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
|
@ -16,10 +16,10 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"sourceMap": true
|
"sourceMap": true
|
||||||
},
|
},
|
||||||
"files": ["src/spec-distributions.json","src/spec-percentiles.json"],
|
"files": ["src/spec-distributions.json", "src/spec-percentiles.json"],
|
||||||
"target": "ES6",
|
"target": "ES6",
|
||||||
"include": ["src/**/*", "src/*"],
|
"include": ["src/**/*", "src/*"],
|
||||||
"exclude": ["node_modules", "**/*.spec.ts"],
|
"exclude": ["node_modules", "**/*.spec.ts", "webpack.config.js"],
|
||||||
"references": [
|
"references": [
|
||||||
{
|
{
|
||||||
"path": "../squiggle-lang"
|
"path": "../squiggle-lang"
|
||||||
|
|
|
@ -12,12 +12,16 @@ module.exports = {
|
||||||
options: { projectReferences: true },
|
options: { projectReferences: true },
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
test: /\.css$/i,
|
||||||
|
use: ["style-loader", "css-loader"],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
resolve: {
|
resolve: {
|
||||||
extensions: [".js", ".tsx", ".ts"],
|
extensions: [".js", ".tsx", ".ts"],
|
||||||
alias: {
|
alias: {
|
||||||
"@quri/squiggle-lang": path.resolve(__dirname, '../squiggle-lang/src/js')
|
"@quri/squiggle-lang": path.resolve(__dirname, "../squiggle-lang/src/js"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: {
|
output: {
|
||||||
|
|
16
packages/playground/.gitignore
vendored
16
packages/playground/.gitignore
vendored
|
@ -1,16 +0,0 @@
|
||||||
.DS_Store
|
|
||||||
.merlin
|
|
||||||
.bsb.lock
|
|
||||||
npm-debug.log
|
|
||||||
/node_modules/
|
|
||||||
.cache
|
|
||||||
.cache/*
|
|
||||||
dist
|
|
||||||
lib/*
|
|
||||||
*.cache
|
|
||||||
build
|
|
||||||
yarn-error.log
|
|
||||||
*.bs.js
|
|
||||||
# Local Netlify folder
|
|
||||||
.netlify
|
|
||||||
.idea
|
|
|
@ -1,21 +0,0 @@
|
||||||
# TODO: REVIVE PLAYGROUND.
|
|
||||||
|
|
||||||
# Squiggle Playground
|
|
||||||
|
|
||||||
This repository contains the squiggle playground, a small web interface
|
|
||||||
for playing around with squiggle concepts.
|
|
||||||
|
|
||||||
It depends on `@quri/squiggle-components` and `@quri/squiggle-lang` so both of them will
|
|
||||||
need to be packaged for this to work. This can be done from the root directory
|
|
||||||
with
|
|
||||||
|
|
||||||
```
|
|
||||||
yarn build:lang
|
|
||||||
yarn build:components
|
|
||||||
```
|
|
||||||
|
|
||||||
Then, starting the playground can be done with:
|
|
||||||
|
|
||||||
```
|
|
||||||
yarn parcel
|
|
||||||
```
|
|
|
@ -1,4 +0,0 @@
|
||||||
[[redirects]]
|
|
||||||
from = "/*"
|
|
||||||
to = "/index.html"
|
|
||||||
status = 200
|
|
|
@ -1,54 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@quri/squiggle-playground",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"homepage": "https://foretold-app.github.io/estiband/",
|
|
||||||
"scripts": {
|
|
||||||
"parcel": "parcel ./src/index.html",
|
|
||||||
"parcel-build": "parcel build ./src/index.html --no-source-maps --no-autoinstall --no-scope-hoist",
|
|
||||||
"deploy": "gh-pages -d dist",
|
|
||||||
"ci": "yarn parcel-build"
|
|
||||||
},
|
|
||||||
"keywords": [],
|
|
||||||
"author": "",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@emotion/react": "^11.8.2",
|
|
||||||
"@quri/squiggle-lang": "^0.2.2",
|
|
||||||
"ace-builds": "^1.4.12",
|
|
||||||
"antd": "^4.19.3",
|
|
||||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
|
||||||
"binary-search-tree": "0.2.6",
|
|
||||||
"css-loader": "^6.7.1",
|
|
||||||
"gh-pages": "3.2.3",
|
|
||||||
"jstat": "1.9.5",
|
|
||||||
"lenses-ppx": "6.1.10",
|
|
||||||
"less": "4.1.2",
|
|
||||||
"lodash": "4.17.21",
|
|
||||||
"mathjs": "10.4.3",
|
|
||||||
"moduleserve": "0.9.1",
|
|
||||||
"moment": "2.29.1",
|
|
||||||
"pdfast": "^0.2.0",
|
|
||||||
"rationale": "0.2.0",
|
|
||||||
"react": "18.0.0",
|
|
||||||
"react-ace": "^9.2.0",
|
|
||||||
"react-dom": "^18.0.0",
|
|
||||||
"react-use": "^17.3.2",
|
|
||||||
"react-vega": "^7.5.0",
|
|
||||||
"vega": "*",
|
|
||||||
"vega-embed": "6.20.8",
|
|
||||||
"vega-lite": "*"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@emotion/babel-plugin": "^11.7.2",
|
|
||||||
"@parcel/core": "^2.4.0",
|
|
||||||
"@types/react": "^18.0.0",
|
|
||||||
"autoprefixer": "^10.4.4",
|
|
||||||
"docsify": "^4.12.2",
|
|
||||||
"jest": "^27.5.1",
|
|
||||||
"parcel": "^2.4.0",
|
|
||||||
"postcss": "^8.4.7",
|
|
||||||
"postcss-cli": "^9.1.0",
|
|
||||||
"tailwindcss": "^3.0.23",
|
|
||||||
"typescript": "^4.6.3"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
plugins: {
|
|
||||||
tailwindcss: {},
|
|
||||||
autoprefixer: {},
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
import React from 'react'
|
|
||||||
import { render } from "react-dom"
|
|
||||||
import DistBuilder from "./components/DistBuilder"
|
|
||||||
|
|
||||||
var root = document.querySelector("#app")
|
|
||||||
|
|
||||||
if (!(root == null)) {
|
|
||||||
render(<DistBuilder />, root)
|
|
||||||
}
|
|
|
@ -1,34 +0,0 @@
|
||||||
import React, {FC} from "react";
|
|
||||||
import AceEditor from "react-ace";
|
|
||||||
|
|
||||||
import "ace-builds/src-noconflict/mode-golang";
|
|
||||||
import "ace-builds/src-noconflict/theme-github";
|
|
||||||
import "ace-builds/src-noconflict/ext-language_tools";
|
|
||||||
import "ace-builds/src-noconflict/keybinding-vim";
|
|
||||||
|
|
||||||
interface CodeEditorProps {
|
|
||||||
value : string,
|
|
||||||
onChange : (value: string) => void
|
|
||||||
}
|
|
||||||
|
|
||||||
export let CodeEditor : FC<CodeEditorProps> = (props) =>
|
|
||||||
<AceEditor
|
|
||||||
value={props.value}
|
|
||||||
mode="golang"
|
|
||||||
height="400px"
|
|
||||||
width="100%"
|
|
||||||
theme="github"
|
|
||||||
showGutter={false}
|
|
||||||
highlightActiveLine={false}
|
|
||||||
showPrintMargin={false}
|
|
||||||
onChange={props.onChange}
|
|
||||||
name="UNIQUE_ID_OF_DIV"
|
|
||||||
editorProps={{
|
|
||||||
$blockScrolling: true,
|
|
||||||
}}
|
|
||||||
setOptions={{
|
|
||||||
enableBasicAutocompletion: false,
|
|
||||||
enableLiveAutocompletion: true,
|
|
||||||
enableSnippets: true,
|
|
||||||
}}
|
|
||||||
/>
|
|
|
@ -1,171 +0,0 @@
|
||||||
import { FC, useState } from "react"
|
|
||||||
import { SquiggleChart } from "@quri/squiggle-components"
|
|
||||||
import { CodeEditor } from "./CodeEditor"
|
|
||||||
import { Form, Input, Card, Row, Col } from "antd"
|
|
||||||
import { css } from '@emotion/react'
|
|
||||||
|
|
||||||
interface FieldFloatProps {
|
|
||||||
label : string,
|
|
||||||
className? : string,
|
|
||||||
value : number,
|
|
||||||
onChange : (value: number) => void,
|
|
||||||
}
|
|
||||||
|
|
||||||
function FieldFloat(Props: FieldFloatProps) {
|
|
||||||
let [contents, setContents] = useState(Props.value + "");
|
|
||||||
return <Form.Item label={Props.label}>
|
|
||||||
<Input
|
|
||||||
value={contents}
|
|
||||||
className={Props.className ? Props.className : ""}
|
|
||||||
onChange={(e) => setContents(e.target.value)}
|
|
||||||
onBlur={(_) => {
|
|
||||||
let result = parseFloat(contents);
|
|
||||||
if(result != NaN) {
|
|
||||||
Props.onChange(result)
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
}
|
|
||||||
let rows = css`
|
|
||||||
>.antCol:firstChild {
|
|
||||||
paddingLeft: 0.25em;
|
|
||||||
paddingRight: 0.125em;
|
|
||||||
}
|
|
||||||
>.antCol:lastChild {
|
|
||||||
paddingLeft: 0.125em;
|
|
||||||
paddingRight: 0.25em;
|
|
||||||
}
|
|
||||||
>.antCol:not(:lastChild):not(:lastChild) {
|
|
||||||
paddingLeft: 0.125em;
|
|
||||||
paddingRight: 0.125em;
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
let parent = css`
|
|
||||||
.antImportNumber {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.anticon {
|
|
||||||
verticalAlign: "zero";
|
|
||||||
}
|
|
||||||
`
|
|
||||||
var form = css`
|
|
||||||
backgroundColor: #eee;
|
|
||||||
padding: 1em;
|
|
||||||
`
|
|
||||||
var dist = css`
|
|
||||||
padding: 1em;
|
|
||||||
`
|
|
||||||
|
|
||||||
var spacer = css`
|
|
||||||
marginTop: 1em;
|
|
||||||
`
|
|
||||||
|
|
||||||
var groupA = css`
|
|
||||||
.antInputNumberInputs {
|
|
||||||
backgroundColor: #fff7db;
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
var groupB = css`
|
|
||||||
.antInputNumberInput {
|
|
||||||
backgroundColor: #eaf4ff;
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
var Styles = {
|
|
||||||
rows: rows,
|
|
||||||
parent: parent,
|
|
||||||
form: form,
|
|
||||||
dist: dist,
|
|
||||||
spacer: spacer,
|
|
||||||
groupA: groupA,
|
|
||||||
groupB: groupB
|
|
||||||
};
|
|
||||||
|
|
||||||
let DistBuilder : FC<{}> = (_: {}) => {
|
|
||||||
let [squiggleString, setSquiggleString] = useState("mm(normal(5,2), normal(10,2))")
|
|
||||||
let [sampleCount, setSampleCount] = useState(1000)
|
|
||||||
let [outputXYPoints, setOutputXYPoints] = useState(1000)
|
|
||||||
let [pointDistLength, setPointDistLength] = useState(undefined)
|
|
||||||
let [kernelWidth, setKernelWidth] = useState(undefined)
|
|
||||||
let [diagramStart, setDiagramStart] = useState(0)
|
|
||||||
let [diagramStop, setDiagramStop] = useState(10)
|
|
||||||
let [diagramCount, setDiagramCount] = useState(20)
|
|
||||||
var demoDist =
|
|
||||||
<SquiggleChart
|
|
||||||
squiggleString={squiggleString}
|
|
||||||
sampleCount={sampleCount}
|
|
||||||
outputXYPoints={outputXYPoints}
|
|
||||||
diagramStart={diagramStart}
|
|
||||||
diagramStop={diagramStop}
|
|
||||||
diagramCount={diagramCount}
|
|
||||||
pointDistLength={pointDistLength}
|
|
||||||
/>
|
|
||||||
return (
|
|
||||||
<div className="grid grid-cols-2 gap-4">
|
|
||||||
<div>
|
|
||||||
<Card
|
|
||||||
title="Distribution Form">
|
|
||||||
<Form>
|
|
||||||
<Row css={Styles.rows}>
|
|
||||||
<Col span={24}>
|
|
||||||
<CodeEditor value={squiggleString} onChange={setSquiggleString} /> </Col>
|
|
||||||
</Row>
|
|
||||||
<Row css={Styles.rows}>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={sampleCount}
|
|
||||||
label="Sample Count"
|
|
||||||
onChange={setSampleCount}
|
|
||||||
/> </Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={outputXYPoints}
|
|
||||||
onChange={setOutputXYPoints}
|
|
||||||
label="Output XY-points" />
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={pointDistLength}
|
|
||||||
onChange={setPointDistLength}
|
|
||||||
label="Downsample To"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={kernelWidth}
|
|
||||||
onChange={setKernelWidth}
|
|
||||||
label="Kernel Width"
|
|
||||||
/> </Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramStart}
|
|
||||||
onChange={setDiagramStart}
|
|
||||||
label="Diagram Start"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramStop}
|
|
||||||
onChange={setDiagramStop}
|
|
||||||
label="Diagram Stop"
|
|
||||||
/> </Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramCount}
|
|
||||||
onChange={setDiagramCount}
|
|
||||||
label="Diagram Count"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Form>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
{demoDist}
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
export default DistBuilder
|
|
|
@ -1,17 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Squiggle Language</title>
|
|
||||||
<link href="https://fonts.googleapis.com/css?family=Lato:300,400,700,900" rel="stylesheet">
|
|
||||||
<link href="./styles/antd.css" rel="stylesheet">
|
|
||||||
<link href="./styles/index.css" rel="stylesheet">
|
|
||||||
<script type="module" src="./Index.tsx" defer></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="app" style="height: 100%"></div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +0,0 @@
|
||||||
@tailwind base;
|
|
||||||
@tailwind components;
|
|
||||||
@tailwind utilities;
|
|
|
@ -1,9 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
content: [
|
|
||||||
"./src/components/*.tsx"
|
|
||||||
],
|
|
||||||
theme: {
|
|
||||||
extend: {},
|
|
||||||
},
|
|
||||||
plugins: [],
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "commonjs",
|
|
||||||
"jsx": "react-jsx",
|
|
||||||
"jsxImportSource": "@emotion/react",
|
|
||||||
"noImplicitAny": false,
|
|
||||||
"removeComments": true,
|
|
||||||
"preserveConstEnums": true,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
"outDir": "./dist",
|
|
||||||
"declarationDir": "./dist",
|
|
||||||
"declaration": true,
|
|
||||||
"sourceMap": true
|
|
||||||
},
|
|
||||||
"target": "ES6",
|
|
||||||
"include": ["src/**/*"],
|
|
||||||
"exclude": ["node_modules", "**/*.spec.ts"]
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
2
packages/squiggle-lang/.gitignore
vendored
2
packages/squiggle-lang/.gitignore
vendored
|
@ -17,3 +17,5 @@ yarn-error.log
|
||||||
*.gen.tsx
|
*.gen.tsx
|
||||||
*.gen.js
|
*.gen.js
|
||||||
dist
|
dist
|
||||||
|
*.coverage
|
||||||
|
_coverage
|
||||||
|
|
|
@ -13,6 +13,9 @@ Other:
|
||||||
yarn start # listens to files and recompiles at every mutation
|
yarn start # listens to files and recompiles at every mutation
|
||||||
yarn test
|
yarn test
|
||||||
yarn test:watch # keeps an active session and runs all tests at every mutation
|
yarn test:watch # keeps an active session and runs all tests at every mutation
|
||||||
|
|
||||||
|
# where o := open in osx and o := xdg-open in linux,
|
||||||
|
yarn coverage; o _coverage/index.html # produces coverage report and opens it in browser
|
||||||
```
|
```
|
||||||
|
|
||||||
## Information
|
## Information
|
||||||
|
|
|
@ -4,10 +4,10 @@ open Expect
|
||||||
describe("Bandwidth", () => {
|
describe("Bandwidth", () => {
|
||||||
test("nrd0()", () => {
|
test("nrd0()", () => {
|
||||||
let data = [1., 4., 3., 2.]
|
let data = [1., 4., 3., 2.]
|
||||||
expect(Bandwidth.nrd0(data)) -> toEqual(0.7625801874014622)
|
expect(SampleSetDist_Bandwidth.nrd0(data)) -> toEqual(0.7625801874014622)
|
||||||
})
|
})
|
||||||
test("nrd()", () => {
|
test("nrd()", () => {
|
||||||
let data = [1., 4., 3., 2.]
|
let data = [1., 4., 3., 2.]
|
||||||
expect(Bandwidth.nrd(data)) -> toEqual(0.8981499984950554)
|
expect(SampleSetDist_Bandwidth.nrd(data)) -> toEqual(0.8981499984950554)
|
||||||
})
|
})
|
||||||
})
|
})
|
|
@ -1,89 +0,0 @@
|
||||||
open Jest
|
|
||||||
open Expect
|
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
|
||||||
only
|
|
||||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
|
||||||
: test(str, () => expect(item1) -> toEqual(item2))
|
|
||||||
|
|
||||||
describe("PointSetTypes", () =>
|
|
||||||
describe("Domain", () => {
|
|
||||||
let makeComplete = (yPoint, expectation) =>
|
|
||||||
makeTest(
|
|
||||||
"With input: " ++ Js.Float.toString(yPoint),
|
|
||||||
PointSetTypes.Domain.yPointToSubYPoint(Complete, yPoint),
|
|
||||||
expectation,
|
|
||||||
)
|
|
||||||
let makeSingle = (direction: [#left | #right], excludingProbabilityMass, yPoint, expectation) =>
|
|
||||||
makeTest(
|
|
||||||
"Excluding: " ++
|
|
||||||
(Js.Float.toString(excludingProbabilityMass) ++
|
|
||||||
(" and yPoint: " ++ Js.Float.toString(yPoint))),
|
|
||||||
PointSetTypes.Domain.yPointToSubYPoint(
|
|
||||||
direction == #left
|
|
||||||
? LeftLimited({xPoint: 3.0, excludingProbabilityMass: excludingProbabilityMass})
|
|
||||||
: RightLimited({xPoint: 3.0, excludingProbabilityMass: excludingProbabilityMass}),
|
|
||||||
yPoint,
|
|
||||||
),
|
|
||||||
expectation,
|
|
||||||
)
|
|
||||||
let makeDouble = (domain, yPoint, expectation) =>
|
|
||||||
makeTest("Excluding: limits", PointSetTypes.Domain.yPointToSubYPoint(domain, yPoint), expectation)
|
|
||||||
|
|
||||||
describe("With Complete Domain", () => {
|
|
||||||
makeComplete(0.0, Some(0.0))
|
|
||||||
makeComplete(0.6, Some(0.6))
|
|
||||||
makeComplete(1.0, Some(1.0))
|
|
||||||
})
|
|
||||||
describe("With Left Limit", () => {
|
|
||||||
makeSingle(#left, 0.5, 1.0, Some(1.0))
|
|
||||||
makeSingle(#left, 0.5, 0.75, Some(0.5))
|
|
||||||
makeSingle(#left, 0.8, 0.9, Some(0.5))
|
|
||||||
makeSingle(#left, 0.5, 0.4, None)
|
|
||||||
makeSingle(#left, 0.5, 0.5, Some(0.0))
|
|
||||||
})
|
|
||||||
describe("With Right Limit", () => {
|
|
||||||
makeSingle(#right, 0.5, 1.0, None)
|
|
||||||
makeSingle(#right, 0.5, 0.25, Some(0.5))
|
|
||||||
makeSingle(#right, 0.8, 0.5, None)
|
|
||||||
makeSingle(#right, 0.2, 0.2, Some(0.25))
|
|
||||||
makeSingle(#right, 0.5, 0.5, Some(1.0))
|
|
||||||
makeSingle(#right, 0.5, 0.0, Some(0.0))
|
|
||||||
makeSingle(#right, 0.5, 0.5, Some(1.0))
|
|
||||||
})
|
|
||||||
describe("With Left and Right Limit", () => {
|
|
||||||
makeDouble(
|
|
||||||
LeftAndRightLimited(
|
|
||||||
{excludingProbabilityMass: 0.25, xPoint: 3.0},
|
|
||||||
{excludingProbabilityMass: 0.25, xPoint: 10.0},
|
|
||||||
),
|
|
||||||
0.5,
|
|
||||||
Some(0.5),
|
|
||||||
)
|
|
||||||
makeDouble(
|
|
||||||
LeftAndRightLimited(
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 3.0},
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 10.0},
|
|
||||||
),
|
|
||||||
0.2,
|
|
||||||
Some(0.125),
|
|
||||||
)
|
|
||||||
makeDouble(
|
|
||||||
LeftAndRightLimited(
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 3.0},
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 10.0},
|
|
||||||
),
|
|
||||||
0.1,
|
|
||||||
Some(0.0),
|
|
||||||
)
|
|
||||||
makeDouble(
|
|
||||||
LeftAndRightLimited(
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 3.0},
|
|
||||||
{excludingProbabilityMass: 0.1, xPoint: 10.0},
|
|
||||||
),
|
|
||||||
0.05,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
)
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let env: DistributionOperation.env = {
|
||||||
|
sampleCount: 100,
|
||||||
|
xyPointLength: 100,
|
||||||
|
}
|
||||||
|
|
||||||
|
let {
|
||||||
|
normalDist5,
|
||||||
|
normalDist10,
|
||||||
|
normalDist20,
|
||||||
|
normalDist,
|
||||||
|
uniformDist,
|
||||||
|
betaDist,
|
||||||
|
lognormalDist,
|
||||||
|
cauchyDist,
|
||||||
|
triangularDist,
|
||||||
|
exponentialDist,
|
||||||
|
} = module(GenericDist_Fixtures)
|
||||||
|
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
|
||||||
|
let {toFloat, toDist, toString, toError} = module(DistributionOperation.Output)
|
||||||
|
let {run} = module(DistributionOperation)
|
||||||
|
let {fmap} = module(DistributionOperation.Output)
|
||||||
|
let run = run(~env)
|
||||||
|
let outputMap = fmap(~env)
|
||||||
|
let toExt: option<'a> => 'a = E.O.toExt(
|
||||||
|
"Should be impossible to reach (This error is in test file)",
|
||||||
|
)
|
||||||
|
|
||||||
|
describe("sparkline", () => {
|
||||||
|
let runTest = (
|
||||||
|
name: string,
|
||||||
|
dist: GenericDist_Types.genericDist,
|
||||||
|
expected: DistributionOperation.outputType,
|
||||||
|
) => {
|
||||||
|
test(name, () => {
|
||||||
|
let result = DistributionOperation.run(~env, FromDist(ToString(ToSparkline(20)), dist))
|
||||||
|
expect(result)->toEqual(expected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"normal",
|
||||||
|
normalDist,
|
||||||
|
String(`▁▁▁▁▁▂▄▆▇██▇▆▄▂▁▁▁▁▁`),
|
||||||
|
)
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"uniform",
|
||||||
|
uniformDist,
|
||||||
|
String(`████████████████████`),
|
||||||
|
)
|
||||||
|
|
||||||
|
runTest("beta", betaDist, String(`▁▄▇████▇▆▅▄▃▃▂▁▁▁▁▁▁`))
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"lognormal",
|
||||||
|
lognormalDist,
|
||||||
|
String(`▁█▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁`),
|
||||||
|
)
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"cauchy",
|
||||||
|
cauchyDist,
|
||||||
|
String(`▁▁▁▁▁▁▁▁▁██▁▁▁▁▁▁▁▁▁`),
|
||||||
|
)
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"triangular",
|
||||||
|
triangularDist,
|
||||||
|
String(`▁▁▂▃▄▅▆▇████▇▆▅▄▃▂▁▁`),
|
||||||
|
)
|
||||||
|
|
||||||
|
runTest(
|
||||||
|
"exponential",
|
||||||
|
exponentialDist,
|
||||||
|
String(`█▅▄▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁`),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("toPointSet", () => {
|
||||||
|
test("on symbolic normal distribution", () => {
|
||||||
|
let result =
|
||||||
|
run(FromDist(ToDist(ToPointSet), normalDist5))
|
||||||
|
->outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
->toFloat
|
||||||
|
->toExt
|
||||||
|
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("on sample set", () => {
|
||||||
|
let result =
|
||||||
|
run(FromDist(ToDist(ToPointSet), normalDist5))
|
||||||
|
->outputMap(FromDist(ToDist(ToSampleSet(1000))))
|
||||||
|
->outputMap(FromDist(ToDist(ToPointSet)))
|
||||||
|
->outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
->toFloat
|
||||||
|
->toExt
|
||||||
|
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,11 @@
|
||||||
|
let normalDist5: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 5.0, stdev: 2.0}))
|
||||||
|
let normalDist10: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 10.0, stdev: 2.0}))
|
||||||
|
let normalDist20: GenericDist_Types.genericDist = Symbolic(#Normal({mean: 20.0, stdev: 2.0}))
|
||||||
|
let normalDist: GenericDist_Types.genericDist = normalDist5
|
||||||
|
|
||||||
|
let betaDist: GenericDist_Types.genericDist = Symbolic(#Beta({alpha: 2.0, beta: 5.0}))
|
||||||
|
let lognormalDist: GenericDist_Types.genericDist = Symbolic(#Lognormal({mu: 0.0, sigma: 1.0}))
|
||||||
|
let cauchyDist: GenericDist_Types.genericDist = Symbolic(#Cauchy({local: 1.0, scale: 1.0}))
|
||||||
|
let triangularDist: GenericDist_Types.genericDist = Symbolic(#Triangular({low: 1.0, medium: 2.0, high: 3.0}))
|
||||||
|
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
||||||
|
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
|
@ -0,0 +1,70 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
|
||||||
|
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
let mkBeta = (alpha, beta) => GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||||
|
let mkExponential = rate => GenericDist_Types.Symbolic(#Exponential({rate: rate}))
|
||||||
|
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
||||||
|
let mkCauchy = (local, scale) => GenericDist_Types.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||||
|
let mkLognormal = (mu, sigma) => GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
|
|
||||||
|
describe("mixture", () => {
|
||||||
|
testAll("fair mean of two normal distributions", list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)}, tup => { // should be property
|
||||||
|
let (mean1, mean2) = tup
|
||||||
|
let meanValue = {
|
||||||
|
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))
|
||||||
|
-> outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
}
|
||||||
|
meanValue -> unpackFloat -> expect -> toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||||
|
})
|
||||||
|
testAll(
|
||||||
|
"weighted mean of a beta and an exponential",
|
||||||
|
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
|
||||||
|
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
|
||||||
|
tup => {
|
||||||
|
let ((alpha, beta), rate) = tup
|
||||||
|
let betaWeight = 0.25
|
||||||
|
let exponentialWeight = 0.75
|
||||||
|
let meanValue = {
|
||||||
|
run(Mixture(
|
||||||
|
[
|
||||||
|
(mkBeta(alpha, beta), betaWeight),
|
||||||
|
(mkExponential(rate), exponentialWeight)
|
||||||
|
]
|
||||||
|
)) -> outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
}
|
||||||
|
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||||
|
let exponentialMean = 1.0 /. rate
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeSoCloseTo(
|
||||||
|
betaWeight *. betaMean +. exponentialWeight *. exponentialMean,
|
||||||
|
~digits=-1
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
testAll(
|
||||||
|
"weighted mean of lognormal and uniform",
|
||||||
|
// Would not survive property tests: very easy to find cases that NaN out.
|
||||||
|
list{((-1e2,1e1), (2e0,1e0)), ((-1e-16,1e-16), (1e-8,1e0)), ((0.0,1e0), (1e0,1e-2))},
|
||||||
|
tup => {
|
||||||
|
let ((low, high), (mu, sigma)) = tup
|
||||||
|
let uniformWeight = 0.6
|
||||||
|
let lognormalWeight = 0.4
|
||||||
|
let meanValue = {
|
||||||
|
run(Mixture([(mkUniform(low, high), uniformWeight), (mkLognormal(mu, sigma), lognormalWeight)]))
|
||||||
|
-> outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
}
|
||||||
|
let uniformMean = (low +. high) /. 2.0
|
||||||
|
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
open Jest
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
describe("Continuous and discrete splits", () => {
|
||||||
|
makeTest(
|
||||||
|
"splits (1)",
|
||||||
|
SampleSetDist_ToPointSet.Internals.T.splitContinuousAndDiscrete([1.432, 1.33455, 2.0]),
|
||||||
|
([1.432, 1.33455, 2.0], E.FloatFloatMap.empty()),
|
||||||
|
)
|
||||||
|
makeTest(
|
||||||
|
"splits (2)",
|
||||||
|
SampleSetDist_ToPointSet.Internals.T.splitContinuousAndDiscrete([
|
||||||
|
1.432,
|
||||||
|
1.33455,
|
||||||
|
2.0,
|
||||||
|
2.0,
|
||||||
|
2.0,
|
||||||
|
2.0,
|
||||||
|
]) |> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
|
||||||
|
([1.432, 1.33455], [(2.0, 4.0)]),
|
||||||
|
)
|
||||||
|
|
||||||
|
let makeDuplicatedArray = count => {
|
||||||
|
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int)
|
||||||
|
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare)
|
||||||
|
E.A.concatMany([sorted, sorted, sorted, sorted]) |> Belt.SortArray.stableSortBy(_, compare)
|
||||||
|
}
|
||||||
|
|
||||||
|
let (_, discrete1) = SampleSetDist_ToPointSet.Internals.T.splitContinuousAndDiscrete(
|
||||||
|
makeDuplicatedArray(10),
|
||||||
|
)
|
||||||
|
let toArr1 = discrete1 |> E.FloatFloatMap.toArray
|
||||||
|
makeTest("splitMedium at count=10", toArr1 |> Belt.Array.length, 10)
|
||||||
|
|
||||||
|
let (_c, discrete2) = SampleSetDist_ToPointSet.Internals.T.splitContinuousAndDiscrete(
|
||||||
|
makeDuplicatedArray(500),
|
||||||
|
)
|
||||||
|
let toArr2 = discrete2 |> E.FloatFloatMap.toArray
|
||||||
|
makeTest("splitMedium at count=500", toArr2 |> Belt.Array.length, 500)
|
||||||
|
})
|
||||||
|
|
161
packages/squiggle-lang/__tests__/Distributions/Symbolic_test.res
Normal file
161
packages/squiggle-lang/__tests__/Distributions/Symbolic_test.res
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||||
|
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
|
||||||
|
describe("(Symbolic) normalize", () => {
|
||||||
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
|
let normalValue = mkNormal(mean, 2.0)
|
||||||
|
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
||||||
|
normalizedValue
|
||||||
|
-> unpackDist
|
||||||
|
-> expect
|
||||||
|
-> toEqual(normalValue)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("(Symbolic) mean", () => {
|
||||||
|
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||||
|
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo(mean)
|
||||||
|
})
|
||||||
|
|
||||||
|
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||||
|
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> ExpectJs.toBeFalsy
|
||||||
|
})
|
||||||
|
|
||||||
|
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||||
|
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo(0.0)
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||||
|
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))))
|
||||||
|
meanValue -> unpackFloat -> expect -> toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||||
|
})
|
||||||
|
|
||||||
|
test("of a cauchy distribution", () => {
|
||||||
|
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo(2.01868297874546)
|
||||||
|
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("of triangular distributions", list{(1.0,2.0,3.0), (-1e7,-1e-7,1e-7), (-1e-7,1e0,1e7), (-1e-16,0.0,1e-16)}, tup => {
|
||||||
|
let (low, medium, high) = tup
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high}))
|
||||||
|
))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||||
|
})
|
||||||
|
|
||||||
|
// TODO: nonpositive inputs are SUPPOSED to crash.
|
||||||
|
testAll("of beta distributions", list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)}, tup => {
|
||||||
|
let (alpha, beta) = tup
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||||
|
))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo(1.0 /. (1.0 +. (beta /. alpha))) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
|
})
|
||||||
|
|
||||||
|
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||||
|
test("of beta(0, 0)", () => {
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))
|
||||||
|
))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> ExpectJs.toBeFalsy
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("of lognormal distributions", list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)}, tup => {
|
||||||
|
let (mu, sigma) = tup
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
|
))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0 )) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("of uniform distributions", list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)}, tup => {
|
||||||
|
let (low, high) = tup
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
||||||
|
))
|
||||||
|
meanValue
|
||||||
|
-> unpackFloat
|
||||||
|
-> expect
|
||||||
|
-> toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
|
})
|
||||||
|
|
||||||
|
test("of a float", () => {
|
||||||
|
let meanValue = run(FromDist(
|
||||||
|
ToFloat(#Mean),
|
||||||
|
GenericDist_Types.Symbolic(#Float(7.7))
|
||||||
|
))
|
||||||
|
meanValue -> unpackFloat -> expect -> toBeCloseTo(7.7)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Normal distribution with sparklines", () => {
|
||||||
|
|
||||||
|
let parameterWiseAdditionPdf = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
|
||||||
|
let normalDistAtSumMeanConstr = SymbolicDist.Normal.add(n1, n2)
|
||||||
|
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
|
||||||
|
| #Normal(params) => params
|
||||||
|
}
|
||||||
|
x => SymbolicDist.Normal.pdf(x, normalDistAtSumMean)
|
||||||
|
}
|
||||||
|
|
||||||
|
let normalDistAtMean5: SymbolicDistTypes.normal = {mean: 5.0, stdev: 2.0}
|
||||||
|
let normalDistAtMean10: SymbolicDistTypes.normal = {mean: 10.0, stdev: 2.0}
|
||||||
|
let range20Float = E.A.Floats.range(0.0, 20.0, 20) // [0.0,1.0,2.0,3.0,4.0,...19.0,]
|
||||||
|
|
||||||
|
test("mean=5 pdf", () => {
|
||||||
|
let pdfNormalDistAtMean5 = x => SymbolicDist.Normal.pdf(x, normalDistAtMean5)
|
||||||
|
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
|
||||||
|
Sparklines.create(sparklineMean5, ())
|
||||||
|
-> expect
|
||||||
|
-> toEqual(`▁▂▃▆██▇▅▂▁▁▁▁▁▁▁▁▁▁▁`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("parameter-wise addition of two normal distributions", () => {
|
||||||
|
let sparklineMean15 = normalDistAtMean5 -> parameterWiseAdditionPdf(normalDistAtMean10) -> fnImage(range20Float)
|
||||||
|
Sparklines.create(sparklineMean15, ())
|
||||||
|
-> expect
|
||||||
|
-> toEqual(`▁▁▁▁▁▁▁▁▁▂▃▄▆███▇▅▄▂`)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("mean=10 cdf", () => {
|
||||||
|
let cdfNormalDistAtMean10 = x => SymbolicDist.Normal.cdf(x, normalDistAtMean10)
|
||||||
|
let sparklineMean10 = fnImage(cdfNormalDistAtMean10, range20Float)
|
||||||
|
Sparklines.create(sparklineMean10, ())
|
||||||
|
-> expect
|
||||||
|
-> toEqual(`▁▁▁▁▁▁▁▁▂▄▅▇████████`)
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,34 +1,87 @@
|
||||||
import { run } from '../src/js/index';
|
import { run, GenericDist, resultMap, makeSampleSetDist } from "../src/js/index";
|
||||||
|
|
||||||
let testRun = (x: string) => {
|
let testRun = (x: string) => {
|
||||||
let result = run(x)
|
let result = run(x);
|
||||||
if(result.tag == 'Ok'){
|
if (result.tag == "Ok") {
|
||||||
return { tag: 'Ok', value: result.value.exports }
|
return { tag: "Ok", value: result.value.exports };
|
||||||
|
} else {
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
else {
|
};
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("Simple calculations and results", () => {
|
describe("Simple calculations and results", () => {
|
||||||
test("mean(normal(5,2))", () => {
|
test("mean(normal(5,2))", () => {
|
||||||
expect(testRun("mean(normal(5,2))")).toEqual({ tag: 'Ok', value: [ { NAME: 'Float', VAL: 5 } ] })
|
expect(testRun("mean(normal(5,2))")).toEqual({
|
||||||
})
|
tag: "Ok",
|
||||||
test("10+10", () => {
|
value: [{ NAME: "Float", VAL: 5 }],
|
||||||
let foo = testRun("10 + 10")
|
});
|
||||||
expect(foo).toEqual({ tag: 'Ok', value: [ { NAME: 'Float', VAL: 20 } ] })
|
});
|
||||||
})
|
test("10+10", () => {
|
||||||
})
|
let foo = testRun("10 + 10");
|
||||||
|
expect(foo).toEqual({ tag: "Ok", value: [{ NAME: "Float", VAL: 20 }] });
|
||||||
|
});
|
||||||
|
});
|
||||||
describe("Log function", () => {
|
describe("Log function", () => {
|
||||||
test("log(1) = 0", () => {
|
test("log(1) = 0", () => {
|
||||||
let foo = testRun("log(1)")
|
let foo = testRun("log(1)");
|
||||||
expect(foo).toEqual({ tag: 'Ok', value: [ { NAME: 'Float', VAL: 0} ]})
|
expect(foo).toEqual({ tag: "Ok", value: [{ NAME: "Float", VAL: 0 }] });
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
|
|
||||||
describe("Multimodal too many weights error", () => {
|
describe("Multimodal too many weights error", () => {
|
||||||
test("mm(0,0,[0,0,0])", () => {
|
test("mm(0,0,[0,0,0])", () => {
|
||||||
let foo = testRun("mm(0,0,[0,0,0])")
|
let foo = testRun("mm(0,0,[0,0,0])");
|
||||||
expect(foo).toEqual({ "tag": "Error", "value": "Function multimodal error: Too many weights provided" })
|
expect(foo).toEqual({
|
||||||
})
|
tag: "Error",
|
||||||
|
value: "Function multimodal error: Too many weights provided",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("GenericDist", () => {
|
||||||
|
|
||||||
|
//It's important that sampleCount is less than 9. If it's more, than that will create randomness
|
||||||
|
//Also, note, the value should be created using makeSampleSetDist() later on.
|
||||||
|
let env = { sampleCount: 8, xyPointLength: 100 };
|
||||||
|
let dist = new GenericDist(
|
||||||
|
{ tag: "SampleSet", value: [3, 4, 5, 6, 6, 7, 10, 15, 30] },
|
||||||
|
env
|
||||||
|
);
|
||||||
|
let dist2 = new GenericDist(
|
||||||
|
{ tag: "SampleSet", value: [20, 22, 24, 29, 30, 35, 38, 44, 52] },
|
||||||
|
env
|
||||||
|
);
|
||||||
|
|
||||||
|
test("mean", () => {
|
||||||
|
expect(dist.mean().value).toBeCloseTo(3.737);
|
||||||
|
});
|
||||||
|
test("pdf", () => {
|
||||||
|
expect(dist.pdf(5.0).value).toBeCloseTo(0.0431);
|
||||||
|
});
|
||||||
|
test("cdf", () => {
|
||||||
|
expect(dist.cdf(5.0).value).toBeCloseTo(0.155);
|
||||||
|
});
|
||||||
|
test("inv", () => {
|
||||||
|
expect(dist.inv(0.5).value).toBeCloseTo(9.458);
|
||||||
|
});
|
||||||
|
test("toPointSet", () => {
|
||||||
|
expect(
|
||||||
|
resultMap(dist.toPointSet(), (r: GenericDist) => r.toString()).value.value
|
||||||
|
).toBe("Point Set Distribution");
|
||||||
|
});
|
||||||
|
test("toSparkline", () => {
|
||||||
|
expect(dist.toSparkline(20).value).toBe("▁▁▃▅███▆▄▃▂▁▁▂▂▃▂▁▁▁");
|
||||||
|
});
|
||||||
|
test("algebraicAdd", () => {
|
||||||
|
expect(
|
||||||
|
resultMap(dist.algebraicAdd(dist2), (r: GenericDist) => r.toSparkline(20))
|
||||||
|
.value.value
|
||||||
|
).toBe("▁▁▂▄▆████▇▆▄▄▃▃▃▂▁▁▁");
|
||||||
|
});
|
||||||
|
test("pointwiseAdd", () => {
|
||||||
|
expect(
|
||||||
|
resultMap(dist.pointwiseAdd(dist2), (r: GenericDist) => r.toSparkline(20))
|
||||||
|
.value.value
|
||||||
|
).toBe("▁▂▅██▅▅▅▆▇█▆▅▃▃▂▂▁▁▁");
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let expectEvalToBe = (expr: string, answer: string) =>
|
||||||
|
Reducer.eval(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
|
describe("builtin", () => {
|
||||||
|
// All MathJs operators and functions are available for string, number and boolean
|
||||||
|
// .e.g + - / * > >= < <= == /= not and or
|
||||||
|
// See https://mathjs.org/docs/expressions/syntax.html
|
||||||
|
// See https://mathjs.org/docs/reference/functions.html
|
||||||
|
test("-1", () => expectEvalToBe("-1", "Ok(-1)"))
|
||||||
|
test("1-1", () => expectEvalToBe("1-1", "Ok(0)"))
|
||||||
|
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
||||||
|
test("concat('a','b')", () => expectEvalToBe("concat('a','b')", "Ok('ab')"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("builtin exception", () => {
|
||||||
|
//It's a pity that MathJs does not return error position
|
||||||
|
test("MathJs Exception", () =>
|
||||||
|
expectEvalToBe("testZadanga()", "Error(JS Exception: Error: Undefined function testZadanga)")
|
||||||
|
)
|
||||||
|
})
|
|
@ -0,0 +1,32 @@
|
||||||
|
open ReducerInterface.ExpressionValue
|
||||||
|
module MathJs = Reducer.MathJs
|
||||||
|
module ErrorValue = Reducer.ErrorValue
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open ExpectJs
|
||||||
|
|
||||||
|
describe("eval", () => {
|
||||||
|
test("Number", () => expect(MathJs.Eval.eval("1"))->toEqual(Ok(EvNumber(1.))))
|
||||||
|
test("Number expr", () => expect(MathJs.Eval.eval("1-1"))->toEqual(Ok(EvNumber(0.))))
|
||||||
|
test("String", () => expect(MathJs.Eval.eval("'hello'"))->toEqual(Ok(EvString("hello"))))
|
||||||
|
test("String expr", () =>
|
||||||
|
expect(MathJs.Eval.eval("concat('hello ','world')"))->toEqual(Ok(EvString("hello world")))
|
||||||
|
)
|
||||||
|
test("Boolean", () => expect(MathJs.Eval.eval("true"))->toEqual(Ok(EvBool(true))))
|
||||||
|
test("Boolean expr", () => expect(MathJs.Eval.eval("2>1"))->toEqual(Ok(EvBool(true))))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("errors", () => {
|
||||||
|
// All those errors propagete up and are returned by the resolver
|
||||||
|
test("unknown function", () =>
|
||||||
|
expect(MathJs.Eval.eval("testZadanga()"))->toEqual(
|
||||||
|
Error(ErrorValue.REJavaScriptExn(Some("Undefined function testZadanga"), Some("Error"))),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
test("unknown answer type", () =>
|
||||||
|
expect(MathJs.Eval.eval("1+1i"))->toEqual(
|
||||||
|
Error(ErrorValue.RETodo("Unhandled MathJs literal type: object")),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
|
@ -0,0 +1,51 @@
|
||||||
|
module Parse = Reducer.MathJs.Parse
|
||||||
|
module Result = Belt.Result
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let expectParseToBe = (expr, answer) =>
|
||||||
|
Parse.parse(expr)->Result.flatMap(Parse.castNodeType)->Parse.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
|
describe("MathJs parse", () => {
|
||||||
|
describe("literals operators paranthesis", () => {
|
||||||
|
test("1", () => expectParseToBe("1", "1"))
|
||||||
|
test("'hello'", () => expectParseToBe("'hello'", "'hello'"))
|
||||||
|
test("true", () => expectParseToBe("true", "true"))
|
||||||
|
test("1+2", () => expectParseToBe("1+2", "add(1, 2)"))
|
||||||
|
test("add(1,2)", () => expectParseToBe("add(1,2)", "add(1, 2)"))
|
||||||
|
test("(1)", () => expectParseToBe("(1)", "(1)"))
|
||||||
|
test("(1+2)", () => expectParseToBe("(1+2)", "(add(1, 2))"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("variables", () => {
|
||||||
|
Skip.test("define", () => expectParseToBe("x = 1", "???"))
|
||||||
|
Skip.test("use", () => expectParseToBe("x", "???"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("functions", () => {
|
||||||
|
Skip.test("define", () => expectParseToBe("identity(x) = x", "???"))
|
||||||
|
Skip.test("use", () => expectParseToBe("identity(x)", "???"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("arrays", () => {
|
||||||
|
test("empty", () => expectParseToBe("[]", "[]"))
|
||||||
|
test("define", () => expectParseToBe("[0, 1, 2]", "[0, 1, 2]"))
|
||||||
|
test("define with strings", () => expectParseToBe("['hello', 'world']", "['hello', 'world']"))
|
||||||
|
Skip.test("range", () => expectParseToBe("range(0, 4)", "range(0, 4)"))
|
||||||
|
test("index", () => expectParseToBe("([0,1,2])[1]", "([0, 1, 2])[1]"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("records", () => {
|
||||||
|
test("define", () => expectParseToBe("{a: 1, b: 2}", "{a: 1, b: 2}"))
|
||||||
|
test("use", () => expectParseToBe("record.property", "record['property']"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("comments", () => {
|
||||||
|
Skip.test("define", () => expectParseToBe("# This is a comment", "???"))
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("if statement", () => {
|
||||||
|
Skip.test("define", () => expectParseToBe("if (true) { 1 } else { 0 }", "???"))
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,11 @@
|
||||||
|
module Expression = Reducer.Expression
|
||||||
|
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let expectParseToBe = (expr: string, answer: string) =>
|
||||||
|
Reducer.parse(expr)->Expression.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
|
let expectEvalToBe = (expr: string, answer: string) =>
|
||||||
|
Reducer.eval(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
81
packages/squiggle-lang/__tests__/Reducer/Reducer_test.res
Normal file
81
packages/squiggle-lang/__tests__/Reducer/Reducer_test.res
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
open Jest
|
||||||
|
open Reducer_TestHelpers
|
||||||
|
|
||||||
|
describe("reducer using mathjs parse", () => {
|
||||||
|
// Test the MathJs parser compatibility
|
||||||
|
// Those tests toString that there is a semantic mapping from MathJs to Expression
|
||||||
|
// Reducer.parse is called by Reducer.eval
|
||||||
|
// See https://mathjs.org/docs/expressions/syntax.html
|
||||||
|
// See https://mathjs.org/docs/reference/functions.html
|
||||||
|
// Those tests toString that we are converting mathjs parse tree to what we need
|
||||||
|
|
||||||
|
describe("expressions", () => {
|
||||||
|
test("1", () => expectParseToBe("1", "Ok(1)"))
|
||||||
|
test("(1)", () => expectParseToBe("(1)", "Ok(1)"))
|
||||||
|
test("1+2", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||||
|
test("(1+2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||||
|
test("add(1,2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||||
|
test("1+2*3", () => expectParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))"))
|
||||||
|
})
|
||||||
|
describe("arrays", () => {
|
||||||
|
//Note. () is a empty list in Lisp
|
||||||
|
// The only builtin structure in Lisp is list. There are no arrays
|
||||||
|
// [1,2,3] becomes (1 2 3)
|
||||||
|
test("empty", () => expectParseToBe("[]", "Ok(())"))
|
||||||
|
test("[1, 2, 3]", () => expectParseToBe("[1, 2, 3]", "Ok((1 2 3))"))
|
||||||
|
test("['hello', 'world']", () => expectParseToBe("['hello', 'world']", "Ok(('hello' 'world'))"))
|
||||||
|
test("index", () => expectParseToBe("([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))"))
|
||||||
|
})
|
||||||
|
describe("records", () => {
|
||||||
|
test("define", () =>
|
||||||
|
expectParseToBe("{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
||||||
|
)
|
||||||
|
test("use", () =>
|
||||||
|
expectParseToBe(
|
||||||
|
"{a: 1, b: 2}.a",
|
||||||
|
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("eval", () => {
|
||||||
|
// All MathJs operators and functions are builtin for string, float and boolean
|
||||||
|
// .e.g + - / * > >= < <= == /= not and or
|
||||||
|
// See https://mathjs.org/docs/expressions/syntax.html
|
||||||
|
// See https://mathjs.org/docs/reference/functions.html
|
||||||
|
describe("expressions", () => {
|
||||||
|
test("1", () => expectEvalToBe("1", "Ok(1)"))
|
||||||
|
test("1+2", () => expectEvalToBe("1+2", "Ok(3)"))
|
||||||
|
test("(1+2)*3", () => expectEvalToBe("(1+2)*3", "Ok(9)"))
|
||||||
|
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
||||||
|
test("concat('a ', 'b')", () => expectEvalToBe("concat('a ', 'b')", "Ok('a b')"))
|
||||||
|
test("log(10)", () => expectEvalToBe("log(10)", "Ok(2.302585092994046)"))
|
||||||
|
test("cos(10)", () => expectEvalToBe("cos(10)", "Ok(-0.8390715290764524)"))
|
||||||
|
// TODO more built ins
|
||||||
|
})
|
||||||
|
describe("arrays", () => {
|
||||||
|
test("empty array", () => expectEvalToBe("[]", "Ok([])"))
|
||||||
|
test("[1, 2, 3]", () => expectEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])"))
|
||||||
|
test("['hello', 'world']", () => expectEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])"))
|
||||||
|
test("index", () => expectEvalToBe("([0,1,2])[1]", "Ok(1)"))
|
||||||
|
test("index not found", () =>
|
||||||
|
expectEvalToBe("([0,1,2])[10]", "Error(Array index not found: 10)")
|
||||||
|
)
|
||||||
|
})
|
||||||
|
describe("records", () => {
|
||||||
|
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1, b: 2})"))
|
||||||
|
test("index", () => expectEvalToBe("{a: 1}.a", "Ok(1)"))
|
||||||
|
test("index not found", () => expectEvalToBe("{a: 1}.b", "Error(Record property not found: b)"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("test exceptions", () => {
|
||||||
|
test("javascript exception", () =>
|
||||||
|
expectEvalToBe("jsraise('div by 0')", "Error(JS Exception: Error: 'div by 0')")
|
||||||
|
)
|
||||||
|
|
||||||
|
test("rescript exception", () =>
|
||||||
|
expectEvalToBe("resraise()", "Error(TODO: unhandled rescript exception)")
|
||||||
|
)
|
||||||
|
})
|
|
@ -0,0 +1,128 @@
|
||||||
|
open Jest
|
||||||
|
|
||||||
|
let testSkip: (bool, string, unit => assertion) => unit = (skip: bool) =>
|
||||||
|
if skip {
|
||||||
|
Skip.test
|
||||||
|
} else {
|
||||||
|
test
|
||||||
|
}
|
||||||
|
let testEval = (~skip=false, str, result) =>
|
||||||
|
testSkip(skip)(str, () => Reducer_TestHelpers.expectEvalToBe(str, result))
|
||||||
|
let testParse = (~skip=false, str, result) =>
|
||||||
|
testSkip(skip)(str, () => Reducer_TestHelpers.expectParseToBe(str, result))
|
||||||
|
|
||||||
|
describe("eval on distribution functions", () => {
|
||||||
|
describe("normal distribution", () => {
|
||||||
|
testEval("normal(5,2)", "Ok(Normal(5,2))")
|
||||||
|
})
|
||||||
|
describe("lognormal distribution", () => {
|
||||||
|
testEval("lognormal(5,2)", "Ok(Lognormal(5,2))")
|
||||||
|
})
|
||||||
|
describe("unaryMinus", () => {
|
||||||
|
testEval("mean(-normal(5,2))", "Ok(-5.002887370380851)")
|
||||||
|
})
|
||||||
|
describe("to", () => {
|
||||||
|
testEval("5 to 2", "Error(TODO: Low value must be less than high value.)")
|
||||||
|
testEval("to(2,5)", "Ok(Lognormal(1.1512925464970227,0.278507821238345))")
|
||||||
|
testEval("to(-2,2)", "Ok(Normal(0,1.215913388057542))")
|
||||||
|
})
|
||||||
|
describe("mean", () => {
|
||||||
|
testEval("mean(normal(5,2))", "Ok(5)")
|
||||||
|
testEval("mean(lognormal(1,2))", "Ok(20.085536923187668)")
|
||||||
|
})
|
||||||
|
describe("toString", () => {
|
||||||
|
testEval("toString(normal(5,2))", "Ok('Normal(5,2)')")
|
||||||
|
})
|
||||||
|
describe("normalize", () => {
|
||||||
|
testEval("normalize(normal(5,2))", "Ok(Normal(5,2))")
|
||||||
|
})
|
||||||
|
describe("toPointSet", () => {
|
||||||
|
testEval("toPointSet(normal(5,2))", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
describe("toSampleSet", () => {
|
||||||
|
testEval("toSampleSet(normal(5,2), 100)", "Ok(Sample Set Distribution)")
|
||||||
|
})
|
||||||
|
describe("add", () => {
|
||||||
|
testEval("add(normal(5,2), normal(10,2))", "Ok(Normal(15,2.8284271247461903))")
|
||||||
|
testEval("add(normal(5,2), lognormal(10,2))", "Ok(Sample Set Distribution)")
|
||||||
|
testEval("add(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("add(3, normal(5,2))", "Ok(Point Set Distribution)")
|
||||||
|
testEval("3+normal(5,2)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("normal(5,2)+3", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
describe("truncate", () => {
|
||||||
|
testEval("truncateLeft(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("truncateRight(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("truncate(normal(5,2), 3, 8)", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("exp", () => {
|
||||||
|
testEval("exp(normal(5,2))", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("pow", () => {
|
||||||
|
testEval("pow(3, uniform(5,8))", "Ok(Point Set Distribution)")
|
||||||
|
testEval("pow(uniform(5,8), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("pow(uniform(5,8), uniform(9, 10))", "Ok(Sample Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("log", () => {
|
||||||
|
testEval("log(2, uniform(5,8))", "Ok(Point Set Distribution)")
|
||||||
|
testEval("log(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("log(normal(5,2), normal(10,1))", "Ok(Sample Set Distribution)")
|
||||||
|
testEval("log(uniform(5,8))", "Ok(Point Set Distribution)")
|
||||||
|
testEval("log10(uniform(5,8))", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("dotLog", () => {
|
||||||
|
testEval("dotLog(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("dotLog(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("dotLog(normal(5,2), normal(10,1))", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("dotAdd", () => {
|
||||||
|
testEval("dotAdd(normal(5,2), lognormal(10,2))", "Ok(Point Set Distribution)")
|
||||||
|
testEval("dotAdd(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("equality", () => {
|
||||||
|
testEval(~skip=true, "normal(5,2) == normal(5,2)", "Ok(true)")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("mixture", () => {
|
||||||
|
testEval(
|
||||||
|
~skip=true,
|
||||||
|
"mx(normal(5,2), normal(10,1), normal(15, 1))",
|
||||||
|
"Ok(Point Set Distribution)",
|
||||||
|
)
|
||||||
|
testEval(
|
||||||
|
~skip=true,
|
||||||
|
"mixture(normal(5,2), normal(10,1), [.2,, .4])",
|
||||||
|
"Ok(Point Set Distribution)",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("parse on distribution functions", () => {
|
||||||
|
describe("power", () => {
|
||||||
|
testParse("normal(5,2) ^ normal(5,1)", "Ok((:pow (:normal 5 2) (:normal 5 1)))")
|
||||||
|
testParse("3 ^ normal(5,1)", "Ok((:pow 3 (:normal 5 1)))")
|
||||||
|
testParse("normal(5,2) ^ 3", "Ok((:pow (:normal 5 2) 3))")
|
||||||
|
})
|
||||||
|
describe("pointwise arithmetic expressions", () => {
|
||||||
|
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||||
|
testParse(~skip=true, "normal(5,2) .- normal(5,1)", "Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))")
|
||||||
|
testParse("normal(5,2) .* normal(5,1)", "Ok((:dotMultiply (:normal 5 2) (:normal 5 1)))")
|
||||||
|
testParse("normal(5,2) ./ normal(5,1)", "Ok((:dotDivide (:normal 5 2) (:normal 5 1)))")
|
||||||
|
testParse("normal(5,2) .^ normal(5,1)", "Ok((:dotPow (:normal 5 2) (:normal 5 1)))")
|
||||||
|
})
|
||||||
|
describe("equality", () => {
|
||||||
|
testParse("5 == normal(5,2)", "Ok((:equal 5 (:normal 5 2)))")
|
||||||
|
})
|
||||||
|
describe("pointwise adding two normals", () => {
|
||||||
|
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||||
|
})
|
||||||
|
describe("exponential of one distribution", () => {
|
||||||
|
testParse(~skip=true, "exp(normal(5,2)", "Ok((:pow (:normal 5 2) 3))")
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,11 @@
|
||||||
|
open ReducerInterface.ExpressionValue
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
describe("ExpressionValue", () => {
|
||||||
|
test("argsToString", () => expect([EvNumber(1.), EvString("a")]->argsToString)->toBe("1, 'a'"))
|
||||||
|
|
||||||
|
test("toStringFunctionCall", () =>
|
||||||
|
expect(("fn", [EvNumber(1.), EvString("a")])->toStringFunctionCall)->toBe("fn(1, 'a')")
|
||||||
|
)
|
||||||
|
})
|
|
@ -1,47 +0,0 @@
|
||||||
open Jest
|
|
||||||
open Expect
|
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
|
||||||
only
|
|
||||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
|
||||||
: test(str, () => expect(item1) -> toEqual(item2))
|
|
||||||
|
|
||||||
describe("Lodash", () =>
|
|
||||||
describe("Lodash", () => {
|
|
||||||
makeTest(
|
|
||||||
"split",
|
|
||||||
SampleSet.Internals.T.splitContinuousAndDiscrete([1.432, 1.33455, 2.0]),
|
|
||||||
([1.432, 1.33455, 2.0], E.FloatFloatMap.empty()),
|
|
||||||
)
|
|
||||||
makeTest(
|
|
||||||
"split",
|
|
||||||
SampleSet.Internals.T.splitContinuousAndDiscrete([
|
|
||||||
1.432,
|
|
||||||
1.33455,
|
|
||||||
2.0,
|
|
||||||
2.0,
|
|
||||||
2.0,
|
|
||||||
2.0,
|
|
||||||
]) |> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
|
|
||||||
([1.432, 1.33455], [(2.0, 4.0)]),
|
|
||||||
)
|
|
||||||
|
|
||||||
let makeDuplicatedArray = count => {
|
|
||||||
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int)
|
|
||||||
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare)
|
|
||||||
E.A.concatMany([sorted, sorted, sorted, sorted]) |> Belt.SortArray.stableSortBy(_, compare)
|
|
||||||
}
|
|
||||||
|
|
||||||
let (_, discrete) = SampleSet.Internals.T.splitContinuousAndDiscrete(
|
|
||||||
makeDuplicatedArray(10),
|
|
||||||
)
|
|
||||||
let toArr = discrete |> E.FloatFloatMap.toArray
|
|
||||||
makeTest("splitMedium", toArr |> Belt.Array.length, 10)
|
|
||||||
|
|
||||||
let (_c, discrete) = SampleSet.Internals.T.splitContinuousAndDiscrete(
|
|
||||||
makeDuplicatedArray(500),
|
|
||||||
)
|
|
||||||
let toArr = discrete |> E.FloatFloatMap.toArray
|
|
||||||
makeTest("splitMedium", toArr |> Belt.Array.length, 500)
|
|
||||||
})
|
|
||||||
)
|
|
26
packages/squiggle-lang/__tests__/TestHelpers.res
Normal file
26
packages/squiggle-lang/__tests__/TestHelpers.res
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let makeTest = (~only=false, str, item1, item2) =>
|
||||||
|
only
|
||||||
|
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
||||||
|
: test(str, () => expect(item1) -> toEqual(item2))
|
||||||
|
|
||||||
|
|
||||||
|
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
||||||
|
|
||||||
|
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||||
|
|
||||||
|
let env: DistributionOperation.env = {
|
||||||
|
sampleCount: 100,
|
||||||
|
xyPointLength: 100,
|
||||||
|
}
|
||||||
|
|
||||||
|
let run = DistributionOperation.run(~env)
|
||||||
|
let outputMap = fmap(~env)
|
||||||
|
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
||||||
|
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
||||||
|
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(unreachableInTestFileMessage)
|
||||||
|
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
||||||
|
let unpackFloat = x => x -> toFloat -> toExtFloat
|
||||||
|
let unpackDist = y => y -> toDist -> toExtDist
|
|
@ -1,9 +1,8 @@
|
||||||
{
|
{
|
||||||
"name": "@quri/squiggle-lang",
|
"name": "@quri/squiggle-lang",
|
||||||
"reason": {},
|
|
||||||
"sources": [
|
"sources": [
|
||||||
{
|
{
|
||||||
"dir": "src",
|
"dir": "src/rescript",
|
||||||
"subdirs": true
|
"subdirs": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -28,12 +27,13 @@
|
||||||
"bs-dependencies": [
|
"bs-dependencies": [
|
||||||
"@glennsl/rescript-jest",
|
"@glennsl/rescript-jest",
|
||||||
"@glennsl/bs-json",
|
"@glennsl/bs-json",
|
||||||
"rationale"
|
"rationale",
|
||||||
|
"bisect_ppx"
|
||||||
],
|
],
|
||||||
"gentypeconfig": {
|
"gentypeconfig": {
|
||||||
"language": "typescript",
|
"language": "typescript",
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"shims": {},
|
"shims": {"Js": "Js"},
|
||||||
"debug": {
|
"debug": {
|
||||||
"all": false,
|
"all": false,
|
||||||
"basic": false
|
"basic": false
|
||||||
|
@ -41,7 +41,13 @@
|
||||||
},
|
},
|
||||||
"refmt": 3,
|
"refmt": 3,
|
||||||
"warnings": {
|
"warnings": {
|
||||||
"number": "+A-42-48-9-30-4-102"
|
"number": "+A-42-48-9-30-4-102-20-27-41"
|
||||||
},
|
},
|
||||||
"ppx-flags": []
|
"ppx-flags": [
|
||||||
|
[
|
||||||
|
"../../node_modules/bisect_ppx/ppx",
|
||||||
|
"--exclude-files",
|
||||||
|
".*_test\\.res$$"
|
||||||
|
]
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,13 @@
|
||||||
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
|
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
|
||||||
module.exports = {
|
module.exports = {
|
||||||
preset: 'ts-jest',
|
preset: "ts-jest",
|
||||||
testEnvironment: 'node',
|
testEnvironment: "node",
|
||||||
|
setupFilesAfterEnv: [
|
||||||
|
"<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js",
|
||||||
|
],
|
||||||
|
testPathIgnorePatterns: [
|
||||||
|
".*Fixtures.bs.js",
|
||||||
|
"/node_modules/",
|
||||||
|
".*Helpers.bs.js",
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|
|
@ -7,8 +7,10 @@
|
||||||
"bundle": "webpack",
|
"bundle": "webpack",
|
||||||
"start": "rescript build -w -with-deps",
|
"start": "rescript build -w -with-deps",
|
||||||
"clean": "rescript clean",
|
"clean": "rescript clean",
|
||||||
|
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
|
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
|
||||||
"all": "yarn build && yarn bundle && yarn test"
|
"all": "yarn build && yarn bundle && yarn test"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
@ -23,12 +25,12 @@
|
||||||
"mathjs": "10.4.3",
|
"mathjs": "10.4.3",
|
||||||
"pdfast": "^0.2.0",
|
"pdfast": "^0.2.0",
|
||||||
"rationale": "0.2.0",
|
"rationale": "0.2.0",
|
||||||
"rescript": "^9.1.4"
|
"rescript": "^9.1.4",
|
||||||
|
"bisect_ppx": "^2.7.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@glennsl/rescript-jest": "^0.9.0",
|
"@glennsl/rescript-jest": "^0.9.0",
|
||||||
"@types/jest": "^27.4.0",
|
"@types/jest": "^27.4.0",
|
||||||
"@types/webpack": "^5.28.0",
|
|
||||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||||
"docsify": "^4.12.2",
|
"docsify": "^4.12.2",
|
||||||
"gentype": "^4.3.0",
|
"gentype": "^4.3.0",
|
||||||
|
|
|
@ -1,17 +1,228 @@
|
||||||
import {runAll} from '../rescript/ProgramEvaluator.gen';
|
import { runAll } from "../rescript/ProgramEvaluator.gen";
|
||||||
import type { Inputs_SamplingInputs_t as SamplingInputs, exportEnv, exportType, exportDistribution} from '../rescript/ProgramEvaluator.gen';
|
import type {
|
||||||
export type { SamplingInputs, exportEnv, exportDistribution }
|
Inputs_SamplingInputs_t as SamplingInputs,
|
||||||
export type {t as DistPlus} from '../rescript/pointSetDist/DistPlus.gen';
|
exportEnv,
|
||||||
|
exportType,
|
||||||
|
exportDistribution,
|
||||||
|
} from "../rescript/ProgramEvaluator.gen";
|
||||||
|
export type { SamplingInputs, exportEnv, exportDistribution };
|
||||||
|
export type { t as DistPlus } from "../rescript/OldInterpreter/DistPlus.gen";
|
||||||
|
import {
|
||||||
|
genericDist,
|
||||||
|
env,
|
||||||
|
resultDist,
|
||||||
|
resultFloat,
|
||||||
|
resultString,
|
||||||
|
} from "../rescript/TypescriptInterface.gen";
|
||||||
|
export {makeSampleSetDist} from "../rescript/TypescriptInterface.gen";
|
||||||
|
import {
|
||||||
|
Constructors_mean,
|
||||||
|
Constructors_sample,
|
||||||
|
Constructors_pdf,
|
||||||
|
Constructors_cdf,
|
||||||
|
Constructors_inv,
|
||||||
|
Constructors_normalize,
|
||||||
|
Constructors_toPointSet,
|
||||||
|
Constructors_toSampleSet,
|
||||||
|
Constructors_truncate,
|
||||||
|
Constructors_inspect,
|
||||||
|
Constructors_toString,
|
||||||
|
Constructors_toSparkline,
|
||||||
|
Constructors_algebraicAdd,
|
||||||
|
Constructors_algebraicMultiply,
|
||||||
|
Constructors_algebraicDivide,
|
||||||
|
Constructors_algebraicSubtract,
|
||||||
|
Constructors_algebraicLogarithm,
|
||||||
|
Constructors_algebraicPower,
|
||||||
|
Constructors_pointwiseAdd,
|
||||||
|
Constructors_pointwiseMultiply,
|
||||||
|
Constructors_pointwiseDivide,
|
||||||
|
Constructors_pointwiseSubtract,
|
||||||
|
Constructors_pointwiseLogarithm,
|
||||||
|
Constructors_pointwisePower,
|
||||||
|
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
|
||||||
|
|
||||||
export let defaultSamplingInputs : SamplingInputs = {
|
export let defaultSamplingInputs: SamplingInputs = {
|
||||||
sampleCount : 10000,
|
sampleCount: 10000,
|
||||||
outputXYPoints : 10000,
|
outputXYPoints: 10000,
|
||||||
pointDistLength : 1000
|
pointDistLength: 1000,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function run(
|
||||||
|
squiggleString: string,
|
||||||
|
samplingInputs?: SamplingInputs,
|
||||||
|
environment?: exportEnv
|
||||||
|
): { tag: "Ok"; value: exportType } | { tag: "Error"; value: string } {
|
||||||
|
let si: SamplingInputs = samplingInputs
|
||||||
|
? samplingInputs
|
||||||
|
: defaultSamplingInputs;
|
||||||
|
let env: exportEnv = environment ? environment : [];
|
||||||
|
return runAll(squiggleString, si, env);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function run(squiggleString : string, samplingInputs? : SamplingInputs, environment?: exportEnv) : { tag: "Ok"; value: exportType }
|
//This is clearly not fully typed. I think later we should use a functional library to
|
||||||
| { tag: "Error"; value: string } {
|
// provide a better Either type and corresponding functions.
|
||||||
let si : SamplingInputs = samplingInputs ? samplingInputs : defaultSamplingInputs
|
type result =
|
||||||
let env : exportEnv = environment ? environment : []
|
| {
|
||||||
return runAll(squiggleString, si, env)
|
tag: "Ok";
|
||||||
|
value: any;
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
tag: "Error";
|
||||||
|
value: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function resultMap(r: result, mapFn: any): result {
|
||||||
|
if (r.tag === "Ok") {
|
||||||
|
return { tag: "Ok", value: mapFn(r.value) };
|
||||||
|
} else {
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resultExn(r: result): any {
|
||||||
|
r.value
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GenericDist {
|
||||||
|
t: genericDist;
|
||||||
|
env: env;
|
||||||
|
|
||||||
|
constructor(t: genericDist, env: env) {
|
||||||
|
this.t = t;
|
||||||
|
this.env = env;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
mapResultDist(r: resultDist) {
|
||||||
|
return resultMap(r, (v: genericDist) => new GenericDist(v, this.env));
|
||||||
|
}
|
||||||
|
|
||||||
|
mean() {
|
||||||
|
return Constructors_mean({ env: this.env }, this.t);
|
||||||
|
}
|
||||||
|
|
||||||
|
sample(): resultFloat {
|
||||||
|
return Constructors_sample({ env: this.env }, this.t);
|
||||||
|
}
|
||||||
|
|
||||||
|
pdf(n: number): resultFloat {
|
||||||
|
return Constructors_pdf({ env: this.env }, this.t, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
cdf(n: number): resultFloat {
|
||||||
|
return Constructors_cdf({ env: this.env }, this.t, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
inv(n: number): resultFloat {
|
||||||
|
return Constructors_inv({ env: this.env }, this.t, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
normalize() {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_normalize({ env: this.env }, this.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
toPointSet() {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_toPointSet({ env: this.env }, this.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
toSampleSet(n: number) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_toSampleSet({ env: this.env }, this.t, n)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
truncate(left: number, right: number) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_truncate({ env: this.env }, this.t, left, right)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
inspect() {
|
||||||
|
return this.mapResultDist(Constructors_inspect({ env: this.env }, this.t));
|
||||||
|
}
|
||||||
|
|
||||||
|
toString(): resultString {
|
||||||
|
return Constructors_toString({ env: this.env }, this.t);
|
||||||
|
}
|
||||||
|
|
||||||
|
toSparkline(n: number): resultString {
|
||||||
|
return Constructors_toSparkline({ env: this.env }, this.t, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicAdd(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicAdd({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicMultiply(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicMultiply({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicDivide(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicDivide({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicSubtract(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicSubtract({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicLogarithm(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicLogarithm({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
algebraicPower(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_algebraicPower({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwiseAdd(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwiseAdd({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwiseMultiply(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwiseMultiply({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwiseDivide(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwiseDivide({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwiseSubtract(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwiseSubtract({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwiseLogarithm(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwiseLogarithm({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pointwisePower(d2: GenericDist) {
|
||||||
|
return this.mapResultDist(
|
||||||
|
Constructors_pointwisePower({ env: this.env }, this.t, d2.t)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,230 @@
|
||||||
|
type functionCallInfo = GenericDist_Types.Operation.genericFunctionCallInfo
|
||||||
|
type genericDist = GenericDist_Types.genericDist
|
||||||
|
type error = GenericDist_Types.error
|
||||||
|
|
||||||
|
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||||
|
|
||||||
|
type env = {
|
||||||
|
sampleCount: int,
|
||||||
|
xyPointLength: int,
|
||||||
|
}
|
||||||
|
|
||||||
|
type outputType =
|
||||||
|
| Dist(genericDist)
|
||||||
|
| Float(float)
|
||||||
|
| String(string)
|
||||||
|
| GenDistError(error)
|
||||||
|
|
||||||
|
/*
|
||||||
|
We're going to add another function to this module later, so first define a
|
||||||
|
local version, which is not exported.
|
||||||
|
*/
|
||||||
|
module OutputLocal = {
|
||||||
|
type t = outputType
|
||||||
|
|
||||||
|
let toError = (t: outputType) =>
|
||||||
|
switch t {
|
||||||
|
| GenDistError(d) => Some(d)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let toErrorOrUnreachable = (t: t): error => t->toError->E.O2.default((Unreachable: error))
|
||||||
|
|
||||||
|
let toDistR = (t: t): result<genericDist, error> =>
|
||||||
|
switch t {
|
||||||
|
| Dist(r) => Ok(r)
|
||||||
|
| e => Error(toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
let toDist = (t: t) =>
|
||||||
|
switch t {
|
||||||
|
| Dist(d) => Some(d)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let toFloat = (t: t) =>
|
||||||
|
switch t {
|
||||||
|
| Float(d) => Some(d)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let toFloatR = (t: t): result<float, error> =>
|
||||||
|
switch t {
|
||||||
|
| Float(r) => Ok(r)
|
||||||
|
| e => Error(toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
let toString = (t: t) =>
|
||||||
|
switch t {
|
||||||
|
| String(d) => Some(d)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let toStringR = (t: t): result<string, error> =>
|
||||||
|
switch t {
|
||||||
|
| String(r) => Ok(r)
|
||||||
|
| e => Error(toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
//This is used to catch errors in other switch statements.
|
||||||
|
let fromResult = (r: result<t, error>): outputType =>
|
||||||
|
switch r {
|
||||||
|
| Ok(t) => t
|
||||||
|
| Error(e) => GenDistError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
|
let {sampleCount, xyPointLength} = env
|
||||||
|
|
||||||
|
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
||||||
|
run(~env, functionCallInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
let toPointSetFn = r => {
|
||||||
|
switch reCall(~functionCallInfo=FromDist(ToDist(ToPointSet), r), ()) {
|
||||||
|
| Dist(PointSet(p)) => Ok(p)
|
||||||
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let toSampleSetFn = r => {
|
||||||
|
switch reCall(~functionCallInfo=FromDist(ToDist(ToSampleSet(sampleCount)), r), ()) {
|
||||||
|
| Dist(SampleSet(p)) => Ok(p)
|
||||||
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let scaleMultiply = (r, weight) =>
|
||||||
|
reCall(
|
||||||
|
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
||||||
|
(),
|
||||||
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
|
let pointwiseAdd = (r1, r2) =>
|
||||||
|
reCall(
|
||||||
|
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
||||||
|
(),
|
||||||
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
|
let fromDistFn = (subFnName: GenericDist_Types.Operation.fromDist, dist: genericDist) =>
|
||||||
|
switch subFnName {
|
||||||
|
| ToFloat(distToFloatOperation) =>
|
||||||
|
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||||
|
->E.R2.fmap(r => Float(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToString(ToString) => dist->GenericDist.toString->String
|
||||||
|
| ToString(ToSparkline(bucketCount)) =>
|
||||||
|
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
||||||
|
->E.R2.fmap(r => String(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDist(Inspect) => {
|
||||||
|
Js.log2("Console log requested: ", dist)
|
||||||
|
Dist(dist)
|
||||||
|
}
|
||||||
|
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
|
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||||
|
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDist(ToSampleSet(n)) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.toSampleSetDist(n)
|
||||||
|
->E.R2.fmap(r => Dist(SampleSet(r)))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDist(ToPointSet) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||||
|
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDistCombination(Algebraic, _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||||
|
| ToDistCombination(Algebraic, arithmeticOperation, #Dist(t2)) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.algebraicCombination(~toPointSetFn, ~toSampleSetFn, ~arithmeticOperation, ~t2)
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDistCombination(Pointwise, arithmeticOperation, #Dist(t2)) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.pointwiseCombination(~toPointSetFn, ~arithmeticOperation, ~t2)
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
| ToDistCombination(Pointwise, arithmeticOperation, #Float(float)) =>
|
||||||
|
dist
|
||||||
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~arithmeticOperation, ~float)
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
}
|
||||||
|
|
||||||
|
switch functionCallInfo {
|
||||||
|
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
||||||
|
| FromFloat(subFnName, float) =>
|
||||||
|
reCall(~functionCallInfo=FromDist(subFnName, GenericDist.fromFloat(float)), ())
|
||||||
|
| Mixture(dists) =>
|
||||||
|
dists
|
||||||
|
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
||||||
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
->OutputLocal.fromResult
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let runFromDist = (~env, ~functionCallInfo, dist) => run(~env, FromDist(functionCallInfo, dist))
|
||||||
|
let runFromFloat = (~env, ~functionCallInfo, float) => run(~env, FromFloat(functionCallInfo, float))
|
||||||
|
|
||||||
|
module Output = {
|
||||||
|
include OutputLocal
|
||||||
|
|
||||||
|
let fmap = (
|
||||||
|
~env,
|
||||||
|
input: outputType,
|
||||||
|
functionCallInfo: GenericDist_Types.Operation.singleParamaterFunction,
|
||||||
|
): outputType => {
|
||||||
|
let newFnCall: result<functionCallInfo, error> = switch (functionCallInfo, input) {
|
||||||
|
| (FromDist(fromDist), Dist(o)) => Ok(FromDist(fromDist, o))
|
||||||
|
| (FromFloat(fromDist), Float(o)) => Ok(FromFloat(fromDist, o))
|
||||||
|
| (_, GenDistError(r)) => Error(r)
|
||||||
|
| (FromDist(_), _) => Error(Other("Expected dist, got something else"))
|
||||||
|
| (FromFloat(_), _) => Error(Other("Expected float, got something else"))
|
||||||
|
}
|
||||||
|
newFnCall->E.R2.fmap(run(~env))->OutputLocal.fromResult
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See comment above GenericDist_Types.Constructors to explain the purpose of this module.
|
||||||
|
// I tried having another internal module called UsingDists, similar to how its done in
|
||||||
|
// GenericDist_Types.Constructors. However, this broke GenType for me, so beware.
|
||||||
|
module Constructors = {
|
||||||
|
module C = GenericDist_Types.Constructors.UsingDists
|
||||||
|
open OutputLocal
|
||||||
|
let mean = (~env, dist) => C.mean(dist)->run(~env)->toFloatR
|
||||||
|
let sample = (~env, dist) => C.sample(dist)->run(~env)->toFloatR
|
||||||
|
let cdf = (~env, dist, f) => C.cdf(dist, f)->run(~env)->toFloatR
|
||||||
|
let inv = (~env, dist, f) => C.inv(dist, f)->run(~env)->toFloatR
|
||||||
|
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||||
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
|
let truncate = (~env, dist, leftCutoff, rightCutoff) =>
|
||||||
|
C.truncate(dist, leftCutoff, rightCutoff)->run(~env)->toDistR
|
||||||
|
let inspect = (~env, dist) => C.inspect(dist)->run(~env)->toDistR
|
||||||
|
let toString = (~env, dist) => C.toString(dist)->run(~env)->toStringR
|
||||||
|
let toSparkline = (~env, dist, bucketCount) =>
|
||||||
|
C.toSparkline(dist, bucketCount)->run(~env)->toStringR
|
||||||
|
let algebraicAdd = (~env, dist1, dist2) => C.algebraicAdd(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let algebraicMultiply = (~env, dist1, dist2) =>
|
||||||
|
C.algebraicMultiply(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let algebraicDivide = (~env, dist1, dist2) => C.algebraicDivide(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let algebraicSubtract = (~env, dist1, dist2) =>
|
||||||
|
C.algebraicSubtract(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let algebraicLogarithm = (~env, dist1, dist2) =>
|
||||||
|
C.algebraicLogarithm(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let algebraicPower = (~env, dist1, dist2) => C.algebraicPower(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwiseAdd = (~env, dist1, dist2) => C.pointwiseAdd(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwiseMultiply = (~env, dist1, dist2) =>
|
||||||
|
C.pointwiseMultiply(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwiseDivide = (~env, dist1, dist2) => C.pointwiseDivide(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwiseSubtract = (~env, dist1, dist2) =>
|
||||||
|
C.pointwiseSubtract(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwiseLogarithm = (~env, dist1, dist2) =>
|
||||||
|
C.pointwiseLogarithm(dist1, dist2)->run(~env)->toDistR
|
||||||
|
let pointwisePower = (~env, dist1, dist2) => C.pointwisePower(dist1, dist2)->run(~env)->toDistR
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
@genType
|
||||||
|
type env = {
|
||||||
|
sampleCount: int,
|
||||||
|
xyPointLength: int,
|
||||||
|
}
|
||||||
|
|
||||||
|
open GenericDist_Types
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type outputType =
|
||||||
|
| Dist(genericDist)
|
||||||
|
| Float(float)
|
||||||
|
| String(string)
|
||||||
|
| GenDistError(error)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let run: (~env: env, GenericDist_Types.Operation.genericFunctionCallInfo) => outputType
|
||||||
|
let runFromDist: (
|
||||||
|
~env: env,
|
||||||
|
~functionCallInfo: GenericDist_Types.Operation.fromDist,
|
||||||
|
genericDist,
|
||||||
|
) => outputType
|
||||||
|
let runFromFloat: (
|
||||||
|
~env: env,
|
||||||
|
~functionCallInfo: GenericDist_Types.Operation.fromDist,
|
||||||
|
float,
|
||||||
|
) => outputType
|
||||||
|
|
||||||
|
module Output: {
|
||||||
|
type t = outputType
|
||||||
|
let toDist: t => option<genericDist>
|
||||||
|
let toDistR: t => result<genericDist, error>
|
||||||
|
let toFloat: t => option<float>
|
||||||
|
let toFloatR: t => result<float, error>
|
||||||
|
let toString: t => option<string>
|
||||||
|
let toStringR: t => result<string, error>
|
||||||
|
let toError: t => option<error>
|
||||||
|
let fmap: (~env: env, t, GenericDist_Types.Operation.singleParamaterFunction) => t
|
||||||
|
}
|
||||||
|
|
||||||
|
module Constructors: {
|
||||||
|
@genType
|
||||||
|
let mean: (~env: env, genericDist) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let sample: (~env: env, genericDist) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let cdf: (~env: env, genericDist, float) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let inv: (~env: env, genericDist, float) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let pdf: (~env: env, genericDist, float) => result<float, error>
|
||||||
|
@genType
|
||||||
|
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let truncate: (
|
||||||
|
~env: env,
|
||||||
|
genericDist,
|
||||||
|
option<float>,
|
||||||
|
option<float>,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let toString: (~env: env, genericDist) => result<string, error>
|
||||||
|
@genType
|
||||||
|
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
||||||
|
@genType
|
||||||
|
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
type genericDist =
|
||||||
|
| PointSet(PointSetTypes.pointSetDist)
|
||||||
|
| SampleSet(array<float>)
|
||||||
|
| Symbolic(SymbolicDistTypes.symbolicDist)
|
||||||
|
|
||||||
|
type error =
|
||||||
|
| NotYetImplemented
|
||||||
|
| Unreachable
|
||||||
|
| DistributionVerticalShiftIsInvalid
|
||||||
|
| Other(string)
|
||||||
|
|
||||||
|
module Operation = {
|
||||||
|
type direction =
|
||||||
|
| Algebraic
|
||||||
|
| Pointwise
|
||||||
|
|
||||||
|
type arithmeticOperation = [
|
||||||
|
| #Add
|
||||||
|
| #Multiply
|
||||||
|
| #Subtract
|
||||||
|
| #Divide
|
||||||
|
| #Power
|
||||||
|
| #Logarithm
|
||||||
|
]
|
||||||
|
|
||||||
|
let arithmeticToFn = (arithmetic: arithmeticOperation) =>
|
||||||
|
switch arithmetic {
|
||||||
|
| #Add => \"+."
|
||||||
|
| #Multiply => \"*."
|
||||||
|
| #Subtract => \"-."
|
||||||
|
| #Power => \"**"
|
||||||
|
| #Divide => \"/."
|
||||||
|
| #Logarithm => (a, b) => log(a) /. log(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
type toFloat = [
|
||||||
|
| #Cdf(float)
|
||||||
|
| #Inv(float)
|
||||||
|
| #Pdf(float)
|
||||||
|
| #Mean
|
||||||
|
| #Sample
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
module DistributionOperation = {
|
||||||
|
type toDist =
|
||||||
|
| Normalize
|
||||||
|
| ToPointSet
|
||||||
|
| ToSampleSet(int)
|
||||||
|
| Truncate(option<float>, option<float>)
|
||||||
|
| Inspect
|
||||||
|
|
||||||
|
type toFloatArray = Sample(int)
|
||||||
|
|
||||||
|
type fromDist =
|
||||||
|
| ToFloat(Operation.toFloat)
|
||||||
|
| ToDist(toDist)
|
||||||
|
| ToDistCombination(Operation.direction, Operation.arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
||||||
|
| ToString
|
||||||
|
|
||||||
|
type singleParamaterFunction =
|
||||||
|
| FromDist(fromDist)
|
||||||
|
| FromFloat(fromDist)
|
||||||
|
|
||||||
|
type genericFunctionCallInfo =
|
||||||
|
| FromDist(fromDist, genericDist)
|
||||||
|
| FromFloat(fromDist, float)
|
||||||
|
| Mixture(array<(genericDist, float)>)
|
||||||
|
|
||||||
|
let distCallToString = (distFunction: fromDist): string =>
|
||||||
|
switch distFunction {
|
||||||
|
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Mean) => `mean`
|
||||||
|
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Sample) => `sample`
|
||||||
|
| ToDist(Normalize) => `normalize`
|
||||||
|
| ToDist(ToPointSet) => `toPointSet`
|
||||||
|
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
|
| ToDist(Truncate(_, _)) => `truncate`
|
||||||
|
| ToDist(Inspect) => `inspect`
|
||||||
|
| ToString => `toString`
|
||||||
|
| ToDistCombination(Algebraic, _, _) => `algebraic`
|
||||||
|
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||||
|
}
|
||||||
|
|
||||||
|
let toString = (d: genericFunctionCallInfo): string =>
|
||||||
|
switch d {
|
||||||
|
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
||||||
|
| Mixture(_) => `mixture`
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,284 @@
|
||||||
|
//TODO: multimodal, add interface, test somehow, track performance, refactor sampleSet, refactor ASTEvaluator.res.
|
||||||
|
type t = GenericDist_Types.genericDist
|
||||||
|
type error = GenericDist_Types.error
|
||||||
|
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
||||||
|
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
let sampleN = (t: t, n) =>
|
||||||
|
switch t {
|
||||||
|
| PointSet(r) => PointSetDist.sampleNRendered(n, r)
|
||||||
|
| Symbolic(r) => SymbolicDist.T.sampleN(n, r)
|
||||||
|
| SampleSet(r) => SampleSetDist.sampleN(r, n)
|
||||||
|
}
|
||||||
|
|
||||||
|
let toSampleSetDist = (t: t, n) =>
|
||||||
|
SampleSetDist.make(sampleN(t, n))->GenericDist_Types.Error.resultStringToResultError
|
||||||
|
|
||||||
|
let fromFloat = (f: float): t => Symbolic(SymbolicDist.Float.make(f))
|
||||||
|
|
||||||
|
let toString = (t: t) =>
|
||||||
|
switch t {
|
||||||
|
| PointSet(_) => "Point Set Distribution"
|
||||||
|
| Symbolic(r) => SymbolicDist.T.toString(r)
|
||||||
|
| SampleSet(_) => "Sample Set Distribution"
|
||||||
|
}
|
||||||
|
|
||||||
|
let normalize = (t: t): t =>
|
||||||
|
switch t {
|
||||||
|
| PointSet(r) => PointSet(PointSetDist.T.normalize(r))
|
||||||
|
| Symbolic(_) => t
|
||||||
|
| SampleSet(_) => t
|
||||||
|
}
|
||||||
|
|
||||||
|
let toFloatOperation = (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~distToFloatOperation: Operation.distToFloatOperation,
|
||||||
|
) => {
|
||||||
|
let symbolicSolution = switch (t: t) {
|
||||||
|
| Symbolic(r) =>
|
||||||
|
switch SymbolicDist.T.operate(distToFloatOperation, r) {
|
||||||
|
| Ok(f) => Some(f)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
switch symbolicSolution {
|
||||||
|
| Some(r) => Ok(r)
|
||||||
|
| None => toPointSetFn(t)->E.R2.fmap(PointSetDist.operate(distToFloatOperation))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Todo: If it's a pointSet, but the xyPointLength is different from what it has, it should change.
|
||||||
|
// This is tricky because the case of discrete distributions.
|
||||||
|
// Also, change the outputXYPoints/pointSetDistLength details
|
||||||
|
let toPointSet = (
|
||||||
|
t,
|
||||||
|
~xyPointLength,
|
||||||
|
~sampleCount,
|
||||||
|
~xSelection: GenericDist_Types.Operation.pointsetXSelection=#ByWeight,
|
||||||
|
unit,
|
||||||
|
): result<PointSetTypes.pointSetDist, error> => {
|
||||||
|
switch (t: t) {
|
||||||
|
| PointSet(pointSet) => Ok(pointSet)
|
||||||
|
| Symbolic(r) => Ok(SymbolicDist.T.toPointSetDist(~xSelection, xyPointLength, r))
|
||||||
|
| SampleSet(r) =>
|
||||||
|
SampleSetDist.toPointSetDist(
|
||||||
|
~samples=r,
|
||||||
|
~samplingInputs={
|
||||||
|
sampleCount: sampleCount,
|
||||||
|
outputXYPoints: xyPointLength,
|
||||||
|
pointSetDistLength: xyPointLength,
|
||||||
|
kernelWidth: None,
|
||||||
|
},
|
||||||
|
)->GenericDist_Types.Error.resultStringToResultError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
||||||
|
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
||||||
|
xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3
|
||||||
|
fairly arbitrarily.
|
||||||
|
*/
|
||||||
|
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, unit): result<string, error> =>
|
||||||
|
t
|
||||||
|
->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
|
||||||
|
->E.R.bind(r =>
|
||||||
|
r->PointSetDist.toSparkline(bucketCount)->GenericDist_Types.Error.resultStringToResultError
|
||||||
|
)
|
||||||
|
|
||||||
|
module Truncate = {
|
||||||
|
let trySymbolicSimplification = (leftCutoff, rightCutoff, t: t): option<t> =>
|
||||||
|
switch (leftCutoff, rightCutoff, t) {
|
||||||
|
| (None, None, _) => None
|
||||||
|
| (lc, rc, Symbolic(#Uniform(u))) if lc < rc =>
|
||||||
|
Some(Symbolic(#Uniform(SymbolicDist.Uniform.truncate(lc, rc, u))))
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let run = (
|
||||||
|
t: t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~leftCutoff=None: option<float>,
|
||||||
|
~rightCutoff=None: option<float>,
|
||||||
|
(),
|
||||||
|
): result<t, error> => {
|
||||||
|
let doesNotNeedCutoff = E.O.isNone(leftCutoff) && E.O.isNone(rightCutoff)
|
||||||
|
if doesNotNeedCutoff {
|
||||||
|
Ok(t)
|
||||||
|
} else {
|
||||||
|
switch trySymbolicSimplification(leftCutoff, rightCutoff, t) {
|
||||||
|
| Some(r) => Ok(r)
|
||||||
|
| None =>
|
||||||
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
|
GenericDist_Types.PointSet(PointSetDist.T.truncate(leftCutoff, rightCutoff, t))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let truncate = Truncate.run
|
||||||
|
|
||||||
|
/* Given two random variables A and B, this returns the distribution
|
||||||
|
of a new variable that is the result of the operation on A and B.
|
||||||
|
For instance, normal(0, 1) + normal(1, 1) -> normal(1, 2).
|
||||||
|
In general, this is implemented via convolution.
|
||||||
|
|
||||||
|
TODO: It would be useful to be able to pass in a paramater to get this to run either with convolution or monte carlo.
|
||||||
|
*/
|
||||||
|
module AlgebraicCombination = {
|
||||||
|
let tryAnalyticalSimplification = (
|
||||||
|
arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
): option<result<SymbolicDistTypes.symbolicDist, string>> =>
|
||||||
|
switch (arithmeticOperation, t1, t2) {
|
||||||
|
| (arithmeticOperation, Symbolic(d1), Symbolic(d2)) =>
|
||||||
|
switch SymbolicDist.T.tryAnalyticalSimplification(d1, d2, arithmeticOperation) {
|
||||||
|
| #AnalyticalSolution(symbolicDist) => Some(Ok(symbolicDist))
|
||||||
|
| #Error(er) => Some(Error(er))
|
||||||
|
| #NoSolution => None
|
||||||
|
}
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let runConvolution = (
|
||||||
|
toPointSet: toPointSetFn,
|
||||||
|
arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
) =>
|
||||||
|
E.R.merge(toPointSet(t1), toPointSet(t2))->E.R2.fmap(((a, b)) =>
|
||||||
|
PointSetDist.combineAlgebraically(arithmeticOperation, a, b)
|
||||||
|
)
|
||||||
|
|
||||||
|
let runMonteCarlo = (
|
||||||
|
toSampleSet: toSampleSetFn,
|
||||||
|
arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
t1: t,
|
||||||
|
t2: t,
|
||||||
|
) => {
|
||||||
|
let fn = Operation.Algebraic.toFn(arithmeticOperation)
|
||||||
|
E.R.merge(toSampleSet(t1), toSampleSet(t2))
|
||||||
|
->E.R.bind(((t1, t2)) => {
|
||||||
|
SampleSetDist.map2(~fn, ~t1, ~t2)->GenericDist_Types.Error.resultStringToResultError
|
||||||
|
})
|
||||||
|
->E.R2.fmap(r => GenericDist_Types.SampleSet(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
//I'm (Ozzie) really just guessing here, very little idea what's best
|
||||||
|
let expectedConvolutionCost: t => int = x =>
|
||||||
|
switch x {
|
||||||
|
| Symbolic(#Float(_)) => 1
|
||||||
|
| Symbolic(_) => 1000
|
||||||
|
| PointSet(Discrete(m)) => m.xyShape->XYShape.T.length
|
||||||
|
| PointSet(Mixed(_)) => 1000
|
||||||
|
| PointSet(Continuous(_)) => 1000
|
||||||
|
| _ => 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
let chooseConvolutionOrMonteCarlo = (t2: t, t1: t) =>
|
||||||
|
expectedConvolutionCost(t1) * expectedConvolutionCost(t2) > 10000
|
||||||
|
? #CalculateWithMonteCarlo
|
||||||
|
: #CalculateWithConvolution
|
||||||
|
|
||||||
|
let run = (
|
||||||
|
t1: t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~toSampleSetFn: toSampleSetFn,
|
||||||
|
~arithmeticOperation,
|
||||||
|
~t2: t,
|
||||||
|
): result<t, error> => {
|
||||||
|
switch tryAnalyticalSimplification(arithmeticOperation, t1, t2) {
|
||||||
|
| Some(Ok(symbolicDist)) => Ok(Symbolic(symbolicDist))
|
||||||
|
| Some(Error(e)) => Error(Other(e))
|
||||||
|
| None =>
|
||||||
|
switch chooseConvolutionOrMonteCarlo(t1, t2) {
|
||||||
|
| #CalculateWithMonteCarlo => runMonteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
||||||
|
| #CalculateWithConvolution =>
|
||||||
|
runConvolution(
|
||||||
|
toPointSetFn,
|
||||||
|
arithmeticOperation,
|
||||||
|
t1,
|
||||||
|
t2,
|
||||||
|
)->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let algebraicCombination = AlgebraicCombination.run
|
||||||
|
|
||||||
|
//TODO: Add faster pointwiseCombine fn
|
||||||
|
let pointwiseCombination = (
|
||||||
|
t1: t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~arithmeticOperation,
|
||||||
|
~t2: t,
|
||||||
|
): result<t, error> => {
|
||||||
|
E.R.merge(toPointSetFn(t1), toPointSetFn(t2))
|
||||||
|
->E.R2.fmap(((t1, t2)) =>
|
||||||
|
PointSetDist.combinePointwise(
|
||||||
|
GenericDist_Types.Operation.arithmeticToFn(arithmeticOperation),
|
||||||
|
t1,
|
||||||
|
t2,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
let pointwiseCombinationFloat = (
|
||||||
|
t: t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
~float: float,
|
||||||
|
): result<t, error> => {
|
||||||
|
let m = switch arithmeticOperation {
|
||||||
|
| #Add | #Subtract => Error(GenericDist_Types.DistributionVerticalShiftIsInvalid)
|
||||||
|
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||||
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
|
//TODO: Move to PointSet codebase
|
||||||
|
let fn = (secondary, main) => Operation.Scale.toFn(arithmeticOperation, main, secondary)
|
||||||
|
let integralSumCacheFn = Operation.Scale.toIntegralSumCacheFn(arithmeticOperation)
|
||||||
|
let integralCacheFn = Operation.Scale.toIntegralCacheFn(arithmeticOperation)
|
||||||
|
PointSetDist.T.mapY(
|
||||||
|
~integralSumCacheFn=integralSumCacheFn(float),
|
||||||
|
~integralCacheFn=integralCacheFn(float),
|
||||||
|
~fn=fn(float),
|
||||||
|
t,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
m->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
//Note: The result should always cumulatively sum to 1. This would be good to test.
|
||||||
|
//Note: If the inputs are not normalized, this will return poor results. The weights probably refer to the post-normalized forms. It would be good to apply a catch to this.
|
||||||
|
let mixture = (
|
||||||
|
values: array<(t, float)>,
|
||||||
|
~scaleMultiplyFn: scaleMultiplyFn,
|
||||||
|
~pointwiseAddFn: pointwiseAddFn,
|
||||||
|
) => {
|
||||||
|
if E.A.length(values) == 0 {
|
||||||
|
Error(GenericDist_Types.Other("Mixture error: mixture must have at least 1 element"))
|
||||||
|
} else {
|
||||||
|
let totalWeight = values->E.A2.fmap(E.Tuple2.second)->E.A.Floats.sum
|
||||||
|
let properlyWeightedValues =
|
||||||
|
values
|
||||||
|
->E.A2.fmap(((dist, weight)) => scaleMultiplyFn(dist, weight /. totalWeight))
|
||||||
|
->E.A.R.firstErrorOrOpen
|
||||||
|
properlyWeightedValues->E.R.bind(values => {
|
||||||
|
values
|
||||||
|
|> Js.Array.sliceFrom(1)
|
||||||
|
|> E.A.fold_left(
|
||||||
|
(acc, x) => E.R.bind(acc, acc => pointwiseAddFn(acc, x)),
|
||||||
|
Ok(E.A.unsafe_get(values, 0)),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,67 @@
|
||||||
|
type t = GenericDist_Types.genericDist
|
||||||
|
type error = GenericDist_Types.error
|
||||||
|
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
||||||
|
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
let sampleN: (t, int) => array<float>
|
||||||
|
|
||||||
|
let toSampleSetDist: (t, int) => Belt.Result.t<QuriSquiggleLang.SampleSetDist.t, error>
|
||||||
|
|
||||||
|
let fromFloat: float => t
|
||||||
|
|
||||||
|
let toString: t => string
|
||||||
|
|
||||||
|
let normalize: t => t
|
||||||
|
|
||||||
|
let toFloatOperation: (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~distToFloatOperation: Operation.distToFloatOperation,
|
||||||
|
) => result<float, error>
|
||||||
|
|
||||||
|
let toPointSet: (
|
||||||
|
t,
|
||||||
|
~xyPointLength: int,
|
||||||
|
~sampleCount: int,
|
||||||
|
~xSelection: GenericDist_Types.Operation.pointsetXSelection=?,
|
||||||
|
unit,
|
||||||
|
) => result<PointSetTypes.pointSetDist, error>
|
||||||
|
let toSparkline: (t, ~sampleCount: int, ~bucketCount: int=?, unit) => result<string, error>
|
||||||
|
|
||||||
|
let truncate: (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~leftCutoff: option<float>=?,
|
||||||
|
~rightCutoff: option<float>=?,
|
||||||
|
unit,
|
||||||
|
) => result<t, error>
|
||||||
|
|
||||||
|
let algebraicCombination: (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~toSampleSetFn: toSampleSetFn,
|
||||||
|
~arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
~t2: t,
|
||||||
|
) => result<t, error>
|
||||||
|
|
||||||
|
let pointwiseCombination: (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
~t2: t,
|
||||||
|
) => result<t, error>
|
||||||
|
|
||||||
|
let pointwiseCombinationFloat: (
|
||||||
|
t,
|
||||||
|
~toPointSetFn: toPointSetFn,
|
||||||
|
~arithmeticOperation: GenericDist_Types.Operation.arithmeticOperation,
|
||||||
|
~float: float,
|
||||||
|
) => result<t, error>
|
||||||
|
|
||||||
|
let mixture: (
|
||||||
|
array<(t, float)>,
|
||||||
|
~scaleMultiplyFn: scaleMultiplyFn,
|
||||||
|
~pointwiseAddFn: pointwiseAddFn,
|
||||||
|
) => result<t, error>
|
|
@ -0,0 +1,184 @@
|
||||||
|
type genericDist =
|
||||||
|
| PointSet(PointSetTypes.pointSetDist)
|
||||||
|
| SampleSet(SampleSetDist.t)
|
||||||
|
| Symbolic(SymbolicDistTypes.symbolicDist)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type error =
|
||||||
|
| NotYetImplemented
|
||||||
|
| Unreachable
|
||||||
|
| DistributionVerticalShiftIsInvalid
|
||||||
|
| Other(string)
|
||||||
|
|
||||||
|
module Error = {
|
||||||
|
type t = error
|
||||||
|
|
||||||
|
let fromString = (s: string): t => Other(s)
|
||||||
|
|
||||||
|
let resultStringToResultError: result<'a, string> => result<'a, error> = n =>
|
||||||
|
n->E.R2.errMap(r => r->fromString->Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
module Operation = {
|
||||||
|
type direction =
|
||||||
|
| Algebraic
|
||||||
|
| Pointwise
|
||||||
|
|
||||||
|
type arithmeticOperation = [
|
||||||
|
| #Add
|
||||||
|
| #Multiply
|
||||||
|
| #Subtract
|
||||||
|
| #Divide
|
||||||
|
| #Power
|
||||||
|
| #Logarithm
|
||||||
|
]
|
||||||
|
|
||||||
|
let arithmeticToFn = (arithmetic: arithmeticOperation) =>
|
||||||
|
switch arithmetic {
|
||||||
|
| #Add => \"+."
|
||||||
|
| #Multiply => \"*."
|
||||||
|
| #Subtract => \"-."
|
||||||
|
| #Power => \"**"
|
||||||
|
| #Divide => \"/."
|
||||||
|
| #Logarithm => (a, b) => log(a) /. log(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
type toFloat = [
|
||||||
|
| #Cdf(float)
|
||||||
|
| #Inv(float)
|
||||||
|
| #Mean
|
||||||
|
| #Pdf(float)
|
||||||
|
| #Sample
|
||||||
|
]
|
||||||
|
|
||||||
|
type pointsetXSelection = [#Linear | #ByWeight]
|
||||||
|
|
||||||
|
type toDist =
|
||||||
|
| Normalize
|
||||||
|
| ToPointSet
|
||||||
|
| ToSampleSet(int)
|
||||||
|
| Truncate(option<float>, option<float>)
|
||||||
|
| Inspect
|
||||||
|
|
||||||
|
type toFloatArray = Sample(int)
|
||||||
|
|
||||||
|
type toString =
|
||||||
|
| ToString
|
||||||
|
| ToSparkline(int)
|
||||||
|
|
||||||
|
type fromDist =
|
||||||
|
| ToFloat(toFloat)
|
||||||
|
| ToDist(toDist)
|
||||||
|
| ToDistCombination(direction, arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
||||||
|
| ToString(toString)
|
||||||
|
|
||||||
|
type singleParamaterFunction =
|
||||||
|
| FromDist(fromDist)
|
||||||
|
| FromFloat(fromDist)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type genericFunctionCallInfo =
|
||||||
|
| FromDist(fromDist, genericDist)
|
||||||
|
| FromFloat(fromDist, float)
|
||||||
|
| Mixture(array<(genericDist, float)>)
|
||||||
|
|
||||||
|
let distCallToString = (distFunction: fromDist): string =>
|
||||||
|
switch distFunction {
|
||||||
|
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Mean) => `mean`
|
||||||
|
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||||
|
| ToFloat(#Sample) => `sample`
|
||||||
|
| ToDist(Normalize) => `normalize`
|
||||||
|
| ToDist(ToPointSet) => `toPointSet`
|
||||||
|
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
|
| ToDist(Truncate(_, _)) => `truncate`
|
||||||
|
| ToDist(Inspect) => `inspect`
|
||||||
|
| ToString(ToString) => `toString`
|
||||||
|
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
||||||
|
| ToDistCombination(Algebraic, _, _) => `algebraic`
|
||||||
|
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||||
|
}
|
||||||
|
|
||||||
|
let toString = (d: genericFunctionCallInfo): string =>
|
||||||
|
switch d {
|
||||||
|
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
||||||
|
| Mixture(_) => `mixture`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
It can be a pain to write out the genericFunctionCallInfo. The constructors help with this.
|
||||||
|
This code only covers some of genericFunctionCallInfo: many arguments could be called with either a
|
||||||
|
float or a distribution. The "UsingDists" module assumes that everything is a distribution.
|
||||||
|
This is a tradeoff of some generality in order to get a bit more simplicity.
|
||||||
|
I could see having a longer interface in the future, but it could be messy.
|
||||||
|
Like, algebraicAddDistFloat vs. algebraicAddDistDist
|
||||||
|
*/
|
||||||
|
module Constructors = {
|
||||||
|
type t = Operation.genericFunctionCallInfo
|
||||||
|
|
||||||
|
module UsingDists = {
|
||||||
|
@genType
|
||||||
|
let mean = (dist): t => FromDist(ToFloat(#Mean), dist)
|
||||||
|
let sample = (dist): t => FromDist(ToFloat(#Sample), dist)
|
||||||
|
let cdf = (dist, x): t => FromDist(ToFloat(#Cdf(x)), dist)
|
||||||
|
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
||||||
|
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
||||||
|
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
||||||
|
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
||||||
|
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
||||||
|
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||||
|
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
||||||
|
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
||||||
|
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
||||||
|
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Add, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Multiply, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let algebraicDivide = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Divide, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Subtract, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Logarithm, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let algebraicPower = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Algebraic, #Power, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
let pointwisePower = (dist1, dist2): t => FromDist(
|
||||||
|
ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
||||||
|
dist1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
# Generic Distribution Library
|
||||||
|
|
||||||
|
This library provides one interface to generic distributions. These distributions can either be symbolic, point set (xy-coordinates of the shape), or sample set (arrays of random samples).
|
||||||
|
|
||||||
|
Different internal formats (symbolic, point set, sample set) allow for benefits and features. It's common for distributions to be converted into either point sets or sample sets to enable certain functions.
|
||||||
|
|
||||||
|
In addition to this interface, there's a second, generic function, for calling functions on this generic distribution type. This ``genericOperation`` standardizes the inputs and outputs for these various function calls. See it's ``run()`` function.
|
||||||
|
|
||||||
|
Performance is very important. Some operations can take a long time to run, and even then, be inaccurate. Because of this, we plan to have a lot of logging and stack tracing functionality eventually built in.
|
||||||
|
|
||||||
|
## Diagram of Distribution Types
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
A[Generic Distribution] -->B{Point Set}
|
||||||
|
A --> C{Sample Set}
|
||||||
|
A --> D{Symbolic}
|
||||||
|
B ---> continuous(Continuous)
|
||||||
|
B ---> discrete(Discrete)
|
||||||
|
B --> mixed(Mixed)
|
||||||
|
continuous -.-> XYshape(XYshape)
|
||||||
|
discrete -.-> XYshape(XYshape)
|
||||||
|
mixed -.-> continuous
|
||||||
|
mixed -.-> discrete
|
||||||
|
D --> Normal(Normal)
|
||||||
|
D --> Lognormal(Lognormal)
|
||||||
|
D --> Triangular(Triangular)
|
||||||
|
D --> Beta(Beta)
|
||||||
|
D --> Uniform(Uniform)
|
||||||
|
D --> Float(Float)
|
||||||
|
D --> Exponential(Exponential)
|
||||||
|
D --> Cauchy(Cauchy)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Diagram of Generic Distribution Types
|
||||||
|
|
||||||
|
## Todo
|
||||||
|
- [ ] Lots of cleanup
|
||||||
|
- [ ] Simple test story
|
||||||
|
- [ ] Provide decent stack traces for key calls in GenericOperation. This could be very useful for debugging.
|
||||||
|
- [ ] Cleanup Sample Set library
|
||||||
|
- [ ] Add memoization for calculations
|
||||||
|
- [ ] Performance bechmarking reports
|
||||||
|
- [ ] Remove most of DistPlus, much of which is not needed anymore
|
||||||
|
- [ ] More functions for Sample Set, which is very minimal now
|
||||||
|
- [ ] Allow these functions to be run on web workers
|
||||||
|
- [ ] Refactor interpreter to use GenericDist. This might not be necessary, as the new reducer-inspired interpreter is integrated.
|
||||||
|
|
||||||
|
## More todos
|
|
@ -114,16 +114,18 @@ let combineShapesContinuousContinuous = (
|
||||||
| #Subtract => (m1, m2) => m1 -. m2
|
| #Subtract => (m1, m2) => m1 -. m2
|
||||||
| #Multiply => (m1, m2) => m1 *. m2
|
| #Multiply => (m1, m2) => m1 *. m2
|
||||||
| #Divide => (m1, mInv2) => m1 *. mInv2
|
| #Divide => (m1, mInv2) => m1 *. mInv2
|
||||||
| #Exponentiate => (m1, mInv2) => m1 ** mInv2
|
| #Power => (m1, mInv2) => m1 ** mInv2
|
||||||
|
| #Logarithm => (m1, m2) => log(m1) /. log(m2)
|
||||||
} // note: here, mInv2 = mean(1 / t2) ~= 1 / mean(t2)
|
} // note: here, mInv2 = mean(1 / t2) ~= 1 / mean(t2)
|
||||||
|
|
||||||
// TODO: I don't know what the variances are for exponentatiation
|
// TODO: Variances are for exponentatiation or logarithms are almost totally made up and very likely very wrong.
|
||||||
// converts the variances and means of the two inputs into the variance of the output
|
// converts the variances and means of the two inputs into the variance of the output
|
||||||
let combineVariancesFn = switch op {
|
let combineVariancesFn = switch op {
|
||||||
| #Add => (v1, v2, _, _) => v1 +. v2
|
| #Add => (v1, v2, _, _) => v1 +. v2
|
||||||
| #Subtract => (v1, v2, _, _) => v1 +. v2
|
| #Subtract => (v1, v2, _, _) => v1 +. v2
|
||||||
| #Multiply => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
| #Multiply => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
||||||
| #Exponentiate => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
| #Power => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
||||||
|
| #Logarithm => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
|
||||||
| #Divide => (v1, vInv2, m1, mInv2) => v1 *. vInv2 +. v1 *. mInv2 ** 2. +. vInv2 *. m1 ** 2.
|
| #Divide => (v1, vInv2, m1, mInv2) => v1 *. vInv2 +. v1 *. mInv2 ** 2. +. vInv2 *. m1 ** 2.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,7 +233,8 @@ let combineShapesContinuousDiscrete = (
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
| #Multiply
|
| #Multiply
|
||||||
| #Exponentiate
|
| #Power
|
||||||
|
| #Logarithm
|
||||||
| #Divide =>
|
| #Divide =>
|
||||||
for j in 0 to t2n - 1 {
|
for j in 0 to t2n - 1 {
|
||||||
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
|
|
@ -1,6 +1,47 @@
|
||||||
open Distributions
|
open Distributions
|
||||||
|
|
||||||
type t = PointSetTypes.continuousShape
|
type t = PointSetTypes.continuousShape
|
||||||
|
|
||||||
|
module Analysis = {
|
||||||
|
let integrate = (
|
||||||
|
~indefiniteIntegralStepwise=(p, h1) => h1 *. p,
|
||||||
|
~indefiniteIntegralLinear=(p, a, b) => a *. p +. b *. p ** 2.0 /. 2.0,
|
||||||
|
t: t,
|
||||||
|
): float => {
|
||||||
|
let xs = t.xyShape.xs
|
||||||
|
let ys = t.xyShape.ys
|
||||||
|
|
||||||
|
E.A.reducei(xs, 0.0, (acc, _x, i) => {
|
||||||
|
let areaUnderIntegral = // TODO Take this switch statement out of the loop body
|
||||||
|
switch (t.interpolation, i) {
|
||||||
|
| (_, 0) => 0.0
|
||||||
|
| (#Stepwise, _) =>
|
||||||
|
indefiniteIntegralStepwise(xs[i], ys[i - 1]) -.
|
||||||
|
indefiniteIntegralStepwise(xs[i - 1], ys[i - 1])
|
||||||
|
| (#Linear, _) =>
|
||||||
|
let x1 = xs[i - 1]
|
||||||
|
let x2 = xs[i]
|
||||||
|
if x1 == x2 {
|
||||||
|
0.0
|
||||||
|
} else {
|
||||||
|
let h1 = ys[i - 1]
|
||||||
|
let h2 = ys[i]
|
||||||
|
let b = (h1 -. h2) /. (x1 -. x2)
|
||||||
|
let a = h1 -. b *. x1
|
||||||
|
indefiniteIntegralLinear(x2, a, b) -. indefiniteIntegralLinear(x1, a, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
acc +. areaUnderIntegral
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let getMeanOfSquares = (t: t) => {
|
||||||
|
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 3.0 /. 3.0 +. b *. p ** 4.0 /. 4.0
|
||||||
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 3.0 /. 3.0
|
||||||
|
integrate(~indefiniteIntegralStepwise, ~indefiniteIntegralLinear, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let getShape = (t: t) => t.xyShape
|
let getShape = (t: t) => t.xyShape
|
||||||
let interpolation = (t: t) => t.interpolation
|
let interpolation = (t: t) => t.interpolation
|
||||||
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
|
@ -194,7 +235,7 @@ module T = Dist({
|
||||||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
|
||||||
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
|
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
|
||||||
|
|
||||||
XYShape.Analysis.integrateContinuousShape(
|
Analysis.integrate(
|
||||||
~indefiniteIntegralStepwise,
|
~indefiniteIntegralStepwise,
|
||||||
~indefiniteIntegralLinear,
|
~indefiniteIntegralLinear,
|
||||||
t,
|
t,
|
||||||
|
@ -204,10 +245,13 @@ module T = Dist({
|
||||||
XYShape.Analysis.getVarianceDangerously(
|
XYShape.Analysis.getVarianceDangerously(
|
||||||
t,
|
t,
|
||||||
mean,
|
mean,
|
||||||
XYShape.Analysis.getMeanOfSquaresContinuousShape,
|
Analysis.getMeanOfSquares,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
let downsampleEquallyOverX = (length, t): t =>
|
||||||
|
t |> shapeMap(XYShape.XsConversion.proportionEquallyOverX(length))
|
||||||
|
|
||||||
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
|
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
|
||||||
each discrete data point, and then adds them all together. */
|
each discrete data point, and then adds them all together. */
|
||||||
let combineAlgebraicallyWithDiscrete = (
|
let combineAlgebraicallyWithDiscrete = (
|
|
@ -209,8 +209,9 @@ module T = Dist({
|
||||||
let s = getShape(t)
|
let s = getShape(t)
|
||||||
E.A.reducei(s.xs, 0.0, (acc, x, i) => acc +. x *. s.ys[i])
|
E.A.reducei(s.xs, 0.0, (acc, x, i) => acc +. x *. s.ys[i])
|
||||||
}
|
}
|
||||||
|
|
||||||
let variance = (t: t): float => {
|
let variance = (t: t): float => {
|
||||||
let getMeanOfSquares = t => t |> shapeMap(XYShape.Analysis.squareXYShape) |> mean
|
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
})
|
})
|
|
@ -213,8 +213,8 @@ module T = Dist({
|
||||||
|
|
||||||
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
||||||
let discreteMean =
|
let discreteMean =
|
||||||
discrete |> Discrete.shapeMap(XYShape.Analysis.squareXYShape) |> Discrete.T.mean
|
discrete |> Discrete.shapeMap(XYShape.T.square) |> Discrete.T.mean
|
||||||
let continuousMean = continuous |> XYShape.Analysis.getMeanOfSquaresContinuousShape
|
let continuousMean = continuous |> Continuous.Analysis.getMeanOfSquares
|
||||||
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
|
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
|
||||||
totalIntegralSum
|
totalIntegralSum
|
||||||
}
|
}
|
|
@ -34,6 +34,7 @@ let toMixed = mapToAll((
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
|
|
||||||
|
//TODO WARNING: The combineAlgebraicallyWithDiscrete will break for subtraction and division, like, discrete - continous
|
||||||
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t =>
|
let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t =>
|
||||||
switch (t1, t2) {
|
switch (t1, t2) {
|
||||||
| (Continuous(m1), Continuous(m2)) =>
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
|
@ -41,7 +42,8 @@ let combineAlgebraically = (op: Operation.algebraicOperation, t1: t, t2: t): t =
|
||||||
| (Continuous(m1), Discrete(m2))
|
| (Continuous(m1), Discrete(m2))
|
||||||
| (Discrete(m2), Continuous(m1)) =>
|
| (Discrete(m2), Continuous(m1)) =>
|
||||||
Continuous.combineAlgebraicallyWithDiscrete(op, m1, m2) |> Continuous.T.toPointSetDist
|
Continuous.combineAlgebraicallyWithDiscrete(op, m1, m2) |> Continuous.T.toPointSetDist
|
||||||
| (Discrete(m1), Discrete(m2)) => Discrete.combineAlgebraically(op, m1, m2) |> Discrete.T.toPointSetDist
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
|
Discrete.combineAlgebraically(op, m1, m2) |> Discrete.T.toPointSetDist
|
||||||
| (m1, m2) => Mixed.combineAlgebraically(op, toMixed(m1), toMixed(m2)) |> Mixed.T.toPointSetDist
|
| (m1, m2) => Mixed.combineAlgebraically(op, toMixed(m1), toMixed(m2)) |> Mixed.T.toPointSetDist
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,15 +191,20 @@ let isFloat = (t: t) =>
|
||||||
let sampleNRendered = (n, dist) => {
|
let sampleNRendered = (n, dist) => {
|
||||||
let integralCache = T.Integral.get(dist)
|
let integralCache = T.Integral.get(dist)
|
||||||
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist)
|
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist)
|
||||||
|
|
||||||
doN(n, () => sample(distWithUpdatedIntegralCache))
|
doN(n, () => sample(distWithUpdatedIntegralCache))
|
||||||
}
|
}
|
||||||
|
|
||||||
let operate = (distToFloatOp: Operation.distToFloatOperation, s): float =>
|
let operate = (distToFloatOp: Operation.distToFloatOperation, s): float =>
|
||||||
switch distToFloatOp {
|
switch distToFloatOp {
|
||||||
| #Pdf(f) => pdf(f, s)
|
| #Pdf(f) => pdf(f, s)
|
||||||
| #Cdf(f) => pdf(f, s)
|
| #Cdf(f) => cdf(f, s)
|
||||||
| #Inv(f) => inv(f, s)
|
| #Inv(f) => inv(f, s)
|
||||||
| #Sample => sample(s)
|
| #Sample => sample(s)
|
||||||
| #Mean => T.mean(s)
|
| #Mean => T.mean(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let toSparkline = (t: t, bucketCount) =>
|
||||||
|
T.toContinuous(t)
|
||||||
|
->E.O2.fmap(Continuous.downsampleEquallyOverX(bucketCount))
|
||||||
|
->E.O2.toResult("toContinous Error: Could not convert into continuous distribution")
|
||||||
|
->E.R2.fmap(r => Continuous.getShape(r).ys->Sparklines.create())
|
|
@ -0,0 +1,93 @@
|
||||||
|
type domainLimit = {
|
||||||
|
xPoint: float,
|
||||||
|
excludingProbabilityMass: float,
|
||||||
|
}
|
||||||
|
|
||||||
|
type domain =
|
||||||
|
| Complete
|
||||||
|
| LeftLimited(domainLimit)
|
||||||
|
| RightLimited(domainLimit)
|
||||||
|
| LeftAndRightLimited(domainLimit, domainLimit)
|
||||||
|
|
||||||
|
type distributionType = [
|
||||||
|
| #PDF
|
||||||
|
| #CDF
|
||||||
|
]
|
||||||
|
|
||||||
|
type xyShape = XYShape.xyShape;
|
||||||
|
type interpolationStrategy = XYShape.interpolationStrategy;
|
||||||
|
type extrapolationStrategy = XYShape.extrapolationStrategy;
|
||||||
|
type interpolator = XYShape.extrapolationStrategy;
|
||||||
|
|
||||||
|
type rec continuousShape = {
|
||||||
|
xyShape: xyShape,
|
||||||
|
interpolation: interpolationStrategy,
|
||||||
|
integralSumCache: option<float>,
|
||||||
|
integralCache: option<continuousShape>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type discreteShape = {
|
||||||
|
xyShape: xyShape,
|
||||||
|
integralSumCache: option<float>,
|
||||||
|
integralCache: option<continuousShape>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type mixedShape = {
|
||||||
|
continuous: continuousShape,
|
||||||
|
discrete: discreteShape,
|
||||||
|
integralSumCache: option<float>,
|
||||||
|
integralCache: option<continuousShape>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type pointSetDistMonad<'a, 'b, 'c> =
|
||||||
|
| Mixed('a)
|
||||||
|
| Discrete('b)
|
||||||
|
| Continuous('c)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type pointSetDist = pointSetDistMonad<mixedShape, discreteShape, continuousShape>
|
||||||
|
|
||||||
|
module ShapeMonad = {
|
||||||
|
let fmap = (t: pointSetDistMonad<'a, 'b, 'c>, (fn1, fn2, fn3)): pointSetDistMonad<'d, 'e, 'f> =>
|
||||||
|
switch t {
|
||||||
|
| Mixed(m) => Mixed(fn1(m))
|
||||||
|
| Discrete(m) => Discrete(fn2(m))
|
||||||
|
| Continuous(m) => Continuous(fn3(m))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type generationSource =
|
||||||
|
| SquiggleString(string)
|
||||||
|
| Shape(pointSetDist)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type distPlus = {
|
||||||
|
pointSetDist: pointSetDist,
|
||||||
|
integralCache: continuousShape,
|
||||||
|
squiggleString: option<string>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type mixedPoint = {
|
||||||
|
continuous: float,
|
||||||
|
discrete: float,
|
||||||
|
}
|
||||||
|
|
||||||
|
module MixedPoint = {
|
||||||
|
type t = mixedPoint
|
||||||
|
let toContinuousValue = (t: t) => t.continuous
|
||||||
|
let toDiscreteValue = (t: t) => t.discrete
|
||||||
|
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
|
||||||
|
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
|
||||||
|
|
||||||
|
let fmap = (fn: float => float, t: t) => {
|
||||||
|
continuous: fn(t.continuous),
|
||||||
|
discrete: fn(t.discrete),
|
||||||
|
}
|
||||||
|
|
||||||
|
let combine2 = (fn, c: t, d: t): t => {
|
||||||
|
continuous: fn(c.continuous, d.continuous),
|
||||||
|
discrete: fn(c.discrete, d.discrete),
|
||||||
|
}
|
||||||
|
|
||||||
|
let add = combine2((a, b) => a +. b)
|
||||||
|
}
|
|
@ -0,0 +1,68 @@
|
||||||
|
/*
|
||||||
|
This is used as a smart constructor. The only way to create a SampleSetDist.t is to call
|
||||||
|
this constructor.
|
||||||
|
https://stackoverflow.com/questions/66909578/how-to-make-a-type-constructor-private-in-rescript-except-in-current-module
|
||||||
|
*/
|
||||||
|
module T: {
|
||||||
|
//This really should be hidden (remove the array<float>). The reason it isn't is to act as an escape hatch in JS__Test.ts.
|
||||||
|
//When we get a good functional library in TS, we could refactor that out.
|
||||||
|
@genType
|
||||||
|
type t = array<float>
|
||||||
|
let make: array<float> => result<t, string>
|
||||||
|
let get: t => array<float>
|
||||||
|
} = {
|
||||||
|
type t = array<float>
|
||||||
|
let make = (a: array<float>) =>
|
||||||
|
if E.A.length(a) > 5 {
|
||||||
|
Ok(a)
|
||||||
|
} else {
|
||||||
|
Error("too small")
|
||||||
|
}
|
||||||
|
let get = (a: t) => a
|
||||||
|
}
|
||||||
|
|
||||||
|
include T
|
||||||
|
|
||||||
|
let length = (t: t) => get(t)->E.A.length
|
||||||
|
|
||||||
|
/*
|
||||||
|
TODO: Refactor to get a more precise estimate. Also, this code is just fairly messy, could use
|
||||||
|
some refactoring.
|
||||||
|
*/
|
||||||
|
let toPointSetDist = (~samples: t, ~samplingInputs: SamplingInputs.samplingInputs): result<
|
||||||
|
PointSetTypes.pointSetDist,
|
||||||
|
string,
|
||||||
|
> =>
|
||||||
|
SampleSetDist_ToPointSet.toPointSetDist(
|
||||||
|
~samples=get(samples),
|
||||||
|
~samplingInputs,
|
||||||
|
(),
|
||||||
|
).pointSetDist->E.O2.toResult("Failed to convert to PointSetDist")
|
||||||
|
|
||||||
|
//Randomly get one sample from the distribution
|
||||||
|
let sample = (t: t): float => {
|
||||||
|
let i = E.Int.random(~min=0, ~max=E.A.length(get(t)) - 1)
|
||||||
|
E.A.unsafe_get(get(t), i)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
If asked for a length of samples shorter or equal the length of the distribution,
|
||||||
|
return this first n samples of this distribution.
|
||||||
|
Else, return n random samples of the distribution.
|
||||||
|
The former helps in cases where multiple distributions are correlated.
|
||||||
|
However, if n > length(t), then there's no clear right answer, so we just randomly
|
||||||
|
sample everything.
|
||||||
|
*/
|
||||||
|
let sampleN = (t: t, n) => {
|
||||||
|
if n <= E.A.length(get(t)) {
|
||||||
|
E.A.slice(get(t), ~offset=0, ~len=n)
|
||||||
|
} else {
|
||||||
|
Belt.Array.makeBy(n, _ => sample(t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//TODO: Figure out what to do if distributions are different lengths. ``zip`` is kind of inelegant for this.
|
||||||
|
let map2 = (~fn: (float, float) => float, ~t1: t, ~t2: t) => {
|
||||||
|
let samples = Belt.Array.zip(get(t1), get(t2))->E.A2.fmap(((a, b)) => fn(a, b))
|
||||||
|
make(samples)
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
//The math here was taken from https://github.com/jasondavies/science.js/blob/master/src/stats/bandwidth.js
|
//The math here was taken from https://github.com/jasondavies/science.js/blob/master/src/stats/SampleSetDist_Bandwidth.js
|
||||||
|
|
||||||
let len = x => E.A.length(x) |> float_of_int
|
let len = x => E.A.length(x) |> float_of_int
|
||||||
|
|
|
@ -55,6 +55,7 @@ module Internals = {
|
||||||
: {
|
: {
|
||||||
let _ = Js.Array.push(element, continuous)
|
let _ = Js.Array.push(element, continuous)
|
||||||
}
|
}
|
||||||
|
|
||||||
()
|
()
|
||||||
})
|
})
|
||||||
(continuous, discrete)
|
(continuous, discrete)
|
||||||
|
@ -69,7 +70,7 @@ module Internals = {
|
||||||
let formatUnitWidth = w => Jstat.max([w, 1.0]) |> int_of_float
|
let formatUnitWidth = w => Jstat.max([w, 1.0]) |> int_of_float
|
||||||
|
|
||||||
let suggestedUnitWidth = (samples, outputXYPoints) => {
|
let suggestedUnitWidth = (samples, outputXYPoints) => {
|
||||||
let suggestedXWidth = Bandwidth.nrd0(samples)
|
let suggestedXWidth = SampleSetDist_Bandwidth.nrd0(samples)
|
||||||
xWidthToUnitWidth(samples, outputXYPoints, suggestedXWidth)
|
xWidthToUnitWidth(samples, outputXYPoints, suggestedXWidth)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,7 +97,7 @@ let toPointSetDist = (
|
||||||
let pdf =
|
let pdf =
|
||||||
continuousPart |> E.A.length > 5
|
continuousPart |> E.A.length > 5
|
||||||
? {
|
? {
|
||||||
let _suggestedXWidth = Bandwidth.nrd0(continuousPart)
|
let _suggestedXWidth = SampleSetDist_Bandwidth.nrd0(continuousPart)
|
||||||
// todo: This does some recalculating from the last step.
|
// todo: This does some recalculating from the last step.
|
||||||
let _suggestedUnitWidth = Internals.T.suggestedUnitWidth(
|
let _suggestedUnitWidth = Internals.T.suggestedUnitWidth(
|
||||||
continuousPart,
|
continuousPart,
|
||||||
|
@ -138,4 +139,4 @@ let toPointSetDist = (
|
||||||
}
|
}
|
||||||
|
|
||||||
samplesParse
|
samplesParse
|
||||||
}
|
}
|
|
@ -2,7 +2,7 @@ open SymbolicDistTypes
|
||||||
|
|
||||||
module Normal = {
|
module Normal = {
|
||||||
type t = normal
|
type t = normal
|
||||||
let make = (mean: float, stdev: float): result<symbolicDist,string> =>
|
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
||||||
stdev > 0.0
|
stdev > 0.0
|
||||||
? Ok(#Normal({mean: mean, stdev: stdev}))
|
? Ok(#Normal({mean: mean, stdev: stdev}))
|
||||||
: Error("Standard deviation of normal distribution must be larger than 0")
|
: Error("Standard deviation of normal distribution must be larger than 0")
|
||||||
|
@ -48,12 +48,14 @@ module Normal = {
|
||||||
|
|
||||||
module Exponential = {
|
module Exponential = {
|
||||||
type t = exponential
|
type t = exponential
|
||||||
let make = (rate: float): result<symbolicDist,string> =>
|
let make = (rate: float): result<symbolicDist, string> =>
|
||||||
rate > 0.0
|
rate > 0.0
|
||||||
? Ok(#Exponential({
|
? Ok(
|
||||||
rate: rate,
|
#Exponential({
|
||||||
}))
|
rate: rate,
|
||||||
: Error("Exponential distributions mean must be larger than 0")
|
}),
|
||||||
|
)
|
||||||
|
: Error("Exponential distributions rate must be larger than 0.")
|
||||||
let pdf = (x, t: t) => Jstat.Exponential.pdf(x, t.rate)
|
let pdf = (x, t: t) => Jstat.Exponential.pdf(x, t.rate)
|
||||||
let cdf = (x, t: t) => Jstat.Exponential.cdf(x, t.rate)
|
let cdf = (x, t: t) => Jstat.Exponential.cdf(x, t.rate)
|
||||||
let inv = (p, t: t) => Jstat.Exponential.inv(p, t.rate)
|
let inv = (p, t: t) => Jstat.Exponential.inv(p, t.rate)
|
||||||
|
@ -69,7 +71,7 @@ module Cauchy = {
|
||||||
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
||||||
let inv = (p, t: t) => Jstat.Cauchy.inv(p, t.local, t.scale)
|
let inv = (p, t: t) => Jstat.Cauchy.inv(p, t.local, t.scale)
|
||||||
let sample = (t: t) => Jstat.Cauchy.sample(t.local, t.scale)
|
let sample = (t: t) => Jstat.Cauchy.sample(t.local, t.scale)
|
||||||
let mean = (_: t) => Error("Cauchy distributions have no mean value.")
|
let mean = (_: t) => Error("Cauchy distributions may have no mean value.")
|
||||||
let toString = ({local, scale}: t) => j`Cauchy($local, $scale)`
|
let toString = ({local, scale}: t) => j`Cauchy($local, $scale)`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,8 +80,8 @@ module Triangular = {
|
||||||
let make = (low, medium, high): result<symbolicDist, string> =>
|
let make = (low, medium, high): result<symbolicDist, string> =>
|
||||||
low < medium && medium < high
|
low < medium && medium < high
|
||||||
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
||||||
: Error("Triangular values must be increasing order")
|
: Error("Triangular values must be increasing order.")
|
||||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium)
|
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||||
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
|
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
|
||||||
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)
|
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)
|
||||||
|
@ -89,7 +91,7 @@ module Triangular = {
|
||||||
|
|
||||||
module Beta = {
|
module Beta = {
|
||||||
type t = beta
|
type t = beta
|
||||||
let make = (alpha, beta) =>
|
let make = (alpha, beta) =>
|
||||||
alpha > 0.0 && beta > 0.0
|
alpha > 0.0 && beta > 0.0
|
||||||
? Ok(#Beta({alpha: alpha, beta: beta}))
|
? Ok(#Beta({alpha: alpha, beta: beta}))
|
||||||
: Error("Beta distribution parameters must be positive")
|
: Error("Beta distribution parameters must be positive")
|
||||||
|
@ -103,10 +105,10 @@ module Beta = {
|
||||||
|
|
||||||
module Lognormal = {
|
module Lognormal = {
|
||||||
type t = lognormal
|
type t = lognormal
|
||||||
let make = (mu, sigma) =>
|
let make = (mu, sigma) =>
|
||||||
sigma > 0.0
|
sigma > 0.0
|
||||||
? Ok(#Lognormal({mu: mu, sigma: sigma}))
|
? Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
: Error("Lognormal standard deviation must be larger than 0")
|
: Error("Lognormal standard deviation must be larger than 0")
|
||||||
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
||||||
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
||||||
let inv = (p, t: t) => Jstat.Lognormal.inv(p, t.mu, t.sigma)
|
let inv = (p, t: t) => Jstat.Lognormal.inv(p, t.mu, t.sigma)
|
||||||
|
@ -127,8 +129,7 @@ module Lognormal = {
|
||||||
let mu = Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance /. meanSquared +. 1.0)
|
let mu = Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance /. meanSquared +. 1.0)
|
||||||
let sigma = Js.Math.pow_float(~base=Js.Math.log(variance /. meanSquared +. 1.0), ~exp=0.5)
|
let sigma = Js.Math.pow_float(~base=Js.Math.log(variance /. meanSquared +. 1.0), ~exp=0.5)
|
||||||
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
Error("Lognormal standard deviation must be larger than 0")
|
Error("Lognormal standard deviation must be larger than 0")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -154,9 +155,7 @@ module Lognormal = {
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
type t = uniform
|
type t = uniform
|
||||||
let make = (low, high) =>
|
let make = (low, high) =>
|
||||||
high > low
|
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low")
|
||||||
? Ok(#Uniform({low: low, high: high}))
|
|
||||||
: Error("High must be larger than low")
|
|
||||||
|
|
||||||
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
||||||
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
||||||
|
@ -165,6 +164,7 @@ module Uniform = {
|
||||||
let mean = (t: t) => Ok(Jstat.Uniform.mean(t.low, t.high))
|
let mean = (t: t) => Ok(Jstat.Uniform.mean(t.low, t.high))
|
||||||
let toString = ({low, high}: t) => j`Uniform($low,$high)`
|
let toString = ({low, high}: t) => j`Uniform($low,$high)`
|
||||||
let truncate = (low, high, t: t): t => {
|
let truncate = (low, high, t: t): t => {
|
||||||
|
//todo: add check
|
||||||
let newLow = max(E.O.default(neg_infinity, low), t.low)
|
let newLow = max(E.O.default(neg_infinity, low), t.low)
|
||||||
let newHigh = min(E.O.default(infinity, high), t.high)
|
let newHigh = min(E.O.default(infinity, high), t.high)
|
||||||
{low: newLow, high: newHigh}
|
{low: newLow, high: newHigh}
|
||||||
|
@ -182,6 +182,15 @@ module Float = {
|
||||||
let toString = Js.Float.toString
|
let toString = Js.Float.toString
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module From90thPercentile = {
|
||||||
|
let make = (low, high) =>
|
||||||
|
switch (low, high) {
|
||||||
|
| (low, high) if low <= 0.0 && low < high => Ok(Normal.from90PercentCI(low, high))
|
||||||
|
| (low, high) if low < high => Ok(Lognormal.from90PercentCI(low, high))
|
||||||
|
| (_, _) => Error("Low value must be less than high value.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module T = {
|
module T = {
|
||||||
let minCdfValue = 0.0001
|
let minCdfValue = 0.0001
|
||||||
let maxCdfValue = 0.9999
|
let maxCdfValue = 0.9999
|
||||||
|
@ -337,11 +346,11 @@ module T = {
|
||||||
| _ => #NoSolution
|
| _ => #NoSolution
|
||||||
}
|
}
|
||||||
|
|
||||||
let toPointSetDist = (sampleCount, d: symbolicDist): PointSetTypes.pointSetDist =>
|
let toPointSetDist = (~xSelection=#ByWeight, sampleCount, d: symbolicDist): PointSetTypes.pointSetDist =>
|
||||||
switch d {
|
switch d {
|
||||||
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
||||||
| _ =>
|
| _ =>
|
||||||
let xs = interpolateXs(~xSelection=#ByWeight, d, sampleCount)
|
let xs = interpolateXs(~xSelection, d, sampleCount)
|
||||||
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
||||||
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
|
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
|
||||||
}
|
}
|
|
@ -118,7 +118,7 @@ module PointwiseCombination = {
|
||||||
switch pointwiseOp {
|
switch pointwiseOp {
|
||||||
| #Add => pointwiseAdd(evaluationParams, t1, t2)
|
| #Add => pointwiseAdd(evaluationParams, t1, t2)
|
||||||
| #Multiply => pointwiseCombine(\"*.", evaluationParams, t1, t2)
|
| #Multiply => pointwiseCombine(\"*.", evaluationParams, t1, t2)
|
||||||
| #Exponentiate => pointwiseCombine(\"**", evaluationParams, t1, t2)
|
| #Power => pointwiseCombine(\"**", evaluationParams, t1, t2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,15 +218,14 @@ module SamplingDistribution = {
|
||||||
algebraicOp,
|
algebraicOp,
|
||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
)
|
) |> E.O.toResult("Could not get samples")
|
||||||
|
|
||||||
let pointSetDist =
|
let sampleSetDist = samples -> E.R.bind(SampleSetDist.make)
|
||||||
samples
|
|
||||||
|> E.O.fmap(r =>
|
let pointSetDist =
|
||||||
SampleSet.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r, ())
|
sampleSetDist
|
||||||
)
|
-> E.R.bind(r =>
|
||||||
|> E.O.bind(_, r => r.pointSetDist)
|
SampleSetDist.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r));
|
||||||
|> E.O.toResult("No response")
|
|
||||||
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
|
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
|
||||||
})
|
})
|
||||||
}
|
}
|
|
@ -8,28 +8,22 @@ let make =
|
||||||
(
|
(
|
||||||
~pointSetDist,
|
~pointSetDist,
|
||||||
~squiggleString,
|
~squiggleString,
|
||||||
~domain=Complete,
|
|
||||||
~unit=UnspecifiedDistribution,
|
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
: t => {
|
: t => {
|
||||||
let integral = pointSetDistIntegral(pointSetDist);
|
let integral = pointSetDistIntegral(pointSetDist);
|
||||||
{pointSetDist, domain, integralCache: integral, unit, squiggleString};
|
{pointSetDist, integralCache: integral, squiggleString};
|
||||||
};
|
};
|
||||||
|
|
||||||
let update =
|
let update =
|
||||||
(
|
(
|
||||||
~pointSetDist=?,
|
~pointSetDist=?,
|
||||||
~integralCache=?,
|
~integralCache=?,
|
||||||
~domain=?,
|
|
||||||
~unit=?,
|
|
||||||
~squiggleString=?,
|
~squiggleString=?,
|
||||||
t: t,
|
t: t,
|
||||||
) => {
|
) => {
|
||||||
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
|
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
|
||||||
integralCache: E.O.default(t.integralCache, integralCache),
|
integralCache: E.O.default(t.integralCache, integralCache),
|
||||||
domain: E.O.default(t.domain, domain),
|
|
||||||
unit: E.O.default(t.unit, unit),
|
|
||||||
squiggleString: E.O.default(t.squiggleString, squiggleString),
|
squiggleString: E.O.default(t.squiggleString, squiggleString),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -38,12 +32,6 @@ let updateShape = (pointSetDist, t) => {
|
||||||
update(~pointSetDist, ~integralCache, t);
|
update(~pointSetDist, ~integralCache, t);
|
||||||
};
|
};
|
||||||
|
|
||||||
let domainIncludedProbabilityMass = (t: t) =>
|
|
||||||
Domain.includedProbabilityMass(t.domain);
|
|
||||||
|
|
||||||
let domainIncludedProbabilityMassAdjustment = (t: t, f) =>
|
|
||||||
f *. Domain.includedProbabilityMass(t.domain);
|
|
||||||
|
|
||||||
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist;
|
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist;
|
||||||
|
|
||||||
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist);
|
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist);
|
||||||
|
@ -73,8 +61,7 @@ module T =
|
||||||
let xToY = (f, t: t) =>
|
let xToY = (f, t: t) =>
|
||||||
t
|
t
|
||||||
|> toPointSetDist
|
|> toPointSetDist
|
||||||
|> PointSetDist.T.xToY(f)
|
|> PointSetDist.T.xToY(f);
|
||||||
|> MixedPoint.fmap(domainIncludedProbabilityMassAdjustment(t));
|
|
||||||
|
|
||||||
let minX = pointSetDistFn(PointSetDist.T.minX);
|
let minX = pointSetDistFn(PointSetDist.T.minX);
|
||||||
let maxX = pointSetDistFn(PointSetDist.T.maxX);
|
let maxX = pointSetDistFn(PointSetDist.T.maxX);
|
||||||
|
@ -115,7 +102,6 @@ module T =
|
||||||
f,
|
f,
|
||||||
toPointSetDist(t),
|
toPointSetDist(t),
|
||||||
)
|
)
|
||||||
|> domainIncludedProbabilityMassAdjustment(t);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
|
@ -227,8 +227,8 @@ let all = [
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
),
|
),
|
||||||
makeRenderedDistFloat("scaleExp", (dist, float) => verticalScaling(#Exponentiate, dist, float)),
|
makeRenderedDistFloat("scaleExp", (dist, float) => verticalScaling(#Power, dist, float)),
|
||||||
makeRenderedDistFloat("scaleMultiply", (dist, float) => verticalScaling(#Multiply, dist, float)),
|
makeRenderedDistFloat("scaleMultiply", (dist, float) => verticalScaling(#Multiply, dist, float)),
|
||||||
makeRenderedDistFloat("scaleLog", (dist, float) => verticalScaling(#Log, dist, float)),
|
makeRenderedDistFloat("scaleLog", (dist, float) => verticalScaling(#Logarithm, dist, float)),
|
||||||
Multimodal._function,
|
Multimodal._function,
|
||||||
]
|
]
|
|
@ -144,11 +144,11 @@ module MathAdtToDistDst = {
|
||||||
| ("subtract", _) => Error("Subtraction needs two operands")
|
| ("subtract", _) => Error("Subtraction needs two operands")
|
||||||
| ("multiply", [l, r]) => toOkAlgebraic((#Multiply, l, r))
|
| ("multiply", [l, r]) => toOkAlgebraic((#Multiply, l, r))
|
||||||
| ("multiply", _) => Error("Multiplication needs two operands")
|
| ("multiply", _) => Error("Multiplication needs two operands")
|
||||||
| ("pow", [l, r]) => toOkAlgebraic((#Exponentiate, l, r))
|
| ("pow", [l, r]) => toOkAlgebraic((#Power, l, r))
|
||||||
| ("pow", _) => Error("Exponentiation needs two operands")
|
| ("pow", _) => Error("Exponentiation needs two operands")
|
||||||
| ("dotMultiply", [l, r]) => toOkPointwise((#Multiply, l, r))
|
| ("dotMultiply", [l, r]) => toOkPointwise((#Multiply, l, r))
|
||||||
| ("dotMultiply", _) => Error("Dotwise multiplication needs two operands")
|
| ("dotMultiply", _) => Error("Dotwise multiplication needs two operands")
|
||||||
| ("dotPow", [l, r]) => toOkPointwise((#Exponentiate, l, r))
|
| ("dotPow", [l, r]) => toOkPointwise((#Power, l, r))
|
||||||
| ("dotPow", _) => Error("Dotwise exponentiation needs two operands")
|
| ("dotPow", _) => Error("Dotwise exponentiation needs two operands")
|
||||||
| ("rightLogShift", [l, r]) => toOkPointwise((#Add, l, r))
|
| ("rightLogShift", [l, r]) => toOkPointwise((#Add, l, r))
|
||||||
| ("rightLogShift", _) => Error("Dotwise addition needs two operands")
|
| ("rightLogShift", _) => Error("Dotwise addition needs two operands")
|
17
packages/squiggle-lang/src/rescript/Reducer/README.md
Normal file
17
packages/squiggle-lang/src/rescript/Reducer/README.md
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
To interface your library there only 2 files to be modified:
|
||||||
|
|
||||||
|
- Reducer/ReducerInterface/ReducerInterface_ExpressionValue.res
|
||||||
|
|
||||||
|
This is where your additional types are referred for the dispatcher.
|
||||||
|
|
||||||
|
- Reducer/ReducerInterface/ReducerInterface_ExternalLibrary.res
|
||||||
|
|
||||||
|
This is where dispatching to your library is done. If the dispatcher becomes beastly then feel free to divide it into submodules.
|
||||||
|
|
||||||
|
The Reducer is built to use different external libraries as well as different external parsers. Both external parsers and external libraries are plugins.
|
||||||
|
|
||||||
|
And finally try using Reducer.eval to how your extentions look:
|
||||||
|
|
||||||
|
```rescript
|
||||||
|
test("1+2", () => expectEvalToBe( "1+2", "Ok(3)"))
|
||||||
|
```
|
9
packages/squiggle-lang/src/rescript/Reducer/Reducer.res
Normal file
9
packages/squiggle-lang/src/rescript/Reducer/Reducer.res
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
module Dispatch = Reducer_Dispatch
|
||||||
|
module ErrorValue = Reducer_ErrorValue
|
||||||
|
module Expression = Reducer_Expression
|
||||||
|
module Extra = Reducer_Extra
|
||||||
|
module Js = Reducer_Js
|
||||||
|
module MathJs = Reducer_MathJs
|
||||||
|
|
||||||
|
let eval = Expression.eval
|
||||||
|
let parse = Expression.parse
|
8
packages/squiggle-lang/src/rescript/Reducer/Reducer.resi
Normal file
8
packages/squiggle-lang/src/rescript/Reducer/Reducer.resi
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
module Dispatch = Reducer_Dispatch
|
||||||
|
module ErrorValue = Reducer_ErrorValue
|
||||||
|
module Expression = Reducer_Expression
|
||||||
|
module Extra = Reducer_Extra
|
||||||
|
module Js = Reducer_Js
|
||||||
|
module MathJs = Reducer_MathJs
|
||||||
|
let eval: string => result<Expression.expressionValue, ErrorValue.errorValue>
|
||||||
|
let parse: string => result<Expression.expression, ErrorValue.errorValue>
|
|
@ -0,0 +1 @@
|
||||||
|
module Builtin = Reducer_Dispatch_BuiltIn
|
|
@ -0,0 +1,72 @@
|
||||||
|
module ExternalLibrary = ReducerInterface.ExternalLibrary
|
||||||
|
module MathJs = Reducer_MathJs
|
||||||
|
open ReducerInterface.ExpressionValue
|
||||||
|
open Reducer_ErrorValue
|
||||||
|
|
||||||
|
/*
|
||||||
|
MathJs provides default implementations for builtins
|
||||||
|
This is where all the expected builtins like + = * / sin cos log ln etc are handled
|
||||||
|
DO NOT try to add external function mapping here!
|
||||||
|
*/
|
||||||
|
|
||||||
|
exception TestRescriptException
|
||||||
|
|
||||||
|
let callInternal = (call: functionCall): result<'b, errorValue> => {
|
||||||
|
let callMathJs = (call: functionCall): result<'b, errorValue> =>
|
||||||
|
switch call {
|
||||||
|
| ("jsraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
||||||
|
| ("resraise", _) => raise(TestRescriptException) // For Tests
|
||||||
|
| call => call->toStringFunctionCall->MathJs.Eval.eval
|
||||||
|
}
|
||||||
|
|
||||||
|
let constructRecord = arrayOfPairs => {
|
||||||
|
Belt.Array.map(arrayOfPairs, pairValue => {
|
||||||
|
switch pairValue {
|
||||||
|
| EvArray([EvString(key), valueValue]) => (key, valueValue)
|
||||||
|
| _ => ("wrong key type", pairValue->toStringWithType->EvString)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
->Js.Dict.fromArray
|
||||||
|
->EvRecord
|
||||||
|
->Ok
|
||||||
|
}
|
||||||
|
|
||||||
|
let arrayAtIndex = (aValueArray: array<expressionValue>, fIndex: float) =>
|
||||||
|
switch Belt.Array.get(aValueArray, Belt.Int.fromFloat(fIndex)) {
|
||||||
|
| Some(value) => value->Ok
|
||||||
|
| None => REArrayIndexNotFound("Array index not found", Belt.Int.fromFloat(fIndex))->Error
|
||||||
|
}
|
||||||
|
|
||||||
|
let recordAtIndex = (dict: Js.Dict.t<expressionValue>, sIndex) =>
|
||||||
|
switch Js.Dict.get(dict, sIndex) {
|
||||||
|
| Some(value) => value->Ok
|
||||||
|
| None => RERecordPropertyNotFound("Record property not found", sIndex)->Error
|
||||||
|
}
|
||||||
|
|
||||||
|
switch call {
|
||||||
|
// | ("$constructRecord", pairArray)
|
||||||
|
// | ("$atIndex", [EvArray(anArray), EvNumber(fIndex)]) => arrayAtIndex(anArray, fIndex)
|
||||||
|
// | ("$atIndex", [EvRecord(aRecord), EvString(sIndex)]) => recordAtIndex(aRecord, sIndex)
|
||||||
|
| ("$constructRecord", [EvArray(arrayOfPairs)]) => constructRecord(arrayOfPairs)
|
||||||
|
| ("$atIndex", [EvArray(aValueArray), EvArray([EvNumber(fIndex)])]) =>
|
||||||
|
arrayAtIndex(aValueArray, fIndex)
|
||||||
|
| ("$atIndex", [EvRecord(dict), EvArray([EvString(sIndex)])]) => recordAtIndex(dict, sIndex)
|
||||||
|
| ("$atIndex", [obj, index]) =>
|
||||||
|
(toStringWithType(obj) ++ "??~~~~" ++ toStringWithType(index))->EvString->Ok
|
||||||
|
| call => callMathJs(call)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Lisp engine uses Result monad while reducing expressions
|
||||||
|
*/
|
||||||
|
let dispatch = (call: functionCall): result<expressionValue, errorValue> =>
|
||||||
|
try {
|
||||||
|
let (fn, args) = call
|
||||||
|
// There is a bug that prevents string match in patterns
|
||||||
|
// So we have to recreate a copy of the string
|
||||||
|
ExternalLibrary.dispatch((Js.String.make(fn), args), callInternal)
|
||||||
|
} catch {
|
||||||
|
| Js.Exn.Error(obj) => REJavaScriptExn(Js.Exn.message(obj), Js.Exn.name(obj))->Error
|
||||||
|
| _ => RETodo("unhandled rescript exception")->Error
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
type errorValue =
|
||||||
|
| REArrayIndexNotFound(string, int)
|
||||||
|
| REFunctionExpected(string)
|
||||||
|
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
||||||
|
| RERecordPropertyNotFound(string, string)
|
||||||
|
| RETodo(string) // To do
|
||||||
|
|
||||||
|
type t = errorValue
|
||||||
|
|
||||||
|
let errorToString = err =>
|
||||||
|
switch err {
|
||||||
|
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
||||||
|
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
||||||
|
| REJavaScriptExn(omsg, oname) => {
|
||||||
|
let answer = "JS Exception:"
|
||||||
|
let answer = switch oname {
|
||||||
|
| Some(name) => `${answer} ${name}`
|
||||||
|
| _ => answer
|
||||||
|
}
|
||||||
|
let answer = switch omsg {
|
||||||
|
| Some(msg) => `${answer}: ${msg}`
|
||||||
|
| _ => answer
|
||||||
|
}
|
||||||
|
answer
|
||||||
|
}
|
||||||
|
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||||
|
| RETodo(msg) => `TODO: ${msg}`
|
||||||
|
}
|
|
@ -0,0 +1,91 @@
|
||||||
|
module BuiltIn = Reducer_Dispatch_BuiltIn
|
||||||
|
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||||
|
module Extra = Reducer_Extra
|
||||||
|
module MathJs = Reducer_MathJs
|
||||||
|
module Result = Belt.Result
|
||||||
|
module T = Reducer_Expression_T
|
||||||
|
open Reducer_ErrorValue
|
||||||
|
|
||||||
|
type expression = T.expression
|
||||||
|
type expressionValue = ExpressionValue.expressionValue
|
||||||
|
type t = expression
|
||||||
|
|
||||||
|
/*
|
||||||
|
Shows the Lisp Code as text lisp code
|
||||||
|
*/
|
||||||
|
let rec toString = expression =>
|
||||||
|
switch expression {
|
||||||
|
| T.EList(aList) =>
|
||||||
|
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||||
|
->Extra.List.interperse(" ")
|
||||||
|
->Belt.List.toArray
|
||||||
|
->Js.String.concatMany("")})`
|
||||||
|
| EValue(aValue) => ExpressionValue.toString(aValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
let toStringResult = codeResult =>
|
||||||
|
switch codeResult {
|
||||||
|
| Ok(a) => `Ok(${toString(a)})`
|
||||||
|
| Error(m) => `Error(${Js.String.make(m)})`
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Converts a MathJs code to Lisp Code
|
||||||
|
*/
|
||||||
|
let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
||||||
|
expr->parser->Result.flatMap(node => converter(node))
|
||||||
|
|
||||||
|
let parse = (mathJsCode: string): result<t, errorValue> =>
|
||||||
|
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromNode)
|
||||||
|
|
||||||
|
module MapString = Belt.Map.String
|
||||||
|
type bindings = MapString.t<unit>
|
||||||
|
let defaultBindings: bindings = MapString.fromArray([])
|
||||||
|
// TODO Define bindings for function execution context
|
||||||
|
|
||||||
|
/*
|
||||||
|
After reducing each level of code tree, we have a value list to evaluate
|
||||||
|
*/
|
||||||
|
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
||||||
|
switch valueList {
|
||||||
|
| list{EvSymbol(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
||||||
|
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Recursively evaluate/reduce the code tree
|
||||||
|
*/
|
||||||
|
let rec reduceExpression = (expression: t, bindings): result<expressionValue, 'e> =>
|
||||||
|
switch expression {
|
||||||
|
| T.EValue(value) => value->Ok
|
||||||
|
| T.EList(list) => {
|
||||||
|
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||||
|
racc,
|
||||||
|
each: expression,
|
||||||
|
) =>
|
||||||
|
racc->Result.flatMap(acc => {
|
||||||
|
each
|
||||||
|
->reduceExpression(bindings)
|
||||||
|
->Result.flatMap(newNode => {
|
||||||
|
acc->Belt.List.add(newNode)->Ok
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
racc->Result.flatMap(acc => acc->reduceValueList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let evalWBindingsExpression = (aExpression, bindings): result<expressionValue, 'e> =>
|
||||||
|
reduceExpression(aExpression, bindings)
|
||||||
|
|
||||||
|
/*
|
||||||
|
Evaluates MathJs code via Lisp using bindings and answers the result
|
||||||
|
*/
|
||||||
|
let evalWBindings = (codeText: string, bindings: bindings) => {
|
||||||
|
parse(codeText)->Result.flatMap(code => code->evalWBindingsExpression(bindings))
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Evaluates MathJs code via Lisp and answers the result
|
||||||
|
*/
|
||||||
|
let eval = (code: string) => evalWBindings(code, defaultBindings)
|
|
@ -0,0 +1,28 @@
|
||||||
|
module Result = Belt.Result
|
||||||
|
module T = Reducer_Expression_T
|
||||||
|
type expression = T.expression
|
||||||
|
type expressionValue = ReducerInterface.ExpressionValue.expressionValue
|
||||||
|
type t = expression
|
||||||
|
let toString: T.expression => Js.String.t
|
||||||
|
let toStringResult: result<T.expression, 'a> => string
|
||||||
|
let parse: string => result<expression, Reducer_ErrorValue.t>
|
||||||
|
module MapString = Belt.Map.String
|
||||||
|
type bindings = MapString.t<unit>
|
||||||
|
let defaultBindings: bindings
|
||||||
|
let reduceValueList: list<expressionValue> => result<
|
||||||
|
expressionValue,
|
||||||
|
Reducer_ErrorValue.t,
|
||||||
|
>
|
||||||
|
let reduceExpression: (expression, 'a) => result<
|
||||||
|
expressionValue,
|
||||||
|
Reducer_ErrorValue.t,
|
||||||
|
>
|
||||||
|
let evalWBindingsExpression: (expression, 'a) => result<
|
||||||
|
expressionValue,
|
||||||
|
Reducer_ErrorValue.t,
|
||||||
|
>
|
||||||
|
let evalWBindings: (string, bindings) => Result.t<
|
||||||
|
expressionValue,
|
||||||
|
Reducer_ErrorValue.t,
|
||||||
|
>
|
||||||
|
let eval: string => Result.t<expressionValue, Reducer_ErrorValue.t>
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user