Merge branch 'develop' into namespace-integration
* develop: (61 commits) Minor UI refactors tweak: Indicate that the configuration step is optional tweak: Show what to click in the extension tweak: Better documentation Formatted code Moved env to GenericDist, and used it Replaced some scoring functions with functionRegistry functions Prior can't be a point v0.2.23 Made new test call polymorphic constructor #ToFloat ⬆️ Bump @typescript-eslint/parser from 5.30.5 to 5.30.6 ⬆️ Bump postcss-cli from 9.1.0 to 10.0.0 ⬆️ Bump @typescript-eslint/eslint-plugin from 5.30.5 to 5.30.6 ⬆️ Bump @docusaurus/preset-classic ⬆️ Bump clsx from 1.2.0 to 1.2.1 ⬆️ Bump postcss-loader from 7.0.0 to 7.0.1 ⬆️ Bump @hookform/resolvers from 2.9.3 to 2.9.5 ⬆️ Bump @types/node from 18.0.0 to 18.0.3 ⬆️ Bump @docusaurus/core from 2.0.0-beta.21 to 2.0.0-beta.22 ⬆️ Bump @typescript-eslint/parser from 5.30.4 to 5.30.5 ...
This commit is contained in:
commit
04accf0075
|
@ -1,29 +1,29 @@
|
||||||
{
|
{
|
||||||
"name": "@quri/squiggle-components",
|
"name": "@quri/squiggle-components",
|
||||||
"version": "0.2.20",
|
"version": "0.2.23",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@headlessui/react": "^1.6.5",
|
"@headlessui/react": "^1.6.6",
|
||||||
"@heroicons/react": "^1.0.6",
|
"@heroicons/react": "^1.0.6",
|
||||||
"@hookform/resolvers": "^2.9.3",
|
"@hookform/resolvers": "^2.9.5",
|
||||||
"@quri/squiggle-lang": "^0.2.8",
|
"@quri/squiggle-lang": "^0.2.8",
|
||||||
"@react-hook/size": "^2.1.2",
|
"@react-hook/size": "^2.1.2",
|
||||||
"clsx": "^1.1.1",
|
"clsx": "^1.2.1",
|
||||||
"framer-motion": "^6.4.1",
|
"framer-motion": "^6.4.3",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"react": "^18.1.0",
|
"react": "^18.1.0",
|
||||||
"react-ace": "^10.1.0",
|
"react-ace": "^10.1.0",
|
||||||
"react-hook-form": "^7.33.0",
|
"react-hook-form": "^7.33.1",
|
||||||
"react-use": "^17.4.0",
|
"react-use": "^17.4.0",
|
||||||
"react-vega": "^7.5.1",
|
"react-vega": "^7.6.0",
|
||||||
"vega": "^5.22.1",
|
"vega": "^5.22.1",
|
||||||
"vega-embed": "^6.21.0",
|
"vega-embed": "^6.21.0",
|
||||||
"vega-lite": "^5.2.0",
|
"vega-lite": "^5.3.0",
|
||||||
"vscode-uri": "^3.0.3",
|
"vscode-uri": "^3.0.3",
|
||||||
"yup": "^0.32.11"
|
"yup": "^0.32.11"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/plugin-proposal-private-property-in-object": "^7.17.12",
|
"@babel/plugin-proposal-private-property-in-object": "^7.18.6",
|
||||||
"@storybook/addon-actions": "^6.5.9",
|
"@storybook/addon-actions": "^6.5.9",
|
||||||
"@storybook/addon-essentials": "^6.5.9",
|
"@storybook/addon-essentials": "^6.5.9",
|
||||||
"@storybook/addon-links": "^6.5.9",
|
"@storybook/addon-links": "^6.5.9",
|
||||||
|
@ -37,26 +37,26 @@
|
||||||
"@testing-library/user-event": "^14.2.1",
|
"@testing-library/user-event": "^14.2.1",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"@types/lodash": "^4.14.182",
|
"@types/lodash": "^4.14.182",
|
||||||
"@types/node": "^18.0.0",
|
"@types/node": "^18.0.3",
|
||||||
"@types/react": "^18.0.9",
|
"@types/react": "^18.0.9",
|
||||||
"@types/styled-components": "^5.1.24",
|
"@types/styled-components": "^5.1.24",
|
||||||
"@types/webpack": "^5.28.0",
|
"@types/webpack": "^5.28.0",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"mini-css-extract-plugin": "^2.6.1",
|
"mini-css-extract-plugin": "^2.6.1",
|
||||||
"postcss-cli": "^9.1.0",
|
"postcss-cli": "^10.0.0",
|
||||||
"postcss-import": "^14.1.0",
|
"postcss-import": "^14.1.0",
|
||||||
"postcss-loader": "^7.0.0",
|
"postcss-loader": "^7.0.1",
|
||||||
"react": "^18.1.0",
|
"react": "^18.1.0",
|
||||||
"react-scripts": "^5.0.1",
|
"react-scripts": "^5.0.1",
|
||||||
"style-loader": "^3.3.1",
|
"style-loader": "^3.3.1",
|
||||||
"tailwindcss": "^3.1.3",
|
"tailwindcss": "^3.1.5",
|
||||||
"ts-loader": "^9.3.0",
|
"ts-loader": "^9.3.0",
|
||||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||||
"typescript": "^4.7.4",
|
"typescript": "^4.7.4",
|
||||||
"web-vitals": "^2.1.4",
|
"web-vitals": "^2.1.4",
|
||||||
"webpack": "^5.73.0",
|
"webpack": "^5.73.0",
|
||||||
"webpack-cli": "^4.10.0",
|
"webpack-cli": "^4.10.0",
|
||||||
"webpack-dev-server": "^4.9.2"
|
"webpack-dev-server": "^4.9.3"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"react": "^16.8.0 || ^17 || ^18",
|
"react": "^16.8.0 || ^17 || ^18",
|
||||||
|
|
|
@ -5,18 +5,15 @@ import {
|
||||||
distributionError,
|
distributionError,
|
||||||
distributionErrorToString,
|
distributionErrorToString,
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
import { Vega, VisualizationSpec } from "react-vega";
|
import { Vega } from "react-vega";
|
||||||
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
|
||||||
import { ErrorAlert } from "./Alert";
|
import { ErrorAlert } from "./Alert";
|
||||||
import { useSize } from "react-use";
|
import { useSize } from "react-use";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
linearXScale,
|
buildVegaSpec,
|
||||||
logXScale,
|
DistributionChartSpecOptions,
|
||||||
linearYScale,
|
} from "../lib/distributionSpecBuilder";
|
||||||
expYScale,
|
|
||||||
} from "./DistributionVegaScales";
|
|
||||||
import { NumberShower } from "./NumberShower";
|
import { NumberShower } from "./NumberShower";
|
||||||
|
|
||||||
export type DistributionPlottingSettings = {
|
export type DistributionPlottingSettings = {
|
||||||
|
@ -24,19 +21,17 @@ export type DistributionPlottingSettings = {
|
||||||
showSummary: boolean;
|
showSummary: boolean;
|
||||||
/** Whether to show the user graph controls (scale etc) */
|
/** Whether to show the user graph controls (scale etc) */
|
||||||
showControls: boolean;
|
showControls: boolean;
|
||||||
/** Set the x scale to be logarithmic by deault */
|
} & DistributionChartSpecOptions;
|
||||||
logX: boolean;
|
|
||||||
/** Set the y scale to be exponential by deault */
|
|
||||||
expY: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type DistributionChartProps = {
|
export type DistributionChartProps = {
|
||||||
distribution: Distribution;
|
distribution: Distribution;
|
||||||
width?: number;
|
width?: number;
|
||||||
height: number;
|
height: number;
|
||||||
|
actions?: boolean;
|
||||||
} & DistributionPlottingSettings;
|
} & DistributionPlottingSettings;
|
||||||
|
|
||||||
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||||
|
const {
|
||||||
distribution,
|
distribution,
|
||||||
height,
|
height,
|
||||||
showSummary,
|
showSummary,
|
||||||
|
@ -44,7 +39,8 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
showControls,
|
showControls,
|
||||||
logX,
|
logX,
|
||||||
expY,
|
expY,
|
||||||
}) => {
|
actions = false,
|
||||||
|
} = props;
|
||||||
const [isLogX, setLogX] = React.useState(logX);
|
const [isLogX, setLogX] = React.useState(logX);
|
||||||
const [isExpY, setExpY] = React.useState(expY);
|
const [isExpY, setExpY] = React.useState(expY);
|
||||||
|
|
||||||
|
@ -64,7 +60,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
const massBelow0 =
|
const massBelow0 =
|
||||||
shape.value.continuous.some((x) => x.x <= 0) ||
|
shape.value.continuous.some((x) => x.x <= 0) ||
|
||||||
shape.value.discrete.some((x) => x.x <= 0);
|
shape.value.discrete.some((x) => x.x <= 0);
|
||||||
const spec = buildVegaSpec(isLogX, isExpY);
|
const spec = buildVegaSpec(props);
|
||||||
|
|
||||||
let widthProp = width ? width : size.width;
|
let widthProp = width ? width : size.width;
|
||||||
if (widthProp < 20) {
|
if (widthProp < 20) {
|
||||||
|
@ -82,7 +78,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||||
width={widthProp - 10}
|
width={widthProp - 10}
|
||||||
height={height}
|
height={height}
|
||||||
actions={false}
|
actions={actions}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<ErrorAlert heading="Log Domain Error">
|
<ErrorAlert heading="Log Domain Error">
|
||||||
|
@ -116,16 +112,6 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
return sized;
|
return sized;
|
||||||
};
|
};
|
||||||
|
|
||||||
function buildVegaSpec(isLogX: boolean, isExpY: boolean): VisualizationSpec {
|
|
||||||
return {
|
|
||||||
...chartSpecification,
|
|
||||||
scales: [
|
|
||||||
isLogX ? logXScale : linearXScale,
|
|
||||||
isExpY ? expYScale : linearYScale,
|
|
||||||
],
|
|
||||||
} as VisualizationSpec;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CheckBoxProps {
|
interface CheckBoxProps {
|
||||||
label: string;
|
label: string;
|
||||||
onChange: (x: boolean) => void;
|
onChange: (x: boolean) => void;
|
||||||
|
|
|
@ -44,6 +44,18 @@ export interface SquiggleChartProps {
|
||||||
logX?: boolean;
|
logX?: boolean;
|
||||||
/** Set the y scale to be exponential by deault */
|
/** Set the y scale to be exponential by deault */
|
||||||
expY?: boolean;
|
expY?: boolean;
|
||||||
|
/** How to format numbers on the x axis */
|
||||||
|
tickFormat?: string;
|
||||||
|
/** Title of the graphed distribution */
|
||||||
|
title?: string;
|
||||||
|
/** Color of the graphed distribution */
|
||||||
|
color?: string;
|
||||||
|
/** Specify the lower bound of the x scale */
|
||||||
|
minX?: number;
|
||||||
|
/** Specify the upper bound of the x scale */
|
||||||
|
maxX?: number;
|
||||||
|
/** Whether to show vega actions to the user, so they can copy the chart spec */
|
||||||
|
distributionChartActions?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultOnChange = () => {};
|
const defaultOnChange = () => {};
|
||||||
|
@ -65,6 +77,12 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
diagramStart = 0,
|
diagramStart = 0,
|
||||||
diagramStop = 10,
|
diagramStop = 10,
|
||||||
diagramCount = 100,
|
diagramCount = 100,
|
||||||
|
tickFormat,
|
||||||
|
minX,
|
||||||
|
maxX,
|
||||||
|
color,
|
||||||
|
title,
|
||||||
|
distributionChartActions,
|
||||||
}) => {
|
}) => {
|
||||||
const result = useSquiggle({
|
const result = useSquiggle({
|
||||||
code,
|
code,
|
||||||
|
@ -83,6 +101,12 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
showSummary,
|
showSummary,
|
||||||
logX,
|
logX,
|
||||||
expY,
|
expY,
|
||||||
|
format: tickFormat,
|
||||||
|
minX,
|
||||||
|
maxX,
|
||||||
|
color,
|
||||||
|
title,
|
||||||
|
actions: distributionChartActions,
|
||||||
};
|
};
|
||||||
|
|
||||||
let chartSettings = {
|
let chartSettings = {
|
||||||
|
|
|
@ -18,7 +18,7 @@ import clsx from "clsx";
|
||||||
|
|
||||||
import { defaultBindings, environment } from "@quri/squiggle-lang";
|
import { defaultBindings, environment } from "@quri/squiggle-lang";
|
||||||
|
|
||||||
import { SquiggleChart } from "./SquiggleChart";
|
import { SquiggleChart, SquiggleChartProps } from "./SquiggleChart";
|
||||||
import { CodeEditor } from "./CodeEditor";
|
import { CodeEditor } from "./CodeEditor";
|
||||||
import { JsonEditor } from "./JsonEditor";
|
import { JsonEditor } from "./JsonEditor";
|
||||||
import { ErrorAlert, SuccessAlert } from "./Alert";
|
import { ErrorAlert, SuccessAlert } from "./Alert";
|
||||||
|
@ -27,28 +27,16 @@ import { Toggle } from "./ui/Toggle";
|
||||||
import { Checkbox } from "./ui/Checkbox";
|
import { Checkbox } from "./ui/Checkbox";
|
||||||
import { StyledTab } from "./ui/StyledTab";
|
import { StyledTab } from "./ui/StyledTab";
|
||||||
|
|
||||||
interface PlaygroundProps {
|
type PlaygroundProps = SquiggleChartProps & {
|
||||||
/** The initial squiggle string to put in the playground */
|
/** The initial squiggle string to put in the playground */
|
||||||
defaultCode?: string;
|
defaultCode?: string;
|
||||||
/** How many pixels high is the playground */
|
/** How many pixels high is the playground */
|
||||||
height?: number;
|
|
||||||
/** Whether to show the types of outputs in the playground */
|
|
||||||
showTypes?: boolean;
|
|
||||||
/** Whether to show the log scale controls in the playground */
|
|
||||||
showControls?: boolean;
|
|
||||||
/** Whether to show the summary table in the playground */
|
|
||||||
showSummary?: boolean;
|
|
||||||
/** Whether to log the x coordinate on distribution charts */
|
|
||||||
logX?: boolean;
|
|
||||||
/** Whether to exp the y coordinate on distribution charts */
|
|
||||||
expY?: boolean;
|
|
||||||
/** If code is set, component becomes controlled */
|
|
||||||
code?: string;
|
|
||||||
onCodeChange?(expr: string): void;
|
onCodeChange?(expr: string): void;
|
||||||
|
/* When settings change */
|
||||||
onSettingsChange?(settings: any): void;
|
onSettingsChange?(settings: any): void;
|
||||||
/** Should we show the editor? */
|
/** Should we show the editor? */
|
||||||
showEditor?: boolean;
|
showEditor?: boolean;
|
||||||
}
|
};
|
||||||
|
|
||||||
const schema = yup.object({}).shape({
|
const schema = yup.object({}).shape({
|
||||||
sampleCount: yup
|
sampleCount: yup
|
||||||
|
@ -82,6 +70,12 @@ const schema = yup.object({}).shape({
|
||||||
showEditor: yup.boolean().required(),
|
showEditor: yup.boolean().required(),
|
||||||
logX: yup.boolean().required(),
|
logX: yup.boolean().required(),
|
||||||
expY: yup.boolean().required(),
|
expY: yup.boolean().required(),
|
||||||
|
tickFormat: yup.string().default(".9~s"),
|
||||||
|
title: yup.string(),
|
||||||
|
color: yup.string().default("#739ECC").required(),
|
||||||
|
minX: yup.number(),
|
||||||
|
maxX: yup.number(),
|
||||||
|
distributionChartActions: yup.boolean(),
|
||||||
showSettingsPage: yup.boolean().default(false),
|
showSettingsPage: yup.boolean().default(false),
|
||||||
diagramStart: yup.number().required().positive().integer().default(0).min(0),
|
diagramStart: yup.number().required().positive().integer().default(0).min(0),
|
||||||
diagramStop: yup.number().required().positive().integer().default(10).min(0),
|
diagramStop: yup.number().required().positive().integer().default(10).min(0),
|
||||||
|
@ -114,7 +108,7 @@ function InputItem<T>({
|
||||||
}: {
|
}: {
|
||||||
name: Path<T>;
|
name: Path<T>;
|
||||||
label: string;
|
label: string;
|
||||||
type: "number";
|
type: "number" | "text" | "color";
|
||||||
register: UseFormRegister<T>;
|
register: UseFormRegister<T>;
|
||||||
}) {
|
}) {
|
||||||
return (
|
return (
|
||||||
|
@ -122,7 +116,7 @@ function InputItem<T>({
|
||||||
<div className="text-sm font-medium text-gray-600 mb-1">{label}</div>
|
<div className="text-sm font-medium text-gray-600 mb-1">{label}</div>
|
||||||
<input
|
<input
|
||||||
type={type}
|
type={type}
|
||||||
{...register(name)}
|
{...register(name, { valueAsNumber: type === "number" })}
|
||||||
className="form-input max-w-lg block w-full shadow-sm focus:ring-indigo-500 focus:border-indigo-500 sm:max-w-xs sm:text-sm border-gray-300 rounded-md"
|
className="form-input max-w-lg block w-full shadow-sm focus:ring-indigo-500 focus:border-indigo-500 sm:max-w-xs sm:text-sm border-gray-300 rounded-md"
|
||||||
/>
|
/>
|
||||||
</label>
|
</label>
|
||||||
|
@ -202,6 +196,11 @@ const ViewSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
|
||||||
name="expY"
|
name="expY"
|
||||||
label="Show y scale exponentially"
|
label="Show y scale exponentially"
|
||||||
/>
|
/>
|
||||||
|
<Checkbox
|
||||||
|
register={register}
|
||||||
|
name="distributionChartActions"
|
||||||
|
label="Show vega chart controls"
|
||||||
|
/>
|
||||||
<Checkbox
|
<Checkbox
|
||||||
register={register}
|
register={register}
|
||||||
name="showControls"
|
name="showControls"
|
||||||
|
@ -212,6 +211,36 @@ const ViewSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
|
||||||
name="showSummary"
|
name="showSummary"
|
||||||
label="Show summary statistics"
|
label="Show summary statistics"
|
||||||
/>
|
/>
|
||||||
|
<InputItem
|
||||||
|
name="minX"
|
||||||
|
type="number"
|
||||||
|
register={register}
|
||||||
|
label="Min X Value"
|
||||||
|
/>
|
||||||
|
<InputItem
|
||||||
|
name="maxX"
|
||||||
|
type="number"
|
||||||
|
register={register}
|
||||||
|
label="Max X Value"
|
||||||
|
/>
|
||||||
|
<InputItem
|
||||||
|
name="title"
|
||||||
|
type="text"
|
||||||
|
register={register}
|
||||||
|
label="Title"
|
||||||
|
/>
|
||||||
|
<InputItem
|
||||||
|
name="tickFormat"
|
||||||
|
type="text"
|
||||||
|
register={register}
|
||||||
|
label="Tick Format"
|
||||||
|
/>
|
||||||
|
<InputItem
|
||||||
|
name="color"
|
||||||
|
type="color"
|
||||||
|
register={register}
|
||||||
|
label="Color"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</HeadedSection>
|
</HeadedSection>
|
||||||
</div>
|
</div>
|
||||||
|
@ -385,6 +414,12 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
showSummary = false,
|
showSummary = false,
|
||||||
logX = false,
|
logX = false,
|
||||||
expY = false,
|
expY = false,
|
||||||
|
title,
|
||||||
|
minX,
|
||||||
|
maxX,
|
||||||
|
color = "#739ECC",
|
||||||
|
tickFormat = ".9~s",
|
||||||
|
distributionChartActions,
|
||||||
code: controlledCode,
|
code: controlledCode,
|
||||||
onCodeChange,
|
onCodeChange,
|
||||||
onSettingsChange,
|
onSettingsChange,
|
||||||
|
@ -408,6 +443,12 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
showControls,
|
showControls,
|
||||||
logX,
|
logX,
|
||||||
expY,
|
expY,
|
||||||
|
title,
|
||||||
|
minX,
|
||||||
|
maxX,
|
||||||
|
color,
|
||||||
|
tickFormat,
|
||||||
|
distributionChartActions,
|
||||||
showSummary,
|
showSummary,
|
||||||
showEditor,
|
showEditor,
|
||||||
leftSizePercent: 50,
|
leftSizePercent: 50,
|
||||||
|
@ -440,15 +481,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
<SquiggleChart
|
<SquiggleChart
|
||||||
code={renderedCode}
|
code={renderedCode}
|
||||||
environment={env}
|
environment={env}
|
||||||
diagramStart={Number(vars.diagramStart)}
|
{...vars}
|
||||||
diagramStop={Number(vars.diagramStop)}
|
|
||||||
diagramCount={Number(vars.diagramCount)}
|
|
||||||
height={vars.chartHeight}
|
|
||||||
showTypes={vars.showTypes}
|
|
||||||
showControls={vars.showControls}
|
|
||||||
showSummary={vars.showSummary}
|
|
||||||
logX={vars.logX}
|
|
||||||
expY={vars.expY}
|
|
||||||
bindings={defaultBindings}
|
bindings={defaultBindings}
|
||||||
jsImports={imports}
|
jsImports={imports}
|
||||||
/>
|
/>
|
||||||
|
@ -488,7 +521,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
);
|
);
|
||||||
|
|
||||||
const withEditor = (
|
const withEditor = (
|
||||||
<div className="flex mt-1">
|
<div className="flex mt-2">
|
||||||
<div className="w-1/2">{tabs}</div>
|
<div className="w-1/2">{tabs}</div>
|
||||||
<div className="w-1/2 p-2 pl-4">{squiggleChart}</div>
|
<div className="w-1/2 p-2 pl-4">{squiggleChart}</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -496,6 +529,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
|
|
||||||
const withoutEditor = <div className="mt-3">{tabs}</div>;
|
const withoutEditor = <div className="mt-3">{tabs}</div>;
|
||||||
|
|
||||||
|
console.log(vars);
|
||||||
return (
|
return (
|
||||||
<SquiggleContainer>
|
<SquiggleContainer>
|
||||||
<StyledTab.Group>
|
<StyledTab.Group>
|
||||||
|
|
|
@ -23,8 +23,8 @@ export const Toggle: React.FC<Props> = ({
|
||||||
layout
|
layout
|
||||||
transition={{ duration: 0.2 }}
|
transition={{ duration: 0.2 }}
|
||||||
className={clsx(
|
className={clsx(
|
||||||
"rounded-full py-1 bg-indigo-500 text-white text-xs font-semibold flex items-center space-x-1",
|
"rounded-md py-0.5 bg-slate-500 text-white text-xs font-semibold flex items-center space-x-1",
|
||||||
status ? "bg-indigo-500" : "bg-gray-400",
|
status ? "bg-slate-500" : "bg-gray-400",
|
||||||
status ? "pl-1 pr-3" : "pl-3 pr-1",
|
status ? "pl-1 pr-3" : "pl-3 pr-1",
|
||||||
!status && "flex-row-reverse space-x-reverse"
|
!status && "flex-row-reverse space-x-reverse"
|
||||||
)}
|
)}
|
||||||
|
|
256
packages/components/src/lib/distributionSpecBuilder.ts
Normal file
256
packages/components/src/lib/distributionSpecBuilder.ts
Normal file
|
@ -0,0 +1,256 @@
|
||||||
|
import { VisualizationSpec } from "react-vega";
|
||||||
|
import type { LogScale, LinearScale, PowScale } from "vega";
|
||||||
|
|
||||||
|
export type DistributionChartSpecOptions = {
|
||||||
|
/** Set the x scale to be logarithmic by deault */
|
||||||
|
logX: boolean;
|
||||||
|
/** Set the y scale to be exponential by deault */
|
||||||
|
expY: boolean;
|
||||||
|
/** The minimum x coordinate shown on the chart */
|
||||||
|
minX?: number;
|
||||||
|
/** The maximum x coordinate shown on the chart */
|
||||||
|
maxX?: number;
|
||||||
|
/** The color of the chart */
|
||||||
|
color?: string;
|
||||||
|
/** The title of the chart */
|
||||||
|
title?: string;
|
||||||
|
/** The formatting of the ticks */
|
||||||
|
format?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export let linearXScale: LinearScale = {
|
||||||
|
name: "xscale",
|
||||||
|
clamp: true,
|
||||||
|
type: "linear",
|
||||||
|
range: "width",
|
||||||
|
zero: false,
|
||||||
|
nice: false,
|
||||||
|
domain: {
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
data: "con",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: "dis",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
export let linearYScale: LinearScale = {
|
||||||
|
name: "yscale",
|
||||||
|
type: "linear",
|
||||||
|
range: "height",
|
||||||
|
zero: false,
|
||||||
|
domain: {
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
data: "con",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: "dis",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export let logXScale: LogScale = {
|
||||||
|
name: "xscale",
|
||||||
|
type: "log",
|
||||||
|
range: "width",
|
||||||
|
zero: false,
|
||||||
|
base: 10,
|
||||||
|
nice: false,
|
||||||
|
clamp: true,
|
||||||
|
domain: {
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
data: "con",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: "dis",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export let expYScale: PowScale = {
|
||||||
|
name: "yscale",
|
||||||
|
type: "pow",
|
||||||
|
exponent: 0.1,
|
||||||
|
range: "height",
|
||||||
|
zero: false,
|
||||||
|
nice: false,
|
||||||
|
domain: {
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
data: "con",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: "dis",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export function buildVegaSpec(
|
||||||
|
specOptions: DistributionChartSpecOptions
|
||||||
|
): VisualizationSpec {
|
||||||
|
let {
|
||||||
|
format = ".9~s",
|
||||||
|
color = "#739ECC",
|
||||||
|
title,
|
||||||
|
minX,
|
||||||
|
maxX,
|
||||||
|
logX,
|
||||||
|
expY,
|
||||||
|
} = specOptions;
|
||||||
|
|
||||||
|
let xScale = logX ? logXScale : linearXScale;
|
||||||
|
if (minX !== undefined && Number.isFinite(minX)) {
|
||||||
|
xScale = { ...xScale, domainMin: minX };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxX !== undefined && Number.isFinite(maxX)) {
|
||||||
|
xScale = { ...xScale, domainMax: maxX };
|
||||||
|
}
|
||||||
|
|
||||||
|
let spec: VisualizationSpec = {
|
||||||
|
$schema: "https://vega.github.io/schema/vega/v5.json",
|
||||||
|
description: "A basic area chart example",
|
||||||
|
width: 500,
|
||||||
|
height: 100,
|
||||||
|
padding: 5,
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
name: "con",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dis",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
signals: [],
|
||||||
|
scales: [xScale, expY ? expYScale : linearYScale],
|
||||||
|
axes: [
|
||||||
|
{
|
||||||
|
orient: "bottom",
|
||||||
|
scale: "xscale",
|
||||||
|
labelColor: "#727d93",
|
||||||
|
tickColor: "#fff",
|
||||||
|
tickOpacity: 0.0,
|
||||||
|
domainColor: "#fff",
|
||||||
|
domainOpacity: 0.0,
|
||||||
|
format: format,
|
||||||
|
tickCount: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
marks: [
|
||||||
|
{
|
||||||
|
type: "area",
|
||||||
|
from: {
|
||||||
|
data: "con",
|
||||||
|
},
|
||||||
|
encode: {
|
||||||
|
update: {
|
||||||
|
interpolate: { value: "linear" },
|
||||||
|
x: {
|
||||||
|
scale: "xscale",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
scale: "yscale",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
y2: {
|
||||||
|
scale: "yscale",
|
||||||
|
value: 0,
|
||||||
|
},
|
||||||
|
fill: {
|
||||||
|
value: color,
|
||||||
|
},
|
||||||
|
fillOpacity: {
|
||||||
|
value: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "rect",
|
||||||
|
from: {
|
||||||
|
data: "dis",
|
||||||
|
},
|
||||||
|
encode: {
|
||||||
|
enter: {
|
||||||
|
width: {
|
||||||
|
value: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
x: {
|
||||||
|
scale: "xscale",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
scale: "yscale",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
y2: {
|
||||||
|
scale: "yscale",
|
||||||
|
value: 0,
|
||||||
|
},
|
||||||
|
fill: {
|
||||||
|
value: "#2f65a7",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "symbol",
|
||||||
|
from: {
|
||||||
|
data: "dis",
|
||||||
|
},
|
||||||
|
encode: {
|
||||||
|
enter: {
|
||||||
|
shape: {
|
||||||
|
value: "circle",
|
||||||
|
},
|
||||||
|
size: [{ value: 100 }],
|
||||||
|
tooltip: {
|
||||||
|
signal: "datum.y",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
x: {
|
||||||
|
scale: "xscale",
|
||||||
|
field: "x",
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
scale: "yscale",
|
||||||
|
field: "y",
|
||||||
|
},
|
||||||
|
fill: {
|
||||||
|
value: "#1e4577",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
if (title) {
|
||||||
|
spec = {
|
||||||
|
...spec,
|
||||||
|
title: {
|
||||||
|
text: title,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return spec;
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
<Meta title="Squiggle/SquiggleChart" component={SquiggleChart} />
|
<Meta title="Squiggle/SquiggleChart" component={SquiggleChart} />
|
||||||
|
|
||||||
export const Template = SquiggleChart;
|
export const Template = (props) => <SquiggleChart {...props} />;
|
||||||
/*
|
/*
|
||||||
We have to hardcode a width here, because otherwise some interaction with
|
We have to hardcode a width here, because otherwise some interaction with
|
||||||
Storybook creates an infinite loop with the internal width
|
Storybook creates an infinite loop with the internal width
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
open Jest
|
open Jest
|
||||||
open Expect
|
open Expect
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: GenericDist.env = {
|
||||||
sampleCount: 100,
|
sampleCount: 100,
|
||||||
xyPointLength: 100,
|
xyPointLength: 100,
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ describe("sparkline", () => {
|
||||||
expected: DistributionOperation.outputType,
|
expected: DistributionOperation.outputType,
|
||||||
) => {
|
) => {
|
||||||
test(name, () => {
|
test(name, () => {
|
||||||
let result = DistributionOperation.run(~env, FromDist(ToString(ToSparkline(20)), dist))
|
let result = DistributionOperation.run(~env, FromDist(#ToString(ToSparkline(20)), dist))
|
||||||
expect(result)->toEqual(expected)
|
expect(result)->toEqual(expected)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -81,8 +81,8 @@ describe("sparkline", () => {
|
||||||
describe("toPointSet", () => {
|
describe("toPointSet", () => {
|
||||||
test("on symbolic normal distribution", () => {
|
test("on symbolic normal distribution", () => {
|
||||||
let result =
|
let result =
|
||||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||||
->outputMap(FromDist(ToFloat(#Mean)))
|
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
->toFloat
|
->toFloat
|
||||||
->toExt
|
->toExt
|
||||||
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
||||||
|
@ -90,10 +90,10 @@ describe("toPointSet", () => {
|
||||||
|
|
||||||
test("on sample set", () => {
|
test("on sample set", () => {
|
||||||
let result =
|
let result =
|
||||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||||
->outputMap(FromDist(ToDist(ToSampleSet(1000))))
|
->outputMap(FromDist(#ToDist(ToSampleSet(1000))))
|
||||||
->outputMap(FromDist(ToDist(ToPointSet)))
|
->outputMap(FromDist(#ToDist(ToPointSet)))
|
||||||
->outputMap(FromDist(ToFloat(#Mean)))
|
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
->toFloat
|
->toFloat
|
||||||
->toExt
|
->toExt
|
||||||
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
||||||
|
|
|
@ -19,7 +19,6 @@ exception MixtureFailed
|
||||||
let float1 = 1.0
|
let float1 = 1.0
|
||||||
let float2 = 2.0
|
let float2 = 2.0
|
||||||
let float3 = 3.0
|
let float3 = 3.0
|
||||||
let {mkDelta} = module(TestHelpers)
|
let point1 = TestHelpers.mkDelta(float1)
|
||||||
let point1 = mkDelta(float1)
|
let point2 = TestHelpers.mkDelta(float2)
|
||||||
let point2 = mkDelta(float2)
|
let point3 = TestHelpers.mkDelta(float3)
|
||||||
let point3 = mkDelta(float3)
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ describe("mixture", () => {
|
||||||
let (mean1, mean2) = tup
|
let (mean1, mean2) = tup
|
||||||
let meanValue = {
|
let meanValue = {
|
||||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
||||||
FromDist(ToFloat(#Mean)),
|
FromDist(#ToFloat(#Mean)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||||
|
@ -28,7 +28,7 @@ describe("mixture", () => {
|
||||||
let meanValue = {
|
let meanValue = {
|
||||||
run(
|
run(
|
||||||
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
||||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
}
|
}
|
||||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||||
let exponentialMean = 1.0 /. rate
|
let exponentialMean = 1.0 /. rate
|
||||||
|
@ -52,7 +52,7 @@ describe("mixture", () => {
|
||||||
(mkUniform(low, high), uniformWeight),
|
(mkUniform(low, high), uniformWeight),
|
||||||
(mkLognormal(mu, sigma), lognormalWeight),
|
(mkLognormal(mu, sigma), lognormalWeight),
|
||||||
]),
|
]),
|
||||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||||
}
|
}
|
||||||
let uniformMean = (low +. high) /. 2.0
|
let uniformMean = (low +. high) /. 2.0
|
||||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||||
|
|
|
@ -3,6 +3,7 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
open GenericDist_Fixtures
|
open GenericDist_Fixtures
|
||||||
|
|
||||||
|
let klDivergence = DistributionOperation.Constructors.LogScore.distEstimateDistAnswer(~env)
|
||||||
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
||||||
let klNormalUniform = (mean, stdev, low, high): float =>
|
let klNormalUniform = (mean, stdev, low, high): float =>
|
||||||
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
||||||
|
@ -11,8 +12,6 @@ let klNormalUniform = (mean, stdev, low, high): float =>
|
||||||
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
||||||
|
|
||||||
describe("klDivergence: continuous -> continuous -> float", () => {
|
describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
|
|
||||||
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
||||||
test("of two uniforms is equal to the analytic expression", () => {
|
test("of two uniforms is equal to the analytic expression", () => {
|
||||||
let answer =
|
let answer =
|
||||||
|
@ -58,7 +57,7 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
|
||||||
switch kl {
|
switch kl {
|
||||||
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=3)
|
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=2)
|
||||||
| Error(err) => {
|
| Error(err) => {
|
||||||
Js.Console.log(DistributionTypes.Error.toString(err))
|
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||||
raise(KlFailed)
|
raise(KlFailed)
|
||||||
|
@ -82,7 +81,6 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("klDivergence: discrete -> discrete -> float", () => {
|
describe("klDivergence: discrete -> discrete -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
||||||
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
||||||
|
@ -117,7 +115,6 @@ describe("klDivergence: discrete -> discrete -> float", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("klDivergence: mixed -> mixed -> float", () => {
|
describe("klDivergence: mixed -> mixed -> float", () => {
|
||||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
|
||||||
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
let mixture = a => {
|
let mixture = a => {
|
||||||
let dist' = a->mixture'->run
|
let dist' = a->mixture'->run
|
||||||
|
@ -189,15 +186,15 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
||||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||||
s,
|
s,
|
||||||
))
|
))
|
||||||
let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
|
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||||
let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
|
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||||
|
|
||||||
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
||||||
let integrand = PointSetDist_Scoring.KLDivergence.integrand
|
let integrand = PointSetDist_Scoring.WithDistAnswer.integrand
|
||||||
|
|
||||||
let result = switch (answerWrapped, predictionWrapped) {
|
let result = switch (answerWrapped, predictionWrapped) {
|
||||||
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
||||||
Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
|
Some(combineAlongSupportOfSecondArgument(interpolator, integrand, a.xyShape, b.xyShape))
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
result
|
result
|
|
@ -0,0 +1,68 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
open GenericDist_Fixtures
|
||||||
|
exception ScoreFailed
|
||||||
|
|
||||||
|
describe("WithScalarAnswer: discrete -> scalar -> score", () => {
|
||||||
|
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||||
|
let pointA = mkDelta(3.0)
|
||||||
|
let pointB = mkDelta(2.0)
|
||||||
|
let pointC = mkDelta(1.0)
|
||||||
|
let pointD = mkDelta(0.0)
|
||||||
|
|
||||||
|
test("score: agrees with analytical answer when finite", () => {
|
||||||
|
let prediction' = [(pointA, 0.25), (pointB, 0.25), (pointC, 0.25), (pointD, 0.25)]->mixture->run
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let answer = 2.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.25 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test("score: agrees with analytical answer when finite", () => {
|
||||||
|
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
test("scoreWithPrior: agrees with analytical answer when finite", () => {
|
||||||
|
let prior' = [(pointA, 0.5), (pointB, 0.5)]->mixture->run
|
||||||
|
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||||
|
|
||||||
|
let prediction = switch prediction' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let prior = switch prior' {
|
||||||
|
| Dist(PointSet(p)) => p
|
||||||
|
| _ => raise(MixtureFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||||
|
let result = PointSetDist_Scoring.WithScalarAnswer.scoreWithPrior(
|
||||||
|
~estimate=prediction,
|
||||||
|
~answer,
|
||||||
|
~prior,
|
||||||
|
)
|
||||||
|
switch result {
|
||||||
|
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0) -. -.Js.Math.log(0.5 /. 1.0))
|
||||||
|
| _ => raise(ScoreFailed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
|
@ -8,34 +8,34 @@ let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean,
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
let normalValue = mkNormal(mean, 2.0)
|
let normalValue = mkNormal(mean, 2.0)
|
||||||
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
let normalizedValue = run(FromDist(#ToDist(Normalize), normalValue))
|
||||||
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("(Symbolic) mean", () => {
|
describe("(Symbolic) mean", () => {
|
||||||
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
run(FromDist(#ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
})
|
})
|
||||||
|
|
||||||
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
run(FromDist(#ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
run(FromDist(#ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||||
})
|
})
|
||||||
|
|
||||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of a cauchy distribution", () => {
|
test("of a cauchy distribution", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
||||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||||
|
@ -48,7 +48,7 @@ describe("(Symbolic) mean", () => {
|
||||||
let (low, medium, high) = tup
|
let (low, medium, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(
|
FromDist(
|
||||||
ToFloat(#Mean),
|
#ToFloat(#Mean),
|
||||||
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -63,7 +63,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (alpha, beta) = tup
|
let (alpha, beta) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
},
|
},
|
||||||
|
@ -72,7 +72,7 @@ describe("(Symbolic) mean", () => {
|
||||||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||||
test("of beta(0, 0)", () => {
|
test("of beta(0, 0)", () => {
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
})
|
})
|
||||||
|
@ -85,7 +85,7 @@ describe("(Symbolic) mean", () => {
|
||||||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||||
let meanValue =
|
let meanValue =
|
||||||
betaDistribution->E.R2.fmap(d =>
|
betaDistribution->E.R2.fmap(d =>
|
||||||
run(FromDist(ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||||
)
|
)
|
||||||
switch meanValue {
|
switch meanValue {
|
||||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
|
@ -100,7 +100,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (mu, sigma) = tup
|
let (mu, sigma) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
},
|
},
|
||||||
|
@ -112,14 +112,14 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, high) = tup
|
let (low, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
test("of a float", () => {
|
test("of a float", () => {
|
||||||
let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
let meanValue = run(FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -22,6 +22,13 @@ module FooImplementation = {
|
||||||
let makeFoo = (a: string, b: string, _environment): string => `I am ${a}-foo and I am ${b}-foo`
|
let makeFoo = (a: string, b: string, _environment): string => `I am ${a}-foo and I am ${b}-foo`
|
||||||
let makeBar = (a: float, b: float, _environment): string =>
|
let makeBar = (a: float, b: float, _environment): string =>
|
||||||
`I am ${a->Js.Float.toString}-bar and I am ${b->Js.Float.toString}-bar`
|
`I am ${a->Js.Float.toString}-bar and I am ${b->Js.Float.toString}-bar`
|
||||||
|
// You can also define functions that has their internal errors
|
||||||
|
let makeReturningError = (_a: float, _b: float, _environment): result<float, ErrorValue.t> =>
|
||||||
|
if false {
|
||||||
|
0.->Ok
|
||||||
|
} else {
|
||||||
|
ErrorValue.RETodo("test error")->Error
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// There is a potential for type modules to define lift functions
|
// There is a potential for type modules to define lift functions
|
||||||
|
@ -38,6 +45,17 @@ module FooFFI = {
|
||||||
| [IEvNumber(a), IEvNumber(b)] => FooImplementation.makeBar(a, b, environment)->IEvString->Some
|
| [IEvNumber(a), IEvNumber(b)] => FooImplementation.makeBar(a, b, environment)->IEvString->Some
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
|
let makeReturningError: ExpressionT.optionFfiFnReturningResult = (
|
||||||
|
args: array<InternalExpressionValue.t>,
|
||||||
|
environment,
|
||||||
|
) =>
|
||||||
|
switch args {
|
||||||
|
| [IEvNumber(a), IEvNumber(b)] =>
|
||||||
|
FooImplementation.makeReturningError(a, b, environment)
|
||||||
|
->Belt.Result.map(v => v->InternalExpressionValue.IEvNumber)
|
||||||
|
->Some
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let fooModule: Module.t =
|
let fooModule: Module.t =
|
||||||
|
@ -47,6 +65,7 @@ let fooModule: Module.t =
|
||||||
->Module.defineBool("fooBool", FooImplementation.fooBool)
|
->Module.defineBool("fooBool", FooImplementation.fooBool)
|
||||||
->Module.defineFunction("makeFoo", FooFFI.makeFoo)
|
->Module.defineFunction("makeFoo", FooFFI.makeFoo)
|
||||||
->Module.defineFunction("makeBar", FooFFI.makeBar)
|
->Module.defineFunction("makeBar", FooFFI.makeBar)
|
||||||
|
->Module.defineFunctionReturningResult("makeReturningError", FooFFI.makeReturningError)
|
||||||
|
|
||||||
let makeBindings = (prevBindings: Bindings.t): Bindings.t =>
|
let makeBindings = (prevBindings: Bindings.t): Bindings.t =>
|
||||||
prevBindings->Module.defineModule("Foo", fooModule)
|
prevBindings->Module.defineModule("Foo", fooModule)
|
|
@ -29,7 +29,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
|
||||||
|
|
||||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: GenericDist.env = {
|
||||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,7 +65,7 @@
|
||||||
"rescript-fast-check": "^1.1.1",
|
"rescript-fast-check": "^1.1.1",
|
||||||
"ts-jest": "^27.1.4",
|
"ts-jest": "^27.1.4",
|
||||||
"ts-loader": "^9.3.0",
|
"ts-loader": "^9.3.0",
|
||||||
"ts-node": "^10.8.1",
|
"ts-node": "^10.8.2",
|
||||||
"typescript": "^4.7.4",
|
"typescript": "^4.7.4",
|
||||||
"webpack": "^5.73.0",
|
"webpack": "^5.73.0",
|
||||||
"webpack-cli": "^4.10.0"
|
"webpack-cli": "^4.10.0"
|
||||||
|
|
|
@ -4,12 +4,9 @@ type error = DistributionTypes.error
|
||||||
|
|
||||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||||
|
|
||||||
type env = {
|
type env = GenericDist.env
|
||||||
sampleCount: int,
|
|
||||||
xyPointLength: int,
|
|
||||||
}
|
|
||||||
|
|
||||||
let defaultEnv = {
|
let defaultEnv: env = {
|
||||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||||
}
|
}
|
||||||
|
@ -93,7 +90,7 @@ module OutputLocal = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
let {sampleCount, xyPointLength} = env
|
let {sampleCount, xyPointLength} = env
|
||||||
|
|
||||||
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
||||||
|
@ -101,14 +98,14 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
}
|
}
|
||||||
|
|
||||||
let toPointSetFn = r => {
|
let toPointSetFn = r => {
|
||||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToPointSet), r), ()) {
|
switch reCall(~functionCallInfo=FromDist(#ToDist(ToPointSet), r), ()) {
|
||||||
| Dist(PointSet(p)) => Ok(p)
|
| Dist(PointSet(p)) => Ok(p)
|
||||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let toSampleSetFn = r => {
|
let toSampleSetFn = r => {
|
||||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToSampleSet(sampleCount)), r), ()) {
|
switch reCall(~functionCallInfo=FromDist(#ToDist(ToSampleSet(sampleCount)), r), ()) {
|
||||||
| Dist(SampleSet(p)) => Ok(p)
|
| Dist(SampleSet(p)) => Ok(p)
|
||||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||||
}
|
}
|
||||||
|
@ -116,13 +113,13 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
|
|
||||||
let scaleMultiply = (r, weight) =>
|
let scaleMultiply = (r, weight) =>
|
||||||
reCall(
|
reCall(
|
||||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
||||||
(),
|
(),
|
||||||
)->OutputLocal.toDistR
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
let pointwiseAdd = (r1, r2) =>
|
let pointwiseAdd = (r1, r2) =>
|
||||||
reCall(
|
reCall(
|
||||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
||||||
(),
|
(),
|
||||||
)->OutputLocal.toDistR
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
|
@ -131,49 +128,40 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
dist: genericDist,
|
dist: genericDist,
|
||||||
): outputType => {
|
): outputType => {
|
||||||
let response = switch subFnName {
|
let response = switch subFnName {
|
||||||
| ToFloat(distToFloatOperation) =>
|
| #ToFloat(distToFloatOperation) =>
|
||||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToString(ToString) => dist->GenericDist.toString->String
|
| #ToString(ToString) => dist->GenericDist.toString->String
|
||||||
| ToString(ToSparkline(bucketCount)) =>
|
| #ToString(ToSparkline(bucketCount)) =>
|
||||||
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
||||||
->E.R2.fmap(r => String(r))
|
->E.R2.fmap(r => String(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Inspect) => {
|
| #ToDist(Inspect) => {
|
||||||
Js.log2("Console log requested: ", dist)
|
Js.log2("Console log requested: ", dist)
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
| ToScore(KLDivergence(t2)) =>
|
| #ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(s => Float(s))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToScore(LogScore(answer, prior)) =>
|
| #ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
GenericDist.Score.logScoreWithPointResolution(
|
| #ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||||
~prediction=dist,
|
|
||||||
~answer,
|
|
||||||
~prior,
|
|
||||||
~toPointSetFn,
|
|
||||||
)
|
|
||||||
->E.R2.fmap(r => Float(r))
|
|
||||||
->OutputLocal.fromResult
|
|
||||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
|
||||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
|
||||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(ToSampleSet(n)) =>
|
| #ToDist(ToSampleSet(n)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.toSampleSetDist(n)
|
->GenericDist.toSampleSetDist(n)
|
||||||
->E.R2.fmap(r => Dist(SampleSet(r)))
|
->E.R2.fmap(r => Dist(SampleSet(r)))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(ToPointSet) =>
|
| #ToDist(ToPointSet) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||||
->E.R2.fmap(r => Dist(PointSet(r)))
|
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
| #ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(
|
->GenericDist.pointwiseCombinationFloat(
|
||||||
~toPointSetFn,
|
~toPointSetFn,
|
||||||
|
@ -182,23 +170,23 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Multiply, f)) =>
|
| #ToDist(Scale(#Multiply, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Logarithm, f)) =>
|
| #ToDist(Scale(#Logarithm, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDist(Scale(#Power, f)) =>
|
| #ToDist(Scale(#Power, f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
| #ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||||
| ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
| #ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.algebraicCombination(
|
->GenericDist.algebraicCombination(
|
||||||
~strategy,
|
~strategy,
|
||||||
|
@ -209,12 +197,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
| #ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
|
->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
| ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
| #ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
||||||
dist
|
dist
|
||||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
|
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
@ -225,8 +213,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
|
|
||||||
switch functionCallInfo {
|
switch functionCallInfo {
|
||||||
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
||||||
| FromFloat(subFnName, float) =>
|
| FromFloat(subFnName, x) => reCall(~functionCallInfo=FromFloat(subFnName, x), ())
|
||||||
reCall(~functionCallInfo=FromDist(subFnName, GenericDist.fromFloat(float)), ())
|
|
||||||
| Mixture(dists) =>
|
| Mixture(dists) =>
|
||||||
dists
|
dists
|
||||||
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
||||||
|
@ -278,13 +265,16 @@ module Constructors = {
|
||||||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
module LogScore = {
|
||||||
let logScoreWithPointResolution = (
|
let distEstimateDistAnswer = (~env, estimate, answer) =>
|
||||||
~env,
|
C.LogScore.distEstimateDistAnswer(estimate, answer)->run(~env)->toFloatR
|
||||||
~prediction: DistributionTypes.genericDist,
|
let distEstimateDistAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||||
~answer: float,
|
C.LogScore.distEstimateDistAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||||
~prior: option<DistributionTypes.genericDist>,
|
let distEstimateScalarAnswer = (~env, estimate, answer) =>
|
||||||
) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
|
C.LogScore.distEstimateScalarAnswer(estimate, answer)->run(~env)->toFloatR
|
||||||
|
let distEstimateScalarAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||||
|
C.LogScore.distEstimateScalarAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||||
|
}
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
@genType
|
@genType
|
||||||
type env = {
|
let defaultEnv: GenericDist.env
|
||||||
sampleCount: int,
|
|
||||||
xyPointLength: int,
|
|
||||||
}
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let defaultEnv: env
|
|
||||||
|
|
||||||
open DistributionTypes
|
open DistributionTypes
|
||||||
|
|
||||||
|
@ -19,15 +13,18 @@ type outputType =
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let run: (~env: env, DistributionTypes.DistributionOperation.genericFunctionCallInfo) => outputType
|
let run: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
DistributionTypes.DistributionOperation.genericFunctionCallInfo,
|
||||||
|
) => outputType
|
||||||
let runFromDist: (
|
let runFromDist: (
|
||||||
~env: env,
|
~env: GenericDist.env,
|
||||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
||||||
genericDist,
|
genericDist,
|
||||||
) => outputType
|
) => outputType
|
||||||
let runFromFloat: (
|
let runFromFloat: (
|
||||||
~env: env,
|
~env: GenericDist.env,
|
||||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
~functionCallInfo: DistributionTypes.DistributionOperation.fromFloat,
|
||||||
float,
|
float,
|
||||||
) => outputType
|
) => outputType
|
||||||
|
|
||||||
|
@ -42,79 +39,147 @@ module Output: {
|
||||||
let toBool: t => option<bool>
|
let toBool: t => option<bool>
|
||||||
let toBoolR: t => result<bool, error>
|
let toBoolR: t => result<bool, error>
|
||||||
let toError: t => option<error>
|
let toError: t => option<error>
|
||||||
let fmap: (~env: env, t, DistributionTypes.DistributionOperation.singleParamaterFunction) => t
|
let fmap: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
t,
|
||||||
|
DistributionTypes.DistributionOperation.singleParamaterFunction,
|
||||||
|
) => t
|
||||||
}
|
}
|
||||||
|
|
||||||
module Constructors: {
|
module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let mean: (~env: env, genericDist) => result<float, error>
|
let mean: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let stdev: (~env: env, genericDist) => result<float, error>
|
let stdev: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let variance: (~env: env, genericDist) => result<float, error>
|
let variance: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let sample: (~env: env, genericDist) => result<float, error>
|
let sample: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
let cdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
let inv: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
let pdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
let normalize: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
let isNormalized: (~env: GenericDist.env, genericDist) => result<bool, error>
|
||||||
|
module LogScore: {
|
||||||
@genType
|
@genType
|
||||||
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
let distEstimateDistAnswer: (
|
||||||
@genType
|
~env: GenericDist.env,
|
||||||
let logScoreWithPointResolution: (
|
genericDist,
|
||||||
~env: env,
|
genericDist,
|
||||||
~prediction: genericDist,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<genericDist>,
|
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let distEstimateDistAnswerWithPrior: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
let distEstimateScalarAnswer: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
float,
|
||||||
|
) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let fromSamples: (~env: env, SampleSetDist.t) => result<genericDist, error>
|
let distEstimateScalarAnswerWithPrior: (
|
||||||
@genType
|
~env: GenericDist.env,
|
||||||
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
genericDist,
|
||||||
@genType
|
float,
|
||||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
genericDist,
|
||||||
@genType
|
) => result<float, error>
|
||||||
let toString: (~env: env, genericDist) => result<string, error>
|
}
|
||||||
@genType
|
@genType
|
||||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
let toPointSet: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let toSampleSet: (~env: GenericDist.env, genericDist, int) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let fromSamples: (~env: GenericDist.env, SampleSetDist.t) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
let truncate: (
|
||||||
@genType
|
~env: GenericDist.env,
|
||||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
genericDist,
|
||||||
@genType
|
option<float>,
|
||||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
option<float>,
|
||||||
@genType
|
) => result<genericDist, error>
|
||||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let inspect: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||||
let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let toString: (~env: GenericDist.env, genericDist) => result<string, error>
|
||||||
let scaleMultiply: (~env: env, genericDist, float) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let toSparkline: (~env: GenericDist.env, genericDist, int) => result<string, error>
|
||||||
let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicMultiply: (
|
||||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
~env: GenericDist.env,
|
||||||
@genType
|
genericDist,
|
||||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
genericDist,
|
||||||
@genType
|
) => result<genericDist, error>
|
||||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicDivide: (
|
||||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
~env: GenericDist.env,
|
||||||
@genType
|
genericDist,
|
||||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicSubtract: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicLogarithm: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let algebraicPower: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let scaleLogarithm: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let scaleMultiply: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let scalePower: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseMultiply: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseDivide: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseSubtract: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwiseLogarithm: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
|
@genType
|
||||||
|
let pointwisePower: (
|
||||||
|
~env: GenericDist.env,
|
||||||
|
genericDist,
|
||||||
|
genericDist,
|
||||||
|
) => result<genericDist, error>
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,61 +98,86 @@ module DistributionOperation = {
|
||||||
| ToString
|
| ToString
|
||||||
| ToSparkline(int)
|
| ToSparkline(int)
|
||||||
|
|
||||||
type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
|
type genericDistOrScalar = Score_Dist(genericDist) | Score_Scalar(float)
|
||||||
|
|
||||||
type fromDist =
|
type toScore = LogScore(genericDistOrScalar, option<genericDist>)
|
||||||
| ToFloat(toFloat)
|
|
||||||
| ToDist(toDist)
|
type fromFloat = [
|
||||||
| ToScore(toScore)
|
| #ToFloat(toFloat)
|
||||||
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
| #ToDist(toDist)
|
||||||
| ToString(toString)
|
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||||
| ToBool(toBool)
|
| #ToString(toString)
|
||||||
|
| #ToBool(toBool)
|
||||||
|
]
|
||||||
|
|
||||||
|
type fromDist = [
|
||||||
|
| fromFloat
|
||||||
|
| #ToScore(toScore)
|
||||||
|
]
|
||||||
|
|
||||||
type singleParamaterFunction =
|
type singleParamaterFunction =
|
||||||
| FromDist(fromDist)
|
| FromDist(fromDist)
|
||||||
| FromFloat(fromDist)
|
| FromFloat(fromFloat)
|
||||||
|
|
||||||
type genericFunctionCallInfo =
|
type genericFunctionCallInfo =
|
||||||
| FromDist(fromDist, genericDist)
|
| FromDist(fromDist, genericDist)
|
||||||
| FromFloat(fromDist, float)
|
| FromFloat(fromFloat, float)
|
||||||
| FromSamples(array<float>)
|
| FromSamples(array<float>)
|
||||||
| Mixture(array<(genericDist, float)>)
|
| Mixture(array<(genericDist, float)>)
|
||||||
|
|
||||||
let distCallToString = (distFunction: fromDist): string =>
|
let floatCallToString = (floatFunction: fromFloat): string =>
|
||||||
switch distFunction {
|
switch floatFunction {
|
||||||
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
| #ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
| #ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Mean) => `mean`
|
| #ToFloat(#Mean) => `mean`
|
||||||
| ToFloat(#Min) => `min`
|
| #ToFloat(#Min) => `min`
|
||||||
| ToFloat(#Max) => `max`
|
| #ToFloat(#Max) => `max`
|
||||||
| ToFloat(#Stdev) => `stdev`
|
| #ToFloat(#Stdev) => `stdev`
|
||||||
| ToFloat(#Variance) => `variance`
|
| #ToFloat(#Variance) => `variance`
|
||||||
| ToFloat(#Mode) => `mode`
|
| #ToFloat(#Mode) => `mode`
|
||||||
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
| #ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||||
| ToFloat(#Sample) => `sample`
|
| #ToFloat(#Sample) => `sample`
|
||||||
| ToFloat(#IntegralSum) => `integralSum`
|
| #ToFloat(#IntegralSum) => `integralSum`
|
||||||
| ToScore(KLDivergence(_)) => `klDivergence`
|
| #ToDist(Normalize) => `normalize`
|
||||||
| ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
|
| #ToDist(ToPointSet) => `toPointSet`
|
||||||
| ToDist(Normalize) => `normalize`
|
| #ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||||
| ToDist(ToPointSet) => `toPointSet`
|
| #ToDist(Truncate(_, _)) => `truncate`
|
||||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
| #ToDist(Inspect) => `inspect`
|
||||||
| ToDist(Truncate(_, _)) => `truncate`
|
| #ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Inspect) => `inspect`
|
| #ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
| #ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||||
| ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
| #ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||||
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
|
||||||
| ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
|
||||||
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
||||||
| ToString(ToString) => `toString`
|
| #ToString(ToString) => `toString`
|
||||||
| ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
| #ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
||||||
| ToBool(IsNormalized) => `isNormalized`
|
| #ToBool(IsNormalized) => `isNormalized`
|
||||||
| ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
| #ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
||||||
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
| #ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||||
|
}
|
||||||
|
|
||||||
|
let distCallToString = (
|
||||||
|
distFunction: [
|
||||||
|
| #ToFloat(toFloat)
|
||||||
|
| #ToDist(toDist)
|
||||||
|
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||||
|
| #ToString(toString)
|
||||||
|
| #ToBool(toBool)
|
||||||
|
| #ToScore(toScore)
|
||||||
|
],
|
||||||
|
): string =>
|
||||||
|
switch distFunction {
|
||||||
|
| #ToScore(_) => `logScore`
|
||||||
|
| #ToFloat(x) => floatCallToString(#ToFloat(x))
|
||||||
|
| #ToDist(x) => floatCallToString(#ToDist(x))
|
||||||
|
| #ToString(x) => floatCallToString(#ToString(x))
|
||||||
|
| #ToBool(x) => floatCallToString(#ToBool(x))
|
||||||
|
| #ToDistCombination(x, y, z) => floatCallToString(#ToDistCombination(x, y, z))
|
||||||
}
|
}
|
||||||
|
|
||||||
let toString = (d: genericFunctionCallInfo): string =>
|
let toString = (d: genericFunctionCallInfo): string =>
|
||||||
switch d {
|
switch d {
|
||||||
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
| FromDist(f, _) => distCallToString(f)
|
||||||
|
| FromFloat(f, _) => floatCallToString(f)
|
||||||
| Mixture(_) => `mixture`
|
| Mixture(_) => `mixture`
|
||||||
| FromSamples(_) => `fromSamples`
|
| FromSamples(_) => `fromSamples`
|
||||||
}
|
}
|
||||||
|
@ -162,80 +187,93 @@ module Constructors = {
|
||||||
|
|
||||||
module UsingDists = {
|
module UsingDists = {
|
||||||
@genType
|
@genType
|
||||||
let mean = (dist): t => FromDist(ToFloat(#Mean), dist)
|
let mean = (dist): t => FromDist(#ToFloat(#Mean), dist)
|
||||||
let stdev = (dist): t => FromDist(ToFloat(#Stdev), dist)
|
let stdev = (dist): t => FromDist(#ToFloat(#Stdev), dist)
|
||||||
let variance = (dist): t => FromDist(ToFloat(#Variance), dist)
|
let variance = (dist): t => FromDist(#ToFloat(#Variance), dist)
|
||||||
let sample = (dist): t => FromDist(ToFloat(#Sample), dist)
|
let sample = (dist): t => FromDist(#ToFloat(#Sample), dist)
|
||||||
let cdf = (dist, x): t => FromDist(ToFloat(#Cdf(x)), dist)
|
let cdf = (dist, x): t => FromDist(#ToFloat(#Cdf(x)), dist)
|
||||||
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
let inv = (dist, x): t => FromDist(#ToFloat(#Inv(x)), dist)
|
||||||
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
let pdf = (dist, x): t => FromDist(#ToFloat(#Pdf(x)), dist)
|
||||||
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
let normalize = (dist): t => FromDist(#ToDist(Normalize), dist)
|
||||||
let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
|
let isNormalized = (dist): t => FromDist(#ToBool(IsNormalized), dist)
|
||||||
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
let toPointSet = (dist): t => FromDist(#ToDist(ToPointSet), dist)
|
||||||
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
let toSampleSet = (dist, r): t => FromDist(#ToDist(ToSampleSet(r)), dist)
|
||||||
let fromSamples = (xs): t => FromSamples(xs)
|
let fromSamples = (xs): t => FromSamples(xs)
|
||||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
let truncate = (dist, left, right): t => FromDist(#ToDist(Truncate(left, right)), dist)
|
||||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
let inspect = (dist): t => FromDist(#ToDist(Inspect), dist)
|
||||||
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
module LogScore = {
|
||||||
let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
|
let distEstimateDistAnswer = (estimate, answer): t => FromDist(
|
||||||
ToScore(LogScore(answer, prior)),
|
#ToScore(LogScore(Score_Dist(answer), None)),
|
||||||
prediction,
|
estimate,
|
||||||
)
|
)
|
||||||
let scaleMultiply = (dist, n): t => FromDist(ToDist(Scale(#Multiply, n)), dist)
|
let distEstimateDistAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
#ToScore(LogScore(Score_Dist(answer), Some(prior))),
|
||||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
estimate,
|
||||||
|
)
|
||||||
|
let distEstimateScalarAnswer = (estimate, answer): t => FromDist(
|
||||||
|
#ToScore(LogScore(Score_Scalar(answer), None)),
|
||||||
|
estimate,
|
||||||
|
)
|
||||||
|
let distEstimateScalarAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||||
|
#ToScore(LogScore(Score_Scalar(answer), Some(prior))),
|
||||||
|
estimate,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
let scaleMultiply = (dist, n): t => FromDist(#ToDist(Scale(#Multiply, n)), dist)
|
||||||
|
let scalePower = (dist, n): t => FromDist(#ToDist(Scale(#Power, n)), dist)
|
||||||
|
let scaleLogarithm = (dist, n): t => FromDist(#ToDist(Scale(#Logarithm, n)), dist)
|
||||||
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
||||||
ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
#ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||||
dist,
|
dist,
|
||||||
)
|
)
|
||||||
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
let toString = (dist): t => FromDist(#ToString(ToString), dist)
|
||||||
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
let toSparkline = (dist, n): t => FromDist(#ToString(ToSparkline(n)), dist)
|
||||||
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicDivide = (dist1, dist2): t => FromDist(
|
let algebraicDivide = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let algebraicPower = (dist1, dist2): t => FromDist(
|
let algebraicPower = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
#ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
let pointwisePower = (dist1, dist2): t => FromDist(
|
let pointwisePower = (dist1, dist2): t => FromDist(
|
||||||
ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
#ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
||||||
dist1,
|
dist1,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,11 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
type pointwiseAddFn = (t, t) => result<t, error>
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
type env = {
|
||||||
|
sampleCount: int,
|
||||||
|
xyPointLength: int,
|
||||||
|
}
|
||||||
|
|
||||||
let isPointSet = (t: t) =>
|
let isPointSet = (t: t) =>
|
||||||
switch t {
|
switch t {
|
||||||
| PointSet(_) => true
|
| PointSet(_) => true
|
||||||
|
@ -61,46 +66,6 @@ let integralEndY = (t: t): float =>
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||||
|
|
||||||
module Score = {
|
|
||||||
let klDivergence = (prediction, answer, ~toPointSetFn: toPointSetFn): result<float, error> => {
|
|
||||||
let pointSets = E.R.merge(toPointSetFn(prediction), toPointSetFn(answer))
|
|
||||||
pointSets |> E.R2.bind(((predi, ans)) =>
|
|
||||||
PointSetDist.T.klDivergence(predi, ans)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let logScoreWithPointResolution = (
|
|
||||||
~prediction: DistributionTypes.genericDist,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<DistributionTypes.genericDist>,
|
|
||||||
~toPointSetFn: toPointSetFn,
|
|
||||||
): result<float, error> => {
|
|
||||||
switch prior {
|
|
||||||
| Some(prior') =>
|
|
||||||
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
|
|
||||||
prior'',
|
|
||||||
prediction'',
|
|
||||||
)) =>
|
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
|
||||||
~prediction=prediction'',
|
|
||||||
~answer,
|
|
||||||
~prior=prior''->Some,
|
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
| None =>
|
|
||||||
prediction
|
|
||||||
->toPointSetFn
|
|
||||||
->E.R.bind(x =>
|
|
||||||
PointSetDist.T.logScoreWithPointResolution(
|
|
||||||
~prediction=x,
|
|
||||||
~answer,
|
|
||||||
~prior=None,
|
|
||||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let toFloatOperation = (
|
let toFloatOperation = (
|
||||||
t,
|
t,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
|
@ -171,6 +136,70 @@ let toPointSet = (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module Score = {
|
||||||
|
type genericDistOrScalar = DistributionTypes.DistributionOperation.genericDistOrScalar
|
||||||
|
|
||||||
|
let argsMake = (~esti: t, ~answ: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||||
|
PointSetDist_Scoring.scoreArgs,
|
||||||
|
error,
|
||||||
|
> => {
|
||||||
|
let toPointSetFn = t =>
|
||||||
|
toPointSet(
|
||||||
|
t,
|
||||||
|
~xyPointLength=env.xyPointLength,
|
||||||
|
~sampleCount=env.sampleCount,
|
||||||
|
~xSelection=#ByWeight,
|
||||||
|
(),
|
||||||
|
)
|
||||||
|
let prior': option<result<PointSetTypes.pointSetDist, error>> = switch prior {
|
||||||
|
| None => None
|
||||||
|
| Some(d) => toPointSetFn(d)->Some
|
||||||
|
}
|
||||||
|
let twoDists = (~toPointSetFn, esti': t, answ': t): result<
|
||||||
|
(PointSetTypes.pointSetDist, PointSetTypes.pointSetDist),
|
||||||
|
error,
|
||||||
|
> => E.R.merge(toPointSetFn(esti'), toPointSetFn(answ'))
|
||||||
|
switch (esti, answ, prior') {
|
||||||
|
| (esti', Score_Dist(answ'), None) =>
|
||||||
|
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||||
|
{estimate: esti'', answer: answ'', prior: None}->PointSetDist_Scoring.DistAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Dist(answ'), Some(Ok(prior''))) =>
|
||||||
|
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ'',
|
||||||
|
prior: Some(prior''),
|
||||||
|
}->PointSetDist_Scoring.DistAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Scalar(answ'), None) =>
|
||||||
|
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ',
|
||||||
|
prior: None,
|
||||||
|
}->PointSetDist_Scoring.ScalarAnswer
|
||||||
|
)
|
||||||
|
| (esti', Score_Scalar(answ'), Some(Ok(prior''))) =>
|
||||||
|
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||||
|
{
|
||||||
|
estimate: esti'',
|
||||||
|
answer: answ',
|
||||||
|
prior: Some(prior''),
|
||||||
|
}->PointSetDist_Scoring.ScalarAnswer
|
||||||
|
)
|
||||||
|
| (_, _, Some(Error(err))) => err->Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let logScore = (~estimate: t, ~answer: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||||
|
float,
|
||||||
|
error,
|
||||||
|
> =>
|
||||||
|
argsMake(~esti=estimate, ~answ=answer, ~prior, ~env)->E.R.bind(x =>
|
||||||
|
x->PointSetDist.logScore->E.R2.errMap(y => DistributionTypes.OperationError(y))
|
||||||
|
)
|
||||||
|
}
|
||||||
/*
|
/*
|
||||||
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
||||||
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
||||||
|
|
|
@ -5,6 +5,9 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
type pointwiseAddFn = (t, t) => result<t, error>
|
type pointwiseAddFn = (t, t) => result<t, error>
|
||||||
|
|
||||||
|
@genType
|
||||||
|
type env = {sampleCount: int, xyPointLength: int}
|
||||||
|
|
||||||
let sampleN: (t, int) => array<float>
|
let sampleN: (t, int) => array<float>
|
||||||
let sample: t => float
|
let sample: t => float
|
||||||
|
|
||||||
|
@ -25,12 +28,11 @@ let toFloatOperation: (
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
|
|
||||||
module Score: {
|
module Score: {
|
||||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
let logScore: (
|
||||||
let logScoreWithPointResolution: (
|
~estimate: t,
|
||||||
~prediction: t,
|
~answer: DistributionTypes.DistributionOperation.genericDistOrScalar,
|
||||||
~answer: float,
|
|
||||||
~prior: option<t>,
|
~prior: option<t>,
|
||||||
~toPointSetFn: toPointSetFn,
|
~env: env,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,7 @@ let combinePointwise = (
|
||||||
|
|
||||||
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
||||||
|
|
||||||
combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
combiner(interpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||||
make(~integralSumCache=combinedIntegralSum, x)
|
make(~integralSumCache=combinedIntegralSum, x)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -270,20 +270,6 @@ module T = Dist({
|
||||||
}
|
}
|
||||||
let variance = (t: t): float =>
|
let variance = (t: t): float =>
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
|
||||||
PointSetDist_Scoring.KLDivergence.integrand,
|
|
||||||
prediction.xyShape,
|
|
||||||
answer.xyShape,
|
|
||||||
)
|
|
||||||
newShape->E.R2.fmap(x => x->make->integralEndY)
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
|
||||||
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
|
||||||
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
let isNormalized = (t: t): bool => {
|
let isNormalized = (t: t): bool => {
|
||||||
|
|
|
@ -49,7 +49,7 @@ let combinePointwise = (
|
||||||
// TODO: does it ever make sense to pointwise combine the integrals here?
|
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||||
// It could be done for pointwise additions, but is that ever needed?
|
// It could be done for pointwise additions, but is that ever needed?
|
||||||
|
|
||||||
combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
combiner(XYShape.XtoY.discreteInterpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
||||||
}
|
}
|
||||||
|
|
||||||
let reduce = (
|
let reduce = (
|
||||||
|
@ -222,15 +222,4 @@ module T = Dist({
|
||||||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
combinePointwise(
|
|
||||||
~fn=PointSetDist_Scoring.KLDivergence.integrand,
|
|
||||||
prediction,
|
|
||||||
answer,
|
|
||||||
)->E.R2.fmap(integralEndY)
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -33,12 +33,6 @@ module type dist = {
|
||||||
|
|
||||||
let mean: t => float
|
let mean: t => float
|
||||||
let variance: t => float
|
let variance: t => float
|
||||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
|
||||||
let logScoreWithPointResolution: (
|
|
||||||
~prediction: t,
|
|
||||||
~answer: float,
|
|
||||||
~prior: option<t>,
|
|
||||||
) => result<float, Operation.Error.t>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module Dist = (T: dist) => {
|
module Dist = (T: dist) => {
|
||||||
|
@ -61,9 +55,6 @@ module Dist = (T: dist) => {
|
||||||
let mean = T.mean
|
let mean = T.mean
|
||||||
let variance = T.variance
|
let variance = T.variance
|
||||||
let integralEndY = T.integralEndY
|
let integralEndY = T.integralEndY
|
||||||
let klDivergence = T.klDivergence
|
|
||||||
let logScoreWithPointResolution = T.logScoreWithPointResolution
|
|
||||||
|
|
||||||
let updateIntegralCache = T.updateIntegralCache
|
let updateIntegralCache = T.updateIntegralCache
|
||||||
|
|
||||||
module Integral = {
|
module Integral = {
|
||||||
|
|
|
@ -302,15 +302,6 @@ module T = Dist({
|
||||||
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) => {
|
|
||||||
let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
|
|
||||||
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
|
||||||
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
|
||||||
}
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
||||||
|
|
|
@ -66,6 +66,7 @@ let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t
|
||||||
}
|
}
|
||||||
|
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
~combiner=XYShape.PointwiseCombination.combine,
|
||||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||||
~integralCachesFn: (
|
~integralCachesFn: (
|
||||||
PointSetTypes.continuousShape,
|
PointSetTypes.continuousShape,
|
||||||
|
@ -78,6 +79,7 @@ let combinePointwise = (
|
||||||
switch (t1, t2) {
|
switch (t1, t2) {
|
||||||
| (Continuous(m1), Continuous(m2)) =>
|
| (Continuous(m1), Continuous(m2)) =>
|
||||||
Continuous.combinePointwise(
|
Continuous.combinePointwise(
|
||||||
|
~combiner,
|
||||||
~integralSumCachesFn,
|
~integralSumCachesFn,
|
||||||
fn,
|
fn,
|
||||||
m1,
|
m1,
|
||||||
|
@ -85,6 +87,7 @@ let combinePointwise = (
|
||||||
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
||||||
| (Discrete(m1), Discrete(m2)) =>
|
| (Discrete(m1), Discrete(m2)) =>
|
||||||
Discrete.combinePointwise(
|
Discrete.combinePointwise(
|
||||||
|
~combiner,
|
||||||
~integralSumCachesFn,
|
~integralSumCachesFn,
|
||||||
~fn,
|
~fn,
|
||||||
m1,
|
m1,
|
||||||
|
@ -195,25 +198,16 @@ module T = Dist({
|
||||||
| Discrete(m) => Discrete.T.variance(m)
|
| Discrete(m) => Discrete.T.variance(m)
|
||||||
| Continuous(m) => Continuous.T.variance(m)
|
| Continuous(m) => Continuous.T.variance(m)
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergence = (prediction: t, answer: t) =>
|
|
||||||
switch (prediction, answer) {
|
|
||||||
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
|
|
||||||
| (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
|
|
||||||
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
|
||||||
}
|
|
||||||
|
|
||||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
|
||||||
switch (prior, prediction) {
|
|
||||||
| (Some(Continuous(t1)), Continuous(t2)) =>
|
|
||||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
|
|
||||||
| (None, Continuous(t2)) =>
|
|
||||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
|
|
||||||
| _ => Error(Operation.NotYetImplemented)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
let logScore = (args: PointSetDist_Scoring.scoreArgs): result<float, Operation.Error.t> =>
|
||||||
|
PointSetDist_Scoring.logScore(
|
||||||
|
args,
|
||||||
|
~combineFn=combinePointwise,
|
||||||
|
~integrateFn=T.Integral.sum,
|
||||||
|
~toMixedFn=toMixed,
|
||||||
|
)
|
||||||
|
|
||||||
let pdf = (f: float, t: t) => {
|
let pdf = (f: float, t: t) => {
|
||||||
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
|
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
|
||||||
mixedPoint.continuous +. mixedPoint.discrete
|
mixedPoint.continuous +. mixedPoint.discrete
|
||||||
|
|
|
@ -1,46 +1,149 @@
|
||||||
module KLDivergence = {
|
type pointSetDist = PointSetTypes.pointSetDist
|
||||||
|
|
||||||
|
type scalar = float
|
||||||
|
type score = float
|
||||||
|
type abstractScoreArgs<'a, 'b> = {estimate: 'a, answer: 'b, prior: option<'a>}
|
||||||
|
type scoreArgs =
|
||||||
|
| DistAnswer(abstractScoreArgs<pointSetDist, pointSetDist>)
|
||||||
|
| ScalarAnswer(abstractScoreArgs<pointSetDist, scalar>)
|
||||||
|
|
||||||
let logFn = Js.Math.log // base e
|
let logFn = Js.Math.log // base e
|
||||||
let integrand = (predictionElement: float, answerElement: float): result<
|
let minusScaledLogOfQuotient = (~esti, ~answ): result<float, Operation.Error.t> => {
|
||||||
|
let quot = esti /. answ
|
||||||
|
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answ *. logFn(quot))
|
||||||
|
}
|
||||||
|
|
||||||
|
module WithDistAnswer = {
|
||||||
|
// The Kullback-Leibler divergence
|
||||||
|
let integrand = (estimateElement: float, answerElement: float): result<
|
||||||
float,
|
float,
|
||||||
Operation.Error.t,
|
Operation.Error.t,
|
||||||
> =>
|
> =>
|
||||||
// We decided that negative infinity, not an error at answerElement = 0.0, is a desirable value.
|
// We decided that 0.0, not an error at answerElement = 0.0, is a desirable value.
|
||||||
if answerElement == 0.0 {
|
if answerElement == 0.0 {
|
||||||
Ok(0.0)
|
Ok(0.0)
|
||||||
} else if predictionElement == 0.0 {
|
} else if estimateElement == 0.0 {
|
||||||
Ok(infinity)
|
Ok(infinity)
|
||||||
} else {
|
} else {
|
||||||
let quot = predictionElement /. answerElement
|
minusScaledLogOfQuotient(~esti=estimateElement, ~answ=answerElement)
|
||||||
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
|
}
|
||||||
|
|
||||||
|
let sum = (
|
||||||
|
~estimate: pointSetDist,
|
||||||
|
~answer: pointSetDist,
|
||||||
|
~combineFn,
|
||||||
|
~integrateFn,
|
||||||
|
~toMixedFn,
|
||||||
|
): result<score, Operation.Error.t> => {
|
||||||
|
let combineAndIntegrate = (estimate, answer) =>
|
||||||
|
combineFn(integrand, estimate, answer)->E.R2.fmap(integrateFn)
|
||||||
|
|
||||||
|
let getMixedSums = (estimate: pointSetDist, answer: pointSetDist) => {
|
||||||
|
let esti = estimate->toMixedFn
|
||||||
|
let answ = answer->toMixedFn
|
||||||
|
switch (
|
||||||
|
Mixed.T.toContinuous(esti),
|
||||||
|
Mixed.T.toDiscrete(esti),
|
||||||
|
Mixed.T.toContinuous(answ),
|
||||||
|
Mixed.T.toDiscrete(answ),
|
||||||
|
) {
|
||||||
|
| (
|
||||||
|
Some(estiContinuousPart),
|
||||||
|
Some(estiDiscretePart),
|
||||||
|
Some(answContinuousPart),
|
||||||
|
Some(answDiscretePart),
|
||||||
|
) =>
|
||||||
|
E.R.merge(
|
||||||
|
combineAndIntegrate(
|
||||||
|
PointSetTypes.Discrete(estiDiscretePart),
|
||||||
|
PointSetTypes.Discrete(answDiscretePart),
|
||||||
|
),
|
||||||
|
combineAndIntegrate(Continuous(estiContinuousPart), Continuous(answContinuousPart)),
|
||||||
|
)
|
||||||
|
| (_, _, _, _) => `unreachable state`->Operation.Other->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module LogScoreWithPointResolution = {
|
switch (estimate, answer) {
|
||||||
let logFn = Js.Math.log
|
| (Continuous(_), Continuous(_))
|
||||||
let score = (
|
| (Discrete(_), Discrete(_)) =>
|
||||||
~priorPdf: option<float => float>,
|
combineAndIntegrate(estimate, answer)
|
||||||
~predictionPdf: float => float,
|
| (_, _) =>
|
||||||
~answer: float,
|
getMixedSums(estimate, answer)->E.R2.fmap(((discretePart, continuousPart)) =>
|
||||||
): result<float, Operation.Error.t> => {
|
discretePart +. continuousPart
|
||||||
let numerator = answer->predictionPdf
|
)
|
||||||
if numerator < 0.0 {
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let sumWithPrior = (
|
||||||
|
~estimate: pointSetDist,
|
||||||
|
~answer: pointSetDist,
|
||||||
|
~prior: pointSetDist,
|
||||||
|
~combineFn,
|
||||||
|
~integrateFn,
|
||||||
|
~toMixedFn,
|
||||||
|
): result<score, Operation.Error.t> => {
|
||||||
|
let kl1 = sum(~estimate, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||||
|
let kl2 = sum(~estimate=prior, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||||
|
E.R.merge(kl1, kl2)->E.R2.fmap(((kl1', kl2')) => kl1' -. kl2')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module WithScalarAnswer = {
|
||||||
|
let sum = (mp: PointSetTypes.MixedPoint.t): float => mp.continuous +. mp.discrete
|
||||||
|
let score = (~estimate: pointSetDist, ~answer: scalar): result<score, Operation.Error.t> => {
|
||||||
|
let _score = (~estimatePdf: float => option<float>, ~answer: float): result<
|
||||||
|
score,
|
||||||
|
Operation.Error.t,
|
||||||
|
> => {
|
||||||
|
let density = answer->estimatePdf
|
||||||
|
switch density {
|
||||||
|
| None => Operation.PdfInvalidError->Error
|
||||||
|
| Some(density') =>
|
||||||
|
if density' < 0.0 {
|
||||||
Operation.PdfInvalidError->Error
|
Operation.PdfInvalidError->Error
|
||||||
} else if numerator == 0.0 {
|
} else if density' == 0.0 {
|
||||||
infinity->Ok
|
infinity->Ok
|
||||||
} else {
|
} else {
|
||||||
-.(
|
density'->logFn->(x => -.x)->Ok
|
||||||
switch priorPdf {
|
|
||||||
| None => numerator->logFn
|
|
||||||
| Some(f) => {
|
|
||||||
let priorDensityOfAnswer = f(answer)
|
|
||||||
if priorDensityOfAnswer == 0.0 {
|
|
||||||
neg_infinity
|
|
||||||
} else {
|
|
||||||
(numerator /. priorDensityOfAnswer)->logFn
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)->Ok
|
|
||||||
|
let estimatePdf = x =>
|
||||||
|
switch estimate {
|
||||||
|
| Continuous(esti) => Continuous.T.xToY(x, esti)->sum->Some
|
||||||
|
| Discrete(esti) => Discrete.T.xToY(x, esti)->sum->Some
|
||||||
|
| Mixed(_) => None
|
||||||
|
}
|
||||||
|
_score(~estimatePdf, ~answer)
|
||||||
|
}
|
||||||
|
|
||||||
|
let scoreWithPrior = (~estimate: pointSetDist, ~answer: scalar, ~prior: pointSetDist): result<
|
||||||
|
score,
|
||||||
|
Operation.Error.t,
|
||||||
|
> => {
|
||||||
|
E.R.merge(score(~estimate, ~answer), score(~estimate=prior, ~answer))->E.R2.fmap(((s1, s2)) =>
|
||||||
|
s1 -. s2
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let twoGenericDistsToTwoPointSetDists = (~toPointSetFn, estimate, answer): result<
|
||||||
|
(pointSetDist, pointSetDist),
|
||||||
|
'e,
|
||||||
|
> => E.R.merge(toPointSetFn(estimate, ()), toPointSetFn(answer, ()))
|
||||||
|
|
||||||
|
let logScore = (args: scoreArgs, ~combineFn, ~integrateFn, ~toMixedFn): result<
|
||||||
|
score,
|
||||||
|
Operation.Error.t,
|
||||||
|
> =>
|
||||||
|
switch args {
|
||||||
|
| DistAnswer({estimate, answer, prior: None}) =>
|
||||||
|
WithDistAnswer.sum(~estimate, ~answer, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||||
|
| DistAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||||
|
WithDistAnswer.sumWithPrior(~estimate, ~answer, ~prior, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||||
|
| ScalarAnswer({estimate, answer, prior: None}) => WithScalarAnswer.score(~estimate, ~answer)
|
||||||
|
| ScalarAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||||
|
WithScalarAnswer.scoreWithPrior(~estimate, ~answer, ~prior)
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ type rec frType =
|
||||||
| FRTypeNumber
|
| FRTypeNumber
|
||||||
| FRTypeNumeric
|
| FRTypeNumeric
|
||||||
| FRTypeDistOrNumber
|
| FRTypeDistOrNumber
|
||||||
|
| FRTypeDist
|
||||||
| FRTypeLambda
|
| FRTypeLambda
|
||||||
| FRTypeRecord(frTypeRecord)
|
| FRTypeRecord(frTypeRecord)
|
||||||
| FRTypeDict(frType)
|
| FRTypeDict(frType)
|
||||||
|
@ -43,7 +44,7 @@ type fnDefinition = {
|
||||||
requiresNamespace: bool,
|
requiresNamespace: bool,
|
||||||
name: string,
|
name: string,
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
run: (array<frValue>, DistributionOperation.env) => result<internalExpressionValue, string>,
|
run: (array<frValue>, GenericDist.env) => result<internalExpressionValue, string>,
|
||||||
}
|
}
|
||||||
|
|
||||||
type function = {
|
type function = {
|
||||||
|
@ -62,6 +63,7 @@ module FRType = {
|
||||||
switch t {
|
switch t {
|
||||||
| FRTypeNumber => "number"
|
| FRTypeNumber => "number"
|
||||||
| FRTypeNumeric => "numeric"
|
| FRTypeNumeric => "numeric"
|
||||||
|
| FRTypeDist => "distribution"
|
||||||
| FRTypeDistOrNumber => "distribution|number"
|
| FRTypeDistOrNumber => "distribution|number"
|
||||||
| FRTypeRecord(r) => {
|
| FRTypeRecord(r) => {
|
||||||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||||
|
@ -100,6 +102,7 @@ module FRType = {
|
||||||
| (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
|
| (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
|
||||||
Some(FRValueDistOrNumber(FRValueNumber(f)))
|
Some(FRValueDistOrNumber(FRValueNumber(f)))
|
||||||
| (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
|
| (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
|
||||||
|
| (FRTypeDist, IEvDistribution(f)) => Some(FRValueDist(f))
|
||||||
| (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
|
| (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
|
||||||
| (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
|
| (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
|
||||||
| (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
|
| (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
|
||||||
|
@ -321,7 +324,7 @@ module FnDefinition = {
|
||||||
t.name ++ `(${inputs})`
|
t.name ++ `(${inputs})`
|
||||||
}
|
}
|
||||||
|
|
||||||
let run = (t: t, args: array<internalExpressionValue>, env: DistributionOperation.env) => {
|
let run = (t: t, args: array<internalExpressionValue>, env: GenericDist.env) => {
|
||||||
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
||||||
switch argValues {
|
switch argValues {
|
||||||
| Some(values) => t.run(values, env)
|
| Some(values) => t.run(values, env)
|
||||||
|
@ -385,7 +388,7 @@ module Registry = {
|
||||||
~registry: registry,
|
~registry: registry,
|
||||||
~fnName: string,
|
~fnName: string,
|
||||||
~args: array<internalExpressionValue>,
|
~args: array<internalExpressionValue>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
|
let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
|
||||||
//Js.log(toSimple(registry))
|
//Js.log(toSimple(registry))
|
||||||
|
|
|
@ -27,6 +27,12 @@ module Prepare = {
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let threeArgs = (inputs: ts): result<ts, err> =>
|
||||||
|
switch inputs {
|
||||||
|
| [FRValueRecord([(_, n1), (_, n2), (_, n3)])] => Ok([n1, n2, n3])
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
|
||||||
let toArgs = (inputs: ts): result<ts, err> =>
|
let toArgs = (inputs: ts): result<ts, err> =>
|
||||||
switch inputs {
|
switch inputs {
|
||||||
| [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
|
| [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
|
||||||
|
@ -57,6 +63,16 @@ module Prepare = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let twoDist = (values: ts): result<
|
||||||
|
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||||
|
err,
|
||||||
|
> => {
|
||||||
|
switch values {
|
||||||
|
| [FRValueDist(a1), FRValueDist(a2)] => Ok(a1, a2)
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let twoNumbers = (values: ts): result<(float, float), err> => {
|
let twoNumbers = (values: ts): result<(float, float), err> => {
|
||||||
switch values {
|
switch values {
|
||||||
| [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
|
| [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
|
||||||
|
@ -81,6 +97,11 @@ module Prepare = {
|
||||||
module Record = {
|
module Record = {
|
||||||
let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
|
let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
|
||||||
values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
|
values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
|
||||||
|
|
||||||
|
let twoDist = (values: ts): result<
|
||||||
|
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||||
|
err,
|
||||||
|
> => values->ToValueArray.Record.twoArgs->E.R.bind(twoDist)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,8 +149,7 @@ module Prepare = {
|
||||||
module Process = {
|
module Process = {
|
||||||
module DistOrNumberToDist = {
|
module DistOrNumberToDist = {
|
||||||
module Helpers = {
|
module Helpers = {
|
||||||
let toSampleSet = (r, env: DistributionOperation.env) =>
|
let toSampleSet = (r, env: GenericDist.env) => GenericDist.toSampleSetDist(r, env.sampleCount)
|
||||||
GenericDist.toSampleSetDist(r, env.sampleCount)
|
|
||||||
|
|
||||||
let mapFnResult = r =>
|
let mapFnResult = r =>
|
||||||
switch r {
|
switch r {
|
||||||
|
@ -166,7 +186,7 @@ module Process = {
|
||||||
let oneValue = (
|
let oneValue = (
|
||||||
~fn: float => result<DistributionTypes.genericDist, string>,
|
~fn: float => result<DistributionTypes.genericDist, string>,
|
||||||
~value: frValueDistOrNumber,
|
~value: frValueDistOrNumber,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): result<DistributionTypes.genericDist, string> => {
|
): result<DistributionTypes.genericDist, string> => {
|
||||||
switch value {
|
switch value {
|
||||||
| FRValueNumber(a1) => fn(a1)
|
| FRValueNumber(a1) => fn(a1)
|
||||||
|
@ -179,7 +199,7 @@ module Process = {
|
||||||
let twoValues = (
|
let twoValues = (
|
||||||
~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
|
~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
|
||||||
~values: (frValueDistOrNumber, frValueDistOrNumber),
|
~values: (frValueDistOrNumber, frValueDistOrNumber),
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): result<DistributionTypes.genericDist, string> => {
|
): result<DistributionTypes.genericDist, string> => {
|
||||||
switch values {
|
switch values {
|
||||||
| (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
|
| (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
|
||||||
|
|
|
@ -49,7 +49,7 @@ let inputsTodist = (inputs: array<FunctionRegistry_Core.frValue>, makeDist) => {
|
||||||
expressionValue
|
expressionValue
|
||||||
}
|
}
|
||||||
|
|
||||||
let registry = [
|
let registryStart = [
|
||||||
Function.make(
|
Function.make(
|
||||||
~name="toContinuousPointSet",
|
~name="toContinuousPointSet",
|
||||||
~definitions=[
|
~definitions=[
|
||||||
|
@ -570,3 +570,67 @@ to(5,10)
|
||||||
(),
|
(),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
let runScoring = (estimate, answer, prior, env) => {
|
||||||
|
GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
|
||||||
|
->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
|
||||||
|
->E.R2.errMap(DistributionTypes.Error.toString)
|
||||||
|
}
|
||||||
|
|
||||||
|
let scoreFunctions = [
|
||||||
|
Function.make(
|
||||||
|
~name="Score",
|
||||||
|
~definitions=[
|
||||||
|
FnDefinition.make(
|
||||||
|
~name="logScore",
|
||||||
|
~inputs=[
|
||||||
|
FRTypeRecord([
|
||||||
|
("estimate", FRTypeDist),
|
||||||
|
("answer", FRTypeDistOrNumber),
|
||||||
|
("prior", FRTypeDist),
|
||||||
|
]),
|
||||||
|
],
|
||||||
|
~run=(inputs, env) => {
|
||||||
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(inputs) {
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d)), FRValueDist(prior)]) =>
|
||||||
|
runScoring(estimate, Score_Dist(d), Some(prior), env)
|
||||||
|
| Ok([
|
||||||
|
FRValueDist(estimate),
|
||||||
|
FRValueDistOrNumber(FRValueNumber(d)),
|
||||||
|
FRValueDist(prior),
|
||||||
|
]) =>
|
||||||
|
runScoring(estimate, Score_Scalar(d), Some(prior), env)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
FnDefinition.make(
|
||||||
|
~name="logScore",
|
||||||
|
~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
|
||||||
|
~run=(inputs, env) => {
|
||||||
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(inputs) {
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d))]) =>
|
||||||
|
runScoring(estimate, Score_Dist(d), None, env)
|
||||||
|
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueNumber(d))]) =>
|
||||||
|
runScoring(estimate, Score_Scalar(d), None, env)
|
||||||
|
| Error(e) => Error(e)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
FnDefinition.make(~name="klDivergence", ~inputs=[FRTypeDist, FRTypeDist], ~run=(
|
||||||
|
inputs,
|
||||||
|
env,
|
||||||
|
) => {
|
||||||
|
switch inputs {
|
||||||
|
| [FRValueDist(estimate), FRValueDist(d)] => runScoring(estimate, Score_Dist(d), None, env)
|
||||||
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
(),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
let registry = E.A.append(registryStart, scoreFunctions)
|
||||||
|
|
|
@ -72,6 +72,10 @@ type ffiFn = (
|
||||||
) => result<internalExpressionValue, Reducer_ErrorValue.errorValue>
|
) => result<internalExpressionValue, Reducer_ErrorValue.errorValue>
|
||||||
|
|
||||||
type optionFfiFn = (array<internalExpressionValue>, environment) => option<internalExpressionValue>
|
type optionFfiFn = (array<internalExpressionValue>, environment) => option<internalExpressionValue>
|
||||||
|
type optionFfiFnReturningResult = (
|
||||||
|
array<internalExpressionValue>,
|
||||||
|
environment,
|
||||||
|
) => option<result<internalExpressionValue, Reducer_ErrorValue.errorValue>>
|
||||||
|
|
||||||
type expressionOrFFI =
|
type expressionOrFFI =
|
||||||
| NotFFI(expression)
|
| NotFFI(expression)
|
||||||
|
|
|
@ -126,6 +126,17 @@ let functionNotFoundErrorFFIFn = (functionName: string): ExpressionT.ffiFn => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let convertOptionToFfiFnReturningResult = (
|
||||||
|
myFunctionName: string,
|
||||||
|
myFunction: ExpressionT.optionFfiFnReturningResult,
|
||||||
|
): ExpressionT.ffiFn => {
|
||||||
|
(args: array<InternalExpressionValue.t>, environment) => {
|
||||||
|
myFunction(args, environment)->Belt.Option.getWithDefault(
|
||||||
|
functionNotFoundErrorFFIFn(myFunctionName)(args, environment),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let convertOptionToFfiFn = (
|
let convertOptionToFfiFn = (
|
||||||
myFunctionName: string,
|
myFunctionName: string,
|
||||||
myFunction: ExpressionT.optionFfiFn,
|
myFunction: ExpressionT.optionFfiFn,
|
||||||
|
@ -159,4 +170,15 @@ let defineFunction = (nameSpace: t, identifier: string, value: ExpressionT.optio
|
||||||
nameSpace->define(identifier, convertOptionToFfiFn(identifier, value)->eLambdaFFIValue)
|
nameSpace->define(identifier, convertOptionToFfiFn(identifier, value)->eLambdaFFIValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
let emptyStdLib: t = emptyModule->defineBool("stdlib", true)
|
let defineFunctionReturningResult = (
|
||||||
|
nameSpace: t,
|
||||||
|
identifier: string,
|
||||||
|
value: ExpressionT.optionFfiFnReturningResult,
|
||||||
|
): t => {
|
||||||
|
nameSpace->define(
|
||||||
|
identifier,
|
||||||
|
convertOptionToFfiFnReturningResult(identifier, value)->eLambdaFFIValue,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let emptyStdLib: t = emptyModule->defineBool("_standardLibrary", true)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
module IEV = ReducerInterface_InternalExpressionValue
|
module IEV = ReducerInterface_InternalExpressionValue
|
||||||
type internalExpressionValue = IEV.t
|
type internalExpressionValue = IEV.t
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
module IEV = ReducerInterface_InternalExpressionValue
|
module IEV = ReducerInterface_InternalExpressionValue
|
||||||
type internalExpressionValue = IEV.t
|
type internalExpressionValue = IEV.t
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -86,7 +86,7 @@ let toStringResult = x =>
|
||||||
}
|
}
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type environment = DistributionOperation.env
|
type environment = GenericDist.env
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
||||||
|
|
|
@ -32,50 +32,38 @@ module Helpers = {
|
||||||
let toFloatFn = (
|
let toFloatFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
|
FromDist(#ToFloat(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toStringFn = (
|
let toStringFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toString,
|
fnCall: DistributionTypes.DistributionOperation.toString,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
|
FromDist(#ToString(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toBoolFn = (
|
let toBoolFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toBool,
|
fnCall: DistributionTypes.DistributionOperation.toBool,
|
||||||
dist: DistributionTypes.genericDist,
|
dist: DistributionTypes.genericDist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
|
FromDist(#ToBool(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let toDistFn = (
|
let toDistFn = (
|
||||||
fnCall: DistributionTypes.DistributionOperation.toDist,
|
fnCall: DistributionTypes.DistributionOperation.toDist,
|
||||||
dist,
|
dist,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
) => {
|
) => {
|
||||||
FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
|
FromDist(#ToDist(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||||
->DistributionOperation.run(~env)
|
|
||||||
->Some
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: DistributionOperation.env) => {
|
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: GenericDist.env) => {
|
||||||
FromDist(
|
FromDist(
|
||||||
DistributionTypes.DistributionOperation.ToDistCombination(
|
#ToDistCombination(direction, arithmeticMap(arithmetic), #Dist(dist2)),
|
||||||
direction,
|
|
||||||
arithmeticMap(arithmetic),
|
|
||||||
#Dist(dist2),
|
|
||||||
),
|
|
||||||
dist1,
|
dist1,
|
||||||
)->DistributionOperation.run(~env)
|
)->DistributionOperation.run(~env)
|
||||||
}
|
}
|
||||||
|
@ -109,7 +97,7 @@ module Helpers = {
|
||||||
let mixtureWithGivenWeights = (
|
let mixtureWithGivenWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
weights: array<float>,
|
weights: array<float>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType =>
|
): DistributionOperation.outputType =>
|
||||||
E.A.length(distributions) == E.A.length(weights)
|
E.A.length(distributions) == E.A.length(weights)
|
||||||
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
||||||
|
@ -119,7 +107,7 @@ module Helpers = {
|
||||||
|
|
||||||
let mixtureWithDefaultWeights = (
|
let mixtureWithDefaultWeights = (
|
||||||
distributions: array<DistributionTypes.genericDist>,
|
distributions: array<DistributionTypes.genericDist>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType => {
|
): DistributionOperation.outputType => {
|
||||||
let length = E.A.length(distributions)
|
let length = E.A.length(distributions)
|
||||||
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
||||||
|
@ -128,7 +116,7 @@ module Helpers = {
|
||||||
|
|
||||||
let mixture = (
|
let mixture = (
|
||||||
args: array<internalExpressionValue>,
|
args: array<internalExpressionValue>,
|
||||||
~env: DistributionOperation.env,
|
~env: GenericDist.env,
|
||||||
): DistributionOperation.outputType => {
|
): DistributionOperation.outputType => {
|
||||||
let error = (err: string): DistributionOperation.outputType =>
|
let error = (err: string): DistributionOperation.outputType =>
|
||||||
err->DistributionTypes.ArgumentError->GenDistError
|
err->DistributionTypes.ArgumentError->GenDistError
|
||||||
|
@ -167,20 +155,6 @@ module Helpers = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let klDivergenceWithPrior = (
|
|
||||||
prediction: DistributionTypes.genericDist,
|
|
||||||
answer: DistributionTypes.genericDist,
|
|
||||||
prior: DistributionTypes.genericDist,
|
|
||||||
env: DistributionOperation.env,
|
|
||||||
) => {
|
|
||||||
let term1 = DistributionOperation.Constructors.klDivergence(~env, prediction, answer)
|
|
||||||
let term2 = DistributionOperation.Constructors.klDivergence(~env, prior, answer)
|
|
||||||
switch E.R.merge(term1, term2)->E.R2.fmap(((a, b)) => a -. b) {
|
|
||||||
| Ok(x) => x->DistributionOperation.Float->Some
|
|
||||||
| Error(_) => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module SymbolicConstructors = {
|
module SymbolicConstructors = {
|
||||||
|
@ -199,7 +173,7 @@ module SymbolicConstructors = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperation.env): option<
|
let dispatchToGenericOutput = (call: IEV.functionCall, env: GenericDist.env): option<
|
||||||
DistributionOperation.outputType,
|
DistributionOperation.outputType,
|
||||||
> => {
|
> => {
|
||||||
let (fnName, args) = call
|
let (fnName, args) = call
|
||||||
|
@ -239,35 +213,6 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
|
||||||
~env,
|
~env,
|
||||||
)->Some
|
)->Some
|
||||||
| ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
| ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
||||||
| ("klDivergence", [IEvDistribution(prediction), IEvDistribution(answer)]) =>
|
|
||||||
Some(DistributionOperation.run(FromDist(ToScore(KLDivergence(answer)), prediction), ~env))
|
|
||||||
| (
|
|
||||||
"klDivergence",
|
|
||||||
[IEvDistribution(prediction), IEvDistribution(answer), IEvDistribution(prior)],
|
|
||||||
) =>
|
|
||||||
Helpers.klDivergenceWithPrior(prediction, answer, prior, env)
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[IEvDistribution(prediction), IEvNumber(answer), IEvDistribution(prior)],
|
|
||||||
)
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[
|
|
||||||
IEvDistribution(prediction),
|
|
||||||
IEvDistribution(Symbolic(#Float(answer))),
|
|
||||||
IEvDistribution(prior),
|
|
||||||
],
|
|
||||||
) =>
|
|
||||||
DistributionOperation.run(
|
|
||||||
FromDist(ToScore(LogScore(answer, prior->Some)), prediction),
|
|
||||||
~env,
|
|
||||||
)->Some
|
|
||||||
| ("logScoreWithPointAnswer", [IEvDistribution(prediction), IEvNumber(answer)])
|
|
||||||
| (
|
|
||||||
"logScoreWithPointAnswer",
|
|
||||||
[IEvDistribution(prediction), IEvDistribution(Symbolic(#Float(answer)))],
|
|
||||||
) =>
|
|
||||||
DistributionOperation.run(FromDist(ToScore(LogScore(answer, None)), prediction), ~env)->Some
|
|
||||||
| ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
| ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
||||||
| ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
| ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
||||||
| ("scaleLog", [IEvDistribution(dist)]) =>
|
| ("scaleLog", [IEvDistribution(dist)]) =>
|
||||||
|
|
|
@ -24,7 +24,7 @@ module ScientificUnit = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||||
> => {
|
> => {
|
||||||
switch call {
|
switch call {
|
||||||
|
|
|
@ -8,7 +8,7 @@ The below few seem to work fine. In the future there's definitely more work to d
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type samplingParams = DistributionOperation.env
|
type samplingParams = GenericDist.env
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type genericDist = DistributionTypes.genericDist
|
type genericDist = DistributionTypes.genericDist
|
||||||
|
|
|
@ -547,6 +547,7 @@ module A = {
|
||||||
let init = Array.init
|
let init = Array.init
|
||||||
let reduce = Belt.Array.reduce
|
let reduce = Belt.Array.reduce
|
||||||
let reducei = Belt.Array.reduceWithIndex
|
let reducei = Belt.Array.reduceWithIndex
|
||||||
|
let some = Belt.Array.some
|
||||||
let isEmpty = r => length(r) < 1
|
let isEmpty = r => length(r) < 1
|
||||||
let stableSortBy = Belt.SortArray.stableSortBy
|
let stableSortBy = Belt.SortArray.stableSortBy
|
||||||
let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
|
let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
|
||||||
|
|
|
@ -327,8 +327,8 @@ module Zipped = {
|
||||||
module PointwiseCombination = {
|
module PointwiseCombination = {
|
||||||
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
||||||
let combine: (
|
let combine: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
interpolator,
|
interpolator,
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
T.t,
|
T.t,
|
||||||
T.t,
|
T.t,
|
||||||
) => result<T.t, Operation.Error.t> = %raw(`
|
) => result<T.t, Operation.Error.t> = %raw(`
|
||||||
|
@ -337,7 +337,7 @@ module PointwiseCombination = {
|
||||||
// and interpolates the value on the other side, thus accumulating xs and ys.
|
// and interpolates the value on the other side, thus accumulating xs and ys.
|
||||||
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
||||||
|
|
||||||
function(fn, interpolator, t1, t2) {
|
function(interpolator, fn, t1, t2) {
|
||||||
let t1n = t1.xs.length;
|
let t1n = t1.xs.length;
|
||||||
let t2n = t2.xs.length;
|
let t2n = t2.xs.length;
|
||||||
let outX = [];
|
let outX = [];
|
||||||
|
@ -399,11 +399,11 @@ module PointwiseCombination = {
|
||||||
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||||
*/
|
*/
|
||||||
let combineAlongSupportOfSecondArgument0: (
|
let combineAlongSupportOfSecondArgument0: (
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
interpolator,
|
interpolator,
|
||||||
|
(float, float) => result<float, Operation.Error.t>,
|
||||||
T.t,
|
T.t,
|
||||||
T.t,
|
T.t,
|
||||||
) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
|
) => result<T.t, Operation.Error.t> = (interpolator, fn, t1, t2) => {
|
||||||
let newYs = []
|
let newYs = []
|
||||||
let newXs = []
|
let newXs = []
|
||||||
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
||||||
|
@ -496,29 +496,9 @@ module PointwiseCombination = {
|
||||||
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
||||||
{xs: newXs, ys: newYs}
|
{xs: newXs, ys: newYs}
|
||||||
}
|
}
|
||||||
// This function is used for klDivergence
|
|
||||||
let combineAlongSupportOfSecondArgument: (
|
|
||||||
(float, float) => result<float, Operation.Error.t>,
|
|
||||||
T.t,
|
|
||||||
T.t,
|
|
||||||
) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
|
|
||||||
let combineWithFn = (answerX: float, i: int) => {
|
|
||||||
let answerY = answer.ys[i]
|
|
||||||
let predictionY = XtoY.linear(answerX, prediction)
|
|
||||||
fn(predictionY, answerY)
|
|
||||||
}
|
|
||||||
let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
|
|
||||||
let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
|
|
||||||
let result = switch newYsOrError {
|
|
||||||
| Ok(a) => Ok({xs: answer.xs, ys: a})
|
|
||||||
| Error(b) => Error(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
||||||
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
combine(interpolator, (a, b) => Ok(a +. b), t1, t2)->E.R.toExn(
|
||||||
"Add operation should never fail",
|
"Add operation should never fail",
|
||||||
_,
|
_,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,21 +1,35 @@
|
||||||
# Squiggle For VS Code
|
# Squiggle For VS Code
|
||||||
|
|
||||||
_[marketplace](https://marketplace.visualstudio.com/items?itemName=QURI.vscode-squiggle)_
|
## About
|
||||||
|
|
||||||
This extension provides support for [Squiggle](https://www.squiggle-language.com/) in VS Code.
|
This extension provides support for [Squiggle](https://www.squiggle-language.com/) in VS Code. It can be found in the VS code _[marketplace](https://marketplace.visualstudio.com/items?itemName=QURI.vscode-squiggle)_
|
||||||
|
|
||||||
Features:
|
Features:
|
||||||
|
|
||||||
- Preview `.squiggle` files in a preview pane
|
- Preview `.squiggle` files in a preview pane
|
||||||
- Syntax highlighting for `.squiggle` and `.squiggleU` files
|
- Syntax highlighting for `.squiggle` and `.squiggleU` files
|
||||||
|
|
||||||
# Configuration
|
## Installation
|
||||||
|
|
||||||
Some preview settings, e.g. whether to show the summary table or types of outputs, can be configurable on in the VS Code settings and persist between different preview sessions.
|
You can install this extension by going to the "extensions" tab, searching for "Squiggle", and then installing it.
|
||||||
|
|
||||||
|
![](./images/vs-code-install.png)
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
After loading a `.squiggle` file, an "Open Preview" button will appear. If you click it, the squiggle model will be shown, and updated as you edit and save you file.
|
||||||
|
|
||||||
|
![](./images/extension-screenshot.png)
|
||||||
|
|
||||||
|
### Configuration (optional)
|
||||||
|
|
||||||
|
Some preview settings, e.g. whether to show the summary table or types of outputs, can be configurable on in the VS Code settings and persist between different preview sessions. The VS Code settings can be accessed with the shortcut `Ctrl+,` with `Ctrl+Shift+P` + searching "Open Settings", or by accessing a file like `$HOME/.config/Code/User/settings.json` in Linux (see [here](https://stackoverflow.com/questions/65908987/how-can-i-open-visual-studio-codes-settings-json-file)) for other operating systems.
|
||||||
|
|
||||||
|
![](./images/vs-code-settings.png)
|
||||||
|
|
||||||
Check out the full list of Squiggle settings in the main VS Code settings.
|
Check out the full list of Squiggle settings in the main VS Code settings.
|
||||||
|
|
||||||
# Build locally
|
## Build locally
|
||||||
|
|
||||||
We assume you ran `yarn` at the monorepo level for all dependencies.
|
We assume you ran `yarn` at the monorepo level for all dependencies.
|
||||||
|
|
||||||
|
|
BIN
packages/vscode-ext/images/extension-screenshot.png
Normal file
BIN
packages/vscode-ext/images/extension-screenshot.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 233 KiB |
BIN
packages/vscode-ext/images/vs-code-install.png
Normal file
BIN
packages/vscode-ext/images/vs-code-install.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 187 KiB |
BIN
packages/vscode-ext/images/vs-code-settings.png
Normal file
BIN
packages/vscode-ext/images/vs-code-settings.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 77 KiB |
|
@ -138,10 +138,10 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/glob": "^7.2.0",
|
"@types/glob": "^7.2.0",
|
||||||
"@types/node": "18.x",
|
"@types/node": "18.x",
|
||||||
"@types/vscode": "^1.68.0",
|
"@types/vscode": "^1.69.0",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.30.4",
|
"@typescript-eslint/eslint-plugin": "^5.30.6",
|
||||||
"@typescript-eslint/parser": "^5.30.4",
|
"@typescript-eslint/parser": "^5.30.6",
|
||||||
"eslint": "^8.18.0",
|
"eslint": "^8.19.0",
|
||||||
"glob": "^8.0.3",
|
"glob": "^8.0.3",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"typescript": "^4.7.4",
|
"typescript": "^4.7.4",
|
||||||
|
|
|
@ -12,11 +12,11 @@
|
||||||
"format": "prettier --write ."
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "2.0.0-beta.21",
|
"@docusaurus/core": "2.0.0-beta.22",
|
||||||
"@docusaurus/preset-classic": "2.0.0-beta.21",
|
"@docusaurus/preset-classic": "2.0.0-beta.22",
|
||||||
"@quri/squiggle-components": "^0.2.20",
|
"@quri/squiggle-components": "^0.2.20",
|
||||||
"base64-js": "^1.5.1",
|
"base64-js": "^1.5.1",
|
||||||
"clsx": "^1.2.0",
|
"clsx": "^1.2.1",
|
||||||
"hast-util-is-element": "2.1.2",
|
"hast-util-is-element": "2.1.2",
|
||||||
"pako": "^2.0.4",
|
"pako": "^2.0.4",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
|
|
Loading…
Reference in New Issue
Block a user