Merge branch 'develop' into collapsible
This commit is contained in:
commit
8f4259cef3
|
@ -13,7 +13,7 @@
|
|||
"dependencies": {
|
||||
"chalk": "^5.0.1",
|
||||
"chokidar": "^3.5.3",
|
||||
"commander": "^9.3.0",
|
||||
"commander": "^9.4.0",
|
||||
"fs": "^0.0.1-security",
|
||||
"glob": "^8.0.3",
|
||||
"indent-string": "^5.0.0"
|
||||
|
|
|
@ -1,31 +1,31 @@
|
|||
{
|
||||
"name": "@quri/squiggle-components",
|
||||
"version": "0.2.20",
|
||||
"version": "0.2.23",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@floating-ui/react-dom": "^0.7.2",
|
||||
"@floating-ui/react-dom-interactions": "^0.6.6",
|
||||
"@headlessui/react": "^1.6.5",
|
||||
"@headlessui/react": "^1.6.6",
|
||||
"@heroicons/react": "^1.0.6",
|
||||
"@hookform/resolvers": "^2.9.3",
|
||||
"@hookform/resolvers": "^2.9.6",
|
||||
"@quri/squiggle-lang": "^0.2.8",
|
||||
"@react-hook/size": "^2.1.2",
|
||||
"clsx": "^1.1.1",
|
||||
"framer-motion": "^6.4.1",
|
||||
"clsx": "^1.2.1",
|
||||
"framer-motion": "^6.5.1",
|
||||
"lodash": "^4.17.21",
|
||||
"react": "^18.1.0",
|
||||
"react-ace": "^10.1.0",
|
||||
"react-hook-form": "^7.33.0",
|
||||
"react-hook-form": "^7.33.1",
|
||||
"react-use": "^17.4.0",
|
||||
"react-vega": "^7.5.1",
|
||||
"react-vega": "^7.6.0",
|
||||
"vega": "^5.22.1",
|
||||
"vega-embed": "^6.21.0",
|
||||
"vega-lite": "^5.2.0",
|
||||
"vega-lite": "^5.3.0",
|
||||
"vscode-uri": "^3.0.3",
|
||||
"yup": "^0.32.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-private-property-in-object": "^7.17.12",
|
||||
"@babel/plugin-proposal-private-property-in-object": "^7.18.6",
|
||||
"@storybook/addon-actions": "^6.5.9",
|
||||
"@storybook/addon-essentials": "^6.5.9",
|
||||
"@storybook/addon-links": "^6.5.9",
|
||||
|
@ -36,29 +36,29 @@
|
|||
"@storybook/react": "^6.5.9",
|
||||
"@testing-library/jest-dom": "^5.16.4",
|
||||
"@testing-library/react": "^13.3.0",
|
||||
"@testing-library/user-event": "^14.2.1",
|
||||
"@testing-library/user-event": "^14.2.6",
|
||||
"@types/jest": "^27.5.0",
|
||||
"@types/lodash": "^4.14.182",
|
||||
"@types/node": "^18.0.0",
|
||||
"@types/node": "^18.0.6",
|
||||
"@types/react": "^18.0.9",
|
||||
"@types/styled-components": "^5.1.24",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"mini-css-extract-plugin": "^2.6.1",
|
||||
"postcss-cli": "^9.1.0",
|
||||
"postcss-cli": "^10.0.0",
|
||||
"postcss-import": "^14.1.0",
|
||||
"postcss-loader": "^7.0.0",
|
||||
"postcss-loader": "^7.0.1",
|
||||
"react": "^18.1.0",
|
||||
"react-scripts": "^5.0.1",
|
||||
"style-loader": "^3.3.1",
|
||||
"tailwindcss": "^3.1.3",
|
||||
"tailwindcss": "^3.1.6",
|
||||
"ts-loader": "^9.3.0",
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||
"typescript": "^4.7.4",
|
||||
"web-vitals": "^2.1.4",
|
||||
"webpack": "^5.73.0",
|
||||
"webpack-cli": "^4.10.0",
|
||||
"webpack-dev-server": "^4.9.2"
|
||||
"webpack-dev-server": "^4.9.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17 || ^18",
|
||||
|
|
|
@ -5,18 +5,15 @@ import {
|
|||
distributionError,
|
||||
distributionErrorToString,
|
||||
} from "@quri/squiggle-lang";
|
||||
import { Vega, VisualizationSpec } from "react-vega";
|
||||
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
||||
import { Vega } from "react-vega";
|
||||
import { ErrorAlert } from "./Alert";
|
||||
import { useSize } from "react-use";
|
||||
import clsx from "clsx";
|
||||
|
||||
import {
|
||||
linearXScale,
|
||||
logXScale,
|
||||
linearYScale,
|
||||
expYScale,
|
||||
} from "./DistributionVegaScales";
|
||||
buildVegaSpec,
|
||||
DistributionChartSpecOptions,
|
||||
} from "../lib/distributionSpecBuilder";
|
||||
import { NumberShower } from "./NumberShower";
|
||||
|
||||
export type DistributionPlottingSettings = {
|
||||
|
@ -24,19 +21,17 @@ export type DistributionPlottingSettings = {
|
|||
showSummary: boolean;
|
||||
/** Whether to show the user graph controls (scale etc) */
|
||||
showControls: boolean;
|
||||
/** Set the x scale to be logarithmic by deault */
|
||||
logX: boolean;
|
||||
/** Set the y scale to be exponential by deault */
|
||||
expY: boolean;
|
||||
};
|
||||
} & DistributionChartSpecOptions;
|
||||
|
||||
export type DistributionChartProps = {
|
||||
distribution: Distribution;
|
||||
width?: number;
|
||||
height: number;
|
||||
actions?: boolean;
|
||||
} & DistributionPlottingSettings;
|
||||
|
||||
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||
export const DistributionChart: React.FC<DistributionChartProps> = (props) => {
|
||||
const {
|
||||
distribution,
|
||||
height,
|
||||
showSummary,
|
||||
|
@ -44,7 +39,8 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
|||
showControls,
|
||||
logX,
|
||||
expY,
|
||||
}) => {
|
||||
actions = false,
|
||||
} = props;
|
||||
const [isLogX, setLogX] = React.useState(logX);
|
||||
const [isExpY, setExpY] = React.useState(expY);
|
||||
|
||||
|
@ -64,7 +60,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
|||
const massBelow0 =
|
||||
shape.value.continuous.some((x) => x.x <= 0) ||
|
||||
shape.value.discrete.some((x) => x.x <= 0);
|
||||
const spec = buildVegaSpec(isLogX, isExpY);
|
||||
const spec = buildVegaSpec(props);
|
||||
|
||||
let widthProp = width ? width : size.width;
|
||||
if (widthProp < 20) {
|
||||
|
@ -82,7 +78,7 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
|||
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||
width={widthProp - 10}
|
||||
height={height}
|
||||
actions={false}
|
||||
actions={actions}
|
||||
/>
|
||||
) : (
|
||||
<ErrorAlert heading="Log Domain Error">
|
||||
|
@ -116,16 +112,6 @@ export const DistributionChart: React.FC<DistributionChartProps> = ({
|
|||
return sized;
|
||||
};
|
||||
|
||||
function buildVegaSpec(isLogX: boolean, isExpY: boolean): VisualizationSpec {
|
||||
return {
|
||||
...chartSpecification,
|
||||
scales: [
|
||||
isLogX ? logXScale : linearXScale,
|
||||
isExpY ? expYScale : linearYScale,
|
||||
],
|
||||
} as VisualizationSpec;
|
||||
}
|
||||
|
||||
interface CheckBoxProps {
|
||||
label: string;
|
||||
onChange: (x: boolean) => void;
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
import * as React from "react";
|
||||
import { lambdaValue, environment, runForeign } from "@quri/squiggle-lang";
|
||||
import {
|
||||
lambdaValue,
|
||||
environment,
|
||||
runForeign,
|
||||
errorValueToString,
|
||||
} from "@quri/squiggle-lang";
|
||||
import { FunctionChart1Dist } from "./FunctionChart1Dist";
|
||||
import { FunctionChart1Number } from "./FunctionChart1Number";
|
||||
import { DistributionPlottingSettings } from "./DistributionChart";
|
||||
|
@ -45,10 +50,16 @@ export const FunctionChart: React.FC<FunctionChartProps> = ({
|
|||
}
|
||||
};
|
||||
const validResult = getValidResult();
|
||||
const resultType =
|
||||
validResult.tag === "Ok" ? validResult.value.tag : ("Error" as const);
|
||||
|
||||
switch (resultType) {
|
||||
if (validResult.tag === "Error") {
|
||||
return (
|
||||
<ErrorAlert heading="Error">
|
||||
{errorValueToString(validResult.value)}
|
||||
</ErrorAlert>
|
||||
);
|
||||
}
|
||||
|
||||
switch (validResult.value.tag) {
|
||||
case "distribution":
|
||||
return (
|
||||
<FunctionChart1Dist
|
||||
|
@ -68,15 +79,11 @@ export const FunctionChart: React.FC<FunctionChartProps> = ({
|
|||
height={height}
|
||||
/>
|
||||
);
|
||||
case "Error":
|
||||
return (
|
||||
<ErrorAlert heading="Error">The function failed to be run</ErrorAlert>
|
||||
);
|
||||
default:
|
||||
return (
|
||||
<MessageAlert heading="Function Display Not Supported">
|
||||
There is no function visualization for this type of output:{" "}
|
||||
<span className="font-bold">{resultType}</span>
|
||||
<span className="font-bold">{validResult.value.tag}</span>
|
||||
</MessageAlert>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -88,7 +88,7 @@ let getPercentiles = ({ chartSettings, fn, environment }) => {
|
|||
let chartPointsData: point[] = chartPointsToRender.map((x) => {
|
||||
let result = runForeign(fn, [x], environment);
|
||||
if (result.tag === "Ok") {
|
||||
if (result.value.tag == "distribution") {
|
||||
if (result.value.tag === "distribution") {
|
||||
return { x, value: { tag: "Ok", value: result.value.value } };
|
||||
} else {
|
||||
return {
|
||||
|
@ -165,12 +165,14 @@ export const FunctionChart1Dist: React.FC<FunctionChart1DistProps> = ({
|
|||
setMouseOverlay(NaN);
|
||||
}
|
||||
const signalListeners = { mousemove: handleHover, mouseout: handleOut };
|
||||
|
||||
//TODO: This custom error handling is a bit hacky and should be improved.
|
||||
let mouseItem: result<squiggleExpression, errorValue> = !!mouseOverlay
|
||||
? runForeign(fn, [mouseOverlay], environment)
|
||||
: {
|
||||
tag: "Error",
|
||||
value: {
|
||||
tag: "REExpectedType",
|
||||
tag: "RETodo",
|
||||
value: "Hover x-coordinate returned NaN. Expected a number.",
|
||||
},
|
||||
};
|
||||
|
|
|
@ -41,6 +41,18 @@ export interface SquiggleChartProps {
|
|||
logX?: boolean;
|
||||
/** Set the y scale to be exponential by deault */
|
||||
expY?: boolean;
|
||||
/** How to format numbers on the x axis */
|
||||
tickFormat?: string;
|
||||
/** Title of the graphed distribution */
|
||||
title?: string;
|
||||
/** Color of the graphed distribution */
|
||||
color?: string;
|
||||
/** Specify the lower bound of the x scale */
|
||||
minX?: number;
|
||||
/** Specify the upper bound of the x scale */
|
||||
maxX?: number;
|
||||
/** Whether to show vega actions to the user, so they can copy the chart spec */
|
||||
distributionChartActions?: boolean;
|
||||
}
|
||||
|
||||
const defaultOnChange = () => {};
|
||||
|
@ -61,6 +73,12 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
|||
diagramStart = 0,
|
||||
diagramStop = 10,
|
||||
diagramCount = 100,
|
||||
tickFormat,
|
||||
minX,
|
||||
maxX,
|
||||
color,
|
||||
title,
|
||||
distributionChartActions,
|
||||
}) => {
|
||||
const result = useSquiggle({
|
||||
code,
|
||||
|
@ -75,6 +93,12 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
|||
showSummary,
|
||||
logX,
|
||||
expY,
|
||||
format: tickFormat,
|
||||
minX,
|
||||
maxX,
|
||||
color,
|
||||
title,
|
||||
actions: distributionChartActions,
|
||||
};
|
||||
|
||||
const chartSettings = {
|
||||
|
|
|
@ -18,7 +18,7 @@ import clsx from "clsx";
|
|||
|
||||
import { defaultBindings, environment } from "@quri/squiggle-lang";
|
||||
|
||||
import { SquiggleChart } from "./SquiggleChart";
|
||||
import { SquiggleChart, SquiggleChartProps } from "./SquiggleChart";
|
||||
import { CodeEditor } from "./CodeEditor";
|
||||
import { JsonEditor } from "./JsonEditor";
|
||||
import { ErrorAlert, SuccessAlert } from "./Alert";
|
||||
|
@ -27,26 +27,15 @@ import { Toggle } from "./ui/Toggle";
|
|||
import { Checkbox } from "./ui/Checkbox";
|
||||
import { StyledTab } from "./ui/StyledTab";
|
||||
|
||||
interface PlaygroundProps {
|
||||
type PlaygroundProps = SquiggleChartProps & {
|
||||
/** The initial squiggle string to put in the playground */
|
||||
defaultCode?: string;
|
||||
/** How many pixels high is the playground */
|
||||
height?: number;
|
||||
/** Whether to show the log scale controls in the playground */
|
||||
showControls?: boolean;
|
||||
/** Whether to show the summary table in the playground */
|
||||
showSummary?: boolean;
|
||||
/** Whether to log the x coordinate on distribution charts */
|
||||
logX?: boolean;
|
||||
/** Whether to exp the y coordinate on distribution charts */
|
||||
expY?: boolean;
|
||||
/** If code is set, component becomes controlled */
|
||||
code?: string;
|
||||
onCodeChange?(expr: string): void;
|
||||
/* When settings change */
|
||||
onSettingsChange?(settings: any): void;
|
||||
/** Should we show the editor? */
|
||||
showEditor?: boolean;
|
||||
}
|
||||
};
|
||||
|
||||
const schema = yup.object({}).shape({
|
||||
sampleCount: yup
|
||||
|
@ -79,6 +68,12 @@ const schema = yup.object({}).shape({
|
|||
showEditor: yup.boolean().required(),
|
||||
logX: yup.boolean().required(),
|
||||
expY: yup.boolean().required(),
|
||||
tickFormat: yup.string().default(".9~s"),
|
||||
title: yup.string(),
|
||||
color: yup.string().default("#739ECC").required(),
|
||||
minX: yup.number(),
|
||||
maxX: yup.number(),
|
||||
distributionChartActions: yup.boolean(),
|
||||
showSettingsPage: yup.boolean().default(false),
|
||||
diagramStart: yup.number().required().positive().integer().default(0).min(0),
|
||||
diagramStop: yup.number().required().positive().integer().default(10).min(0),
|
||||
|
@ -111,7 +106,7 @@ function InputItem<T>({
|
|||
}: {
|
||||
name: Path<T>;
|
||||
label: string;
|
||||
type: "number";
|
||||
type: "number" | "text" | "color";
|
||||
register: UseFormRegister<T>;
|
||||
}) {
|
||||
return (
|
||||
|
@ -119,7 +114,7 @@ function InputItem<T>({
|
|||
<div className="text-sm font-medium text-gray-600 mb-1">{label}</div>
|
||||
<input
|
||||
type={type}
|
||||
{...register(name)}
|
||||
{...register(name, { valueAsNumber: type === "number" })}
|
||||
className="form-input max-w-lg block w-full shadow-sm focus:ring-indigo-500 focus:border-indigo-500 sm:max-w-xs sm:text-sm border-gray-300 rounded-md"
|
||||
/>
|
||||
</label>
|
||||
|
@ -194,6 +189,11 @@ const ViewSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
|
|||
name="expY"
|
||||
label="Show y scale exponentially"
|
||||
/>
|
||||
<Checkbox
|
||||
register={register}
|
||||
name="distributionChartActions"
|
||||
label="Show vega chart controls"
|
||||
/>
|
||||
<Checkbox
|
||||
register={register}
|
||||
name="showControls"
|
||||
|
@ -204,6 +204,36 @@ const ViewSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
|
|||
name="showSummary"
|
||||
label="Show summary statistics"
|
||||
/>
|
||||
<InputItem
|
||||
name="minX"
|
||||
type="number"
|
||||
register={register}
|
||||
label="Min X Value"
|
||||
/>
|
||||
<InputItem
|
||||
name="maxX"
|
||||
type="number"
|
||||
register={register}
|
||||
label="Max X Value"
|
||||
/>
|
||||
<InputItem
|
||||
name="title"
|
||||
type="text"
|
||||
register={register}
|
||||
label="Title"
|
||||
/>
|
||||
<InputItem
|
||||
name="tickFormat"
|
||||
type="text"
|
||||
register={register}
|
||||
label="Tick Format"
|
||||
/>
|
||||
<InputItem
|
||||
name="color"
|
||||
type="color"
|
||||
register={register}
|
||||
label="Color"
|
||||
/>
|
||||
</div>
|
||||
</HeadedSection>
|
||||
</div>
|
||||
|
@ -376,6 +406,12 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
|||
showSummary = false,
|
||||
logX = false,
|
||||
expY = false,
|
||||
title,
|
||||
minX,
|
||||
maxX,
|
||||
color = "#739ECC",
|
||||
tickFormat = ".9~s",
|
||||
distributionChartActions,
|
||||
code: controlledCode,
|
||||
onCodeChange,
|
||||
onSettingsChange,
|
||||
|
@ -398,6 +434,12 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
|||
showControls,
|
||||
logX,
|
||||
expY,
|
||||
title,
|
||||
minX,
|
||||
maxX,
|
||||
color,
|
||||
tickFormat,
|
||||
distributionChartActions,
|
||||
showSummary,
|
||||
showEditor,
|
||||
leftSizePercent: 50,
|
||||
|
@ -430,14 +472,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
|||
<SquiggleChart
|
||||
code={renderedCode}
|
||||
environment={env}
|
||||
diagramStart={Number(vars.diagramStart)}
|
||||
diagramStop={Number(vars.diagramStop)}
|
||||
diagramCount={Number(vars.diagramCount)}
|
||||
height={vars.chartHeight}
|
||||
showControls={vars.showControls}
|
||||
showSummary={vars.showSummary}
|
||||
logX={vars.logX}
|
||||
expY={vars.expY}
|
||||
{...vars}
|
||||
bindings={defaultBindings}
|
||||
jsImports={imports}
|
||||
/>
|
||||
|
@ -477,7 +512,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
|||
);
|
||||
|
||||
const withEditor = (
|
||||
<div className="flex mt-1">
|
||||
<div className="flex mt-2">
|
||||
<div className="w-1/2">{tabs}</div>
|
||||
<div className="w-1/2 p-2 pl-4">{squiggleChart}</div>
|
||||
</div>
|
||||
|
|
|
@ -23,8 +23,8 @@ export const Toggle: React.FC<Props> = ({
|
|||
layout
|
||||
transition={{ duration: 0.2 }}
|
||||
className={clsx(
|
||||
"rounded-full py-1 bg-indigo-500 text-white text-xs font-semibold flex items-center space-x-1",
|
||||
status ? "bg-indigo-500" : "bg-gray-400",
|
||||
"rounded-md py-0.5 bg-slate-500 text-white text-xs font-semibold flex items-center space-x-1",
|
||||
status ? "bg-slate-500" : "bg-gray-400",
|
||||
status ? "pl-1 pr-3" : "pl-3 pr-1",
|
||||
!status && "flex-row-reverse space-x-reverse"
|
||||
)}
|
||||
|
|
256
packages/components/src/lib/distributionSpecBuilder.ts
Normal file
256
packages/components/src/lib/distributionSpecBuilder.ts
Normal file
|
@ -0,0 +1,256 @@
|
|||
import { VisualizationSpec } from "react-vega";
|
||||
import type { LogScale, LinearScale, PowScale } from "vega";
|
||||
|
||||
export type DistributionChartSpecOptions = {
|
||||
/** Set the x scale to be logarithmic by deault */
|
||||
logX: boolean;
|
||||
/** Set the y scale to be exponential by deault */
|
||||
expY: boolean;
|
||||
/** The minimum x coordinate shown on the chart */
|
||||
minX?: number;
|
||||
/** The maximum x coordinate shown on the chart */
|
||||
maxX?: number;
|
||||
/** The color of the chart */
|
||||
color?: string;
|
||||
/** The title of the chart */
|
||||
title?: string;
|
||||
/** The formatting of the ticks */
|
||||
format?: string;
|
||||
};
|
||||
|
||||
export let linearXScale: LinearScale = {
|
||||
name: "xscale",
|
||||
clamp: true,
|
||||
type: "linear",
|
||||
range: "width",
|
||||
zero: false,
|
||||
nice: false,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "x",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "x",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
export let linearYScale: LinearScale = {
|
||||
name: "yscale",
|
||||
type: "linear",
|
||||
range: "height",
|
||||
zero: true,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "y",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "y",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export let logXScale: LogScale = {
|
||||
name: "xscale",
|
||||
type: "log",
|
||||
range: "width",
|
||||
zero: false,
|
||||
base: 10,
|
||||
nice: false,
|
||||
clamp: true,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "x",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "x",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export let expYScale: PowScale = {
|
||||
name: "yscale",
|
||||
type: "pow",
|
||||
exponent: 0.1,
|
||||
range: "height",
|
||||
zero: true,
|
||||
nice: false,
|
||||
domain: {
|
||||
fields: [
|
||||
{
|
||||
data: "con",
|
||||
field: "y",
|
||||
},
|
||||
{
|
||||
data: "dis",
|
||||
field: "y",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export function buildVegaSpec(
|
||||
specOptions: DistributionChartSpecOptions
|
||||
): VisualizationSpec {
|
||||
let {
|
||||
format = ".9~s",
|
||||
color = "#739ECC",
|
||||
title,
|
||||
minX,
|
||||
maxX,
|
||||
logX,
|
||||
expY,
|
||||
} = specOptions;
|
||||
|
||||
let xScale = logX ? logXScale : linearXScale;
|
||||
if (minX !== undefined && Number.isFinite(minX)) {
|
||||
xScale = { ...xScale, domainMin: minX };
|
||||
}
|
||||
|
||||
if (maxX !== undefined && Number.isFinite(maxX)) {
|
||||
xScale = { ...xScale, domainMax: maxX };
|
||||
}
|
||||
|
||||
let spec: VisualizationSpec = {
|
||||
$schema: "https://vega.github.io/schema/vega/v5.json",
|
||||
description: "A basic area chart example",
|
||||
width: 500,
|
||||
height: 100,
|
||||
padding: 5,
|
||||
data: [
|
||||
{
|
||||
name: "con",
|
||||
},
|
||||
{
|
||||
name: "dis",
|
||||
},
|
||||
],
|
||||
signals: [],
|
||||
scales: [xScale, expY ? expYScale : linearYScale],
|
||||
axes: [
|
||||
{
|
||||
orient: "bottom",
|
||||
scale: "xscale",
|
||||
labelColor: "#727d93",
|
||||
tickColor: "#fff",
|
||||
tickOpacity: 0.0,
|
||||
domainColor: "#fff",
|
||||
domainOpacity: 0.0,
|
||||
format: format,
|
||||
tickCount: 10,
|
||||
},
|
||||
],
|
||||
marks: [
|
||||
{
|
||||
type: "area",
|
||||
from: {
|
||||
data: "con",
|
||||
},
|
||||
encode: {
|
||||
update: {
|
||||
interpolate: { value: "linear" },
|
||||
x: {
|
||||
scale: "xscale",
|
||||
field: "x",
|
||||
},
|
||||
y: {
|
||||
scale: "yscale",
|
||||
field: "y",
|
||||
},
|
||||
y2: {
|
||||
scale: "yscale",
|
||||
value: 0,
|
||||
},
|
||||
fill: {
|
||||
value: color,
|
||||
},
|
||||
fillOpacity: {
|
||||
value: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
type: "rect",
|
||||
from: {
|
||||
data: "dis",
|
||||
},
|
||||
encode: {
|
||||
enter: {
|
||||
width: {
|
||||
value: 1,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
x: {
|
||||
scale: "xscale",
|
||||
field: "x",
|
||||
},
|
||||
y: {
|
||||
scale: "yscale",
|
||||
field: "y",
|
||||
},
|
||||
y2: {
|
||||
scale: "yscale",
|
||||
value: 0,
|
||||
},
|
||||
fill: {
|
||||
value: "#2f65a7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
type: "symbol",
|
||||
from: {
|
||||
data: "dis",
|
||||
},
|
||||
encode: {
|
||||
enter: {
|
||||
shape: {
|
||||
value: "circle",
|
||||
},
|
||||
size: [{ value: 100 }],
|
||||
tooltip: {
|
||||
signal: "{ probability: datum.y, value: datum.x }",
|
||||
},
|
||||
},
|
||||
update: {
|
||||
x: {
|
||||
scale: "xscale",
|
||||
field: "x",
|
||||
},
|
||||
y: {
|
||||
scale: "yscale",
|
||||
field: "y",
|
||||
},
|
||||
fill: {
|
||||
value: "#1e4577",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
if (title) {
|
||||
spec = {
|
||||
...spec,
|
||||
title: {
|
||||
text: title,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return spec;
|
||||
}
|
|
@ -3,7 +3,7 @@ import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
|||
|
||||
<Meta title="Squiggle/SquiggleChart" component={SquiggleChart} />
|
||||
|
||||
export const Template = SquiggleChart;
|
||||
export const Template = (props) => <SquiggleChart {...props} />;
|
||||
/*
|
||||
We have to hardcode a width here, because otherwise some interaction with
|
||||
Storybook creates an infinite loop with the internal width
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
open Jest
|
||||
open Expect
|
||||
|
||||
let env: DistributionOperation.env = {
|
||||
let env: GenericDist.env = {
|
||||
sampleCount: 100,
|
||||
xyPointLength: 100,
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ describe("sparkline", () => {
|
|||
expected: DistributionOperation.outputType,
|
||||
) => {
|
||||
test(name, () => {
|
||||
let result = DistributionOperation.run(~env, FromDist(ToString(ToSparkline(20)), dist))
|
||||
let result = DistributionOperation.run(~env, FromDist(#ToString(ToSparkline(20)), dist))
|
||||
expect(result)->toEqual(expected)
|
||||
})
|
||||
}
|
||||
|
@ -81,8 +81,8 @@ describe("sparkline", () => {
|
|||
describe("toPointSet", () => {
|
||||
test("on symbolic normal distribution", () => {
|
||||
let result =
|
||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
||||
->outputMap(FromDist(ToFloat(#Mean)))
|
||||
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||
->toFloat
|
||||
->toExt
|
||||
expect(result)->toBeSoCloseTo(5.0, ~digits=0)
|
||||
|
@ -90,10 +90,10 @@ describe("toPointSet", () => {
|
|||
|
||||
test("on sample set", () => {
|
||||
let result =
|
||||
run(FromDist(ToDist(ToPointSet), normalDist5))
|
||||
->outputMap(FromDist(ToDist(ToSampleSet(1000))))
|
||||
->outputMap(FromDist(ToDist(ToPointSet)))
|
||||
->outputMap(FromDist(ToFloat(#Mean)))
|
||||
run(FromDist(#ToDist(ToPointSet), normalDist5))
|
||||
->outputMap(FromDist(#ToDist(ToSampleSet(1000))))
|
||||
->outputMap(FromDist(#ToDist(ToPointSet)))
|
||||
->outputMap(FromDist(#ToFloat(#Mean)))
|
||||
->toFloat
|
||||
->toExt
|
||||
expect(result)->toBeSoCloseTo(5.0, ~digits=-1)
|
||||
|
|
|
@ -19,7 +19,6 @@ exception MixtureFailed
|
|||
let float1 = 1.0
|
||||
let float2 = 2.0
|
||||
let float3 = 3.0
|
||||
let {mkDelta} = module(TestHelpers)
|
||||
let point1 = mkDelta(float1)
|
||||
let point2 = mkDelta(float2)
|
||||
let point3 = mkDelta(float3)
|
||||
let point1 = TestHelpers.mkDelta(float1)
|
||||
let point2 = TestHelpers.mkDelta(float2)
|
||||
let point3 = TestHelpers.mkDelta(float3)
|
||||
|
|
|
@ -11,7 +11,7 @@ describe("mixture", () => {
|
|||
let (mean1, mean2) = tup
|
||||
let meanValue = {
|
||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
||||
FromDist(ToFloat(#Mean)),
|
||||
FromDist(#ToFloat(#Mean)),
|
||||
)
|
||||
}
|
||||
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||
|
@ -28,7 +28,7 @@ describe("mixture", () => {
|
|||
let meanValue = {
|
||||
run(
|
||||
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||
}
|
||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||
let exponentialMean = 1.0 /. rate
|
||||
|
@ -52,7 +52,7 @@ describe("mixture", () => {
|
|||
(mkUniform(low, high), uniformWeight),
|
||||
(mkLognormal(mu, sigma), lognormalWeight),
|
||||
]),
|
||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||
)->outputMap(FromDist(#ToFloat(#Mean)))
|
||||
}
|
||||
let uniformMean = (low +. high) /. 2.0
|
||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||
|
|
|
@ -3,6 +3,7 @@ open Expect
|
|||
open TestHelpers
|
||||
open GenericDist_Fixtures
|
||||
|
||||
let klDivergence = DistributionOperation.Constructors.LogScore.distEstimateDistAnswer(~env)
|
||||
// integral from low to high of 1 / (high - low) log(normal(mean, stdev)(x) / (1 / (high - low))) dx
|
||||
let klNormalUniform = (mean, stdev, low, high): float =>
|
||||
-.Js.Math.log((high -. low) /. Js.Math.sqrt(2.0 *. MagicNumbers.Math.pi *. stdev ** 2.0)) +.
|
||||
|
@ -11,8 +12,6 @@ let klNormalUniform = (mean, stdev, low, high): float =>
|
|||
(mean ** 2.0 -. (high +. low) *. mean +. (low ** 2.0 +. high *. low +. high ** 2.0) /. 3.0)
|
||||
|
||||
describe("klDivergence: continuous -> continuous -> float", () => {
|
||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||
|
||||
let testUniform = (lowAnswer, highAnswer, lowPrediction, highPrediction) => {
|
||||
test("of two uniforms is equal to the analytic expression", () => {
|
||||
let answer =
|
||||
|
@ -58,7 +57,7 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
|||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||
|
||||
switch kl {
|
||||
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=3)
|
||||
| Ok(kl') => kl'->expect->toBeSoCloseTo(analyticalKl, ~digits=2)
|
||||
| Error(err) => {
|
||||
Js.Console.log(DistributionTypes.Error.toString(err))
|
||||
raise(KlFailed)
|
||||
|
@ -82,7 +81,6 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
|||
})
|
||||
|
||||
describe("klDivergence: discrete -> discrete -> float", () => {
|
||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||
let a' = [(point1, 1e0), (point2, 1e0)]->mixture->run
|
||||
let b' = [(point1, 1e0), (point2, 1e0), (point3, 1e0)]->mixture->run
|
||||
|
@ -117,7 +115,6 @@ describe("klDivergence: discrete -> discrete -> float", () => {
|
|||
})
|
||||
|
||||
describe("klDivergence: mixed -> mixed -> float", () => {
|
||||
let klDivergence = DistributionOperation.Constructors.klDivergence(~env)
|
||||
let mixture' = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||
let mixture = a => {
|
||||
let dist' = a->mixture'->run
|
||||
|
@ -189,15 +186,15 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
|||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||
s,
|
||||
))
|
||||
let answerWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), answer)
|
||||
let predictionWrapped = E.R.fmap(a => run(FromDist(ToDist(ToPointSet), a)), prediction)
|
||||
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||
|
||||
let interpolator = XYShape.XtoY.continuousInterpolator(#Stepwise, #UseZero)
|
||||
let integrand = PointSetDist_Scoring.KLDivergence.integrand
|
||||
let integrand = PointSetDist_Scoring.WithDistAnswer.integrand
|
||||
|
||||
let result = switch (answerWrapped, predictionWrapped) {
|
||||
| (Ok(Dist(PointSet(Continuous(a)))), Ok(Dist(PointSet(Continuous(b))))) =>
|
||||
Some(combineAlongSupportOfSecondArgument(integrand, interpolator, a.xyShape, b.xyShape))
|
||||
Some(combineAlongSupportOfSecondArgument(interpolator, integrand, a.xyShape, b.xyShape))
|
||||
| _ => None
|
||||
}
|
||||
result
|
|
@ -0,0 +1,68 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open TestHelpers
|
||||
open GenericDist_Fixtures
|
||||
exception ScoreFailed
|
||||
|
||||
describe("WithScalarAnswer: discrete -> scalar -> score", () => {
|
||||
let mixture = a => DistributionTypes.DistributionOperation.Mixture(a)
|
||||
let pointA = mkDelta(3.0)
|
||||
let pointB = mkDelta(2.0)
|
||||
let pointC = mkDelta(1.0)
|
||||
let pointD = mkDelta(0.0)
|
||||
|
||||
test("score: agrees with analytical answer when finite", () => {
|
||||
let prediction' = [(pointA, 0.25), (pointB, 0.25), (pointC, 0.25), (pointD, 0.25)]->mixture->run
|
||||
let prediction = switch prediction' {
|
||||
| Dist(PointSet(p)) => p
|
||||
| _ => raise(MixtureFailed)
|
||||
}
|
||||
|
||||
let answer = 2.0 // So this is: assigning 100% probability to 2.0
|
||||
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||
switch result {
|
||||
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.25 /. 1.0))
|
||||
| _ => raise(ScoreFailed)
|
||||
}
|
||||
})
|
||||
|
||||
test("score: agrees with analytical answer when finite", () => {
|
||||
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||
let prediction = switch prediction' {
|
||||
| Dist(PointSet(p)) => p
|
||||
| _ => raise(MixtureFailed)
|
||||
}
|
||||
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||
let result = PointSetDist_Scoring.WithScalarAnswer.score(~estimate=prediction, ~answer)
|
||||
switch result {
|
||||
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0))
|
||||
| _ => raise(ScoreFailed)
|
||||
}
|
||||
})
|
||||
|
||||
test("scoreWithPrior: agrees with analytical answer when finite", () => {
|
||||
let prior' = [(pointA, 0.5), (pointB, 0.5)]->mixture->run
|
||||
let prediction' = [(pointA, 0.75), (pointB, 0.25)]->mixture->run
|
||||
|
||||
let prediction = switch prediction' {
|
||||
| Dist(PointSet(p)) => p
|
||||
| _ => raise(MixtureFailed)
|
||||
}
|
||||
|
||||
let prior = switch prior' {
|
||||
| Dist(PointSet(p)) => p
|
||||
| _ => raise(MixtureFailed)
|
||||
}
|
||||
|
||||
let answer = 3.0 // So this is: assigning 100% probability to 2.0
|
||||
let result = PointSetDist_Scoring.WithScalarAnswer.scoreWithPrior(
|
||||
~estimate=prediction,
|
||||
~answer,
|
||||
~prior,
|
||||
)
|
||||
switch result {
|
||||
| Ok(x) => x->expect->toEqual(-.Js.Math.log(0.75 /. 1.0) -. -.Js.Math.log(0.5 /. 1.0))
|
||||
| _ => raise(ScoreFailed)
|
||||
}
|
||||
})
|
||||
})
|
|
@ -8,34 +8,34 @@ let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean,
|
|||
describe("(Symbolic) normalize", () => {
|
||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||
let normalValue = mkNormal(mean, 2.0)
|
||||
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
||||
let normalizedValue = run(FromDist(#ToDist(Normalize), normalValue))
|
||||
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
||||
})
|
||||
})
|
||||
|
||||
describe("(Symbolic) mean", () => {
|
||||
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||
run(FromDist(#ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||
})
|
||||
|
||||
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||
run(FromDist(#ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||
})
|
||||
|
||||
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||
run(FromDist(#ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||
})
|
||||
|
||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||
})
|
||||
|
||||
test("of a cauchy distribution", () => {
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||
|
@ -48,7 +48,7 @@ describe("(Symbolic) mean", () => {
|
|||
let (low, medium, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(
|
||||
ToFloat(#Mean),
|
||||
#ToFloat(#Mean),
|
||||
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||
),
|
||||
)
|
||||
|
@ -63,7 +63,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (alpha, beta) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||
},
|
||||
|
@ -72,7 +72,7 @@ describe("(Symbolic) mean", () => {
|
|||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||
test("of beta(0, 0)", () => {
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||
})
|
||||
|
@ -85,7 +85,7 @@ describe("(Symbolic) mean", () => {
|
|||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||
let meanValue =
|
||||
betaDistribution->E.R2.fmap(d =>
|
||||
run(FromDist(ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||
)
|
||||
switch meanValue {
|
||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||
|
@ -100,7 +100,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (mu, sigma) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||
},
|
||||
|
@ -112,14 +112,14 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (low, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||
},
|
||||
)
|
||||
|
||||
test("of a float", () => {
|
||||
let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||
let meanValue = run(FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -21,6 +21,10 @@ describe("builtin", () => {
|
|||
"addOne(t)=t+1; toList(mapSamples(fromSamples([1,2,3,4,5,6]), addOne))",
|
||||
"Ok([2,3,4,5,6,7])",
|
||||
)
|
||||
testEval(
|
||||
"toList(mapSamplesN([fromSamples([1,2,3,4,5,6]), fromSamples([6, 5, 4, 3, 2, 1])], {|x| x[0] > x[1] ? x[0] : x[1]}))",
|
||||
"Ok([6,5,4,4,5,6])",
|
||||
)
|
||||
})
|
||||
|
||||
describe("builtin exception", () => {
|
||||
|
|
|
@ -2,15 +2,15 @@
|
|||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExternalExpressionValue = ReducerInterface.ExternalExpressionValue
|
||||
module InternalExpressionValue = ReducerInterface.InternalExpressionValue
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
let removeDefaultsInternal = (iev: InternalExpressionValue.t) => {
|
||||
switch iev {
|
||||
| InternalExpressionValue.IEvModule(nameSpace) =>
|
||||
Module.removeOther(
|
||||
| InternalExpressionValue.IEvBindings(nameSpace) =>
|
||||
Bindings.removeOther(
|
||||
nameSpace,
|
||||
ReducerInterface.StdLib.internalStdLib,
|
||||
)->InternalExpressionValue.IEvModule
|
||||
)->InternalExpressionValue.IEvBindings
|
||||
| value => value
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Module
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
||||
// ----------------------
|
||||
// --- Start of Module File
|
||||
// ----------------------
|
||||
|
||||
module FooImplementation = {
|
||||
// As this is a Rescript module, functions can use other functions in this module
|
||||
// and in other stdLib modules implemented this way.
|
||||
// Embedding function definitions in to switch statements is a bad practice
|
||||
// - to reduce line count or to
|
||||
let fooNumber = 0.0
|
||||
let fooString = "Foo String"
|
||||
let fooBool = true
|
||||
let makeFoo = (a: string, b: string, _environment): string => `I am ${a}-foo and I am ${b}-foo`
|
||||
let makeBar = (a: float, b: float, _environment): string =>
|
||||
`I am ${a->Js.Float.toString}-bar and I am ${b->Js.Float.toString}-bar`
|
||||
}
|
||||
|
||||
// There is a potential for type modules to define lift functions
|
||||
// for their own type to get rid of switch statements.
|
||||
module FooFFI = {
|
||||
let makeFoo: ExpressionT.optionFfiFn = (args: array<InternalExpressionValue.t>, environment) => {
|
||||
switch args {
|
||||
| [IEvString(a), IEvString(b)] => FooImplementation.makeFoo(a, b, environment)->IEvString->Some
|
||||
| _ => None
|
||||
}
|
||||
}
|
||||
let makeBar: ExpressionT.optionFfiFn = (args: array<InternalExpressionValue.t>, environment) =>
|
||||
switch args {
|
||||
| [IEvNumber(a), IEvNumber(b)] => FooImplementation.makeBar(a, b, environment)->IEvString->Some
|
||||
| _ => None
|
||||
}
|
||||
}
|
||||
|
||||
let fooModule: Module.t =
|
||||
Module.emptyStdLib
|
||||
->Module.defineNumber("fooNumber", FooImplementation.fooNumber)
|
||||
->Module.defineString("fooString", FooImplementation.fooString)
|
||||
->Module.defineBool("fooBool", FooImplementation.fooBool)
|
||||
->Module.defineFunction("makeFoo", FooFFI.makeFoo)
|
||||
->Module.defineFunction("makeBar", FooFFI.makeBar)
|
||||
|
||||
let makeBindings = (prevBindings: Bindings.t): Bindings.t =>
|
||||
prevBindings->Module.defineModule("Foo", fooModule)
|
||||
|
||||
// ----------------------
|
||||
// --- End of Module File
|
||||
// ----------------------
|
||||
|
||||
let stdLibWithFoo = Bindings.emptyBindings->makeBindings
|
||||
let evalWithFoo = sourceCode =>
|
||||
Reducer_Expression.parse(sourceCode)->Belt.Result.flatMap(expr =>
|
||||
Reducer_Expression.reduceExpression(
|
||||
expr,
|
||||
stdLibWithFoo,
|
||||
InternalExpressionValue.defaultEnvironment,
|
||||
)
|
||||
)
|
||||
let evalToStringResultWithFoo = sourceCode =>
|
||||
evalWithFoo(sourceCode)->InternalExpressionValue.toStringResult
|
||||
|
||||
describe("Module", () => {
|
||||
test("fooNumber", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.fooNumber")
|
||||
expect(result)->toEqual("Ok(0)")
|
||||
})
|
||||
test("fooString", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.fooString")
|
||||
expect(result)->toEqual("Ok('Foo String')")
|
||||
})
|
||||
test("fooBool", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.fooBool")
|
||||
expect(result)->toEqual("Ok(true)")
|
||||
})
|
||||
test("fooBool", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.fooBool")
|
||||
expect(result)->toEqual("Ok(true)")
|
||||
})
|
||||
test("makeFoo", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.makeFoo('a', 'b')")
|
||||
expect(result)->toEqual("Ok('I am a-foo and I am b-foo')")
|
||||
})
|
||||
test("makeFoo wrong arguments", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.makeFoo(1, 2)")
|
||||
// Notice the error with types
|
||||
expect(result)->toEqual("Error(Function not found: makeFoo(Number,Number))")
|
||||
})
|
||||
test("makeBar", () => {
|
||||
let result = evalToStringResultWithFoo("Foo.makeBar(1, 2)")
|
||||
expect(result)->toEqual("Ok('I am 1-bar and I am 2-bar')")
|
||||
})
|
||||
})
|
|
@ -236,7 +236,8 @@ describe("Peggy parse", () => {
|
|||
testParse("1m+2cm", "{(::add (::fromUnit_m 1) (::fromUnit_cm 2))}")
|
||||
})
|
||||
describe("Module", () => {
|
||||
testParse("Math.pi", "{(::$_atIndex_$ @Math 'pi')}")
|
||||
testParse("x", "{:x}")
|
||||
testParse("Math.pi", "{:Math.pi}")
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ describe("Peggy parse type", () => {
|
|||
"{(::$_typeOf_$ :f (::$_typeFunction_$ (::$_constructArray_$ (#number #number #number))))}",
|
||||
)
|
||||
})
|
||||
describe("high priority modifier", () => {
|
||||
describe("high priority contract", () => {
|
||||
testParse(
|
||||
"answer: number<-min<-max(100)|string",
|
||||
"{(::$_typeOf_$ :answer (::$_typeOr_$ (::$_constructArray_$ ((::$_typeModifier_max_$ (::$_typeModifier_min_$ #number) 100) #string))))}",
|
||||
|
@ -30,7 +30,7 @@ describe("Peggy parse type", () => {
|
|||
"{(::$_typeOf_$ :answer (::$_typeModifier_memberOf_$ #number (::$_constructArray_$ (1 3 5))))}",
|
||||
)
|
||||
})
|
||||
describe("low priority modifier", () => {
|
||||
describe("low priority contract", () => {
|
||||
testParse(
|
||||
"answer: number | string $ opaque",
|
||||
"{(::$_typeOf_$ :answer (::$_typeModifier_opaque_$ (::$_typeOr_$ (::$_constructArray_$ (#number #string)))))}",
|
||||
|
@ -63,14 +63,14 @@ describe("Peggy parse type", () => {
|
|||
"{(::$_typeOf_$ :weekend (::$_typeOr_$ (::$_constructArray_$ ((::$_typeConstructor_$ #Saturday (::$_constructArray_$ ())) (::$_typeConstructor_$ #Sunday (::$_constructArray_$ ()))))))}",
|
||||
)
|
||||
})
|
||||
describe("type paranthesis", () => {
|
||||
//$ is introduced to avoid paranthesis
|
||||
describe("type parenthesis", () => {
|
||||
//$ is introduced to avoid parenthesis
|
||||
testParse(
|
||||
"answer: (number|string)<-opaque",
|
||||
"{(::$_typeOf_$ :answer (::$_typeModifier_opaque_$ (::$_typeOr_$ (::$_constructArray_$ (#number #string)))))}",
|
||||
)
|
||||
})
|
||||
describe("squiggle expressions in type modifiers", () => {
|
||||
describe("squiggle expressions in type contracts", () => {
|
||||
testParse(
|
||||
"odds1 = [1,3,5]; odds2 = [7, 9]; type odds = number<-memberOf(concat(odds1, odds2))",
|
||||
"{:odds1 = {(::$_constructArray_$ (1 3 5))}; :odds2 = {(::$_constructArray_$ (7 9))}; (::$_typeAlias_$ #odds (::$_typeModifier_memberOf_$ #number (::concat :odds1 :odds2)))}",
|
||||
|
|
|
@ -4,6 +4,7 @@ module ExpressionValue = ReducerInterface.InternalExpressionValue
|
|||
module Parse = Reducer_Peggy_Parse
|
||||
module Result = Belt.Result
|
||||
module ToExpression = Reducer_Peggy_ToExpression
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
module Bindings = Reducer_Bindings
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
|
||||
open Jest
|
||||
open Reducer_Peggy_TestHelpers
|
||||
|
||||
describe("Peggy to Expression", () => {
|
||||
describe("literals operators parenthesis", () => {
|
||||
// Note that there is always an outer block. Otherwise, external bindings are ignrored at the first statement
|
||||
// Note that there is always an outer block. Otherwise, external bindings are ignored at the first statement
|
||||
testToExpression("1", "{1}", ~v="1", ())
|
||||
testToExpression("'hello'", "{'hello'}", ~v="'hello'", ())
|
||||
testToExpression("true", "{true}", ~v="true", ())
|
||||
|
@ -183,6 +186,14 @@ describe("Peggy to Expression", () => {
|
|||
})
|
||||
|
||||
describe("module", () => {
|
||||
testToExpression("Math.pi", "{(:$_atIndex_$ :Math 'pi')}", ~v="3.141592653589793", ())
|
||||
// testToExpression("Math.pi", "{:Math.pi}", ~v="3.141592653589793", ())
|
||||
// Only.test("stdlibrary", () => {
|
||||
// ReducerInterface_StdLib.internalStdLib
|
||||
// ->IEvBindings
|
||||
// ->InternalExpressionValue.toString
|
||||
// ->expect
|
||||
// ->toBe("")
|
||||
// })
|
||||
testToExpression("Math.pi", "{:Math.pi}", ~v="3.141592653589793", ())
|
||||
})
|
||||
})
|
||||
|
|
|
@ -40,7 +40,7 @@ describe("Peggy Types to Expression", () => {
|
|||
(),
|
||||
)
|
||||
})
|
||||
describe("high priority modifier", () => {
|
||||
describe("high priority contract", () => {
|
||||
testToExpression(
|
||||
"answer: number<-min(1)<-max(100)|string",
|
||||
"{(:$_typeOf_$ :answer (:$_typeOr_$ (:$_constructArray_$ ((:$_typeModifier_max_$ (:$_typeModifier_min_$ #number 1) 100) #string))))}",
|
||||
|
@ -78,7 +78,7 @@ describe("Peggy Types to Expression", () => {
|
|||
(),
|
||||
)
|
||||
})
|
||||
describe("low priority modifier", () => {
|
||||
describe("low priority contract", () => {
|
||||
testToExpression(
|
||||
"answer: number | string $ opaque",
|
||||
"{(:$_typeOf_$ :answer (:$_typeModifier_opaque_$ (:$_typeOr_$ (:$_constructArray_$ (#number #string)))))}",
|
||||
|
@ -86,7 +86,7 @@ describe("Peggy Types to Expression", () => {
|
|||
(),
|
||||
)
|
||||
})
|
||||
describe("squiggle expressions in type modifiers", () => {
|
||||
describe("squiggle expressions in type contracts", () => {
|
||||
testToExpression(
|
||||
"odds1 = [1,3,5]; odds2 = [7, 9]; type odds = number<-memberOf(concat(odds1, odds2))",
|
||||
"{(:$_let_$ :odds1 {(:$_constructArray_$ (1 3 5))}); (:$_let_$ :odds2 {(:$_constructArray_$ (7 9))}); (:$_typeAlias_$ #odds (:$_typeModifier_memberOf_$ #number (:concat :odds1 :odds2)))}",
|
||||
|
|
|
@ -8,7 +8,7 @@ module InternalExpressionValue = ReducerInterface.InternalExpressionValue
|
|||
module ExpressionWithContext = Reducer_ExpressionWithContext
|
||||
module Macro = Reducer_Expression_Macro
|
||||
module T = Reducer_Expression_T
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
let testMacro_ = (
|
||||
tester,
|
||||
|
@ -16,7 +16,7 @@ let testMacro_ = (
|
|||
expr: T.expression,
|
||||
expectedCode: string,
|
||||
) => {
|
||||
let bindings = Module.fromArray(bindArray)
|
||||
let bindings = Bindings.fromArray(bindArray)
|
||||
tester(expr->T.toString, () =>
|
||||
expr
|
||||
->Macro.expandMacroCall(
|
||||
|
@ -36,7 +36,7 @@ let testMacroEval_ = (
|
|||
expr: T.expression,
|
||||
expectedValue: string,
|
||||
) => {
|
||||
let bindings = Module.fromArray(bindArray)
|
||||
let bindings = Bindings.fromArray(bindArray)
|
||||
tester(expr->T.toString, () =>
|
||||
expr
|
||||
->Macro.doMacroCall(
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
module Expression = Reducer_Expression
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Bindings = Reducer_Bindings
|
||||
module T = Reducer_Type_T
|
||||
module TypeCompile = Reducer_Type_Compile
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
||||
let myIevEval = (aTypeSourceCode: string) =>
|
||||
TypeCompile.ievFromTypeExpression(aTypeSourceCode, Expression.reduceExpression)
|
||||
let myIevEvalToString = (aTypeSourceCode: string) =>
|
||||
myIevEval(aTypeSourceCode)->InternalExpressionValue.toStringResult
|
||||
|
||||
let myIevExpectEqual = (aTypeSourceCode, answer) =>
|
||||
expect(myIevEvalToString(aTypeSourceCode))->toEqual(answer)
|
||||
|
||||
let myIevTest = (test, aTypeSourceCode, answer) =>
|
||||
test(aTypeSourceCode, () => myIevExpectEqual(aTypeSourceCode, answer))
|
||||
|
||||
let myTypeEval = (aTypeSourceCode: string) =>
|
||||
TypeCompile.fromTypeExpression(aTypeSourceCode, Expression.reduceExpression)
|
||||
let myTypeEvalToString = (aTypeSourceCode: string) => myTypeEval(aTypeSourceCode)->T.toStringResult
|
||||
|
||||
let myTypeExpectEqual = (aTypeSourceCode, answer) =>
|
||||
expect(myTypeEvalToString(aTypeSourceCode))->toEqual(answer)
|
||||
|
||||
let myTypeTest = (test, aTypeSourceCode, answer) =>
|
||||
test(aTypeSourceCode, () => myTypeExpectEqual(aTypeSourceCode, answer))
|
||||
|
||||
// | ItTypeIdentifier(string)
|
||||
myTypeTest(test, "number", "number")
|
||||
myTypeTest(test, "(number)", "number")
|
||||
// | ItModifiedType({modifiedType: iType})
|
||||
myIevTest(test, "number<-min(0)", "Ok({min: 0,typeIdentifier: #number,typeTag: 'typeIdentifier'})")
|
||||
myTypeTest(test, "number<-min(0)", "number<-min(0)")
|
||||
// | ItTypeOr({typeOr: array<iType>})
|
||||
myTypeTest(test, "number | string", "(number | string)")
|
||||
// | ItTypeFunction({inputs: array<iType>, output: iType})
|
||||
myTypeTest(test, "number => number => number", "(number => number => number)")
|
||||
// | ItTypeArray({element: iType})
|
||||
myIevTest(test, "[number]", "Ok({element: #number,typeTag: 'typeArray'})")
|
||||
myTypeTest(test, "[number]", "[number]")
|
||||
// | ItTypeTuple({elements: array<iType>})
|
||||
myTypeTest(test, "[number, string]", "[number, string]")
|
||||
// | ItTypeRecord({properties: Belt.Map.String.t<iType>})
|
||||
myIevTest(
|
||||
test,
|
||||
"{age: number, name: string}",
|
||||
"Ok({properties: {age: #number,name: #string},typeTag: 'typeRecord'})",
|
||||
)
|
||||
myTypeTest(test, "{age: number, name: string}", "{age: number, name: string}")
|
|
@ -0,0 +1,41 @@
|
|||
module Expression = Reducer_Expression
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Bindings = Reducer_Bindings
|
||||
module T = Reducer_Type_T
|
||||
module TypeChecker = Reducer_Type_TypeChecker
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
||||
let checkArgumentsSourceCode = (aTypeSourceCode: string, sourceCode: string): result<
|
||||
'v,
|
||||
ErrorValue.t,
|
||||
> => {
|
||||
let reducerFn = Expression.reduceExpression
|
||||
let rResult =
|
||||
Reducer.parse(sourceCode)->Belt.Result.flatMap(expr =>
|
||||
reducerFn(expr, Bindings.emptyBindings, InternalExpressionValue.defaultEnvironment)
|
||||
)
|
||||
rResult->Belt.Result.flatMap(result =>
|
||||
switch result {
|
||||
| IEvArray(args) => TypeChecker.checkArguments(aTypeSourceCode, args, reducerFn)
|
||||
| _ => Js.Exn.raiseError("Arguments has to be an array")
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
let myCheckArguments = (aTypeSourceCode: string, sourceCode: string): string =>
|
||||
switch checkArgumentsSourceCode(aTypeSourceCode, sourceCode) {
|
||||
| Ok(_) => "Ok"
|
||||
| Error(error) => ErrorValue.errorToString(error)
|
||||
}
|
||||
|
||||
let myCheckArgumentsExpectEqual = (aTypeSourceCode, sourceCode, answer) =>
|
||||
expect(myCheckArguments(aTypeSourceCode, sourceCode))->toEqual(answer)
|
||||
|
||||
let myCheckArgumentsTest = (test, aTypeSourceCode, sourceCode, answer) =>
|
||||
test(aTypeSourceCode, () => myCheckArgumentsExpectEqual(aTypeSourceCode, sourceCode, answer))
|
||||
|
||||
myCheckArgumentsTest(test, "number=>number=>number", "[1,2]", "Ok")
|
|
@ -0,0 +1,70 @@
|
|||
module Expression = Reducer_Expression
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ErrorValue = Reducer_ErrorValue
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Bindings = Reducer_Bindings
|
||||
module T = Reducer_Type_T
|
||||
module TypeChecker = Reducer_Type_TypeChecker
|
||||
|
||||
open Jest
|
||||
open Expect
|
||||
|
||||
// In development, you are expected to use TypeChecker.isTypeOf(aTypeSourceCode, result, reducerFn).
|
||||
// isTypeOfSourceCode is written to use strings instead of expression values.
|
||||
|
||||
let isTypeOfSourceCode = (aTypeSourceCode: string, sourceCode: string): result<
|
||||
'v,
|
||||
ErrorValue.t,
|
||||
> => {
|
||||
let reducerFn = Expression.reduceExpression
|
||||
let rResult =
|
||||
Reducer.parse(sourceCode)->Belt.Result.flatMap(expr =>
|
||||
reducerFn(expr, Bindings.emptyBindings, InternalExpressionValue.defaultEnvironment)
|
||||
)
|
||||
rResult->Belt.Result.flatMap(result => TypeChecker.isTypeOf(aTypeSourceCode, result, reducerFn))
|
||||
}
|
||||
|
||||
let myTypeCheck = (aTypeSourceCode: string, sourceCode: string): string =>
|
||||
switch isTypeOfSourceCode(aTypeSourceCode, sourceCode) {
|
||||
| Ok(_) => "Ok"
|
||||
| Error(error) => ErrorValue.errorToString(error)
|
||||
}
|
||||
|
||||
let myTypeCheckExpectEqual = (aTypeSourceCode, sourceCode, answer) =>
|
||||
expect(myTypeCheck(aTypeSourceCode, sourceCode))->toEqual(answer)
|
||||
|
||||
let myTypeCheckTest = (test, aTypeSourceCode, sourceCode, answer) =>
|
||||
test(aTypeSourceCode, () => myTypeCheckExpectEqual(aTypeSourceCode, sourceCode, answer))
|
||||
|
||||
myTypeCheckTest(test, "number", "1", "Ok")
|
||||
myTypeCheckTest(test, "number", "'2'", "Expected type: number but got: '2'")
|
||||
myTypeCheckTest(test, "string", "3", "Expected type: string but got: 3")
|
||||
myTypeCheckTest(test, "string", "'a'", "Ok")
|
||||
myTypeCheckTest(test, "[number]", "[1,2,3]", "Ok")
|
||||
myTypeCheckTest(test, "[number]", "['a','a','a']", "Expected type: number but got: 'a'")
|
||||
myTypeCheckTest(test, "[number]", "[1,'a',3]", "Expected type: number but got: 'a'")
|
||||
myTypeCheckTest(test, "[number, string]", "[1,'a']", "Ok")
|
||||
myTypeCheckTest(test, "[number, string]", "[1, 2]", "Expected type: string but got: 2")
|
||||
myTypeCheckTest(
|
||||
test,
|
||||
"[number, string, string]",
|
||||
"[1,'a']",
|
||||
"Expected type: [number, string, string] but got: [1,'a']",
|
||||
)
|
||||
myTypeCheckTest(
|
||||
test,
|
||||
"[number, string]",
|
||||
"[1,'a', 3]",
|
||||
"Expected type: [number, string] but got: [1,'a',3]",
|
||||
)
|
||||
myTypeCheckTest(test, "{age: number, name: string}", "{age: 1, name: 'a'}", "Ok")
|
||||
myTypeCheckTest(
|
||||
test,
|
||||
"{age: number, name: string}",
|
||||
"{age: 1, name: 'a', job: 'IT'}",
|
||||
"Expected type: {age: number, name: string} but got: {age: 1,job: 'IT',name: 'a'}",
|
||||
)
|
||||
myTypeCheckTest(test, "number | string", "1", "Ok")
|
||||
myTypeCheckTest(test, "date | string", "1", "Expected type: (date | string) but got: 1")
|
||||
myTypeCheckTest(test, "number<-min(10)", "10", "Ok")
|
||||
myTypeCheckTest(test, "number<-min(10)", "0", "Expected type: number<-min(10) but got: 0")
|
|
@ -10,5 +10,5 @@ describe("Evaluate ternary operator", () => {
|
|||
testEvalToBe("false ? 'YES' : 'NO'", "Ok('NO')")
|
||||
testEvalToBe("2 > 1 ? 'YES' : 'NO'", "Ok('YES')")
|
||||
testEvalToBe("2 <= 1 ? 'YES' : 'NO'", "Ok('NO')")
|
||||
testEvalToBe("1+1 ? 'YES' : 'NO'", "Error(Expected type: Boolean)")
|
||||
testEvalToBe("1+1 ? 'YES' : 'NO'", "Error(Expected type: Boolean but got: )")
|
||||
})
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open Reducer_TestHelpers
|
||||
|
||||
let expectEvalToBeOk = (expr: string) =>
|
||||
Reducer.evaluate(expr)->Reducer_Helpers.rRemoveDefaultsExternal->E.R.isOk->expect->toBe(true)
|
||||
|
||||
let registry = FunctionRegistry_Library.registry
|
||||
let examples = E.A.to_list(FunctionRegistry_Core.Registry.allExamples(registry))
|
||||
|
||||
describe("FunctionRegistry Library", () => {
|
||||
describe("Regular tests", () => {
|
||||
testEvalToBe("List.make(3, 'HI')", "Ok(['HI','HI','HI'])")
|
||||
testEvalToBe("make(3, 'HI')", "Error(Function not found: make(Number,String))")
|
||||
testEvalToBe("List.upTo(1,3)", "Ok([1,2,3])")
|
||||
testEvalToBe("List.first([3,5,8])", "Ok(3)")
|
||||
testEvalToBe("List.last([3,5,8])", "Ok(8)")
|
||||
testEvalToBe("List.reverse([3,5,8])", "Ok([8,5,3])")
|
||||
testEvalToBe("Dist.normal(5,2)", "Ok(Normal(5,2))")
|
||||
testEvalToBe("normal(5,2)", "Ok(Normal(5,2))")
|
||||
testEvalToBe("normal({mean:5,stdev:2})", "Ok(Normal(5,2))")
|
||||
testEvalToBe("-2 to 4", "Ok(Normal(1,1.8238704957353074))")
|
||||
testEvalToBe("pointMass(5)", "Ok(PointMass(5))")
|
||||
testEvalToBe("Number.floor(5.5)", "Ok(5)")
|
||||
testEvalToBe("Number.ceil(5.5)", "Ok(6)")
|
||||
testEvalToBe("floor(5.5)", "Ok(5)")
|
||||
testEvalToBe("ceil(5.5)", "Ok(6)")
|
||||
testEvalToBe("Number.abs(5.5)", "Ok(5.5)")
|
||||
testEvalToBe("abs(5.5)", "Ok(5.5)")
|
||||
testEvalToBe("Number.exp(10)", "Ok(22026.465794806718)")
|
||||
testEvalToBe("Number.log10(10)", "Ok(1)")
|
||||
testEvalToBe("Number.log2(10)", "Ok(3.321928094887362)")
|
||||
testEvalToBe("Number.sum([2,5,3])", "Ok(10)")
|
||||
testEvalToBe("sum([2,5,3])", "Ok(10)")
|
||||
testEvalToBe("Number.product([2,5,3])", "Ok(30)")
|
||||
testEvalToBe("Number.min([2,5,3])", "Ok(2)")
|
||||
testEvalToBe("Number.max([2,5,3])", "Ok(5)")
|
||||
testEvalToBe("Number.mean([0,5,10])", "Ok(5)")
|
||||
testEvalToBe("Number.geomean([1,5,18])", "Ok(4.481404746557164)")
|
||||
testEvalToBe("Number.stdev([0,5,10,15])", "Ok(5.5901699437494745)")
|
||||
testEvalToBe("Number.variance([0,5,10,15])", "Ok(31.25)")
|
||||
testEvalToBe("Number.sort([10,0,15,5])", "Ok([0,5,10,15])")
|
||||
testEvalToBe("Number.cumsum([1,5,3])", "Ok([1,6,9])")
|
||||
testEvalToBe("Number.cumprod([1,5,3])", "Ok([1,5,15])")
|
||||
testEvalToBe("Number.diff([1,5,3])", "Ok([4,-2])")
|
||||
testEvalToBe(
|
||||
"Dist.logScore({estimate: normal(5,2), answer: normal(5.2,1), prior: normal(5.5,3)})",
|
||||
"Ok(-0.33591375663884876)",
|
||||
)
|
||||
testEvalToBe(
|
||||
"Dist.logScore({estimate: normal(5,2), answer: normal(5.2,1)})",
|
||||
"Ok(0.32244107041564646)",
|
||||
)
|
||||
testEvalToBe("Dist.logScore({estimate: normal(5,2), answer: 4.5})", "Ok(1.6433360626394853)")
|
||||
testEvalToBe("Dist.klDivergence(normal(5,2), normal(5,1.5))", "Ok(0.06874342818671068)")
|
||||
})
|
||||
|
||||
describe("Fn auto-testing", () => {
|
||||
testAll("tests of validity", examples, r => {
|
||||
expectEvalToBeOk(r)
|
||||
})
|
||||
|
||||
testAll(
|
||||
"tests of type",
|
||||
E.A.to_list(
|
||||
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) =>
|
||||
E.O.isSome(fn.output)
|
||||
),
|
||||
),
|
||||
((fn, example)) => {
|
||||
let responseType =
|
||||
example
|
||||
->Reducer.evaluate
|
||||
->E.R2.fmap(ReducerInterface_InternalExpressionValue.externalValueToValueType)
|
||||
let expectedOutputType = fn.output |> E.O.toExn("")
|
||||
expect(responseType)->toEqual(Ok(expectedOutputType))
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
|
@ -29,7 +29,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
|
|||
|
||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||
|
||||
let env: DistributionOperation.env = {
|
||||
let env: GenericDist.env = {
|
||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@
|
|||
"chalk": "^5.0.1",
|
||||
"codecov": "^3.8.3",
|
||||
"fast-check": "^3.0.1",
|
||||
"gentype": "^4.4.0",
|
||||
"gentype": "^4.5.0",
|
||||
"jest": "^27.5.1",
|
||||
"moduleserve": "^0.9.1",
|
||||
"nyc": "^15.1.0",
|
||||
|
@ -65,7 +65,7 @@
|
|||
"rescript-fast-check": "^1.1.1",
|
||||
"ts-jest": "^27.1.4",
|
||||
"ts-loader": "^9.3.0",
|
||||
"ts-node": "^10.8.1",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^4.7.4",
|
||||
"webpack": "^5.73.0",
|
||||
"webpack-cli": "^4.10.0"
|
||||
|
|
|
@ -4,12 +4,9 @@ type error = DistributionTypes.error
|
|||
|
||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||
|
||||
type env = {
|
||||
sampleCount: int,
|
||||
xyPointLength: int,
|
||||
}
|
||||
type env = GenericDist.env
|
||||
|
||||
let defaultEnv = {
|
||||
let defaultEnv: env = {
|
||||
sampleCount: MagicNumbers.Environment.defaultSampleCount,
|
||||
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
|
||||
}
|
||||
|
@ -93,7 +90,7 @@ module OutputLocal = {
|
|||
}
|
||||
}
|
||||
|
||||
let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||
let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
||||
let {sampleCount, xyPointLength} = env
|
||||
|
||||
let reCall = (~env=env, ~functionCallInfo=functionCallInfo, ()) => {
|
||||
|
@ -101,14 +98,14 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
}
|
||||
|
||||
let toPointSetFn = r => {
|
||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToPointSet), r), ()) {
|
||||
switch reCall(~functionCallInfo=FromDist(#ToDist(ToPointSet), r), ()) {
|
||||
| Dist(PointSet(p)) => Ok(p)
|
||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||
}
|
||||
}
|
||||
|
||||
let toSampleSetFn = r => {
|
||||
switch reCall(~functionCallInfo=FromDist(ToDist(ToSampleSet(sampleCount)), r), ()) {
|
||||
switch reCall(~functionCallInfo=FromDist(#ToDist(ToSampleSet(sampleCount)), r), ()) {
|
||||
| Dist(SampleSet(p)) => Ok(p)
|
||||
| e => Error(OutputLocal.toErrorOrUnreachable(e))
|
||||
}
|
||||
|
@ -116,13 +113,13 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
|
||||
let scaleMultiply = (r, weight) =>
|
||||
reCall(
|
||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
||||
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Multiply, #Float(weight)), r),
|
||||
(),
|
||||
)->OutputLocal.toDistR
|
||||
|
||||
let pointwiseAdd = (r1, r2) =>
|
||||
reCall(
|
||||
~functionCallInfo=FromDist(ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
||||
~functionCallInfo=FromDist(#ToDistCombination(Pointwise, #Add, #Dist(r2)), r1),
|
||||
(),
|
||||
)->OutputLocal.toDistR
|
||||
|
||||
|
@ -131,49 +128,40 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
dist: genericDist,
|
||||
): outputType => {
|
||||
let response = switch subFnName {
|
||||
| ToFloat(distToFloatOperation) =>
|
||||
| #ToFloat(distToFloatOperation) =>
|
||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||
->E.R2.fmap(r => Float(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToString(ToString) => dist->GenericDist.toString->String
|
||||
| ToString(ToSparkline(bucketCount)) =>
|
||||
| #ToString(ToString) => dist->GenericDist.toString->String
|
||||
| #ToString(ToSparkline(bucketCount)) =>
|
||||
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
|
||||
->E.R2.fmap(r => String(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Inspect) => {
|
||||
| #ToDist(Inspect) => {
|
||||
Js.log2("Console log requested: ", dist)
|
||||
Dist(dist)
|
||||
}
|
||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||
| ToScore(KLDivergence(t2)) =>
|
||||
GenericDist.Score.klDivergence(dist, t2, ~toPointSetFn)
|
||||
->E.R2.fmap(r => Float(r))
|
||||
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||
| #ToScore(LogScore(answer, prior)) =>
|
||||
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||
->E.R2.fmap(s => Float(s))
|
||||
->OutputLocal.fromResult
|
||||
| ToScore(LogScore(answer, prior)) =>
|
||||
GenericDist.Score.logScoreWithPointResolution(
|
||||
~prediction=dist,
|
||||
~answer,
|
||||
~prior,
|
||||
~toPointSetFn,
|
||||
)
|
||||
->E.R2.fmap(r => Float(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||
| #ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||
| #ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(ToSampleSet(n)) =>
|
||||
| #ToDist(ToSampleSet(n)) =>
|
||||
dist
|
||||
->GenericDist.toSampleSetDist(n)
|
||||
->E.R2.fmap(r => Dist(SampleSet(r)))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(ToPointSet) =>
|
||||
| #ToDist(ToPointSet) =>
|
||||
dist
|
||||
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
|
||||
->E.R2.fmap(r => Dist(PointSet(r)))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||
| #ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(
|
||||
~toPointSetFn,
|
||||
|
@ -182,23 +170,23 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#Multiply, f)) =>
|
||||
| #ToDist(Scale(#Multiply, f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Multiply, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#Logarithm, f)) =>
|
||||
| #ToDist(Scale(#Logarithm, f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Logarithm, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDist(Scale(#Power, f)) =>
|
||||
| #ToDist(Scale(#Power, f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination=#Power, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||
| ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
||||
| #ToDistCombination(Algebraic(_), _, #Float(_)) => GenDistError(NotYetImplemented)
|
||||
| #ToDistCombination(Algebraic(strategy), arithmeticOperation, #Dist(t2)) =>
|
||||
dist
|
||||
->GenericDist.algebraicCombination(
|
||||
~strategy,
|
||||
|
@ -209,12 +197,12 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
||||
| #ToDistCombination(Pointwise, algebraicCombination, #Dist(t2)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombination(~toPointSetFn, ~algebraicCombination, ~t2)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
->OutputLocal.fromResult
|
||||
| ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
||||
| #ToDistCombination(Pointwise, algebraicCombination, #Float(f)) =>
|
||||
dist
|
||||
->GenericDist.pointwiseCombinationFloat(~toPointSetFn, ~algebraicCombination, ~f)
|
||||
->E.R2.fmap(r => Dist(r))
|
||||
|
@ -225,8 +213,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
|||
|
||||
switch functionCallInfo {
|
||||
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
||||
| FromFloat(subFnName, float) =>
|
||||
reCall(~functionCallInfo=FromDist(subFnName, GenericDist.fromFloat(float)), ())
|
||||
| FromFloat(subFnName, x) => reCall(~functionCallInfo=FromFloat(subFnName, x), ())
|
||||
| Mixture(dists) =>
|
||||
dists
|
||||
->GenericDist.mixture(~scaleMultiplyFn=scaleMultiply, ~pointwiseAddFn=pointwiseAdd)
|
||||
|
@ -278,13 +265,16 @@ module Constructors = {
|
|||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||
let klDivergence = (~env, dist1, dist2) => C.klDivergence(dist1, dist2)->run(~env)->toFloatR
|
||||
let logScoreWithPointResolution = (
|
||||
~env,
|
||||
~prediction: DistributionTypes.genericDist,
|
||||
~answer: float,
|
||||
~prior: option<DistributionTypes.genericDist>,
|
||||
) => C.logScoreWithPointResolution(~prediction, ~answer, ~prior)->run(~env)->toFloatR
|
||||
module LogScore = {
|
||||
let distEstimateDistAnswer = (~env, estimate, answer) =>
|
||||
C.LogScore.distEstimateDistAnswer(estimate, answer)->run(~env)->toFloatR
|
||||
let distEstimateDistAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||
C.LogScore.distEstimateDistAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||
let distEstimateScalarAnswer = (~env, estimate, answer) =>
|
||||
C.LogScore.distEstimateScalarAnswer(estimate, answer)->run(~env)->toFloatR
|
||||
let distEstimateScalarAnswerWithPrior = (~env, estimate, answer, prior) =>
|
||||
C.LogScore.distEstimateScalarAnswerWithPrior(estimate, answer, prior)->run(~env)->toFloatR
|
||||
}
|
||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||
let fromSamples = (~env, xs) => C.fromSamples(xs)->run(~env)->toDistR
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
@genType
|
||||
type env = {
|
||||
sampleCount: int,
|
||||
xyPointLength: int,
|
||||
}
|
||||
|
||||
@genType
|
||||
let defaultEnv: env
|
||||
let defaultEnv: GenericDist.env
|
||||
|
||||
open DistributionTypes
|
||||
|
||||
|
@ -19,15 +13,18 @@ type outputType =
|
|||
| GenDistError(error)
|
||||
|
||||
@genType
|
||||
let run: (~env: env, DistributionTypes.DistributionOperation.genericFunctionCallInfo) => outputType
|
||||
let run: (
|
||||
~env: GenericDist.env,
|
||||
DistributionTypes.DistributionOperation.genericFunctionCallInfo,
|
||||
) => outputType
|
||||
let runFromDist: (
|
||||
~env: env,
|
||||
~env: GenericDist.env,
|
||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
||||
genericDist,
|
||||
) => outputType
|
||||
let runFromFloat: (
|
||||
~env: env,
|
||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromDist,
|
||||
~env: GenericDist.env,
|
||||
~functionCallInfo: DistributionTypes.DistributionOperation.fromFloat,
|
||||
float,
|
||||
) => outputType
|
||||
|
||||
|
@ -42,79 +39,147 @@ module Output: {
|
|||
let toBool: t => option<bool>
|
||||
let toBoolR: t => result<bool, error>
|
||||
let toError: t => option<error>
|
||||
let fmap: (~env: env, t, DistributionTypes.DistributionOperation.singleParamaterFunction) => t
|
||||
let fmap: (
|
||||
~env: GenericDist.env,
|
||||
t,
|
||||
DistributionTypes.DistributionOperation.singleParamaterFunction,
|
||||
) => t
|
||||
}
|
||||
|
||||
module Constructors: {
|
||||
@genType
|
||||
let mean: (~env: env, genericDist) => result<float, error>
|
||||
let mean: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let stdev: (~env: env, genericDist) => result<float, error>
|
||||
let stdev: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let variance: (~env: env, genericDist) => result<float, error>
|
||||
let variance: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let sample: (~env: env, genericDist) => result<float, error>
|
||||
let sample: (~env: GenericDist.env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
||||
let cdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
||||
let inv: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
||||
let pdf: (~env: GenericDist.env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
||||
let normalize: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
||||
let isNormalized: (~env: GenericDist.env, genericDist) => result<bool, error>
|
||||
module LogScore: {
|
||||
@genType
|
||||
let klDivergence: (~env: env, genericDist, genericDist) => result<float, error>
|
||||
@genType
|
||||
let logScoreWithPointResolution: (
|
||||
~env: env,
|
||||
~prediction: genericDist,
|
||||
~answer: float,
|
||||
~prior: option<genericDist>,
|
||||
let distEstimateDistAnswer: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<float, error>
|
||||
@genType
|
||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||
let distEstimateDistAnswerWithPrior: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<float, error>
|
||||
@genType
|
||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||
let distEstimateScalarAnswer: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
float,
|
||||
) => result<float, error>
|
||||
@genType
|
||||
let fromSamples: (~env: env, SampleSetDist.t) => result<genericDist, error>
|
||||
@genType
|
||||
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
||||
@genType
|
||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toString: (~env: env, genericDist) => result<string, error>
|
||||
@genType
|
||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
||||
@genType
|
||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let scaleLogarithm: (~env: env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let scaleMultiply: (~env: env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let scalePower: (~env: env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
let distEstimateScalarAnswerWithPrior: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
float,
|
||||
genericDist,
|
||||
) => result<float, error>
|
||||
}
|
||||
@genType
|
||||
let toPointSet: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toSampleSet: (~env: GenericDist.env, genericDist, int) => result<genericDist, error>
|
||||
@genType
|
||||
let fromSamples: (~env: GenericDist.env, SampleSetDist.t) => result<genericDist, error>
|
||||
@genType
|
||||
let truncate: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
option<float>,
|
||||
option<float>,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let inspect: (~env: GenericDist.env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toString: (~env: GenericDist.env, genericDist) => result<string, error>
|
||||
@genType
|
||||
let toSparkline: (~env: GenericDist.env, genericDist, int) => result<string, error>
|
||||
@genType
|
||||
let algebraicAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicMultiply: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicDivide: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicSubtract: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicLogarithm: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicPower: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let scaleLogarithm: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let scaleMultiply: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let scalePower: (~env: GenericDist.env, genericDist, float) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseAdd: (~env: GenericDist.env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseMultiply: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseDivide: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseSubtract: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseLogarithm: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwisePower: (
|
||||
~env: GenericDist.env,
|
||||
genericDist,
|
||||
genericDist,
|
||||
) => result<genericDist, error>
|
||||
}
|
||||
|
|
|
@ -98,61 +98,86 @@ module DistributionOperation = {
|
|||
| ToString
|
||||
| ToSparkline(int)
|
||||
|
||||
type toScore = KLDivergence(genericDist) | LogScore(float, option<genericDist>)
|
||||
type genericDistOrScalar = Score_Dist(genericDist) | Score_Scalar(float)
|
||||
|
||||
type fromDist =
|
||||
| ToFloat(toFloat)
|
||||
| ToDist(toDist)
|
||||
| ToScore(toScore)
|
||||
| ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||
| ToString(toString)
|
||||
| ToBool(toBool)
|
||||
type toScore = LogScore(genericDistOrScalar, option<genericDist>)
|
||||
|
||||
type fromFloat = [
|
||||
| #ToFloat(toFloat)
|
||||
| #ToDist(toDist)
|
||||
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||
| #ToString(toString)
|
||||
| #ToBool(toBool)
|
||||
]
|
||||
|
||||
type fromDist = [
|
||||
| fromFloat
|
||||
| #ToScore(toScore)
|
||||
]
|
||||
|
||||
type singleParamaterFunction =
|
||||
| FromDist(fromDist)
|
||||
| FromFloat(fromDist)
|
||||
| FromFloat(fromFloat)
|
||||
|
||||
type genericFunctionCallInfo =
|
||||
| FromDist(fromDist, genericDist)
|
||||
| FromFloat(fromDist, float)
|
||||
| FromFloat(fromFloat, float)
|
||||
| FromSamples(array<float>)
|
||||
| Mixture(array<(genericDist, float)>)
|
||||
|
||||
let distCallToString = (distFunction: fromDist): string =>
|
||||
switch distFunction {
|
||||
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||
| ToFloat(#Mean) => `mean`
|
||||
| ToFloat(#Min) => `min`
|
||||
| ToFloat(#Max) => `max`
|
||||
| ToFloat(#Stdev) => `stdev`
|
||||
| ToFloat(#Variance) => `variance`
|
||||
| ToFloat(#Mode) => `mode`
|
||||
| ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||
| ToFloat(#Sample) => `sample`
|
||||
| ToFloat(#IntegralSum) => `integralSum`
|
||||
| ToScore(KLDivergence(_)) => `klDivergence`
|
||||
| ToScore(LogScore(x, _)) => `logScore against ${E.Float.toFixed(x)}`
|
||||
| ToDist(Normalize) => `normalize`
|
||||
| ToDist(ToPointSet) => `toPointSet`
|
||||
| ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||
| ToDist(Truncate(_, _)) => `truncate`
|
||||
| ToDist(Inspect) => `inspect`
|
||||
| ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||
| ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
||||
| ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||
| ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||
let floatCallToString = (floatFunction: fromFloat): string =>
|
||||
switch floatFunction {
|
||||
| #ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
|
||||
| #ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
|
||||
| #ToFloat(#Mean) => `mean`
|
||||
| #ToFloat(#Min) => `min`
|
||||
| #ToFloat(#Max) => `max`
|
||||
| #ToFloat(#Stdev) => `stdev`
|
||||
| #ToFloat(#Variance) => `variance`
|
||||
| #ToFloat(#Mode) => `mode`
|
||||
| #ToFloat(#Pdf(r)) => `pdf(${E.Float.toFixed(r)})`
|
||||
| #ToFloat(#Sample) => `sample`
|
||||
| #ToFloat(#IntegralSum) => `integralSum`
|
||||
| #ToDist(Normalize) => `normalize`
|
||||
| #ToDist(ToPointSet) => `toPointSet`
|
||||
| #ToDist(ToSampleSet(r)) => `toSampleSet(${E.I.toString(r)})`
|
||||
| #ToDist(Truncate(_, _)) => `truncate`
|
||||
| #ToDist(Inspect) => `inspect`
|
||||
| #ToDist(Scale(#Power, r)) => `scalePower(${E.Float.toFixed(r)})`
|
||||
| #ToDist(Scale(#Multiply, r)) => `scaleMultiply(${E.Float.toFixed(r)})`
|
||||
| #ToDist(Scale(#Logarithm, r)) => `scaleLog(${E.Float.toFixed(r)})`
|
||||
| #ToDist(Scale(#LogarithmWithThreshold(eps), r)) =>
|
||||
`scaleLogWithThreshold(${E.Float.toFixed(r)}, epsilon=${E.Float.toFixed(eps)})`
|
||||
| ToString(ToString) => `toString`
|
||||
| ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
||||
| ToBool(IsNormalized) => `isNormalized`
|
||||
| ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
||||
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||
| #ToString(ToString) => `toString`
|
||||
| #ToString(ToSparkline(n)) => `sparkline(${E.I.toString(n)})`
|
||||
| #ToBool(IsNormalized) => `isNormalized`
|
||||
| #ToDistCombination(Algebraic(_), _, _) => `algebraic`
|
||||
| #ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||
}
|
||||
|
||||
let distCallToString = (
|
||||
distFunction: [
|
||||
| #ToFloat(toFloat)
|
||||
| #ToDist(toDist)
|
||||
| #ToDistCombination(direction, Operation.Algebraic.t, [#Dist(genericDist) | #Float(float)])
|
||||
| #ToString(toString)
|
||||
| #ToBool(toBool)
|
||||
| #ToScore(toScore)
|
||||
],
|
||||
): string =>
|
||||
switch distFunction {
|
||||
| #ToScore(_) => `logScore`
|
||||
| #ToFloat(x) => floatCallToString(#ToFloat(x))
|
||||
| #ToDist(x) => floatCallToString(#ToDist(x))
|
||||
| #ToString(x) => floatCallToString(#ToString(x))
|
||||
| #ToBool(x) => floatCallToString(#ToBool(x))
|
||||
| #ToDistCombination(x, y, z) => floatCallToString(#ToDistCombination(x, y, z))
|
||||
}
|
||||
|
||||
let toString = (d: genericFunctionCallInfo): string =>
|
||||
switch d {
|
||||
| FromDist(f, _) | FromFloat(f, _) => distCallToString(f)
|
||||
| FromDist(f, _) => distCallToString(f)
|
||||
| FromFloat(f, _) => floatCallToString(f)
|
||||
| Mixture(_) => `mixture`
|
||||
| FromSamples(_) => `fromSamples`
|
||||
}
|
||||
|
@ -162,80 +187,93 @@ module Constructors = {
|
|||
|
||||
module UsingDists = {
|
||||
@genType
|
||||
let mean = (dist): t => FromDist(ToFloat(#Mean), dist)
|
||||
let stdev = (dist): t => FromDist(ToFloat(#Stdev), dist)
|
||||
let variance = (dist): t => FromDist(ToFloat(#Variance), dist)
|
||||
let sample = (dist): t => FromDist(ToFloat(#Sample), dist)
|
||||
let cdf = (dist, x): t => FromDist(ToFloat(#Cdf(x)), dist)
|
||||
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
||||
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
||||
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
||||
let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
|
||||
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
||||
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
||||
let mean = (dist): t => FromDist(#ToFloat(#Mean), dist)
|
||||
let stdev = (dist): t => FromDist(#ToFloat(#Stdev), dist)
|
||||
let variance = (dist): t => FromDist(#ToFloat(#Variance), dist)
|
||||
let sample = (dist): t => FromDist(#ToFloat(#Sample), dist)
|
||||
let cdf = (dist, x): t => FromDist(#ToFloat(#Cdf(x)), dist)
|
||||
let inv = (dist, x): t => FromDist(#ToFloat(#Inv(x)), dist)
|
||||
let pdf = (dist, x): t => FromDist(#ToFloat(#Pdf(x)), dist)
|
||||
let normalize = (dist): t => FromDist(#ToDist(Normalize), dist)
|
||||
let isNormalized = (dist): t => FromDist(#ToBool(IsNormalized), dist)
|
||||
let toPointSet = (dist): t => FromDist(#ToDist(ToPointSet), dist)
|
||||
let toSampleSet = (dist, r): t => FromDist(#ToDist(ToSampleSet(r)), dist)
|
||||
let fromSamples = (xs): t => FromSamples(xs)
|
||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||
let inspect = (dist): t => FromDist(ToDist(Inspect), dist)
|
||||
let klDivergence = (dist1, dist2): t => FromDist(ToScore(KLDivergence(dist2)), dist1)
|
||||
let logScoreWithPointResolution = (~prediction, ~answer, ~prior): t => FromDist(
|
||||
ToScore(LogScore(answer, prior)),
|
||||
prediction,
|
||||
let truncate = (dist, left, right): t => FromDist(#ToDist(Truncate(left, right)), dist)
|
||||
let inspect = (dist): t => FromDist(#ToDist(Inspect), dist)
|
||||
module LogScore = {
|
||||
let distEstimateDistAnswer = (estimate, answer): t => FromDist(
|
||||
#ToScore(LogScore(Score_Dist(answer), None)),
|
||||
estimate,
|
||||
)
|
||||
let scaleMultiply = (dist, n): t => FromDist(ToDist(Scale(#Multiply, n)), dist)
|
||||
let scalePower = (dist, n): t => FromDist(ToDist(Scale(#Power, n)), dist)
|
||||
let scaleLogarithm = (dist, n): t => FromDist(ToDist(Scale(#Logarithm, n)), dist)
|
||||
let distEstimateDistAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||
#ToScore(LogScore(Score_Dist(answer), Some(prior))),
|
||||
estimate,
|
||||
)
|
||||
let distEstimateScalarAnswer = (estimate, answer): t => FromDist(
|
||||
#ToScore(LogScore(Score_Scalar(answer), None)),
|
||||
estimate,
|
||||
)
|
||||
let distEstimateScalarAnswerWithPrior = (estimate, answer, prior): t => FromDist(
|
||||
#ToScore(LogScore(Score_Scalar(answer), Some(prior))),
|
||||
estimate,
|
||||
)
|
||||
}
|
||||
let scaleMultiply = (dist, n): t => FromDist(#ToDist(Scale(#Multiply, n)), dist)
|
||||
let scalePower = (dist, n): t => FromDist(#ToDist(Scale(#Power, n)), dist)
|
||||
let scaleLogarithm = (dist, n): t => FromDist(#ToDist(Scale(#Logarithm, n)), dist)
|
||||
let scaleLogarithmWithThreshold = (dist, n, eps): t => FromDist(
|
||||
ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||
#ToDist(Scale(#LogarithmWithThreshold(eps), n)),
|
||||
dist,
|
||||
)
|
||||
let toString = (dist): t => FromDist(ToString(ToString), dist)
|
||||
let toSparkline = (dist, n): t => FromDist(ToString(ToSparkline(n)), dist)
|
||||
let toString = (dist): t => FromDist(#ToString(ToString), dist)
|
||||
let toSparkline = (dist, n): t => FromDist(#ToString(ToSparkline(n)), dist)
|
||||
let algebraicAdd = (dist1, dist2: genericDist): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Add, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let algebraicMultiply = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Multiply, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let algebraicDivide = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Divide, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let algebraicSubtract = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Subtract, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let algebraicLogarithm = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Logarithm, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let algebraicPower = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
||||
#ToDistCombination(Algebraic(AsDefault), #Power, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwiseAdd = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Add, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwiseMultiply = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Multiply, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwiseDivide = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Divide, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwiseSubtract = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Subtract, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwiseLogarithm = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Logarithm, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
let pointwisePower = (dist1, dist2): t => FromDist(
|
||||
ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
||||
#ToDistCombination(Pointwise, #Power, #Dist(dist2)),
|
||||
dist1,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -6,6 +6,11 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
|||
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||
type pointwiseAddFn = (t, t) => result<t, error>
|
||||
|
||||
type env = {
|
||||
sampleCount: int,
|
||||
xyPointLength: int,
|
||||
}
|
||||
|
||||
let isPointSet = (t: t) =>
|
||||
switch t {
|
||||
| PointSet(_) => true
|
||||
|
@ -61,46 +66,6 @@ let integralEndY = (t: t): float =>
|
|||
|
||||
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||
|
||||
module Score = {
|
||||
let klDivergence = (prediction, answer, ~toPointSetFn: toPointSetFn): result<float, error> => {
|
||||
let pointSets = E.R.merge(toPointSetFn(prediction), toPointSetFn(answer))
|
||||
pointSets |> E.R2.bind(((predi, ans)) =>
|
||||
PointSetDist.T.klDivergence(predi, ans)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||
)
|
||||
}
|
||||
|
||||
let logScoreWithPointResolution = (
|
||||
~prediction: DistributionTypes.genericDist,
|
||||
~answer: float,
|
||||
~prior: option<DistributionTypes.genericDist>,
|
||||
~toPointSetFn: toPointSetFn,
|
||||
): result<float, error> => {
|
||||
switch prior {
|
||||
| Some(prior') =>
|
||||
E.R.merge(toPointSetFn(prior'), toPointSetFn(prediction))->E.R.bind(((
|
||||
prior'',
|
||||
prediction'',
|
||||
)) =>
|
||||
PointSetDist.T.logScoreWithPointResolution(
|
||||
~prediction=prediction'',
|
||||
~answer,
|
||||
~prior=prior''->Some,
|
||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||
)
|
||||
| None =>
|
||||
prediction
|
||||
->toPointSetFn
|
||||
->E.R.bind(x =>
|
||||
PointSetDist.T.logScoreWithPointResolution(
|
||||
~prediction=x,
|
||||
~answer,
|
||||
~prior=None,
|
||||
)->E.R2.errMap(x => DistributionTypes.OperationError(x))
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let toFloatOperation = (
|
||||
t,
|
||||
~toPointSetFn: toPointSetFn,
|
||||
|
@ -171,6 +136,70 @@ let toPointSet = (
|
|||
}
|
||||
}
|
||||
|
||||
module Score = {
|
||||
type genericDistOrScalar = DistributionTypes.DistributionOperation.genericDistOrScalar
|
||||
|
||||
let argsMake = (~esti: t, ~answ: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||
PointSetDist_Scoring.scoreArgs,
|
||||
error,
|
||||
> => {
|
||||
let toPointSetFn = t =>
|
||||
toPointSet(
|
||||
t,
|
||||
~xyPointLength=env.xyPointLength,
|
||||
~sampleCount=env.sampleCount,
|
||||
~xSelection=#ByWeight,
|
||||
(),
|
||||
)
|
||||
let prior': option<result<PointSetTypes.pointSetDist, error>> = switch prior {
|
||||
| None => None
|
||||
| Some(d) => toPointSetFn(d)->Some
|
||||
}
|
||||
let twoDists = (~toPointSetFn, esti': t, answ': t): result<
|
||||
(PointSetTypes.pointSetDist, PointSetTypes.pointSetDist),
|
||||
error,
|
||||
> => E.R.merge(toPointSetFn(esti'), toPointSetFn(answ'))
|
||||
switch (esti, answ, prior') {
|
||||
| (esti', Score_Dist(answ'), None) =>
|
||||
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||
{estimate: esti'', answer: answ'', prior: None}->PointSetDist_Scoring.DistAnswer
|
||||
)
|
||||
| (esti', Score_Dist(answ'), Some(Ok(prior''))) =>
|
||||
twoDists(~toPointSetFn, esti', answ')->E.R2.fmap(((esti'', answ'')) =>
|
||||
{
|
||||
estimate: esti'',
|
||||
answer: answ'',
|
||||
prior: Some(prior''),
|
||||
}->PointSetDist_Scoring.DistAnswer
|
||||
)
|
||||
| (esti', Score_Scalar(answ'), None) =>
|
||||
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||
{
|
||||
estimate: esti'',
|
||||
answer: answ',
|
||||
prior: None,
|
||||
}->PointSetDist_Scoring.ScalarAnswer
|
||||
)
|
||||
| (esti', Score_Scalar(answ'), Some(Ok(prior''))) =>
|
||||
toPointSetFn(esti')->E.R2.fmap(esti'' =>
|
||||
{
|
||||
estimate: esti'',
|
||||
answer: answ',
|
||||
prior: Some(prior''),
|
||||
}->PointSetDist_Scoring.ScalarAnswer
|
||||
)
|
||||
| (_, _, Some(Error(err))) => err->Error
|
||||
}
|
||||
}
|
||||
|
||||
let logScore = (~estimate: t, ~answer: genericDistOrScalar, ~prior: option<t>, ~env: env): result<
|
||||
float,
|
||||
error,
|
||||
> =>
|
||||
argsMake(~esti=estimate, ~answ=answer, ~prior, ~env)->E.R.bind(x =>
|
||||
x->PointSetDist.logScore->E.R2.errMap(y => DistributionTypes.OperationError(y))
|
||||
)
|
||||
}
|
||||
/*
|
||||
PointSetDist.toSparkline calls "downsampleEquallyOverX", which downsamples it to n=bucketCount.
|
||||
It first needs a pointSetDist, so we convert to a pointSetDist. In this process we want the
|
||||
|
|
|
@ -5,6 +5,9 @@ type toSampleSetFn = t => result<SampleSetDist.t, error>
|
|||
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||
type pointwiseAddFn = (t, t) => result<t, error>
|
||||
|
||||
@genType
|
||||
type env = {sampleCount: int, xyPointLength: int}
|
||||
|
||||
let sampleN: (t, int) => array<float>
|
||||
let sample: t => float
|
||||
|
||||
|
@ -25,12 +28,11 @@ let toFloatOperation: (
|
|||
) => result<float, error>
|
||||
|
||||
module Score: {
|
||||
let klDivergence: (t, t, ~toPointSetFn: toPointSetFn) => result<float, error>
|
||||
let logScoreWithPointResolution: (
|
||||
~prediction: t,
|
||||
~answer: float,
|
||||
let logScore: (
|
||||
~estimate: t,
|
||||
~answer: DistributionTypes.DistributionOperation.genericDistOrScalar,
|
||||
~prior: option<t>,
|
||||
~toPointSetFn: toPointSetFn,
|
||||
~env: env,
|
||||
) => result<float, error>
|
||||
}
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ let combinePointwise = (
|
|||
|
||||
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
|
||||
|
||||
combiner(fn, interpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||
combiner(interpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(x =>
|
||||
make(~integralSumCache=combinedIntegralSum, x)
|
||||
)
|
||||
}
|
||||
|
@ -270,20 +270,6 @@ module T = Dist({
|
|||
}
|
||||
let variance = (t: t): float =>
|
||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
let newShape = XYShape.PointwiseCombination.combineAlongSupportOfSecondArgument(
|
||||
PointSetDist_Scoring.KLDivergence.integrand,
|
||||
prediction.xyShape,
|
||||
answer.xyShape,
|
||||
)
|
||||
newShape->E.R2.fmap(x => x->make->integralEndY)
|
||||
}
|
||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||
let priorPdf = prior->E.O2.fmap((shape, x) => XYShape.XtoY.linear(x, shape.xyShape))
|
||||
let predictionPdf = x => XYShape.XtoY.linear(x, prediction.xyShape)
|
||||
PointSetDist_Scoring.LogScoreWithPointResolution.score(~priorPdf, ~predictionPdf, ~answer)
|
||||
}
|
||||
})
|
||||
|
||||
let isNormalized = (t: t): bool => {
|
||||
|
|
|
@ -49,7 +49,7 @@ let combinePointwise = (
|
|||
// TODO: does it ever make sense to pointwise combine the integrals here?
|
||||
// It could be done for pointwise additions, but is that ever needed?
|
||||
|
||||
combiner(fn, XYShape.XtoY.discreteInterpolator, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
||||
combiner(XYShape.XtoY.discreteInterpolator, fn, t1.xyShape, t2.xyShape)->E.R2.fmap(make)
|
||||
}
|
||||
|
||||
let reduce = (
|
||||
|
@ -222,15 +222,4 @@ module T = Dist({
|
|||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||
}
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
combinePointwise(
|
||||
~fn=PointSetDist_Scoring.KLDivergence.integrand,
|
||||
prediction,
|
||||
answer,
|
||||
)->E.R2.fmap(integralEndY)
|
||||
}
|
||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||
Error(Operation.NotYetImplemented)
|
||||
}
|
||||
})
|
||||
|
|
|
@ -33,12 +33,6 @@ module type dist = {
|
|||
|
||||
let mean: t => float
|
||||
let variance: t => float
|
||||
let klDivergence: (t, t) => result<float, Operation.Error.t>
|
||||
let logScoreWithPointResolution: (
|
||||
~prediction: t,
|
||||
~answer: float,
|
||||
~prior: option<t>,
|
||||
) => result<float, Operation.Error.t>
|
||||
}
|
||||
|
||||
module Dist = (T: dist) => {
|
||||
|
@ -61,9 +55,6 @@ module Dist = (T: dist) => {
|
|||
let mean = T.mean
|
||||
let variance = T.variance
|
||||
let integralEndY = T.integralEndY
|
||||
let klDivergence = T.klDivergence
|
||||
let logScoreWithPointResolution = T.logScoreWithPointResolution
|
||||
|
||||
let updateIntegralCache = T.updateIntegralCache
|
||||
|
||||
module Integral = {
|
||||
|
|
|
@ -302,15 +302,6 @@ module T = Dist({
|
|||
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||
}
|
||||
}
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) => {
|
||||
let klDiscretePart = Discrete.T.klDivergence(prediction.discrete, answer.discrete)
|
||||
let klContinuousPart = Continuous.T.klDivergence(prediction.continuous, answer.continuous)
|
||||
E.R.merge(klDiscretePart, klContinuousPart)->E.R2.fmap(t => fst(t) +. snd(t))
|
||||
}
|
||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||
Error(Operation.NotYetImplemented)
|
||||
}
|
||||
})
|
||||
|
||||
let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t => {
|
||||
|
|
|
@ -66,6 +66,7 @@ let combineAlgebraically = (op: Operation.convolutionOperation, t1: t, t2: t): t
|
|||
}
|
||||
|
||||
let combinePointwise = (
|
||||
~combiner=XYShape.PointwiseCombination.combine,
|
||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||
~integralCachesFn: (
|
||||
PointSetTypes.continuousShape,
|
||||
|
@ -78,6 +79,7 @@ let combinePointwise = (
|
|||
switch (t1, t2) {
|
||||
| (Continuous(m1), Continuous(m2)) =>
|
||||
Continuous.combinePointwise(
|
||||
~combiner,
|
||||
~integralSumCachesFn,
|
||||
fn,
|
||||
m1,
|
||||
|
@ -85,6 +87,7 @@ let combinePointwise = (
|
|||
)->E.R2.fmap(x => PointSetTypes.Continuous(x))
|
||||
| (Discrete(m1), Discrete(m2)) =>
|
||||
Discrete.combinePointwise(
|
||||
~combiner,
|
||||
~integralSumCachesFn,
|
||||
~fn,
|
||||
m1,
|
||||
|
@ -195,25 +198,16 @@ module T = Dist({
|
|||
| Discrete(m) => Discrete.T.variance(m)
|
||||
| Continuous(m) => Continuous.T.variance(m)
|
||||
}
|
||||
|
||||
let klDivergence = (prediction: t, answer: t) =>
|
||||
switch (prediction, answer) {
|
||||
| (Continuous(t1), Continuous(t2)) => Continuous.T.klDivergence(t1, t2)
|
||||
| (Discrete(t1), Discrete(t2)) => Discrete.T.klDivergence(t1, t2)
|
||||
| (m1, m2) => Mixed.T.klDivergence(m1->toMixed, m2->toMixed)
|
||||
}
|
||||
|
||||
let logScoreWithPointResolution = (~prediction: t, ~answer: float, ~prior: option<t>) => {
|
||||
switch (prior, prediction) {
|
||||
| (Some(Continuous(t1)), Continuous(t2)) =>
|
||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=t1->Some)
|
||||
| (None, Continuous(t2)) =>
|
||||
Continuous.T.logScoreWithPointResolution(~prediction=t2, ~answer, ~prior=None)
|
||||
| _ => Error(Operation.NotYetImplemented)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let logScore = (args: PointSetDist_Scoring.scoreArgs): result<float, Operation.Error.t> =>
|
||||
PointSetDist_Scoring.logScore(
|
||||
args,
|
||||
~combineFn=combinePointwise,
|
||||
~integrateFn=T.Integral.sum,
|
||||
~toMixedFn=toMixed,
|
||||
)
|
||||
|
||||
let pdf = (f: float, t: t) => {
|
||||
let mixedPoint: PointSetTypes.mixedPoint = T.xToY(f, t)
|
||||
mixedPoint.continuous +. mixedPoint.discrete
|
||||
|
|
|
@ -1,46 +1,149 @@
|
|||
module KLDivergence = {
|
||||
type pointSetDist = PointSetTypes.pointSetDist
|
||||
|
||||
type scalar = float
|
||||
type score = float
|
||||
type abstractScoreArgs<'a, 'b> = {estimate: 'a, answer: 'b, prior: option<'a>}
|
||||
type scoreArgs =
|
||||
| DistAnswer(abstractScoreArgs<pointSetDist, pointSetDist>)
|
||||
| ScalarAnswer(abstractScoreArgs<pointSetDist, scalar>)
|
||||
|
||||
let logFn = Js.Math.log // base e
|
||||
let integrand = (predictionElement: float, answerElement: float): result<
|
||||
let minusScaledLogOfQuotient = (~esti, ~answ): result<float, Operation.Error.t> => {
|
||||
let quot = esti /. answ
|
||||
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answ *. logFn(quot))
|
||||
}
|
||||
|
||||
module WithDistAnswer = {
|
||||
// The Kullback-Leibler divergence
|
||||
let integrand = (estimateElement: float, answerElement: float): result<
|
||||
float,
|
||||
Operation.Error.t,
|
||||
> =>
|
||||
// We decided that negative infinity, not an error at answerElement = 0.0, is a desirable value.
|
||||
// We decided that 0.0, not an error at answerElement = 0.0, is a desirable value.
|
||||
if answerElement == 0.0 {
|
||||
Ok(0.0)
|
||||
} else if predictionElement == 0.0 {
|
||||
} else if estimateElement == 0.0 {
|
||||
Ok(infinity)
|
||||
} else {
|
||||
let quot = predictionElement /. answerElement
|
||||
quot < 0.0 ? Error(Operation.ComplexNumberError) : Ok(-.answerElement *. logFn(quot))
|
||||
minusScaledLogOfQuotient(~esti=estimateElement, ~answ=answerElement)
|
||||
}
|
||||
|
||||
let sum = (
|
||||
~estimate: pointSetDist,
|
||||
~answer: pointSetDist,
|
||||
~combineFn,
|
||||
~integrateFn,
|
||||
~toMixedFn,
|
||||
): result<score, Operation.Error.t> => {
|
||||
let combineAndIntegrate = (estimate, answer) =>
|
||||
combineFn(integrand, estimate, answer)->E.R2.fmap(integrateFn)
|
||||
|
||||
let getMixedSums = (estimate: pointSetDist, answer: pointSetDist) => {
|
||||
let esti = estimate->toMixedFn
|
||||
let answ = answer->toMixedFn
|
||||
switch (
|
||||
Mixed.T.toContinuous(esti),
|
||||
Mixed.T.toDiscrete(esti),
|
||||
Mixed.T.toContinuous(answ),
|
||||
Mixed.T.toDiscrete(answ),
|
||||
) {
|
||||
| (
|
||||
Some(estiContinuousPart),
|
||||
Some(estiDiscretePart),
|
||||
Some(answContinuousPart),
|
||||
Some(answDiscretePart),
|
||||
) =>
|
||||
E.R.merge(
|
||||
combineAndIntegrate(
|
||||
PointSetTypes.Discrete(estiDiscretePart),
|
||||
PointSetTypes.Discrete(answDiscretePart),
|
||||
),
|
||||
combineAndIntegrate(Continuous(estiContinuousPart), Continuous(answContinuousPart)),
|
||||
)
|
||||
| (_, _, _, _) => `unreachable state`->Operation.Other->Error
|
||||
}
|
||||
}
|
||||
|
||||
module LogScoreWithPointResolution = {
|
||||
let logFn = Js.Math.log
|
||||
let score = (
|
||||
~priorPdf: option<float => float>,
|
||||
~predictionPdf: float => float,
|
||||
~answer: float,
|
||||
): result<float, Operation.Error.t> => {
|
||||
let numerator = answer->predictionPdf
|
||||
if numerator < 0.0 {
|
||||
switch (estimate, answer) {
|
||||
| (Continuous(_), Continuous(_))
|
||||
| (Discrete(_), Discrete(_)) =>
|
||||
combineAndIntegrate(estimate, answer)
|
||||
| (_, _) =>
|
||||
getMixedSums(estimate, answer)->E.R2.fmap(((discretePart, continuousPart)) =>
|
||||
discretePart +. continuousPart
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let sumWithPrior = (
|
||||
~estimate: pointSetDist,
|
||||
~answer: pointSetDist,
|
||||
~prior: pointSetDist,
|
||||
~combineFn,
|
||||
~integrateFn,
|
||||
~toMixedFn,
|
||||
): result<score, Operation.Error.t> => {
|
||||
let kl1 = sum(~estimate, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||
let kl2 = sum(~estimate=prior, ~answer, ~combineFn, ~integrateFn, ~toMixedFn)
|
||||
E.R.merge(kl1, kl2)->E.R2.fmap(((kl1', kl2')) => kl1' -. kl2')
|
||||
}
|
||||
}
|
||||
|
||||
module WithScalarAnswer = {
|
||||
let sum = (mp: PointSetTypes.MixedPoint.t): float => mp.continuous +. mp.discrete
|
||||
let score = (~estimate: pointSetDist, ~answer: scalar): result<score, Operation.Error.t> => {
|
||||
let _score = (~estimatePdf: float => option<float>, ~answer: float): result<
|
||||
score,
|
||||
Operation.Error.t,
|
||||
> => {
|
||||
let density = answer->estimatePdf
|
||||
switch density {
|
||||
| None => Operation.PdfInvalidError->Error
|
||||
| Some(density') =>
|
||||
if density' < 0.0 {
|
||||
Operation.PdfInvalidError->Error
|
||||
} else if numerator == 0.0 {
|
||||
} else if density' == 0.0 {
|
||||
infinity->Ok
|
||||
} else {
|
||||
-.(
|
||||
switch priorPdf {
|
||||
| None => numerator->logFn
|
||||
| Some(f) => {
|
||||
let priorDensityOfAnswer = f(answer)
|
||||
if priorDensityOfAnswer == 0.0 {
|
||||
neg_infinity
|
||||
} else {
|
||||
(numerator /. priorDensityOfAnswer)->logFn
|
||||
density'->logFn->(x => -.x)->Ok
|
||||
}
|
||||
}
|
||||
}
|
||||
)->Ok
|
||||
|
||||
let estimatePdf = x =>
|
||||
switch estimate {
|
||||
| Continuous(esti) => Continuous.T.xToY(x, esti)->sum->Some
|
||||
| Discrete(esti) => Discrete.T.xToY(x, esti)->sum->Some
|
||||
| Mixed(_) => None
|
||||
}
|
||||
_score(~estimatePdf, ~answer)
|
||||
}
|
||||
|
||||
let scoreWithPrior = (~estimate: pointSetDist, ~answer: scalar, ~prior: pointSetDist): result<
|
||||
score,
|
||||
Operation.Error.t,
|
||||
> => {
|
||||
E.R.merge(score(~estimate, ~answer), score(~estimate=prior, ~answer))->E.R2.fmap(((s1, s2)) =>
|
||||
s1 -. s2
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let twoGenericDistsToTwoPointSetDists = (~toPointSetFn, estimate, answer): result<
|
||||
(pointSetDist, pointSetDist),
|
||||
'e,
|
||||
> => E.R.merge(toPointSetFn(estimate, ()), toPointSetFn(answer, ()))
|
||||
|
||||
let logScore = (args: scoreArgs, ~combineFn, ~integrateFn, ~toMixedFn): result<
|
||||
score,
|
||||
Operation.Error.t,
|
||||
> =>
|
||||
switch args {
|
||||
| DistAnswer({estimate, answer, prior: None}) =>
|
||||
WithDistAnswer.sum(~estimate, ~answer, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||
| DistAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||
WithDistAnswer.sumWithPrior(~estimate, ~answer, ~prior, ~integrateFn, ~combineFn, ~toMixedFn)
|
||||
| ScalarAnswer({estimate, answer, prior: None}) => WithScalarAnswer.score(~estimate, ~answer)
|
||||
| ScalarAnswer({estimate, answer, prior: Some(prior)}) =>
|
||||
WithScalarAnswer.scoreWithPrior(~estimate, ~answer, ~prior)
|
||||
}
|
||||
|
|
|
@ -117,6 +117,11 @@ let map3 = (
|
|||
): result<t, sampleSetError> =>
|
||||
E.A.zip3(get(t1), get(t2), get(t3))->E.A2.fmap(E.Tuple3.toFnCall(fn))->_fromSampleResultArray
|
||||
|
||||
let mapN = (~fn: array<float> => result<float, Operation.Error.t>, ~t1: array<t>): result<
|
||||
t,
|
||||
sampleSetError,
|
||||
> => E.A.transpose(E.A.fmap(get, t1))->E.A2.fmap(fn)->_fromSampleResultArray
|
||||
|
||||
let mean = t => T.get(t)->E.A.Floats.mean
|
||||
let geomean = t => T.get(t)->E.A.Floats.geomean
|
||||
let mode = t => T.get(t)->E.A.Floats.mode
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
type internalExpressionValue = ReducerInterface_InternalExpressionValue.t
|
||||
type internalExpressionValueType = ReducerInterface_InternalExpressionValue.internalExpressionValueType
|
||||
|
||||
/*
|
||||
Function Registry "Type". A type, without any other information.
|
||||
|
@ -8,6 +9,7 @@ type rec frType =
|
|||
| FRTypeNumber
|
||||
| FRTypeNumeric
|
||||
| FRTypeDistOrNumber
|
||||
| FRTypeDist
|
||||
| FRTypeLambda
|
||||
| FRTypeRecord(frTypeRecord)
|
||||
| FRTypeDict(frType)
|
||||
|
@ -41,18 +43,26 @@ and frValueDistOrNumber = FRValueNumber(float) | FRValueDist(DistributionTypes.g
|
|||
type fnDefinition = {
|
||||
name: string,
|
||||
inputs: array<frType>,
|
||||
run: (array<frValue>, DistributionOperation.env) => result<internalExpressionValue, string>,
|
||||
run: (
|
||||
array<internalExpressionValue>,
|
||||
array<frValue>,
|
||||
GenericDist.env,
|
||||
) => result<internalExpressionValue, string>,
|
||||
}
|
||||
|
||||
type function = {
|
||||
name: string,
|
||||
definitions: array<fnDefinition>,
|
||||
examples: option<string>,
|
||||
requiresNamespace: bool,
|
||||
nameSpace: string,
|
||||
output: option<internalExpressionValueType>,
|
||||
examples: array<string>,
|
||||
description: option<string>,
|
||||
isExperimental: bool,
|
||||
}
|
||||
|
||||
type registry = array<function>
|
||||
type fnNameDict = Js.Dict.t<array<function>>
|
||||
type registry = {functions: array<function>, fnNameDict: fnNameDict}
|
||||
|
||||
module FRType = {
|
||||
type t = frType
|
||||
|
@ -60,6 +70,7 @@ module FRType = {
|
|||
switch t {
|
||||
| FRTypeNumber => "number"
|
||||
| FRTypeNumeric => "numeric"
|
||||
| FRTypeDist => "distribution"
|
||||
| FRTypeDistOrNumber => "distribution|number"
|
||||
| FRTypeRecord(r) => {
|
||||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||
|
@ -98,6 +109,7 @@ module FRType = {
|
|||
| (FRTypeDistOrNumber, IEvDistribution(Symbolic(#Float(f)))) =>
|
||||
Some(FRValueDistOrNumber(FRValueNumber(f)))
|
||||
| (FRTypeDistOrNumber, IEvDistribution(f)) => Some(FRValueDistOrNumber(FRValueDist(f)))
|
||||
| (FRTypeDist, IEvDistribution(f)) => Some(FRValueDist(f))
|
||||
| (FRTypeNumeric, IEvNumber(f)) => Some(FRValueNumber(f))
|
||||
| (FRTypeNumeric, IEvDistribution(Symbolic(#Float(f)))) => Some(FRValueNumber(f))
|
||||
| (FRTypeLambda, IEvLambda(f)) => Some(FRValueLambda(f))
|
||||
|
@ -262,7 +274,7 @@ module Matcher = {
|
|||
|
||||
module Registry = {
|
||||
let _findExactMatches = (r: registry, fnName: string, args: array<internalExpressionValue>) => {
|
||||
let functionMatchPairs = r->E.A2.fmap(l => (l, Function.match(l, fnName, args)))
|
||||
let functionMatchPairs = r.functions->E.A2.fmap(l => (l, Function.match(l, fnName, args)))
|
||||
let fullMatch = functionMatchPairs->E.A.getBy(((_, match)) => Match.isFullMatch(match))
|
||||
fullMatch->E.O.bind(((fn, match)) =>
|
||||
switch match {
|
||||
|
@ -273,7 +285,7 @@ module Matcher = {
|
|||
}
|
||||
|
||||
let _findNameMatches = (r: registry, fnName: string, args: array<internalExpressionValue>) => {
|
||||
let functionMatchPairs = r->E.A2.fmap(l => (l, Function.match(l, fnName, args)))
|
||||
let functionMatchPairs = r.functions->E.A2.fmap(l => (l, Function.match(l, fnName, args)))
|
||||
let getNameMatches =
|
||||
functionMatchPairs
|
||||
->E.A2.fmap(((fn, match)) => Match.isNameMatchOnly(match) ? Some((fn, match)) : None)
|
||||
|
@ -292,10 +304,13 @@ module Matcher = {
|
|||
}
|
||||
|
||||
let findMatches = (r: registry, fnName: string, args: array<internalExpressionValue>) => {
|
||||
switch _findExactMatches(r, fnName, args) {
|
||||
let fnNameInParts = Js.String.split(".", fnName)
|
||||
let fnToSearch = E.A.get(fnNameInParts, 1) |> E.O.default(fnNameInParts[0])
|
||||
|
||||
switch _findExactMatches(r, fnToSearch, args) {
|
||||
| Some(r) => Match.FullMatch(r)
|
||||
| None =>
|
||||
switch _findNameMatches(r, fnName, args) {
|
||||
switch _findNameMatches(r, fnToSearch, args) {
|
||||
| Some(r) => Match.SameNameDifferentArguments(r)
|
||||
| None => Match.DifferentName
|
||||
}
|
||||
|
@ -305,7 +320,7 @@ module Matcher = {
|
|||
let matchToDef = (registry: registry, {fnName, inputIndex}: RegistryMatch.match): option<
|
||||
fnDefinition,
|
||||
> =>
|
||||
registry
|
||||
registry.functions
|
||||
->E.A.getBy(fn => fn.name === fnName)
|
||||
->E.O.bind(fn => E.A.get(fn.definitions, inputIndex))
|
||||
}
|
||||
|
@ -319,15 +334,23 @@ module FnDefinition = {
|
|||
t.name ++ `(${inputs})`
|
||||
}
|
||||
|
||||
let run = (t: t, args: array<internalExpressionValue>, env: DistributionOperation.env) => {
|
||||
let isMatch = (t: t, args: array<internalExpressionValue>) => {
|
||||
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
||||
switch argValues {
|
||||
| Some(values) => t.run(values, env)
|
||||
| Some(_) => true
|
||||
| None => false
|
||||
}
|
||||
}
|
||||
|
||||
let run = (t: t, args: array<internalExpressionValue>, env: GenericDist.env) => {
|
||||
let argValues = FRType.matchWithExpressionValueArray(t.inputs, args)
|
||||
switch argValues {
|
||||
| Some(values) => t.run(args, values, env)
|
||||
| None => Error("Incorrect Types")
|
||||
}
|
||||
}
|
||||
|
||||
let make = (~name, ~inputs, ~run): t => {
|
||||
let make = (~name, ~inputs, ~run, ()): t => {
|
||||
name: name,
|
||||
inputs: inputs,
|
||||
run: run,
|
||||
|
@ -340,16 +363,29 @@ module Function = {
|
|||
type functionJson = {
|
||||
name: string,
|
||||
definitions: array<string>,
|
||||
examples: option<string>,
|
||||
examples: array<string>,
|
||||
description: option<string>,
|
||||
isExperimental: bool,
|
||||
}
|
||||
|
||||
let make = (~name, ~definitions, ~examples=?, ~description=?, ~isExperimental=false, ()): t => {
|
||||
let make = (
|
||||
~name,
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~definitions,
|
||||
~examples=?,
|
||||
~output=?,
|
||||
~description=?,
|
||||
~isExperimental=false,
|
||||
(),
|
||||
): t => {
|
||||
name: name,
|
||||
nameSpace: nameSpace,
|
||||
definitions: definitions,
|
||||
examples: examples,
|
||||
output: output,
|
||||
examples: examples |> E.O.default([]),
|
||||
isExperimental: isExperimental,
|
||||
requiresNamespace: requiresNamespace,
|
||||
description: description,
|
||||
}
|
||||
|
||||
|
@ -362,22 +398,64 @@ module Function = {
|
|||
}
|
||||
}
|
||||
|
||||
module NameSpace = {
|
||||
type t = {name: string, functions: array<function>}
|
||||
let definitions = (t: t) => t.functions->E.A2.fmap(f => f.definitions)->E.A.concatMany
|
||||
let uniqueFnNames = (t: t) => definitions(t)->E.A2.fmap(r => r.name)->E.A.uniq
|
||||
let nameToDefinitions = (t: t, name: string) => definitions(t)->E.A2.filter(r => r.name == name)
|
||||
}
|
||||
|
||||
module Registry = {
|
||||
let toJson = (r: registry) => r->E.A2.fmap(Function.toJson)
|
||||
let toJson = (r: registry) => r.functions->E.A2.fmap(Function.toJson)
|
||||
let allExamples = (r: registry) => r.functions->E.A2.fmap(r => r.examples)->E.A.concatMany
|
||||
let allExamplesWithFns = (r: registry) =>
|
||||
r.functions->E.A2.fmap(fn => fn.examples->E.A2.fmap(example => (fn, example)))->E.A.concatMany
|
||||
|
||||
let _buildFnNameDict = (r: array<function>): fnNameDict => {
|
||||
let allDefinitionsWithFns =
|
||||
r
|
||||
->E.A2.fmap(fn => fn.definitions->E.A2.fmap(definitions => (fn, definitions)))
|
||||
->E.A.concatMany
|
||||
let functionsWithFnNames =
|
||||
allDefinitionsWithFns
|
||||
->E.A2.fmap(((fn, def)) => {
|
||||
let nameWithNamespace = `${fn.nameSpace}.${def.name}`
|
||||
let nameWithoutNamespace = def.name
|
||||
fn.requiresNamespace
|
||||
? [(nameWithNamespace, fn)]
|
||||
: [(nameWithNamespace, fn), (nameWithoutNamespace, fn)]
|
||||
})
|
||||
->E.A.concatMany
|
||||
let uniqueNames = functionsWithFnNames->E.A2.fmap(((name, _)) => name)->E.A.uniq
|
||||
let cacheAsArray: array<(string, array<function>)> = uniqueNames->E.A2.fmap(uniqueName => {
|
||||
let relevantItems =
|
||||
E.A2.filter(functionsWithFnNames, ((defName, _)) => defName == uniqueName)->E.A2.fmap(
|
||||
E.Tuple2.second,
|
||||
)
|
||||
(uniqueName, relevantItems)
|
||||
})
|
||||
cacheAsArray->Js.Dict.fromArray
|
||||
}
|
||||
|
||||
let make = (fns: array<function>): registry => {
|
||||
let dict = _buildFnNameDict(fns)
|
||||
{functions: fns, fnNameDict: dict}
|
||||
}
|
||||
|
||||
/*
|
||||
There's a (potential+minor) bug here: If a function definition is called outside of the calls
|
||||
to the registry, then it's possible that there could be a match after the registry is
|
||||
called. However, for now, we could just call the registry last.
|
||||
*/
|
||||
let matchAndRun = (
|
||||
let _matchAndRun = (
|
||||
~registry: registry,
|
||||
~fnName: string,
|
||||
~args: array<internalExpressionValue>,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
) => {
|
||||
let relevantFunctions = Js.Dict.get(registry.fnNameDict, fnName) |> E.O.default([])
|
||||
let modified = {functions: relevantFunctions, fnNameDict: registry.fnNameDict}
|
||||
let matchToDef = m => Matcher.Registry.matchToDef(registry, m)
|
||||
//Js.log(toSimple(registry))
|
||||
let showNameMatchDefinitions = matches => {
|
||||
let defs =
|
||||
matches
|
||||
|
@ -388,10 +466,21 @@ module Registry = {
|
|||
->E.A2.joinWith("; ")
|
||||
`There are function matches for ${fnName}(), but with different arguments: ${defs}`
|
||||
}
|
||||
switch Matcher.Registry.findMatches(registry, fnName, args) {
|
||||
|
||||
switch Matcher.Registry.findMatches(modified, fnName, args) {
|
||||
| Matcher.Match.FullMatch(match) => match->matchToDef->E.O2.fmap(FnDefinition.run(_, args, env))
|
||||
| SameNameDifferentArguments(m) => Some(Error(showNameMatchDefinitions(m)))
|
||||
| _ => None
|
||||
}
|
||||
}
|
||||
|
||||
let dispatch = (
|
||||
registry,
|
||||
(fnName, args): ReducerInterface_InternalExpressionValue.functionCall,
|
||||
env,
|
||||
) => {
|
||||
_matchAndRun(~registry, ~fnName, ~args, ~env)->E.O2.fmap(
|
||||
E.R2.errMap(_, s => Reducer_ErrorValue.RETodo(s)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,12 @@ module Prepare = {
|
|||
| _ => Error(impossibleError)
|
||||
}
|
||||
|
||||
let threeArgs = (inputs: ts): result<ts, err> =>
|
||||
switch inputs {
|
||||
| [FRValueRecord([(_, n1), (_, n2), (_, n3)])] => Ok([n1, n2, n3])
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
|
||||
let toArgs = (inputs: ts): result<ts, err> =>
|
||||
switch inputs {
|
||||
| [FRValueRecord(args)] => args->E.A2.fmap(((_, b)) => b)->Ok
|
||||
|
@ -57,6 +63,16 @@ module Prepare = {
|
|||
}
|
||||
}
|
||||
|
||||
let twoDist = (values: ts): result<
|
||||
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||
err,
|
||||
> => {
|
||||
switch values {
|
||||
| [FRValueDist(a1), FRValueDist(a2)] => Ok(a1, a2)
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}
|
||||
|
||||
let twoNumbers = (values: ts): result<(float, float), err> => {
|
||||
switch values {
|
||||
| [FRValueNumber(a1), FRValueNumber(a2)] => Ok(a1, a2)
|
||||
|
@ -81,6 +97,11 @@ module Prepare = {
|
|||
module Record = {
|
||||
let twoDistOrNumber = (values: ts): result<(frValueDistOrNumber, frValueDistOrNumber), err> =>
|
||||
values->ToValueArray.Record.twoArgs->E.R.bind(twoDistOrNumber)
|
||||
|
||||
let twoDist = (values: ts): result<
|
||||
(DistributionTypes.genericDist, DistributionTypes.genericDist),
|
||||
err,
|
||||
> => values->ToValueArray.Record.twoArgs->E.R.bind(twoDist)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,8 +149,7 @@ module Prepare = {
|
|||
module Process = {
|
||||
module DistOrNumberToDist = {
|
||||
module Helpers = {
|
||||
let toSampleSet = (r, env: DistributionOperation.env) =>
|
||||
GenericDist.toSampleSetDist(r, env.sampleCount)
|
||||
let toSampleSet = (r, env: GenericDist.env) => GenericDist.toSampleSetDist(r, env.sampleCount)
|
||||
|
||||
let mapFnResult = r =>
|
||||
switch r {
|
||||
|
@ -166,7 +186,7 @@ module Process = {
|
|||
let oneValue = (
|
||||
~fn: float => result<DistributionTypes.genericDist, string>,
|
||||
~value: frValueDistOrNumber,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
): result<DistributionTypes.genericDist, string> => {
|
||||
switch value {
|
||||
| FRValueNumber(a1) => fn(a1)
|
||||
|
@ -179,7 +199,7 @@ module Process = {
|
|||
let twoValues = (
|
||||
~fn: ((float, float)) => result<DistributionTypes.genericDist, string>,
|
||||
~values: (frValueDistOrNumber, frValueDistOrNumber),
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
): result<DistributionTypes.genericDist, string> => {
|
||||
switch values {
|
||||
| (FRValueNumber(a1), FRValueNumber(a2)) => fn((a1, a2))
|
||||
|
@ -193,72 +213,3 @@ module Process = {
|
|||
twoValues(~fn=Helpers.wrapSymbolic(fn), ~values)
|
||||
}
|
||||
}
|
||||
|
||||
module TwoArgDist = {
|
||||
let process = (~fn, ~env, r) =>
|
||||
r
|
||||
->E.R.bind(Process.DistOrNumberToDist.twoValuesUsingSymbolicDist(~fn, ~values=_, ~env))
|
||||
->E.R2.fmap(Wrappers.evDistribution)
|
||||
|
||||
let make = (name, fn) => {
|
||||
FnDefinition.make(~name, ~inputs=[FRTypeDistOrNumber, FRTypeDistOrNumber], ~run=(inputs, env) =>
|
||||
inputs->Prepare.ToValueTuple.twoDistOrNumber->process(~fn, ~env)
|
||||
)
|
||||
}
|
||||
|
||||
let makeRecordP5P95 = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeRecord([("p5", FRTypeDistOrNumber), ("p95", FRTypeDistOrNumber)])],
|
||||
~run=(inputs, env) => inputs->Prepare.ToValueTuple.Record.twoDistOrNumber->process(~fn, ~env),
|
||||
)
|
||||
}
|
||||
|
||||
let makeRecordMeanStdev = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeRecord([("mean", FRTypeDistOrNumber), ("stdev", FRTypeDistOrNumber)])],
|
||||
~run=(inputs, env) => inputs->Prepare.ToValueTuple.Record.twoDistOrNumber->process(~fn, ~env),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module OneArgDist = {
|
||||
let process = (~fn, ~env, r) =>
|
||||
r
|
||||
->E.R.bind(Process.DistOrNumberToDist.oneValueUsingSymbolicDist(~fn, ~value=_, ~env))
|
||||
->E.R2.fmap(Wrappers.evDistribution)
|
||||
|
||||
let make = (name, fn) =>
|
||||
FnDefinition.make(~name, ~inputs=[FRTypeDistOrNumber], ~run=(inputs, env) =>
|
||||
inputs->Prepare.ToValueTuple.oneDistOrNumber->process(~fn, ~env)
|
||||
)
|
||||
}
|
||||
|
||||
module ArrayNumberDist = {
|
||||
let make = (name, fn) => {
|
||||
FnDefinition.make(~name, ~inputs=[FRTypeArray(FRTypeNumber)], ~run=(inputs, _) =>
|
||||
Prepare.ToTypedArray.numbers(inputs)
|
||||
->E.R.bind(r => E.A.length(r) === 0 ? Error("List is empty") : Ok(r))
|
||||
->E.R.bind(fn)
|
||||
)
|
||||
}
|
||||
let make2 = (name, fn) => {
|
||||
FnDefinition.make(~name, ~inputs=[FRTypeArray(FRTypeAny)], ~run=(inputs, _) =>
|
||||
Prepare.ToTypedArray.numbers(inputs)
|
||||
->E.R.bind(r => E.A.length(r) === 0 ? Error("List is empty") : Ok(r))
|
||||
->E.R.bind(fn)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module NumberToNumber = {
|
||||
let make = (name, fn) =>
|
||||
FnDefinition.make(~name, ~inputs=[FRTypeNumber], ~run=(inputs, _) => {
|
||||
inputs
|
||||
->getOrError(0)
|
||||
->E.R.bind(Prepare.oneNumber)
|
||||
->E.R2.fmap(fn)
|
||||
->E.R2.fmap(Wrappers.evNumber)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,512 +1,12 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
let twoArgs = E.Tuple2.toFnCall
|
||||
|
||||
module Declaration = {
|
||||
let frType = FRTypeRecord([
|
||||
("fn", FRTypeLambda),
|
||||
("inputs", FRTypeArray(FRTypeRecord([("min", FRTypeNumber), ("max", FRTypeNumber)]))),
|
||||
let fnList = Belt.Array.concatMany([
|
||||
FR_Dict.library,
|
||||
FR_Dist.library,
|
||||
FR_Fn.library,
|
||||
FR_List.library,
|
||||
FR_Number.library,
|
||||
FR_Pointset.library,
|
||||
FR_Scoring.library,
|
||||
])
|
||||
|
||||
let fromExpressionValue = (e: frValue): result<internalExpressionValue, string> => {
|
||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs([e]) {
|
||||
| Ok([FRValueLambda(lambda), FRValueArray(inputs)]) => {
|
||||
open FunctionRegistry_Helpers.Prepare
|
||||
let getMinMax = arg =>
|
||||
ToValueArray.Record.toArgs([arg])
|
||||
->E.R.bind(ToValueTuple.twoNumbers)
|
||||
->E.R2.fmap(((min, max)) => Declaration.ContinuousFloatArg.make(min, max))
|
||||
inputs
|
||||
->E.A2.fmap(getMinMax)
|
||||
->E.A.R.firstErrorOrOpen
|
||||
->E.R2.fmap(args => ReducerInterface_InternalExpressionValue.IEvDeclaration(
|
||||
Declaration.make(lambda, args),
|
||||
))
|
||||
}
|
||||
| Error(r) => Error(r)
|
||||
| Ok(_) => Error(FunctionRegistry_Helpers.impossibleError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let inputsTodist = (inputs: array<FunctionRegistry_Core.frValue>, makeDist) => {
|
||||
let array = inputs->getOrError(0)->E.R.bind(Prepare.ToValueArray.Array.openA)
|
||||
let xyCoords =
|
||||
array->E.R.bind(xyCoords =>
|
||||
xyCoords
|
||||
->E.A2.fmap(xyCoord =>
|
||||
[xyCoord]->Prepare.ToValueArray.Record.twoArgs->E.R.bind(Prepare.ToValueTuple.twoNumbers)
|
||||
)
|
||||
->E.A.R.firstErrorOrOpen
|
||||
)
|
||||
let expressionValue =
|
||||
xyCoords
|
||||
->E.R.bind(r => r->XYShape.T.makeFromZipped->E.R2.errMap(XYShape.Error.toString))
|
||||
->E.R2.fmap(r => ReducerInterface_InternalExpressionValue.IEvDistribution(
|
||||
PointSet(makeDist(r)),
|
||||
))
|
||||
expressionValue
|
||||
}
|
||||
|
||||
let registry = [
|
||||
Function.make(
|
||||
~name="toContinuousPointSet",
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="toContinuousPointSet",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(inputs, _) => inputsTodist(inputs, r => Continuous(Continuous.make(r))),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="toDiscretePointSet",
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="toDiscretePointSet",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(inputs, _) => inputsTodist(inputs, r => Discrete(Discrete.make(r))),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Declaration",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="declareFn", ~inputs=[Declaration.frType], ~run=(inputs, _) => {
|
||||
inputs->getOrError(0)->E.R.bind(Declaration.fromExpressionValue)
|
||||
}),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Normal",
|
||||
~examples=`normal(5,1)
|
||||
normal({p5: 4, p95: 10})
|
||||
normal({mean: 5, stdev: 2})`,
|
||||
~definitions=[
|
||||
TwoArgDist.make("normal", twoArgs(SymbolicDist.Normal.make)),
|
||||
TwoArgDist.makeRecordP5P95("normal", r =>
|
||||
twoArgs(SymbolicDist.Normal.from90PercentCI, r)->Ok
|
||||
),
|
||||
TwoArgDist.makeRecordMeanStdev("normal", twoArgs(SymbolicDist.Normal.make)),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Lognormal",
|
||||
~examples=`lognormal(0.5, 0.8)
|
||||
lognormal({p5: 4, p95: 10})
|
||||
lognormal({mean: 5, stdev: 2})`,
|
||||
~definitions=[
|
||||
TwoArgDist.make("lognormal", twoArgs(SymbolicDist.Lognormal.make)),
|
||||
TwoArgDist.makeRecordP5P95("lognormal", r =>
|
||||
twoArgs(SymbolicDist.Lognormal.from90PercentCI, r)->Ok
|
||||
),
|
||||
TwoArgDist.makeRecordMeanStdev("lognormal", twoArgs(SymbolicDist.Lognormal.fromMeanAndStdev)),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Uniform",
|
||||
~examples=`uniform(10, 12)`,
|
||||
~definitions=[TwoArgDist.make("uniform", twoArgs(SymbolicDist.Uniform.make))],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Beta",
|
||||
~examples=`beta(20, 25)
|
||||
beta({mean: 0.39, stdev: 0.1})`,
|
||||
~definitions=[
|
||||
TwoArgDist.make("beta", twoArgs(SymbolicDist.Beta.make)),
|
||||
TwoArgDist.makeRecordMeanStdev("beta", twoArgs(SymbolicDist.Beta.fromMeanAndStdev)),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Cauchy",
|
||||
~examples=`cauchy(5, 1)`,
|
||||
~definitions=[TwoArgDist.make("cauchy", twoArgs(SymbolicDist.Cauchy.make))],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Gamma",
|
||||
~examples=`gamma(5, 1)`,
|
||||
~definitions=[TwoArgDist.make("gamma", twoArgs(SymbolicDist.Gamma.make))],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Logistic",
|
||||
~examples=`gamma(5, 1)`,
|
||||
~definitions=[TwoArgDist.make("logistic", twoArgs(SymbolicDist.Logistic.make))],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="To (Distribution)",
|
||||
~examples=`5 to 10
|
||||
to(5,10)
|
||||
-5 to 5`,
|
||||
~definitions=[
|
||||
TwoArgDist.make("to", twoArgs(SymbolicDist.From90thPercentile.make)),
|
||||
TwoArgDist.make(
|
||||
"credibleIntervalToDistribution",
|
||||
twoArgs(SymbolicDist.From90thPercentile.make),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Exponential",
|
||||
~examples=`exponential(2)`,
|
||||
~definitions=[OneArgDist.make("exponential", SymbolicDist.Exponential.make)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Bernoulli",
|
||||
~examples=`bernoulli(0.5)`,
|
||||
~definitions=[OneArgDist.make("bernoulli", SymbolicDist.Bernoulli.make)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="PointMass",
|
||||
~examples=`pointMass(0.5)`,
|
||||
~definitions=[OneArgDist.make("pointMass", SymbolicDist.Float.makeSafe)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="toContinuousPointSet",
|
||||
~description="Converts a set of points to a continuous distribution",
|
||||
~examples=`toContinuousPointSet([
|
||||
{x: 0, y: 0.1},
|
||||
{x: 1, y: 0.2},
|
||||
{x: 2, y: 0.15},
|
||||
{x: 3, y: 0.1}
|
||||
])`,
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="toContinuousPointSet",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(inputs, _) => inputsTodist(inputs, r => Continuous(Continuous.make(r))),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="toDiscretePointSet",
|
||||
~description="Converts a set of points to a discrete distribution",
|
||||
~examples=`toDiscretePointSet([
|
||||
{x: 0, y: 0.1},
|
||||
{x: 1, y: 0.2},
|
||||
{x: 2, y: 0.15},
|
||||
{x: 3, y: 0.1}
|
||||
])`,
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="toDiscretePointSet",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(inputs, _) => inputsTodist(inputs, r => Discrete(Discrete.make(r))),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Declaration (Continuous Function)",
|
||||
~description="Adds metadata to a function of the input ranges. Works now for numeric and date inputs. This is useful when making predictions. It allows you to limit the domain that your prediction will be used and scored within.",
|
||||
~examples=`declareFn({
|
||||
fn: {|a,b| a },
|
||||
inputs: [
|
||||
{min: 0, max: 100},
|
||||
{min: 30, max: 50}
|
||||
]
|
||||
})`,
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="declareFn", ~inputs=[Declaration.frType], ~run=(inputs, _) => {
|
||||
inputs->E.A.unsafe_get(0)->Declaration.fromExpressionValue
|
||||
}),
|
||||
],
|
||||
~isExperimental=true,
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Floor",
|
||||
~definitions=[NumberToNumber.make("floor", Js.Math.floor_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Ceiling",
|
||||
~definitions=[NumberToNumber.make("ceil", Js.Math.ceil_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Absolute Value",
|
||||
~definitions=[NumberToNumber.make("abs", Js.Math.abs_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(~name="Exponent", ~definitions=[NumberToNumber.make("exp", Js.Math.exp)], ()),
|
||||
Function.make(~name="Log", ~definitions=[NumberToNumber.make("log", Js.Math.log)], ()),
|
||||
Function.make(
|
||||
~name="Log Base 10",
|
||||
~definitions=[NumberToNumber.make("log10", Js.Math.log10)],
|
||||
(),
|
||||
),
|
||||
Function.make(~name="Log Base 2", ~definitions=[NumberToNumber.make("log2", Js.Math.log2)], ()),
|
||||
Function.make(~name="Round", ~definitions=[NumberToNumber.make("round", Js.Math.round)], ()),
|
||||
Function.make(
|
||||
~name="Sum",
|
||||
~definitions=[ArrayNumberDist.make("sum", r => r->E.A.Floats.sum->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Product",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("product", r => r->E.A.Floats.product->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Min",
|
||||
~definitions=[ArrayNumberDist.make("min", r => r->E.A.Floats.min->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Max",
|
||||
~definitions=[ArrayNumberDist.make("max", r => r->E.A.Floats.max->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Mean",
|
||||
~definitions=[ArrayNumberDist.make("mean", r => r->E.A.Floats.mean->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Geometric Mean",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("geomean", r => r->E.A.Floats.geomean->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Standard Deviation",
|
||||
~definitions=[ArrayNumberDist.make("stdev", r => r->E.A.Floats.stdev->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Variance",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("variance", r => r->E.A.Floats.stdev->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="First",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make2("first", r =>
|
||||
r->E.A.first |> E.O.toResult(impossibleError) |> E.R.fmap(Wrappers.evNumber)
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Last",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make2("last", r =>
|
||||
r->E.A.last |> E.O.toResult(impossibleError) |> E.R.fmap(Wrappers.evNumber)
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Sort",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("sort", r =>
|
||||
r->E.A.Floats.sort->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Reverse",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("reverse", r =>
|
||||
r->Belt_Array.reverse->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Cumulative Sum",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("cumsum", r =>
|
||||
r->E.A.Floats.cumsum->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Cumulative Prod",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("cumprod", r =>
|
||||
r->E.A.Floats.cumsum->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Diff",
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("diff", r =>
|
||||
r->E.A.Floats.diff->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Dict.merge",
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="merge",
|
||||
~inputs=[FRTypeDict(FRTypeAny), FRTypeDict(FRTypeAny)],
|
||||
~run=(inputs, _) => {
|
||||
switch inputs {
|
||||
| [FRValueDict(d1), FRValueDict(d2)] => {
|
||||
let newDict =
|
||||
E.Dict.concat(d1, d2) |> Js.Dict.map((. r) =>
|
||||
FunctionRegistry_Core.FRType.matchReverse(r)
|
||||
)
|
||||
newDict->Js.Dict.entries->Belt.Map.String.fromArray->Wrappers.evRecord->Ok
|
||||
}
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
},
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
//TODO: Make sure that two functions can't have the same name. This causes chaos elsewhere.
|
||||
Function.make(
|
||||
~name="Dict.mergeMany",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="mergeMany", ~inputs=[FRTypeArray(FRTypeDict(FRTypeAny))], ~run=(
|
||||
inputs,
|
||||
_,
|
||||
) =>
|
||||
inputs
|
||||
->Prepare.ToTypedArray.dicts
|
||||
->E.R2.fmap(E.Dict.concatMany)
|
||||
->E.R2.fmap(Js.Dict.map((. r) => FunctionRegistry_Core.FRType.matchReverse(r)))
|
||||
->E.R2.fmap(r => r->Js.Dict.entries->Belt.Map.String.fromArray)
|
||||
->E.R2.fmap(Wrappers.evRecord)
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Dict.keys",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="keys", ~inputs=[FRTypeDict(FRTypeAny)], ~run=(inputs, _) =>
|
||||
switch inputs {
|
||||
| [FRValueDict(d1)] => Js.Dict.keys(d1)->E.A2.fmap(Wrappers.evString)->Wrappers.evArray->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Dict.values",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="values", ~inputs=[FRTypeDict(FRTypeAny)], ~run=(inputs, _) =>
|
||||
switch inputs {
|
||||
| [FRValueDict(d1)] =>
|
||||
Js.Dict.values(d1)
|
||||
->E.A2.fmap(FunctionRegistry_Core.FRType.matchReverse)
|
||||
->Wrappers.evArray
|
||||
->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Dict.toList",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="dictToList", ~inputs=[FRTypeDict(FRTypeAny)], ~run=(inputs, _) =>
|
||||
switch inputs {
|
||||
| [FRValueDict(dict)] =>
|
||||
dict
|
||||
->Js.Dict.entries
|
||||
->E.A2.fmap(((key, value)) =>
|
||||
Wrappers.evArray([
|
||||
Wrappers.evString(key),
|
||||
FunctionRegistry_Core.FRType.matchReverse(value),
|
||||
])
|
||||
)
|
||||
->Wrappers.evArray
|
||||
->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="Dict.fromList",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="dictFromList", ~inputs=[FRTypeArray(FRTypeArray(FRTypeAny))], ~run=(
|
||||
inputs,
|
||||
_,
|
||||
) => {
|
||||
let convertInternalItems = items =>
|
||||
items
|
||||
->E.A2.fmap(item => {
|
||||
switch item {
|
||||
| [FRValueString(string), value] =>
|
||||
(string, FunctionRegistry_Core.FRType.matchReverse(value))->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
})
|
||||
->E.A.R.firstErrorOrOpen
|
||||
->E.R2.fmap(Belt.Map.String.fromArray)
|
||||
->E.R2.fmap(Wrappers.evRecord)
|
||||
inputs->getOrError(0)->E.R.bind(Prepare.ToValueArray.Array.arrayOfArrays)
|
||||
|> E.R2.bind(convertInternalItems)
|
||||
}),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="List.make",
|
||||
~definitions=[
|
||||
//Todo: If the second item is a function with no args, it could be nice to run this function and return the result.
|
||||
FnDefinition.make(~name="listMake", ~inputs=[FRTypeNumber, FRTypeAny], ~run=(inputs, _) => {
|
||||
switch inputs {
|
||||
| [FRValueNumber(number), value] =>
|
||||
Belt.Array.make(E.Float.toInt(number), value)
|
||||
->E.A2.fmap(FunctionRegistry_Core.FRType.matchReverse)
|
||||
->Wrappers.evArray
|
||||
->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="upTo",
|
||||
~definitions=[
|
||||
FnDefinition.make(~name="upTo", ~inputs=[FRTypeNumber, FRTypeNumber], ~run=(inputs, _) =>
|
||||
inputs
|
||||
->Prepare.ToValueTuple.twoNumbers
|
||||
->E.R2.fmap(((low, high)) =>
|
||||
E.A.Floats.range(low, high, (high -. low +. 1.0)->E.Float.toInt)
|
||||
->E.A2.fmap(Wrappers.evNumber)
|
||||
->Wrappers.evArray
|
||||
)
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
||||
let registry = FunctionRegistry_Core.Registry.make(fnList)
|
||||
let dispatch = FunctionRegistry_Core.Registry.dispatch(registry)
|
||||
|
|
|
@ -0,0 +1,169 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
let nameSpace = "Dict"
|
||||
|
||||
module Internals = {
|
||||
type t = ReducerInterface_InternalExpressionValue.map
|
||||
|
||||
let keys = (a: t): internalExpressionValue => IEvArray(
|
||||
Belt.Map.String.keysToArray(a)->E.A2.fmap(Wrappers.evString),
|
||||
)
|
||||
|
||||
let values = (a: t): internalExpressionValue => IEvArray(Belt.Map.String.valuesToArray(a))
|
||||
|
||||
let toList = (a: t): internalExpressionValue =>
|
||||
Belt.Map.String.toArray(a)
|
||||
->E.A2.fmap(((key, value)) => Wrappers.evArray([IEvString(key), value]))
|
||||
->Wrappers.evArray
|
||||
|
||||
let fromList = (items: array<internalExpressionValue>): result<internalExpressionValue, string> =>
|
||||
items
|
||||
->E.A2.fmap(item => {
|
||||
switch (item: internalExpressionValue) {
|
||||
| IEvArray([IEvString(string), value]) => (string, value)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
})
|
||||
->E.A.R.firstErrorOrOpen
|
||||
->E.R2.fmap(Belt.Map.String.fromArray)
|
||||
->E.R2.fmap(Wrappers.evRecord)
|
||||
|
||||
let merge = (a: t, b: t): internalExpressionValue => IEvRecord(
|
||||
Belt.Map.String.merge(a, b, (_, _, c) => c),
|
||||
)
|
||||
|
||||
//Belt.Map.String has a function for mergeMany, but I couldn't understand how to use it yet.
|
||||
let mergeMany = (a: array<t>): internalExpressionValue => {
|
||||
let mergedValues =
|
||||
a->E.A2.fmap(Belt.Map.String.toArray)->Belt.Array.concatMany->Belt.Map.String.fromArray
|
||||
IEvRecord(mergedValues)
|
||||
}
|
||||
}
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="merge",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtRecord,
|
||||
~examples=[`Dict.merge({a: 1, b: 2}, {c: 3, d: 4})`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="merge",
|
||||
~inputs=[FRTypeDict(FRTypeAny), FRTypeDict(FRTypeAny)],
|
||||
~run=(inputs, _, _) => {
|
||||
switch inputs {
|
||||
| [IEvRecord(d1), IEvRecord(d2)] => Internals.merge(d1, d2)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
//TODO: Change to use new mergeMany() function.
|
||||
Function.make(
|
||||
~name="mergeMany",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtRecord,
|
||||
~examples=[`Dict.mergeMany([{a: 1, b: 2}, {c: 3, d: 4}])`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="mergeMany",
|
||||
~inputs=[FRTypeArray(FRTypeDict(FRTypeAny))],
|
||||
~run=(_, inputs, _) =>
|
||||
inputs
|
||||
->Prepare.ToTypedArray.dicts
|
||||
->E.R2.fmap(E.Dict.concatMany)
|
||||
->E.R2.fmap(Js.Dict.map((. r) => FunctionRegistry_Core.FRType.matchReverse(r)))
|
||||
->E.R2.fmap(r => r->Js.Dict.entries->Belt.Map.String.fromArray)
|
||||
->E.R2.fmap(Wrappers.evRecord),
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="keys",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtArray,
|
||||
~examples=[`Dict.keys({a: 1, b: 2})`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="keys",
|
||||
~inputs=[FRTypeDict(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvRecord(d1)] => Internals.keys(d1)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="values",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtArray,
|
||||
~examples=[`Dict.values({a: 1, b: 2})`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="values",
|
||||
~inputs=[FRTypeDict(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvRecord(d1)] => Internals.values(d1)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="toList",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtArray,
|
||||
~examples=[`Dict.toList({a: 1, b: 2})`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="toList",
|
||||
~inputs=[FRTypeDict(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvRecord(dict)] => dict->Internals.toList->Ok
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="fromList",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtRecord,
|
||||
~examples=[`Dict.fromList([["a", 1], ["b", 2]])`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="fromList",
|
||||
~inputs=[FRTypeArray(FRTypeArray(FRTypeAny))],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvArray(items)] => Internals.fromList(items)
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,152 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
let twoArgs = E.Tuple2.toFnCall
|
||||
|
||||
module DistributionCreation = {
|
||||
let nameSpace = "Dist"
|
||||
let output = ReducerInterface_InternalExpressionValue.EvtDistribution
|
||||
let requiresNamespace = false
|
||||
|
||||
let fnMake = (~name, ~examples, ~definitions) => {
|
||||
Function.make(~name, ~nameSpace, ~output, ~examples, ~definitions, ~requiresNamespace, ())
|
||||
}
|
||||
|
||||
module TwoArgDist = {
|
||||
let process = (~fn, ~env, r) =>
|
||||
r
|
||||
->E.R.bind(Process.DistOrNumberToDist.twoValuesUsingSymbolicDist(~fn, ~values=_, ~env))
|
||||
->E.R2.fmap(Wrappers.evDistribution)
|
||||
|
||||
let make = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeDistOrNumber, FRTypeDistOrNumber],
|
||||
~run=(_, inputs, env) => inputs->Prepare.ToValueTuple.twoDistOrNumber->process(~fn, ~env),
|
||||
(),
|
||||
)
|
||||
}
|
||||
|
||||
let makeRecordP5P95 = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeRecord([("p5", FRTypeDistOrNumber), ("p95", FRTypeDistOrNumber)])],
|
||||
~run=(_, inputs, env) =>
|
||||
inputs->Prepare.ToValueTuple.Record.twoDistOrNumber->process(~fn, ~env),
|
||||
(),
|
||||
)
|
||||
}
|
||||
|
||||
let makeRecordMeanStdev = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeRecord([("mean", FRTypeDistOrNumber), ("stdev", FRTypeDistOrNumber)])],
|
||||
~run=(_, inputs, env) =>
|
||||
inputs->Prepare.ToValueTuple.Record.twoDistOrNumber->process(~fn, ~env),
|
||||
(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module OneArgDist = {
|
||||
let process = (~fn, ~env, r) =>
|
||||
r
|
||||
->E.R.bind(Process.DistOrNumberToDist.oneValueUsingSymbolicDist(~fn, ~value=_, ~env))
|
||||
->E.R2.fmap(Wrappers.evDistribution)
|
||||
|
||||
let make = (name, fn) =>
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeDistOrNumber],
|
||||
~run=(_, inputs, env) => inputs->Prepare.ToValueTuple.oneDistOrNumber->process(~fn, ~env),
|
||||
(),
|
||||
)
|
||||
}
|
||||
|
||||
let library = [
|
||||
fnMake(
|
||||
~name="normal",
|
||||
~examples=["normal(5,1)", "normal({p5: 4, p95: 10})", "normal({mean: 5, stdev: 2})"],
|
||||
~definitions=[
|
||||
TwoArgDist.make("normal", twoArgs(SymbolicDist.Normal.make)),
|
||||
TwoArgDist.makeRecordP5P95("normal", r =>
|
||||
twoArgs(SymbolicDist.Normal.from90PercentCI, r)->Ok
|
||||
),
|
||||
TwoArgDist.makeRecordMeanStdev("normal", twoArgs(SymbolicDist.Normal.make)),
|
||||
],
|
||||
),
|
||||
fnMake(
|
||||
~name="lognormal",
|
||||
~examples=[
|
||||
"lognormal(0.5, 0.8)",
|
||||
"lognormal({p5: 4, p95: 10})",
|
||||
"lognormal({mean: 5, stdev: 2})",
|
||||
],
|
||||
~definitions=[
|
||||
TwoArgDist.make("lognormal", twoArgs(SymbolicDist.Lognormal.make)),
|
||||
TwoArgDist.makeRecordP5P95("lognormal", r =>
|
||||
twoArgs(SymbolicDist.Lognormal.from90PercentCI, r)->Ok
|
||||
),
|
||||
TwoArgDist.makeRecordMeanStdev(
|
||||
"lognormal",
|
||||
twoArgs(SymbolicDist.Lognormal.fromMeanAndStdev),
|
||||
),
|
||||
],
|
||||
),
|
||||
fnMake(
|
||||
~name="uniform",
|
||||
~examples=[`uniform(10, 12)`],
|
||||
~definitions=[TwoArgDist.make("uniform", twoArgs(SymbolicDist.Uniform.make))],
|
||||
),
|
||||
fnMake(
|
||||
~name="beta",
|
||||
~examples=[`beta(20, 25)`, `beta({mean: 0.39, stdev: 0.1})`],
|
||||
~definitions=[
|
||||
TwoArgDist.make("beta", twoArgs(SymbolicDist.Beta.make)),
|
||||
TwoArgDist.makeRecordMeanStdev("beta", twoArgs(SymbolicDist.Beta.fromMeanAndStdev)),
|
||||
],
|
||||
),
|
||||
fnMake(
|
||||
~name="cauchy",
|
||||
~examples=[`cauchy(5, 1)`],
|
||||
~definitions=[TwoArgDist.make("cauchy", twoArgs(SymbolicDist.Cauchy.make))],
|
||||
),
|
||||
fnMake(
|
||||
~name="gamma",
|
||||
~examples=[`gamma(5, 1)`],
|
||||
~definitions=[TwoArgDist.make("gamma", twoArgs(SymbolicDist.Gamma.make))],
|
||||
),
|
||||
fnMake(
|
||||
~name="logistic",
|
||||
~examples=[`logistic(5, 1)`],
|
||||
~definitions=[TwoArgDist.make("logistic", twoArgs(SymbolicDist.Logistic.make))],
|
||||
),
|
||||
fnMake(
|
||||
~name="to (distribution)",
|
||||
~examples=[`5 to 10`, `to(5,10)`, `-5 to 5`],
|
||||
~definitions=[
|
||||
TwoArgDist.make("to", twoArgs(SymbolicDist.From90thPercentile.make)),
|
||||
TwoArgDist.make(
|
||||
"credibleIntervalToDistribution",
|
||||
twoArgs(SymbolicDist.From90thPercentile.make),
|
||||
),
|
||||
],
|
||||
),
|
||||
fnMake(
|
||||
~name="exponential",
|
||||
~examples=[`exponential(2)`],
|
||||
~definitions=[OneArgDist.make("exponential", SymbolicDist.Exponential.make)],
|
||||
),
|
||||
fnMake(
|
||||
~name="bernoulli",
|
||||
~examples=[`bernoulli(0.5)`],
|
||||
~definitions=[OneArgDist.make("bernoulli", SymbolicDist.Bernoulli.make)],
|
||||
),
|
||||
fnMake(
|
||||
~name="pointMass",
|
||||
~examples=[`pointMass(0.5)`],
|
||||
~definitions=[OneArgDist.make("pointMass", SymbolicDist.Float.makeSafe)],
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
let library = DistributionCreation.library
|
|
@ -0,0 +1,62 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
module Declaration = {
|
||||
let frType = FRTypeRecord([
|
||||
("fn", FRTypeLambda),
|
||||
("inputs", FRTypeArray(FRTypeRecord([("min", FRTypeNumber), ("max", FRTypeNumber)]))),
|
||||
])
|
||||
|
||||
let fromExpressionValue = (e: frValue): result<internalExpressionValue, string> => {
|
||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs([e]) {
|
||||
| Ok([FRValueLambda(lambda), FRValueArray(inputs)]) => {
|
||||
open FunctionRegistry_Helpers.Prepare
|
||||
let getMinMax = arg =>
|
||||
ToValueArray.Record.toArgs([arg])
|
||||
->E.R.bind(ToValueTuple.twoNumbers)
|
||||
->E.R2.fmap(((min, max)) => Declaration.ContinuousFloatArg.make(min, max))
|
||||
inputs
|
||||
->E.A2.fmap(getMinMax)
|
||||
->E.A.R.firstErrorOrOpen
|
||||
->E.R2.fmap(args => ReducerInterface_InternalExpressionValue.IEvDeclaration(
|
||||
Declaration.make(lambda, args),
|
||||
))
|
||||
}
|
||||
| Error(r) => Error(r)
|
||||
| Ok(_) => Error(FunctionRegistry_Helpers.impossibleError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let nameSpace = "Function"
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="declare",
|
||||
~nameSpace,
|
||||
~requiresNamespace=true,
|
||||
~output=EvtDeclaration,
|
||||
~description="Adds metadata to a function of the input ranges. Works now for numeric and date inputs. This is useful when making predictions. It allows you to limit the domain that your prediction will be used and scored within.",
|
||||
~examples=[
|
||||
`Function.declare({
|
||||
fn: {|a,b| a },
|
||||
inputs: [
|
||||
{min: 0, max: 100},
|
||||
{min: 30, max: 50}
|
||||
]
|
||||
})`,
|
||||
],
|
||||
~isExperimental=true,
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="declare",
|
||||
~inputs=[Declaration.frType],
|
||||
~run=(_, inputs, _) => {
|
||||
inputs->getOrError(0)->E.R.bind(Declaration.fromExpressionValue)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,128 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
let nameSpace = "List"
|
||||
let requiresNamespace = true
|
||||
|
||||
module Internals = {
|
||||
let makeFromNumber = (
|
||||
n: float,
|
||||
value: internalExpressionValue,
|
||||
): internalExpressionValue => IEvArray(Belt.Array.make(E.Float.toInt(n), value))
|
||||
|
||||
let upTo = (low: float, high: float): internalExpressionValue => IEvArray(
|
||||
E.A.Floats.range(low, high, (high -. low +. 1.0)->E.Float.toInt)->E.A2.fmap(Wrappers.evNumber),
|
||||
)
|
||||
|
||||
let first = (v: array<internalExpressionValue>): result<internalExpressionValue, string> =>
|
||||
v->E.A.first |> E.O.toResult("No first element")
|
||||
|
||||
let last = (v: array<internalExpressionValue>): result<internalExpressionValue, string> =>
|
||||
v->E.A.last |> E.O.toResult("No last element")
|
||||
|
||||
let reverse = (array: array<internalExpressionValue>): internalExpressionValue => IEvArray(
|
||||
Belt.Array.reverse(array),
|
||||
)
|
||||
}
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="make",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`List.make(2, "testValue")`],
|
||||
~definitions=[
|
||||
//Todo: If the second item is a function with no args, it could be nice to run this function and return the result.
|
||||
FnDefinition.make(
|
||||
~name="make",
|
||||
~inputs=[FRTypeNumber, FRTypeAny],
|
||||
~run=(inputs, _, _) => {
|
||||
switch inputs {
|
||||
| [IEvNumber(number), value] => Internals.makeFromNumber(number, value)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="upTo",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`List.upTo(1,4)`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="upTo",
|
||||
~inputs=[FRTypeNumber, FRTypeNumber],
|
||||
~run=(_, inputs, _) =>
|
||||
inputs
|
||||
->Prepare.ToValueTuple.twoNumbers
|
||||
->E.R2.fmap(((low, high)) => Internals.upTo(low, high)),
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="first",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~examples=[`List.first([1,4,5])`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="first",
|
||||
~inputs=[FRTypeArray(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvArray(array)] => Internals.first(array)
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="last",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~examples=[`List.last([1,4,5])`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="last",
|
||||
~inputs=[FRTypeArray(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvArray(array)] => Internals.last(array)
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="reverse",
|
||||
~nameSpace,
|
||||
~output=EvtArray,
|
||||
~requiresNamespace=false,
|
||||
~examples=[`List.reverse([1,4,5])`],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="reverse",
|
||||
~inputs=[FRTypeArray(FRTypeAny)],
|
||||
~run=(inputs, _, _) =>
|
||||
switch inputs {
|
||||
| [IEvArray(array)] => Internals.reverse(array)->Ok
|
||||
| _ => Error(impossibleError)
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,251 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
let nameSpace = "Number"
|
||||
let requiresNamespace = false
|
||||
|
||||
module NumberToNumber = {
|
||||
let make = (name, fn) =>
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeNumber],
|
||||
~run=(_, inputs, _) => {
|
||||
inputs
|
||||
->getOrError(0)
|
||||
->E.R.bind(Prepare.oneNumber)
|
||||
->E.R2.fmap(fn)
|
||||
->E.R2.fmap(Wrappers.evNumber)
|
||||
},
|
||||
(),
|
||||
)
|
||||
}
|
||||
|
||||
module ArrayNumberDist = {
|
||||
let make = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeArray(FRTypeNumber)],
|
||||
~run=(_, inputs, _) =>
|
||||
Prepare.ToTypedArray.numbers(inputs)
|
||||
->E.R.bind(r => E.A.length(r) === 0 ? Error("List is empty") : Ok(r))
|
||||
->E.R.bind(fn),
|
||||
(),
|
||||
)
|
||||
}
|
||||
let make2 = (name, fn) => {
|
||||
FnDefinition.make(
|
||||
~name,
|
||||
~inputs=[FRTypeArray(FRTypeAny)],
|
||||
~run=(_, inputs, _) =>
|
||||
Prepare.ToTypedArray.numbers(inputs)
|
||||
->E.R.bind(r => E.A.length(r) === 0 ? Error("List is empty") : Ok(r))
|
||||
->E.R.bind(fn),
|
||||
(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="floor",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`floor(3.5)`],
|
||||
~definitions=[NumberToNumber.make("floor", Js.Math.floor_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="ceiling",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`ceil(3.5)`],
|
||||
~definitions=[NumberToNumber.make("ceil", Js.Math.ceil_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="absolute value",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`abs(3.5)`],
|
||||
~definitions=[NumberToNumber.make("abs", Js.Math.abs_float)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="exponent",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`exp(3.5)`],
|
||||
~definitions=[NumberToNumber.make("exp", Js.Math.exp)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="log",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`log(3.5)`],
|
||||
~definitions=[NumberToNumber.make("log", Js.Math.log)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="log base 10",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`log10(3.5)`],
|
||||
~definitions=[NumberToNumber.make("log10", Js.Math.log10)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="log base 2",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`log2(3.5)`],
|
||||
~definitions=[NumberToNumber.make("log2", Js.Math.log2)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="round",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`round(3.5)`],
|
||||
~definitions=[NumberToNumber.make("round", Js.Math.round)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="sum",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`sum([3,5,2])`],
|
||||
~definitions=[ArrayNumberDist.make("sum", r => r->E.A.Floats.sum->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="product",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`product([3,5,2])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("product", r => r->E.A.Floats.product->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="min",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`min([3,5,2])`],
|
||||
~definitions=[ArrayNumberDist.make("min", r => r->E.A.Floats.min->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="max",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`max([3,5,2])`],
|
||||
~definitions=[ArrayNumberDist.make("max", r => r->E.A.Floats.max->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="mean",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`mean([3,5,2])`],
|
||||
~definitions=[ArrayNumberDist.make("mean", r => r->E.A.Floats.mean->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="geometric mean",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`geomean([3,5,2])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("geomean", r => r->E.A.Floats.geomean->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="standard deviation",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`stdev([3,5,2,3,5])`],
|
||||
~definitions=[ArrayNumberDist.make("stdev", r => r->E.A.Floats.stdev->Wrappers.evNumber->Ok)],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="variance",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[`variance([3,5,2,3,5])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("variance", r => r->E.A.Floats.variance->Wrappers.evNumber->Ok),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="sort",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`sort([3,5,2,3,5])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("sort", r =>
|
||||
r->E.A.Floats.sort->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="cumulative sum",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`cumsum([3,5,2,3,5])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("cumsum", r =>
|
||||
r->E.A.Floats.cumSum->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="cumulative prod",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`cumprod([3,5,2,3,5])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("cumprod", r =>
|
||||
r->E.A.Floats.cumProd->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="diff",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtArray,
|
||||
~examples=[`diff([3,5,2,3,5])`],
|
||||
~definitions=[
|
||||
ArrayNumberDist.make("diff", r =>
|
||||
r->E.A.Floats.diff->E.A2.fmap(Wrappers.evNumber)->Wrappers.evArray->Ok
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,73 @@
|
|||
open FunctionRegistry_Core
|
||||
open FunctionRegistry_Helpers
|
||||
|
||||
let nameSpace = "Pointset"
|
||||
let requiresNamespace = true
|
||||
|
||||
let inputsTodist = (inputs: array<FunctionRegistry_Core.frValue>, makeDist) => {
|
||||
let array = inputs->getOrError(0)->E.R.bind(Prepare.ToValueArray.Array.openA)
|
||||
let xyCoords =
|
||||
array->E.R.bind(xyCoords =>
|
||||
xyCoords
|
||||
->E.A2.fmap(xyCoord =>
|
||||
[xyCoord]->Prepare.ToValueArray.Record.twoArgs->E.R.bind(Prepare.ToValueTuple.twoNumbers)
|
||||
)
|
||||
->E.A.R.firstErrorOrOpen
|
||||
)
|
||||
let expressionValue =
|
||||
xyCoords
|
||||
->E.R.bind(r => r->XYShape.T.makeFromZipped->E.R2.errMap(XYShape.Error.toString))
|
||||
->E.R2.fmap(r => ReducerInterface_InternalExpressionValue.IEvDistribution(
|
||||
PointSet(makeDist(r)),
|
||||
))
|
||||
expressionValue
|
||||
}
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="makeContinuous",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~examples=[
|
||||
`Pointset.makeContinuous([
|
||||
{x: 0, y: 0.2},
|
||||
{x: 1, y: 0.7},
|
||||
{x: 2, y: 0.8},
|
||||
{x: 3, y: 0.2}
|
||||
])`,
|
||||
],
|
||||
~output=ReducerInterface_InternalExpressionValue.EvtDistribution,
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="makeContinuous",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(_, inputs, _) => inputsTodist(inputs, r => Continuous(Continuous.make(r))),
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="makeDiscrete",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~examples=[
|
||||
`Pointset.makeDiscrete([
|
||||
{x: 0, y: 0.2},
|
||||
{x: 1, y: 0.7},
|
||||
{x: 2, y: 0.8},
|
||||
{x: 3, y: 0.2}
|
||||
])`,
|
||||
],
|
||||
~output=ReducerInterface_InternalExpressionValue.EvtDistribution,
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="makeDiscrete",
|
||||
~inputs=[FRTypeArray(FRTypeRecord([("x", FRTypeNumeric), ("y", FRTypeNumeric)]))],
|
||||
~run=(_, inputs, _) => inputsTodist(inputs, r => Discrete(Discrete.make(r))),
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,89 @@
|
|||
open FunctionRegistry_Core
|
||||
|
||||
let nameSpace = "Dist"
|
||||
let requiresNamespace = true
|
||||
|
||||
let runScoring = (estimate, answer, prior, env) => {
|
||||
GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
|
||||
->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
|
||||
->E.R2.errMap(DistributionTypes.Error.toString)
|
||||
}
|
||||
|
||||
let library = [
|
||||
Function.make(
|
||||
~name="logScore",
|
||||
~nameSpace,
|
||||
~requiresNamespace,
|
||||
~output=EvtNumber,
|
||||
~examples=[
|
||||
"Dist.logScore({estimate: normal(5,2), answer: normal(5.2,1), prior: normal(5.5,3)})",
|
||||
"Dist.logScore({estimate: normal(5,2), answer: normal(5.2,1)})",
|
||||
"Dist.logScore({estimate: normal(5,2), answer: 4.5})",
|
||||
],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="logScore",
|
||||
~inputs=[
|
||||
FRTypeRecord([
|
||||
("estimate", FRTypeDist),
|
||||
("answer", FRTypeDistOrNumber),
|
||||
("prior", FRTypeDist),
|
||||
]),
|
||||
],
|
||||
~run=(_, inputs, env) => {
|
||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(inputs) {
|
||||
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d)), FRValueDist(prior)]) =>
|
||||
runScoring(estimate, Score_Dist(d), Some(prior), env)
|
||||
| Ok([
|
||||
FRValueDist(estimate),
|
||||
FRValueDistOrNumber(FRValueNumber(d)),
|
||||
FRValueDist(prior),
|
||||
]) =>
|
||||
runScoring(estimate, Score_Scalar(d), Some(prior), env)
|
||||
| Error(e) => Error(e)
|
||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||
}
|
||||
},
|
||||
(),
|
||||
),
|
||||
FnDefinition.make(
|
||||
~name="logScore",
|
||||
~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
|
||||
~run=(_, inputs, env) => {
|
||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(inputs) {
|
||||
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueDist(d))]) =>
|
||||
runScoring(estimate, Score_Dist(d), None, env)
|
||||
| Ok([FRValueDist(estimate), FRValueDistOrNumber(FRValueNumber(d))]) =>
|
||||
runScoring(estimate, Score_Scalar(d), None, env)
|
||||
| Error(e) => Error(e)
|
||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||
}
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
Function.make(
|
||||
~name="klDivergence",
|
||||
~nameSpace,
|
||||
~output=EvtNumber,
|
||||
~requiresNamespace,
|
||||
~examples=["Dist.klDivergence(normal(5,2), normal(5,1.5))"],
|
||||
~definitions=[
|
||||
FnDefinition.make(
|
||||
~name="klDivergence",
|
||||
~inputs=[FRTypeDist, FRTypeDist],
|
||||
~run=(_, inputs, env) => {
|
||||
switch inputs {
|
||||
| [FRValueDist(estimate), FRValueDist(d)] =>
|
||||
runScoring(estimate, Score_Dist(d), None, env)
|
||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||
}
|
||||
},
|
||||
(),
|
||||
),
|
||||
],
|
||||
(),
|
||||
),
|
||||
]
|
|
@ -1,3 +1,6 @@
|
|||
// Only Bindings as the global module is supported
|
||||
// Other module operations such as import export will be prepreocessed jobs
|
||||
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
open Reducer_ErrorValue
|
||||
|
@ -75,10 +78,10 @@ let emptyBindings = emptyModule
|
|||
let fromTypeScriptBindings = ReducerInterface_InternalExpressionValue.nameSpaceFromTypeScriptBindings
|
||||
let toTypeScriptBindings = ReducerInterface_InternalExpressionValue.nameSpaceToTypeScriptBindings
|
||||
|
||||
let toExpressionValue = (nameSpace: t): internalExpressionValue => IEvModule(nameSpace)
|
||||
let toExpressionValue = (nameSpace: t): internalExpressionValue => IEvBindings(nameSpace)
|
||||
let fromExpressionValue = (aValue: internalExpressionValue): t =>
|
||||
switch aValue {
|
||||
| IEvModule(nameSpace) => nameSpace
|
||||
| IEvBindings(nameSpace) => nameSpace
|
||||
| _ => emptyModule
|
||||
}
|
||||
|
||||
|
@ -126,6 +129,17 @@ let functionNotFoundErrorFFIFn = (functionName: string): ExpressionT.ffiFn => {
|
|||
}
|
||||
}
|
||||
|
||||
let convertOptionToFfiFnReturningResult = (
|
||||
myFunctionName: string,
|
||||
myFunction: ExpressionT.optionFfiFnReturningResult,
|
||||
): ExpressionT.ffiFn => {
|
||||
(args: array<InternalExpressionValue.t>, environment) => {
|
||||
myFunction(args, environment)->Belt.Option.getWithDefault(
|
||||
functionNotFoundErrorFFIFn(myFunctionName)(args, environment),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let convertOptionToFfiFn = (
|
||||
myFunctionName: string,
|
||||
myFunction: ExpressionT.optionFfiFn,
|
||||
|
@ -159,4 +173,15 @@ let defineFunction = (nameSpace: t, identifier: string, value: ExpressionT.optio
|
|||
nameSpace->define(identifier, convertOptionToFfiFn(identifier, value)->eLambdaFFIValue)
|
||||
}
|
||||
|
||||
let emptyStdLib: t = emptyModule->defineBool("stdlib", true)
|
||||
let defineFunctionReturningResult = (
|
||||
nameSpace: t,
|
||||
identifier: string,
|
||||
value: ExpressionT.optionFfiFnReturningResult,
|
||||
): t => {
|
||||
nameSpace->define(
|
||||
identifier,
|
||||
convertOptionToFfiFnReturningResult(identifier, value)->eLambdaFFIValue,
|
||||
)
|
||||
}
|
||||
|
||||
let emptyStdLib: t = emptyModule->defineBool("_standardLibrary", true)
|
|
@ -3,15 +3,15 @@ module ExpressionT = Reducer_Expression_T
|
|||
module ExternalLibrary = ReducerInterface.ExternalLibrary
|
||||
module Lambda = Reducer_Expression_Lambda
|
||||
module MathJs = Reducer_MathJs
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
module Result = Belt.Result
|
||||
module TypeBuilder = Reducer_Type_TypeBuilder
|
||||
open ReducerInterface_InternalExpressionValue
|
||||
open Reducer_ErrorValue
|
||||
|
||||
/*
|
||||
MathJs provides default implementations for builtins
|
||||
This is where all the expected builtins like + = * / sin cos log ln etc are handled
|
||||
MathJs provides default implementations for built-ins
|
||||
This is where all the expected built-ins like + = * / sin cos log ln etc are handled
|
||||
DO NOT try to add external function mapping here!
|
||||
*/
|
||||
|
||||
|
@ -49,9 +49,9 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
}
|
||||
|
||||
let moduleAtIndex = (nameSpace: nameSpace, sIndex) =>
|
||||
switch Module.get(nameSpace, sIndex) {
|
||||
switch Bindings.get(nameSpace, sIndex) {
|
||||
| Some(value) => value->Ok
|
||||
| None => RERecordPropertyNotFound("Module property not found", sIndex)->Error
|
||||
| None => RERecordPropertyNotFound("Bindings property not found", sIndex)->Error
|
||||
}
|
||||
|
||||
let recordAtIndex = (dict: Belt.Map.String.t<internalExpressionValue>, sIndex) =>
|
||||
|
@ -81,19 +81,19 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
}
|
||||
|
||||
let doSetBindings = (bindings: nameSpace, symbol: string, value: internalExpressionValue) => {
|
||||
Module.set(bindings, symbol, value)->IEvModule->Ok
|
||||
Bindings.set(bindings, symbol, value)->IEvBindings->Ok
|
||||
}
|
||||
|
||||
let doSetTypeAliasBindings = (
|
||||
bindings: nameSpace,
|
||||
symbol: string,
|
||||
value: internalExpressionValue,
|
||||
) => Module.setTypeAlias(bindings, symbol, value)->IEvModule->Ok
|
||||
) => Bindings.setTypeAlias(bindings, symbol, value)->IEvBindings->Ok
|
||||
|
||||
let doSetTypeOfBindings = (bindings: nameSpace, symbol: string, value: internalExpressionValue) =>
|
||||
Module.setTypeOf(bindings, symbol, value)->IEvModule->Ok
|
||||
Bindings.setTypeOf(bindings, symbol, value)->IEvBindings->Ok
|
||||
|
||||
let doExportBindings = (bindings: nameSpace) => bindings->Module.toExpressionValue->Ok
|
||||
let doExportBindings = (bindings: nameSpace) => bindings->Bindings.toExpressionValue->Ok
|
||||
|
||||
let doKeepArray = (aValueArray, aLambdaValue) => {
|
||||
let rMappedList = aValueArray->Belt.Array.reduceReverse(Ok(list{}), (rAcc, elem) =>
|
||||
|
@ -149,6 +149,27 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
doLambdaCall(aLambdaValue, list{IEvNumber(a), IEvNumber(b), IEvNumber(c)})
|
||||
SampleSetDist.map3(~fn, ~t1, ~t2, ~t3)->toType
|
||||
}
|
||||
|
||||
let parseSampleSetArray = (arr: array<internalExpressionValue>): option<
|
||||
array<SampleSetDist.t>,
|
||||
> => {
|
||||
let parseSampleSet = (value: internalExpressionValue): option<SampleSetDist.t> =>
|
||||
switch value {
|
||||
| IEvDistribution(SampleSet(dist)) => Some(dist)
|
||||
| _ => None
|
||||
}
|
||||
E.A.O.openIfAllSome(E.A.fmap(parseSampleSet, arr))
|
||||
}
|
||||
|
||||
let mapN = (aValueArray: array<internalExpressionValue>, aLambdaValue) => {
|
||||
switch parseSampleSetArray(aValueArray) {
|
||||
| Some(t1) =>
|
||||
let fn = a => doLambdaCall(aLambdaValue, list{IEvArray(E.A.fmap(x => IEvNumber(x), a))})
|
||||
SampleSetDist.mapN(~fn, ~t1)->toType
|
||||
| None =>
|
||||
Error(REFunctionNotFound(call->functionCallToCallSignature->functionCallSignatureToString))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let doReduceArray = (aValueArray, initialValue, aLambdaValue) => {
|
||||
|
@ -169,16 +190,16 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
|
||||
switch call {
|
||||
| ("$_atIndex_$", [IEvArray(aValueArray), IEvNumber(fIndex)]) => arrayAtIndex(aValueArray, fIndex)
|
||||
| ("$_atIndex_$", [IEvModule(dict), IEvString(sIndex)]) => moduleAtIndex(dict, sIndex)
|
||||
| ("$_atIndex_$", [IEvBindings(dict), IEvString(sIndex)]) => moduleAtIndex(dict, sIndex)
|
||||
| ("$_atIndex_$", [IEvRecord(dict), IEvString(sIndex)]) => recordAtIndex(dict, sIndex)
|
||||
| ("$_constructArray_$", [IEvArray(aValueArray)]) => IEvArray(aValueArray)->Ok
|
||||
| ("$_constructRecord_$", [IEvArray(arrayOfPairs)]) => constructRecord(arrayOfPairs)
|
||||
| ("$_exportBindings_$", [IEvModule(nameSpace)]) => doExportBindings(nameSpace)
|
||||
| ("$_setBindings_$", [IEvModule(nameSpace), IEvSymbol(symbol), value]) =>
|
||||
| ("$_exportBindings_$", [IEvBindings(nameSpace)]) => doExportBindings(nameSpace)
|
||||
| ("$_setBindings_$", [IEvBindings(nameSpace), IEvSymbol(symbol), value]) =>
|
||||
doSetBindings(nameSpace, symbol, value)
|
||||
| ("$_setTypeAliasBindings_$", [IEvModule(nameSpace), IEvTypeIdentifier(symbol), value]) =>
|
||||
| ("$_setTypeAliasBindings_$", [IEvBindings(nameSpace), IEvTypeIdentifier(symbol), value]) =>
|
||||
doSetTypeAliasBindings(nameSpace, symbol, value)
|
||||
| ("$_setTypeOfBindings_$", [IEvModule(nameSpace), IEvSymbol(symbol), value]) =>
|
||||
| ("$_setTypeOfBindings_$", [IEvBindings(nameSpace), IEvSymbol(symbol), value]) =>
|
||||
doSetTypeOfBindings(nameSpace, symbol, value)
|
||||
| ("$_typeModifier_memberOf_$", [IEvTypeIdentifier(typeIdentifier), IEvArray(arr)]) =>
|
||||
TypeBuilder.typeModifier_memberOf(IEvTypeIdentifier(typeIdentifier), IEvArray(arr))
|
||||
|
@ -198,7 +219,7 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
| ("$_typeFunction_$", [IEvArray(arr)]) => TypeBuilder.typeFunction(arr)
|
||||
| ("$_typeTuple_$", [IEvArray(elems)]) => TypeBuilder.typeTuple(elems)
|
||||
| ("$_typeArray_$", [elem]) => TypeBuilder.typeArray(elem)
|
||||
| ("$_typeRecord_$", [IEvArray(arrayOfPairs)]) => TypeBuilder.typeRecord(arrayOfPairs)
|
||||
| ("$_typeRecord_$", [IEvRecord(propertyMap)]) => TypeBuilder.typeRecord(propertyMap)
|
||||
| ("concat", [IEvArray(aValueArray), IEvArray(bValueArray)]) =>
|
||||
doAddArray(aValueArray, bValueArray)
|
||||
| ("concat", [IEvString(aValueString), IEvString(bValueString)]) =>
|
||||
|
@ -230,6 +251,8 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
],
|
||||
) =>
|
||||
SampleMap.map3(dist1, dist2, dist3, aLambdaValue)
|
||||
| ("mapSamplesN", [IEvArray(aValueArray), IEvLambda(aLambdaValue)]) =>
|
||||
SampleMap.mapN(aValueArray, aLambdaValue)
|
||||
| ("reduce", [IEvArray(aValueArray), initialValue, IEvLambda(aLambdaValue)]) =>
|
||||
doReduceArray(aValueArray, initialValue, aLambdaValue)
|
||||
| ("reduceReverse", [IEvArray(aValueArray), initialValue, IEvLambda(aLambdaValue)]) =>
|
||||
|
@ -246,7 +269,6 @@ let callInternal = (call: functionCall, environment, reducer: ExpressionT.reduce
|
|||
Error(REFunctionNotFound(call->functionCallToCallSignature->functionCallSignatureToString)) // Report full type signature as error
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Reducer uses Result monad while reducing expressions
|
||||
*/
|
||||
|
@ -255,11 +277,10 @@ let dispatch = (call: functionCall, environment, reducer: ExpressionT.reducerFn)
|
|||
errorValue,
|
||||
> =>
|
||||
try {
|
||||
let callInternalWithReducer = (call, environment) => callInternal(call, environment, reducer)
|
||||
let (fn, args) = call
|
||||
// There is a bug that prevents string match in patterns
|
||||
// So we have to recreate a copy of the string
|
||||
ExternalLibrary.dispatch((Js.String.make(fn), args), environment, callInternalWithReducer)
|
||||
ExternalLibrary.dispatch((Js.String.make(fn), args), environment, reducer, callInternal)
|
||||
} catch {
|
||||
| Js.Exn.Error(obj) => REJavaScriptExn(Js.Exn.message(obj), Js.Exn.name(obj))->Error
|
||||
| _ => RETodo("unhandled rescript exception")->Error
|
||||
|
|
|
@ -9,7 +9,7 @@ module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
|||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module ExpressionWithContext = Reducer_ExpressionWithContext
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
module Result = Belt.Result
|
||||
open Reducer_Expression_ExpressionBuilder
|
||||
|
||||
|
@ -28,7 +28,7 @@ let dispatchMacroCall = (
|
|||
let rExternalBindingsValue = reduceExpression(bindingExpr, bindings, environment)
|
||||
|
||||
rExternalBindingsValue->Result.flatMap(nameSpaceValue => {
|
||||
let newBindings = Module.fromExpressionValue(nameSpaceValue)
|
||||
let newBindings = Bindings.fromExpressionValue(nameSpaceValue)
|
||||
|
||||
let rNewStatement = BindingsReplacer.replaceSymbols(newBindings, statement)
|
||||
rNewStatement->Result.map(boundStatement =>
|
||||
|
@ -144,7 +144,7 @@ let dispatchMacroCall = (
|
|||
let ifTrueBlock = eBlock(list{ifTrue})
|
||||
ExpressionWithContext.withContext(ifTrueBlock, bindings)->Ok
|
||||
}
|
||||
| _ => REExpectedType("Boolean")->Error
|
||||
| _ => REExpectedType("Boolean", "")->Error
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ type errorValue =
|
|||
| REArrayIndexNotFound(string, int)
|
||||
| REAssignmentExpected
|
||||
| REDistributionError(DistributionTypes.error)
|
||||
| REExpectedType(string)
|
||||
| REExpectedType(string, string)
|
||||
| REExpressionExpected
|
||||
| REFunctionExpected(string)
|
||||
| REFunctionNotFound(string)
|
||||
|
@ -55,6 +55,6 @@ let errorToString = err =>
|
|||
| RESymbolNotFound(symbolName) => `${symbolName} is not defined`
|
||||
| RESyntaxError(desc, _) => `Syntax Error: ${desc}`
|
||||
| RETodo(msg) => `TODO: ${msg}`
|
||||
| REExpectedType(typeName) => `Expected type: ${typeName}`
|
||||
| REExpectedType(typeName, valueString) => `Expected type: ${typeName} but got: ${valueString}`
|
||||
| REUnitNotFound(unitName) => `Unit not found: ${unitName}`
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
// There are switch stament cases in the code which are impossible to reach by design.
|
||||
// There are switch statement cases in the code which are impossible to reach by design.
|
||||
// ImpossibleException is a sign of programming error.
|
||||
exception ImpossibleException
|
||||
exception ImpossibleException(string)
|
||||
|
|
|
@ -6,7 +6,7 @@ module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
|||
module Lambda = Reducer_Expression_Lambda
|
||||
module Macro = Reducer_Expression_Macro
|
||||
module MathJs = Reducer_MathJs
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
module Result = Belt.Result
|
||||
module T = Reducer_Expression_T
|
||||
|
||||
|
@ -121,10 +121,10 @@ let evaluateUsingOptions = (
|
|||
ReducerInterface_ExternalExpressionValue.defaultEnvironment,
|
||||
)
|
||||
|
||||
let mergedBindings: InternalExpressionValue.nameSpace = Module.merge(
|
||||
let mergedBindings: InternalExpressionValue.nameSpace = Bindings.merge(
|
||||
ReducerInterface_StdLib.internalStdLib,
|
||||
Belt.Option.map(externalBindings, Module.fromTypeScriptBindings)->Belt.Option.getWithDefault(
|
||||
Module.emptyModule,
|
||||
Belt.Option.map(externalBindings, Bindings.fromTypeScriptBindings)->Belt.Option.getWithDefault(
|
||||
Bindings.emptyModule,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ module ErrorValue = Reducer_ErrorValue
|
|||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Result = Belt.Result
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
type bindings = ExpressionT.bindings
|
||||
type context = bindings
|
||||
|
@ -41,7 +41,7 @@ let toString = expressionWithContext =>
|
|||
| ExpressionNoContext(expr) => ExpressionT.toString(expr)
|
||||
| ExpressionWithContext(expr, context) =>
|
||||
`${ExpressionT.toString(expr)} context: ${context
|
||||
->Module.toExpressionValue
|
||||
->Bindings.toExpressionValue
|
||||
->InternalExpressionValue.toString}`
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ module ErrorValue = Reducer_ErrorValue
|
|||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Result = Belt.Result
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
type expression = ExpressionT.expression
|
||||
|
@ -42,8 +42,8 @@ and replaceSymbolsOnExpressionList = (bindings, list) => {
|
|||
}
|
||||
and replaceSymbolOnValue = (bindings, evValue: internalExpressionValue) =>
|
||||
switch evValue {
|
||||
| IEvSymbol(symbol) => Module.getWithDefault(bindings, symbol, evValue)->Ok
|
||||
| IEvCall(symbol) => Module.getWithDefault(bindings, symbol, evValue)->checkIfCallable
|
||||
| IEvSymbol(symbol) => Bindings.getWithDefault(bindings, symbol, evValue)->Ok
|
||||
| IEvCall(symbol) => Bindings.getWithDefault(bindings, symbol, evValue)->checkIfCallable
|
||||
| _ => evValue->Ok
|
||||
}
|
||||
and checkIfCallable = (evValue: internalExpressionValue) =>
|
||||
|
|
|
@ -2,7 +2,7 @@ module BBindingsReplacer = Reducer_Expression_BindingsReplacer
|
|||
module BErrorValue = Reducer_ErrorValue
|
||||
module BExpressionT = Reducer_Expression_T
|
||||
module BInternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module BModule = Reducer_Module
|
||||
module BBindings = Reducer_Bindings
|
||||
|
||||
type errorValue = BErrorValue.errorValue
|
||||
type expression = BExpressionT.expression
|
||||
|
@ -15,7 +15,7 @@ let eArray = anArray => anArray->BInternalExpressionValue.IEvArray->BExpressionT
|
|||
let eArrayString = anArray => anArray->BInternalExpressionValue.IEvArrayString->BExpressionT.EValue
|
||||
|
||||
let eBindings = (anArray: array<(string, BInternalExpressionValue.t)>) =>
|
||||
anArray->BModule.fromArray->BModule.toExpressionValue->BExpressionT.EValue
|
||||
anArray->BBindings.fromArray->BBindings.toExpressionValue->BExpressionT.EValue
|
||||
|
||||
let eBool = aBool => aBool->BInternalExpressionValue.IEvBool->BExpressionT.EValue
|
||||
|
||||
|
@ -35,12 +35,12 @@ let eLambda = (
|
|||
BInternalExpressionValue.IEvLambda({
|
||||
parameters: parameters,
|
||||
context: context,
|
||||
body: NotFFI(expr)->BModule.castExpressionToInternalCode,
|
||||
body: NotFFI(expr)->BBindings.castExpressionToInternalCode,
|
||||
})->BExpressionT.EValue
|
||||
}
|
||||
|
||||
let eLambdaFFI = (ffiFn: ffiFn) => {
|
||||
ffiFn->BModule.eLambdaFFIValue->BExpressionT.EValue
|
||||
ffiFn->BBindings.eLambdaFFIValue->BExpressionT.EValue
|
||||
}
|
||||
|
||||
let eNumber = aNumber => aNumber->BInternalExpressionValue.IEvNumber->BExpressionT.EValue
|
||||
|
@ -57,7 +57,7 @@ let eList = (list: list<expression>): expression => list->BExpressionT.EList
|
|||
let eBlock = (exprs: list<expression>): expression => eFunction("$$_block_$$", exprs)
|
||||
|
||||
let eModule = (nameSpace: BInternalExpressionValue.nameSpace): expression =>
|
||||
nameSpace->BInternalExpressionValue.IEvModule->BExpressionT.EValue
|
||||
nameSpace->BInternalExpressionValue.IEvBindings->BExpressionT.EValue
|
||||
|
||||
let eLetStatement = (symbol: string, valueExpression: expression): expression =>
|
||||
eFunction("$_let_$", list{eSymbol(symbol), valueExpression})
|
||||
|
|
|
@ -3,7 +3,7 @@ module ErrorValue = Reducer_ErrorValue
|
|||
module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module ExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
module Result = Belt.Result
|
||||
|
||||
type environment = ReducerInterface_InternalExpressionValue.environment
|
||||
|
@ -50,7 +50,7 @@ let caseNotFFI = (lambdaValue: ExpressionValue.lambdaValue, expr, args, environm
|
|||
let bindings = Belt.List.reduce(zippedParameterList, lambdaValue.context, (
|
||||
acc,
|
||||
(variable, variableValue),
|
||||
) => acc->Module.set(variable, variableValue))
|
||||
) => acc->Bindings.set(variable, variableValue))
|
||||
let newExpression = ExpressionBuilder.eBlock(list{expr})
|
||||
reducer(newExpression, bindings, environment)
|
||||
}
|
||||
|
|
|
@ -30,12 +30,12 @@ let rec toString = expression =>
|
|||
switch expression {
|
||||
| EList(list{EValue(IEvCall("$$_block_$$")), ...statements}) =>
|
||||
`{${Belt.List.map(statements, aValue => toString(aValue))
|
||||
->Extra.List.interperse("; ")
|
||||
->Extra.List.intersperse("; ")
|
||||
->Belt.List.toArray
|
||||
->Js.String.concatMany("")}}`
|
||||
| EList(aList) =>
|
||||
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||
->Extra.List.interperse(" ")
|
||||
->Extra.List.intersperse(" ")
|
||||
->Belt.List.toArray
|
||||
->Js.String.concatMany("")})`
|
||||
| EValue(aValue) => InternalExpressionValue.toString(aValue)
|
||||
|
@ -72,6 +72,10 @@ type ffiFn = (
|
|||
) => result<internalExpressionValue, Reducer_ErrorValue.errorValue>
|
||||
|
||||
type optionFfiFn = (array<internalExpressionValue>, environment) => option<internalExpressionValue>
|
||||
type optionFfiFnReturningResult = (
|
||||
array<internalExpressionValue>,
|
||||
environment,
|
||||
) => option<result<internalExpressionValue, Reducer_ErrorValue.errorValue>>
|
||||
|
||||
type expressionOrFFI =
|
||||
| NotFFI(expression)
|
||||
|
|
|
@ -3,5 +3,5 @@
|
|||
*/
|
||||
module ExtraList = Reducer_Extra_List
|
||||
|
||||
let interperse = (anArray, seperator) =>
|
||||
anArray->Belt.List.fromArray->ExtraList.interperse(seperator)->Belt.List.toArray
|
||||
let intersperse = (anArray, seperator) =>
|
||||
anArray->Belt.List.fromArray->ExtraList.intersperse(seperator)->Belt.List.toArray
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/*
|
||||
Insert seperator between the elements of a list
|
||||
*/
|
||||
let rec interperse = (aList, seperator) =>
|
||||
let rec intersperse = (aList, seperator) =>
|
||||
switch aList {
|
||||
| list{} => list{}
|
||||
| list{a} => list{a}
|
||||
| list{a, ...rest} => list{a, seperator, ...interperse(rest, seperator)}
|
||||
| list{a, ...rest} => list{a, seperator, ...intersperse(rest, seperator)}
|
||||
}
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
module ExpressionBuilder = Reducer_Expression_ExpressionBuilder
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
|
||||
type expression = ExpressionT.expression
|
||||
|
||||
let defaultCaseFFI = (functionName: string): expression => {
|
||||
ExpressionBuilder.eLambdaFFI(Reducer_Module.functionNotFoundErrorFFIFn(functionName))
|
||||
}
|
||||
|
||||
let addGuard = (
|
||||
guard: expression,
|
||||
expression: expression,
|
||||
previousExpression: expression,
|
||||
): expression => ExpressionBuilder.eTernary(guard, expression, previousExpression)
|
|
@ -187,13 +187,16 @@ basicLiteral
|
|||
/ dollarIdentifier
|
||||
|
||||
dollarIdentifierWithModule 'identifier'
|
||||
= head:moduleIdentifier
|
||||
= head:$moduleIdentifier
|
||||
tail:('.' _nl @$moduleIdentifier)* '.' _nl
|
||||
final:$dollarIdentifier
|
||||
{ tail.push(final);
|
||||
return tail.reduce(function(result, element) {
|
||||
return h.makeFunctionCall(h.postOperatorToFunction['[]'], [result, h.nodeString(element)])
|
||||
}, head)}
|
||||
{
|
||||
let modifiers = [...tail]
|
||||
modifiers.unshift(head)
|
||||
modifiers.push(final)
|
||||
let modifiedIdentifier = modifiers.join('.')
|
||||
return h.nodeIdentifier(modifiedIdentifier)
|
||||
}
|
||||
|
||||
identifier 'identifier'
|
||||
= ([_a-z]+[_a-z0-9]i*) {return h.nodeIdentifier(text(), location())}
|
||||
|
|
|
@ -91,7 +91,7 @@ let rec pgToString = (peggyNode: peggyNode): string => {
|
|||
args->Js.Array2.map(arg => PgNodeIdentifier(arg)->pgToString)->Js.Array2.toString
|
||||
|
||||
let nodesToStringUsingSeparator = (nodes: array<node>, separator: string): string =>
|
||||
nodes->Js.Array2.map(toString)->Extra.Array.interperse(separator)->Js.String.concatMany("")
|
||||
nodes->Js.Array2.map(toString)->Extra.Array.intersperse(separator)->Js.String.concatMany("")
|
||||
|
||||
switch peggyNode {
|
||||
| PgNodeBlock(node) => "{" ++ node["statements"]->nodesToStringUsingSeparator("; ") ++ "}"
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module Bindings = Reducer_Bindings
|
||||
module T = Reducer_Type_T
|
||||
|
||||
let ievFromTypeExpression = (
|
||||
typeExpressionSourceCode: string,
|
||||
reducerFn: ExpressionT.reducerFn,
|
||||
): result<InternalExpressionValue.t, ErrorValue.t> => {
|
||||
let sIndex = "compiled"
|
||||
let sourceCode = `type ${sIndex}=${typeExpressionSourceCode}`
|
||||
Reducer_Expression.parse(sourceCode)->Belt.Result.flatMap(expr => {
|
||||
let rContext = reducerFn(
|
||||
expr,
|
||||
Bindings.emptyBindings,
|
||||
InternalExpressionValue.defaultEnvironment,
|
||||
)
|
||||
Belt.Result.map(rContext, context =>
|
||||
switch context {
|
||||
| IEvBindings(nameSpace) =>
|
||||
switch Bindings.getType(nameSpace, sIndex) {
|
||||
| Some(value) => value
|
||||
| None => raise(Reducer_Exception.ImpossibleException("Reducer_Type_Compile-none"))
|
||||
}
|
||||
| _ => raise(Reducer_Exception.ImpossibleException("Reducer_Type_Compile-raise"))
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
let fromTypeExpression = (
|
||||
typeExpressionSourceCode: string,
|
||||
reducerFn: ExpressionT.reducerFn,
|
||||
): result<T.t, ErrorValue.t> => {
|
||||
ievFromTypeExpression(
|
||||
(typeExpressionSourceCode: string),
|
||||
(reducerFn: ExpressionT.reducerFn),
|
||||
)->Belt.Result.map(T.fromIEvValue)
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module T = Reducer_Type_T
|
||||
|
||||
let isMin = (modifierArg: InternalExpressionValue.t, aValue: InternalExpressionValue.t): bool => {
|
||||
let pair = (modifierArg, aValue)
|
||||
switch pair {
|
||||
| (IEvNumber(a), IEvNumber(b)) => a <= b
|
||||
| _ => false
|
||||
}
|
||||
}
|
||||
|
||||
let isMax = (modifierArg: InternalExpressionValue.t, aValue: InternalExpressionValue.t): bool => {
|
||||
let pair = (modifierArg, aValue)
|
||||
switch pair {
|
||||
| (IEvNumber(a), IEvNumber(b)) => a >= b
|
||||
| _ => false
|
||||
}
|
||||
}
|
||||
|
||||
let isMemberOf = (
|
||||
modifierArg: InternalExpressionValue.t,
|
||||
aValue: InternalExpressionValue.t,
|
||||
): bool => {
|
||||
let pair = (modifierArg, aValue)
|
||||
switch pair {
|
||||
| (ievA, IEvArray(b)) => Js.Array2.includes(b, ievA)
|
||||
| _ => false
|
||||
}
|
||||
}
|
||||
|
||||
let checkModifier = (
|
||||
key: string,
|
||||
modifierArg: InternalExpressionValue.t,
|
||||
aValue: InternalExpressionValue.t,
|
||||
): bool =>
|
||||
switch key {
|
||||
| "min" => isMin(modifierArg, aValue)
|
||||
| "max" => isMax(modifierArg, aValue)
|
||||
| "isMemberOf" => isMemberOf(modifierArg, aValue)
|
||||
| _ => false
|
||||
}
|
||||
|
||||
let checkModifiers = (
|
||||
contracts: Belt.Map.String.t<InternalExpressionValue.t>,
|
||||
aValue: InternalExpressionValue.t,
|
||||
): bool => {
|
||||
contracts->Belt.Map.String.reduce(true, (acc, key, modifierArg) =>
|
||||
switch acc {
|
||||
| true => checkModifier(key, modifierArg, aValue)
|
||||
| _ => acc
|
||||
}
|
||||
)
|
||||
}
|
|
@ -3,13 +3,42 @@ open InternalExpressionValue
|
|||
|
||||
type rec iType =
|
||||
| ItTypeIdentifier(string)
|
||||
| ItModifiedType({modifiedType: iType})
|
||||
| ItModifiedType({modifiedType: iType, contracts: Belt.Map.String.t<InternalExpressionValue.t>})
|
||||
| ItTypeOr({typeOr: array<iType>})
|
||||
| ItTypeFunction({inputs: array<iType>, output: iType})
|
||||
| ItTypeArray({element: iType})
|
||||
| ItTypeTuple({elements: array<iType>})
|
||||
| ItTypeRecord({properties: Belt.Map.String.t<iType>})
|
||||
|
||||
type t = iType
|
||||
type typeErrorValue = TypeMismatch(t, InternalExpressionValue.t)
|
||||
|
||||
let rec toString = (t: t): string => {
|
||||
switch t {
|
||||
| ItTypeIdentifier(s) => s
|
||||
| ItModifiedType({modifiedType, contracts}) =>
|
||||
`${toString(modifiedType)}${contracts->Belt.Map.String.reduce("", (acc, k, v) =>
|
||||
Js.String2.concatMany(acc, ["<-", k, "(", InternalExpressionValue.toString(v), ")"])
|
||||
)}`
|
||||
| ItTypeOr({typeOr}) => `(${Js.Array2.map(typeOr, toString)->Js.Array2.joinWith(" | ")})`
|
||||
| ItTypeFunction({inputs, output}) =>
|
||||
`(${inputs->Js.Array2.map(toString)->Js.Array2.joinWith(" => ")} => ${toString(output)})`
|
||||
| ItTypeArray({element}) => `[${toString(element)}]`
|
||||
| ItTypeTuple({elements}) => `[${Js.Array2.map(elements, toString)->Js.Array2.joinWith(", ")}]`
|
||||
| ItTypeRecord({properties}) =>
|
||||
`{${properties
|
||||
->Belt.Map.String.toArray
|
||||
->Js.Array2.map(((k, v)) => Js.String2.concatMany(k, [": ", toString(v)]))
|
||||
->Js.Array2.joinWith(", ")}}`
|
||||
}
|
||||
}
|
||||
|
||||
let toStringResult = (rt: result<t, ErrorValue.t>) =>
|
||||
switch rt {
|
||||
| Ok(t) => toString(t)
|
||||
| Error(e) => ErrorValue.errorToString(e)
|
||||
}
|
||||
|
||||
let rec fromTypeMap = typeMap => {
|
||||
let default = IEvString("")
|
||||
let evTypeTag: InternalExpressionValue.t = Belt.Map.String.getWithDefault(
|
||||
|
@ -52,31 +81,39 @@ let rec fromTypeMap = typeMap => {
|
|||
"properties",
|
||||
default,
|
||||
)
|
||||
//TODO: map type modifiers
|
||||
switch evTypeTag {
|
||||
| IEvString("typeIdentifier") => ItModifiedType({modifiedType: fromIEvValue(evTypeIdentifier)})
|
||||
|
||||
let contracts =
|
||||
typeMap->Belt.Map.String.keep((k, _v) => ["min", "max", "memberOf"]->Js.Array2.includes(k))
|
||||
|
||||
let makeIt = switch evTypeTag {
|
||||
| IEvString("typeIdentifier") => fromIEvValue(evTypeIdentifier)
|
||||
| IEvString("typeOr") => ItTypeOr({typeOr: fromIEvArray(evTypeOr)})
|
||||
| IEvString("typeFunction") =>
|
||||
ItTypeFunction({inputs: fromIEvArray(evInputs), output: fromIEvValue(evOutput)})
|
||||
| IEvString("typeArray") => ItTypeArray({element: fromIEvValue(evElement)})
|
||||
| IEvString("typeTuple") => ItTypeTuple({elements: fromIEvArray(evElements)})
|
||||
| IEvString("typeRecord") => ItTypeRecord({properties: fromIEvRecord(evProperties)})
|
||||
| _ => raise(Reducer_Exception.ImpossibleException)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException("Reducer_Type_T-evTypeTag"))
|
||||
}
|
||||
|
||||
Belt.Map.String.isEmpty(contracts)
|
||||
? makeIt
|
||||
: ItModifiedType({modifiedType: makeIt, contracts: contracts})
|
||||
}
|
||||
and fromIEvValue = (ievValue: InternalExpressionValue.t) =>
|
||||
|
||||
and fromIEvValue = (ievValue: InternalExpressionValue.t): iType =>
|
||||
switch ievValue {
|
||||
| IEvTypeIdentifier(typeIdentifier) => ItTypeIdentifier({typeIdentifier})
|
||||
| IEvType(typeMap) => fromTypeMap(typeMap)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException("Reducer_Type_T-ievValue"))
|
||||
}
|
||||
and fromIEvArray = (ievArray: InternalExpressionValue.t) =>
|
||||
switch ievArray {
|
||||
| IEvArray(array) => array->Belt.Array.map(fromIEvValue)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException("Reducer_Type_T-ievArray"))
|
||||
}
|
||||
and fromIEvRecord = (ievRecord: InternalExpressionValue.t) =>
|
||||
switch ievRecord {
|
||||
| IEvRecord(record) => record->Belt.Map.String.map(fromIEvValue)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException("Reducer_Type_T-ievRecord"))
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ let typeFunction = anArray => {
|
|||
|
||||
let typeArray = element => {
|
||||
let newRecord = Belt.Map.String.fromArray([
|
||||
("typeTag", IEvString("typeTuple")),
|
||||
("typeTag", IEvString("typeArray")),
|
||||
("element", element),
|
||||
])
|
||||
newRecord->IEvType->Ok
|
||||
|
@ -64,22 +64,14 @@ let typeArray = element => {
|
|||
|
||||
let typeTuple = anArray => {
|
||||
let newRecord = Belt.Map.String.fromArray([
|
||||
("typeTag", IEvString("typeArray")),
|
||||
("typeTag", IEvString("typeTuple")),
|
||||
("elements", IEvArray(anArray)),
|
||||
])
|
||||
newRecord->IEvType->Ok
|
||||
}
|
||||
|
||||
let typeRecord = arrayOfPairs => {
|
||||
let newProperties =
|
||||
Belt.Array.map(arrayOfPairs, pairValue =>
|
||||
switch pairValue {
|
||||
| IEvArray([IEvString(key), valueValue]) => (key, valueValue)
|
||||
| _ => ("wrong key type", pairValue->toStringWithType->IEvString)
|
||||
}
|
||||
)
|
||||
->Belt.Map.String.fromArray
|
||||
->IEvRecord
|
||||
let typeRecord = propertyMap => {
|
||||
let newProperties = propertyMap->IEvRecord
|
||||
let newRecord = Belt.Map.String.fromArray([
|
||||
("typeTag", IEvString("typeRecord")),
|
||||
("properties", newProperties),
|
||||
|
|
|
@ -1,81 +1,168 @@
|
|||
module ExpressionT = Reducer_Expression_T
|
||||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
module T = Reducer_Type_T
|
||||
module TypeBuilder = Reducer_Type_TypeBuilder
|
||||
module TypeContracts = Reducer_Type_Contracts
|
||||
open InternalExpressionValue
|
||||
|
||||
type typeErrorValue =
|
||||
| TypeError(T.iType, InternalExpressionValue.t)
|
||||
| TypeErrorWithPosition(T.iType, InternalExpressionValue.t, int)
|
||||
| TypeErrorWithProperty(T.iType, InternalExpressionValue.t, string)
|
||||
|
||||
let rec isOfResolvedIType = (anIType: T.iType, aValue): result<bool, typeErrorValue> => {
|
||||
let rec isITypeOf = (anIType: T.iType, aValue): result<bool, T.typeErrorValue> => {
|
||||
let caseTypeIdentifier = (anUpperTypeName, aValue) => {
|
||||
let aTypeName = anUpperTypeName->Js.String2.toLowerCase
|
||||
let valueTypeName = aValue->valueToValueType->valueTypeToString->Js.String2.toLowerCase
|
||||
switch aTypeName === valueTypeName {
|
||||
switch aTypeName == valueTypeName {
|
||||
| true => Ok(true)
|
||||
| false => TypeError(anIType, aValue)->Error
|
||||
| false => T.TypeMismatch(anIType, aValue)->Error
|
||||
}
|
||||
}
|
||||
|
||||
let _caseRecord = (anIType, evValue, propertyMap, map) => {
|
||||
let caseRecord = (anIType, propertyMap: Belt.Map.String.t<T.iType>, evValue) =>
|
||||
switch evValue {
|
||||
| IEvRecord(aRecord) =>
|
||||
if (
|
||||
Js.Array2.length(propertyMap->Belt.Map.String.keysToArray) ==
|
||||
Js.Array2.length(aRecord->Belt.Map.String.keysToArray)
|
||||
) {
|
||||
Belt.Map.String.reduce(propertyMap, Ok(true), (acc, property, propertyType) => {
|
||||
Belt.Result.flatMap(acc, _ =>
|
||||
switch Belt.Map.String.get(map, property) {
|
||||
| Some(propertyValue) => isOfResolvedIType(propertyType, propertyValue)
|
||||
| None => TypeErrorWithProperty(anIType, evValue, property)->Error
|
||||
switch Belt.Map.String.get(aRecord, property) {
|
||||
| Some(propertyValue) => isITypeOf(propertyType, propertyValue)
|
||||
| None => T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
)
|
||||
})
|
||||
} else {
|
||||
T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
let _caseArray = (anIType, evValue, elementType, anArray) => {
|
||||
Belt.Array.reduceWithIndex(anArray, Ok(true), (acc, element, index) => {
|
||||
switch isOfResolvedIType(elementType, element) {
|
||||
|
||||
| _ => T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
|
||||
let caseArray = (anIType, elementType, evValue) =>
|
||||
switch evValue {
|
||||
| IEvArray(anArray) =>
|
||||
Belt.Array.reduce(anArray, Ok(true), (acc, element) =>
|
||||
Belt.Result.flatMap(acc, _ =>
|
||||
switch isITypeOf(elementType, element) {
|
||||
| Ok(_) => Ok(true)
|
||||
| Error(error) => error->Error
|
||||
}
|
||||
)
|
||||
)
|
||||
| _ => T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
|
||||
let caseTuple = (anIType, elementTypes, evValue) =>
|
||||
switch evValue {
|
||||
| IEvArray(anArray) =>
|
||||
if Js.Array2.length(elementTypes) == Js.Array2.length(anArray) {
|
||||
let zipped = Belt.Array.zip(elementTypes, anArray)
|
||||
Belt.Array.reduce(zipped, Ok(true), (acc, (elementType, element)) =>
|
||||
switch acc {
|
||||
| Ok(_) =>
|
||||
switch isITypeOf(elementType, element) {
|
||||
| Ok(_) => acc
|
||||
| Error(_) => TypeErrorWithPosition(anIType, evValue, index)->Error
|
||||
| Error(error) => Error(error)
|
||||
}
|
||||
| _ => acc
|
||||
}
|
||||
)
|
||||
} else {
|
||||
T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
| _ => T.TypeMismatch(anIType, evValue)->Error
|
||||
}
|
||||
|
||||
let caseOr = (anIType, anITypeArray, evValue) =>
|
||||
switch Belt.Array.reduce(anITypeArray, Ok(false), (acc, anIType) =>
|
||||
Belt.Result.flatMap(acc, _ =>
|
||||
switch acc {
|
||||
| Ok(false) =>
|
||||
switch isITypeOf(anIType, evValue) {
|
||||
| Ok(_) => Ok(true)
|
||||
| Error(_) => acc
|
||||
}
|
||||
| _ => acc
|
||||
}
|
||||
)
|
||||
) {
|
||||
| Ok(true) => Ok(true)
|
||||
| Ok(false) => T.TypeMismatch(anIType, evValue)->Error
|
||||
| Error(error) => Error(error)
|
||||
}
|
||||
|
||||
let caseModifiedType = (
|
||||
anIType: T.iType,
|
||||
modifiedType: T.iType,
|
||||
contracts: Belt.Map.String.t<InternalExpressionValue.t>,
|
||||
aValue: InternalExpressionValue.t,
|
||||
) => {
|
||||
isITypeOf(modifiedType, aValue)->Belt.Result.flatMap(_result => {
|
||||
if TypeContracts.checkModifiers(contracts, aValue) {
|
||||
Ok(true)
|
||||
} else {
|
||||
T.TypeMismatch(anIType, aValue)->Error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
switch anIType {
|
||||
| ItTypeIdentifier(name) => caseTypeIdentifier(name, aValue)
|
||||
// TODO: Work in progress. Code is commented to make an a release of other features
|
||||
// | ItModifiedType({modifiedType: anIType}) => raise(Reducer_Exception.ImpossibleException)
|
||||
// | ItTypeOr({typeOr: anITypeArray}) => raise(Reducer_Exception.ImpossibleException)
|
||||
// | ItTypeFunction({inputs: anITypeArray, output: anIType}) =>
|
||||
// raise(Reducer_Exception.ImpossibleException)
|
||||
// | ItTypeArray({element: anIType}) => raise(Reducer_Exception.ImpossibleException)
|
||||
// | ItTypeTuple({elements: anITypeArray}) => raise(Reducer_Exception.ImpossibleException)
|
||||
// | ItTypeRecord({properties: anITypeMap}) => raise(Reducer_Exception.ImpossibleException)
|
||||
| _ => raise(Reducer_Exception.ImpossibleException)
|
||||
| ItModifiedType({modifiedType, contracts}) =>
|
||||
caseModifiedType(anIType, modifiedType, contracts, aValue) //{modifiedType: iType, contracts: Belt.Map.String.t<InternalExpressionValue.t>}
|
||||
| ItTypeOr({typeOr}) => caseOr(anIType, typeOr, aValue)
|
||||
| ItTypeFunction(_) =>
|
||||
raise(
|
||||
Reducer_Exception.ImpossibleException(
|
||||
"Reducer_TypeChecker-functions are without a type at the moment",
|
||||
),
|
||||
)
|
||||
| ItTypeArray({element}) => caseArray(anIType, element, aValue)
|
||||
| ItTypeTuple({elements}) => caseTuple(anIType, elements, aValue)
|
||||
| ItTypeRecord({properties}) => caseRecord(anIType, properties, aValue)
|
||||
}
|
||||
}
|
||||
|
||||
let isOfResolvedType = (aType: InternalExpressionValue.t, aValue): result<bool, typeErrorValue> =>
|
||||
aType->T.fromIEvValue->isOfResolvedIType(aValue)
|
||||
let isTypeOf = (
|
||||
typeExpressionSourceCode: string,
|
||||
aValue: InternalExpressionValue.t,
|
||||
reducerFn: ExpressionT.reducerFn,
|
||||
): result<InternalExpressionValue.t, ErrorValue.t> => {
|
||||
switch typeExpressionSourceCode->Reducer_Type_Compile.fromTypeExpression(reducerFn) {
|
||||
| Ok(anIType) =>
|
||||
switch isITypeOf(anIType, aValue) {
|
||||
| Ok(_) => Ok(aValue)
|
||||
| Error(T.TypeMismatch(anIType, evValue)) =>
|
||||
Error(
|
||||
ErrorValue.REExpectedType(anIType->T.toString, evValue->InternalExpressionValue.toString),
|
||||
)
|
||||
}
|
||||
| Error(error) => Error(error) // Directly propagating - err => err - causes type mismatch
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Work in progress. Code is commented to make an a release of other features
|
||||
// let checkArguments = (
|
||||
// evFunctionType: InternalExpressionValue.t,
|
||||
// args: array<InternalExpressionValue.t>,
|
||||
// ) => {
|
||||
// let functionType = switch evFunctionType {
|
||||
// | IEvRecord(functionType) => functionType
|
||||
// | _ => raise(Reducer_Exception.ImpossibleException)
|
||||
// }
|
||||
// let evInputs = functionType->Belt.Map.String.getWithDefault("inputs", []->IEvArray)
|
||||
// let inputs = switch evInputs {
|
||||
// | IEvArray(inputs) => inputs
|
||||
// | _ => raise(Reducer_Exception.ImpossibleException)
|
||||
// }
|
||||
// let rTupleType = TypeBuilder.typeTuple(inputs)
|
||||
// Belt.Result.flatMap(rTupleType, tuppleType => isOfResolvedType(tuppleType, args->IEvArray))
|
||||
// }
|
||||
let checkITypeArguments = (anIType: T.iType, args: array<InternalExpressionValue.t>): result<
|
||||
bool,
|
||||
T.typeErrorValue,
|
||||
> => {
|
||||
switch anIType {
|
||||
| T.ItTypeFunction({inputs}) => isITypeOf(T.ItTypeTuple({elements: inputs}), args->IEvArray)
|
||||
| _ => T.TypeMismatch(anIType, args->IEvArray)->Error
|
||||
}
|
||||
}
|
||||
|
||||
// let compileTypeExpression = (typeExpression: string, bindings: ExpressionT.bindings, reducerFn: ExpressionT.reducerFn) => {
|
||||
// statement = `type compiled=${typeExpression}`
|
||||
|
||||
// }
|
||||
|
||||
//TODO: asGuard
|
||||
let checkArguments = (
|
||||
typeExpressionSourceCode: string,
|
||||
args: array<InternalExpressionValue.t>,
|
||||
reducerFn: ExpressionT.reducerFn,
|
||||
): result<InternalExpressionValue.t, ErrorValue.t> => {
|
||||
switch typeExpressionSourceCode->Reducer_Type_Compile.fromTypeExpression(reducerFn) {
|
||||
| Ok(anIType) =>
|
||||
switch checkITypeArguments(anIType, args) {
|
||||
| Ok(_) => Ok(args->IEvArray)
|
||||
| Error(T.TypeMismatch(anIType, evValue)) =>
|
||||
Error(
|
||||
ErrorValue.REExpectedType(anIType->T.toString, evValue->InternalExpressionValue.toString),
|
||||
)
|
||||
}
|
||||
| Error(error) => Error(error) // Directly propagating - err => err - causes type mismatch
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
module IEV = ReducerInterface_InternalExpressionValue
|
||||
type internalExpressionValue = IEV.t
|
||||
|
||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||
> => {
|
||||
switch call {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
module IEV = ReducerInterface_InternalExpressionValue
|
||||
type internalExpressionValue = IEV.t
|
||||
|
||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||
> => {
|
||||
switch call {
|
||||
|
|
|
@ -86,7 +86,7 @@ let toStringResult = x =>
|
|||
}
|
||||
|
||||
@genType
|
||||
type environment = DistributionOperation.env
|
||||
type environment = GenericDist.env
|
||||
|
||||
@genType
|
||||
let defaultEnvironment: environment = DistributionOperation.defaultEnv
|
||||
|
|
|
@ -1,29 +1,10 @@
|
|||
module InternalExpressionValue = ReducerInterface_InternalExpressionValue
|
||||
|
||||
type internalExpressionValue = InternalExpressionValue.t
|
||||
|
||||
// module Sample = {
|
||||
// // In real life real libraries should be somewhere else
|
||||
// /*
|
||||
// For an example of mapping polymorphic custom functions. To be deleted after real integration
|
||||
// */
|
||||
// let customAdd = (a: float, b: float): float => {a +. b}
|
||||
// }
|
||||
|
||||
/*
|
||||
Map external calls of Reducer
|
||||
*/
|
||||
|
||||
// I expect that it's important to build this first, so it doesn't get recalculated for each tryRegistry() call.
|
||||
let registry = FunctionRegistry_Library.registry
|
||||
|
||||
let tryRegistry = ((fnName, args): InternalExpressionValue.functionCall, env) => {
|
||||
FunctionRegistry_Core.Registry.matchAndRun(~registry, ~fnName, ~args, ~env)->E.O2.fmap(
|
||||
E.R2.errMap(_, s => Reducer_ErrorValue.RETodo(s)),
|
||||
)
|
||||
}
|
||||
|
||||
let dispatch = (call: InternalExpressionValue.functionCall, environment, chain): result<
|
||||
let dispatch = (call: InternalExpressionValue.functionCall, environment, reducer, chain): result<
|
||||
internalExpressionValue,
|
||||
'e,
|
||||
> => {
|
||||
|
@ -32,9 +13,10 @@ let dispatch = (call: InternalExpressionValue.functionCall, environment, chain):
|
|||
() => ReducerInterface_Date.dispatch(call, environment),
|
||||
() => ReducerInterface_Duration.dispatch(call, environment),
|
||||
() => ReducerInterface_Number.dispatch(call, environment),
|
||||
() => tryRegistry(call, environment),
|
||||
])->E.O2.default(chain(call, environment))
|
||||
() => FunctionRegistry_Library.dispatch(call, environment),
|
||||
])->E.O2.default(chain(call, environment, reducer))
|
||||
}
|
||||
|
||||
/*
|
||||
If your dispatch is too big you can divide it into smaller dispatches and pass the call so that it gets called finally.
|
||||
|
||||
|
|
|
@ -32,50 +32,38 @@ module Helpers = {
|
|||
let toFloatFn = (
|
||||
fnCall: DistributionTypes.DistributionOperation.toFloat,
|
||||
dist: DistributionTypes.genericDist,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
) => {
|
||||
FromDist(DistributionTypes.DistributionOperation.ToFloat(fnCall), dist)
|
||||
->DistributionOperation.run(~env)
|
||||
->Some
|
||||
FromDist(#ToFloat(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||
}
|
||||
|
||||
let toStringFn = (
|
||||
fnCall: DistributionTypes.DistributionOperation.toString,
|
||||
dist: DistributionTypes.genericDist,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
) => {
|
||||
FromDist(DistributionTypes.DistributionOperation.ToString(fnCall), dist)
|
||||
->DistributionOperation.run(~env)
|
||||
->Some
|
||||
FromDist(#ToString(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||
}
|
||||
|
||||
let toBoolFn = (
|
||||
fnCall: DistributionTypes.DistributionOperation.toBool,
|
||||
dist: DistributionTypes.genericDist,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
) => {
|
||||
FromDist(DistributionTypes.DistributionOperation.ToBool(fnCall), dist)
|
||||
->DistributionOperation.run(~env)
|
||||
->Some
|
||||
FromDist(#ToBool(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||
}
|
||||
|
||||
let toDistFn = (
|
||||
fnCall: DistributionTypes.DistributionOperation.toDist,
|
||||
dist,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
) => {
|
||||
FromDist(DistributionTypes.DistributionOperation.ToDist(fnCall), dist)
|
||||
->DistributionOperation.run(~env)
|
||||
->Some
|
||||
FromDist(#ToDist(fnCall), dist)->DistributionOperation.run(~env)->Some
|
||||
}
|
||||
|
||||
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: DistributionOperation.env) => {
|
||||
let twoDiststoDistFn = (direction, arithmetic, dist1, dist2, ~env: GenericDist.env) => {
|
||||
FromDist(
|
||||
DistributionTypes.DistributionOperation.ToDistCombination(
|
||||
direction,
|
||||
arithmeticMap(arithmetic),
|
||||
#Dist(dist2),
|
||||
),
|
||||
#ToDistCombination(direction, arithmeticMap(arithmetic), #Dist(dist2)),
|
||||
dist1,
|
||||
)->DistributionOperation.run(~env)
|
||||
}
|
||||
|
@ -109,7 +97,7 @@ module Helpers = {
|
|||
let mixtureWithGivenWeights = (
|
||||
distributions: array<DistributionTypes.genericDist>,
|
||||
weights: array<float>,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
): DistributionOperation.outputType =>
|
||||
E.A.length(distributions) == E.A.length(weights)
|
||||
? Mixture(Belt.Array.zip(distributions, weights))->DistributionOperation.run(~env)
|
||||
|
@ -119,7 +107,7 @@ module Helpers = {
|
|||
|
||||
let mixtureWithDefaultWeights = (
|
||||
distributions: array<DistributionTypes.genericDist>,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
): DistributionOperation.outputType => {
|
||||
let length = E.A.length(distributions)
|
||||
let weights = Belt.Array.make(length, 1.0 /. Belt.Int.toFloat(length))
|
||||
|
@ -128,7 +116,7 @@ module Helpers = {
|
|||
|
||||
let mixture = (
|
||||
args: array<internalExpressionValue>,
|
||||
~env: DistributionOperation.env,
|
||||
~env: GenericDist.env,
|
||||
): DistributionOperation.outputType => {
|
||||
let error = (err: string): DistributionOperation.outputType =>
|
||||
err->DistributionTypes.ArgumentError->GenDistError
|
||||
|
@ -167,20 +155,6 @@ module Helpers = {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
let klDivergenceWithPrior = (
|
||||
prediction: DistributionTypes.genericDist,
|
||||
answer: DistributionTypes.genericDist,
|
||||
prior: DistributionTypes.genericDist,
|
||||
env: DistributionOperation.env,
|
||||
) => {
|
||||
let term1 = DistributionOperation.Constructors.klDivergence(~env, prediction, answer)
|
||||
let term2 = DistributionOperation.Constructors.klDivergence(~env, prior, answer)
|
||||
switch E.R.merge(term1, term2)->E.R2.fmap(((a, b)) => a -. b) {
|
||||
| Ok(x) => x->DistributionOperation.Float->Some
|
||||
| Error(_) => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module SymbolicConstructors = {
|
||||
|
@ -199,7 +173,7 @@ module SymbolicConstructors = {
|
|||
}
|
||||
}
|
||||
|
||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperation.env): option<
|
||||
let dispatchToGenericOutput = (call: IEV.functionCall, env: GenericDist.env): option<
|
||||
DistributionOperation.outputType,
|
||||
> => {
|
||||
let (fnName, args) = call
|
||||
|
@ -239,35 +213,6 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
|
|||
~env,
|
||||
)->Some
|
||||
| ("normalize", [IEvDistribution(dist)]) => Helpers.toDistFn(Normalize, dist, ~env)
|
||||
| ("klDivergence", [IEvDistribution(prediction), IEvDistribution(answer)]) =>
|
||||
Some(DistributionOperation.run(FromDist(ToScore(KLDivergence(answer)), prediction), ~env))
|
||||
| (
|
||||
"klDivergence",
|
||||
[IEvDistribution(prediction), IEvDistribution(answer), IEvDistribution(prior)],
|
||||
) =>
|
||||
Helpers.klDivergenceWithPrior(prediction, answer, prior, env)
|
||||
| (
|
||||
"logScoreWithPointAnswer",
|
||||
[IEvDistribution(prediction), IEvNumber(answer), IEvDistribution(prior)],
|
||||
)
|
||||
| (
|
||||
"logScoreWithPointAnswer",
|
||||
[
|
||||
IEvDistribution(prediction),
|
||||
IEvDistribution(Symbolic(#Float(answer))),
|
||||
IEvDistribution(prior),
|
||||
],
|
||||
) =>
|
||||
DistributionOperation.run(
|
||||
FromDist(ToScore(LogScore(answer, prior->Some)), prediction),
|
||||
~env,
|
||||
)->Some
|
||||
| ("logScoreWithPointAnswer", [IEvDistribution(prediction), IEvNumber(answer)])
|
||||
| (
|
||||
"logScoreWithPointAnswer",
|
||||
[IEvDistribution(prediction), IEvDistribution(Symbolic(#Float(answer)))],
|
||||
) =>
|
||||
DistributionOperation.run(FromDist(ToScore(LogScore(answer, None)), prediction), ~env)->Some
|
||||
| ("isNormalized", [IEvDistribution(dist)]) => Helpers.toBoolFn(IsNormalized, dist, ~env)
|
||||
| ("toPointSet", [IEvDistribution(dist)]) => Helpers.toDistFn(ToPointSet, dist, ~env)
|
||||
| ("scaleLog", [IEvDistribution(dist)]) =>
|
||||
|
|
|
@ -15,7 +15,7 @@ type rec t =
|
|||
| IEvDeclaration(lambdaDeclaration)
|
||||
| IEvDistribution(DistributionTypes.genericDist)
|
||||
| IEvLambda(lambdaValue)
|
||||
| IEvModule(nameSpace)
|
||||
| IEvBindings(nameSpace)
|
||||
| IEvNumber(float)
|
||||
| IEvRecord(map)
|
||||
| IEvString(string)
|
||||
|
@ -52,7 +52,7 @@ let rec toString = aValue =>
|
|||
| IEvDeclaration(d) => Declaration.toString(d, r => toString(IEvLambda(r)))
|
||||
| IEvDistribution(dist) => GenericDist.toString(dist)
|
||||
| IEvLambda(lambdaValue) => `lambda(${Js.Array2.toString(lambdaValue.parameters)}=>internal code)`
|
||||
| IEvModule(m) => `@${m->toStringNameSpace}`
|
||||
| IEvBindings(m) => `@${m->toStringNameSpace}`
|
||||
| IEvNumber(aNumber) => Js.String.make(aNumber)
|
||||
| IEvRecord(aMap) => aMap->toStringMap
|
||||
| IEvString(aString) => `'${aString}'`
|
||||
|
@ -84,7 +84,7 @@ let toStringWithType = aValue =>
|
|||
| IEvDeclaration(_) => `Declaration::${toString(aValue)}`
|
||||
| IEvDistribution(_) => `Distribution::${toString(aValue)}`
|
||||
| IEvLambda(_) => `Lambda::${toString(aValue)}`
|
||||
| IEvModule(_) => `Module::${toString(aValue)}`
|
||||
| IEvBindings(_) => `Bindings::${toString(aValue)}`
|
||||
| IEvNumber(_) => `Number::${toString(aValue)}`
|
||||
| IEvRecord(_) => `Record::${toString(aValue)}`
|
||||
| IEvString(_) => `String::${toString(aValue)}`
|
||||
|
@ -150,7 +150,7 @@ let valueToValueType = value =>
|
|||
| IEvDeclaration(_) => EvtDeclaration
|
||||
| IEvDistribution(_) => EvtDistribution
|
||||
| IEvLambda(_) => EvtLambda
|
||||
| IEvModule(_) => EvtModule
|
||||
| IEvBindings(_) => EvtModule
|
||||
| IEvNumber(_) => EvtNumber
|
||||
| IEvRecord(_) => EvtRecord
|
||||
| IEvString(_) => EvtString
|
||||
|
@ -160,6 +160,26 @@ let valueToValueType = value =>
|
|||
| IEvTypeIdentifier(_) => EvtTypeIdentifier
|
||||
}
|
||||
|
||||
let externalValueToValueType = (value: ExternalExpressionValue.t) =>
|
||||
switch value {
|
||||
| EvArray(_) => EvtArray
|
||||
| EvArrayString(_) => EvtArrayString
|
||||
| EvBool(_) => EvtBool
|
||||
| EvCall(_) => EvtCall
|
||||
| EvDate(_) => EvtDate
|
||||
| EvDeclaration(_) => EvtDeclaration
|
||||
| EvDistribution(_) => EvtDistribution
|
||||
| EvLambda(_) => EvtLambda
|
||||
| EvModule(_) => EvtModule
|
||||
| EvNumber(_) => EvtNumber
|
||||
| EvRecord(_) => EvtRecord
|
||||
| EvString(_) => EvtString
|
||||
| EvSymbol(_) => EvtSymbol
|
||||
| EvTimeDuration(_) => EvtTimeDuration
|
||||
| EvType(_) => EvtType
|
||||
| EvTypeIdentifier(_) => EvtTypeIdentifier
|
||||
}
|
||||
|
||||
let functionCallToCallSignature = (functionCall: functionCall): functionCallSignature => {
|
||||
let (fn, args) = functionCall
|
||||
CallSignature(fn, args->Js.Array2.map(valueToValueType))
|
||||
|
@ -211,7 +231,7 @@ let rec toExternal = (iev: t): ExternalExpressionValue.t => {
|
|||
| IEvTimeDuration(v) => EvTimeDuration(v)
|
||||
| IEvType(v) => v->mapToExternal->EvType
|
||||
| IEvTypeIdentifier(v) => EvTypeIdentifier(v)
|
||||
| IEvModule(v) => v->nameSpaceToTypeScriptBindings->EvModule
|
||||
| IEvBindings(v) => v->nameSpaceToTypeScriptBindings->EvModule
|
||||
}
|
||||
}
|
||||
and mapToExternal = v =>
|
||||
|
@ -243,7 +263,7 @@ let rec toInternal = (ev: ExternalExpressionValue.t): t => {
|
|||
}
|
||||
| EvDistribution(v) => IEvDistribution(v)
|
||||
| EvLambda(v) => IEvLambda(lambdaValueToInternal(v))
|
||||
| EvModule(v) => v->nameSpaceFromTypeScriptBindings->IEvModule
|
||||
| EvModule(v) => v->nameSpaceFromTypeScriptBindings->IEvBindings
|
||||
| EvNumber(v) => IEvNumber(v)
|
||||
| EvRecord(v) => v->recordToInternal->IEvRecord
|
||||
| EvString(v) => IEvString(v)
|
||||
|
|
|
@ -24,7 +24,7 @@ module ScientificUnit = {
|
|||
}
|
||||
}
|
||||
|
||||
let dispatch = (call: IEV.functionCall, _: DistributionOperation.env): option<
|
||||
let dispatch = (call: IEV.functionCall, _: GenericDist.env): option<
|
||||
result<internalExpressionValue, QuriSquiggleLang.Reducer_ErrorValue.errorValue>,
|
||||
> => {
|
||||
switch call {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
let internalStdLib = Module.emptyModule->SquiggleLibrary_Math.makeBindings
|
||||
let internalStdLib = Bindings.emptyBindings->SquiggleLibrary_Math.makeBindings
|
||||
|
||||
@genType
|
||||
let externalStdLib = internalStdLib->Module.toTypeScriptBindings
|
||||
let externalStdLib = internalStdLib->Bindings.toTypeScriptBindings
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
module Bindings = Reducer_Module
|
||||
module Module = Reducer_Module
|
||||
module Bindings = Reducer_Bindings
|
||||
|
||||
let availableNumbers: array<(string, float)> = [
|
||||
("pi", Js.Math._PI),
|
||||
("e", Js.Math._E),
|
||||
("ln2", Js.Math._LN2),
|
||||
("ln10", Js.Math._LN10),
|
||||
("log2e", Js.Math._LOG2E),
|
||||
("log10e", Js.Math._LOG10E),
|
||||
("sqrt2", Js.Math._SQRT2),
|
||||
("sqrt1_2", Js.Math._SQRT1_2),
|
||||
("phi", 1.618033988749895),
|
||||
("tau", 6.283185307179586),
|
||||
("Math.pi", Js.Math._PI),
|
||||
("Math.e", Js.Math._E),
|
||||
("Math.ln2", Js.Math._LN2),
|
||||
("Math.ln10", Js.Math._LN10),
|
||||
("Math.log2e", Js.Math._LOG2E),
|
||||
("Math.log10e", Js.Math._LOG10E),
|
||||
("Math.sqrt2", Js.Math._SQRT2),
|
||||
("Math.sqrt1_2", Js.Math._SQRT1_2),
|
||||
("Math.phi", 1.618033988749895),
|
||||
("Math.tau", 6.283185307179586),
|
||||
]
|
||||
|
||||
let mathBindings: Bindings.t =
|
||||
|
@ -20,4 +19,4 @@ let mathBindings: Bindings.t =
|
|||
->Bindings.fromArray
|
||||
|
||||
let makeBindings = (previousBindings: Bindings.t): Bindings.t =>
|
||||
previousBindings->Bindings.defineModule("Math", mathBindings)
|
||||
previousBindings->Bindings.merge(mathBindings)
|
||||
|
|
|
@ -8,7 +8,7 @@ The below few seem to work fine. In the future there's definitely more work to d
|
|||
*/
|
||||
|
||||
@genType
|
||||
type samplingParams = DistributionOperation.env
|
||||
type samplingParams = GenericDist.env
|
||||
|
||||
@genType
|
||||
type genericDist = DistributionTypes.genericDist
|
||||
|
|
|
@ -547,6 +547,7 @@ module A = {
|
|||
let init = Array.init
|
||||
let reduce = Belt.Array.reduce
|
||||
let reducei = Belt.Array.reduceWithIndex
|
||||
let some = Belt.Array.some
|
||||
let isEmpty = r => length(r) < 1
|
||||
let stableSortBy = Belt.SortArray.stableSortBy
|
||||
let toNoneIfEmpty = r => isEmpty(r) ? None : Some(r)
|
||||
|
@ -630,6 +631,19 @@ module A = {
|
|||
)
|
||||
let filter = Js.Array.filter
|
||||
let joinWith = Js.Array.joinWith
|
||||
let transpose = (xs: array<array<'a>>): array<array<'a>> => {
|
||||
let arr: array<array<'a>> = []
|
||||
for i in 0 to length(xs) - 1 {
|
||||
for j in 0 to length(xs[i]) - 1 {
|
||||
if Js.Array.length(arr) <= j {
|
||||
ignore(Js.Array.push([xs[i][j]], arr))
|
||||
} else {
|
||||
ignore(Js.Array.push(xs[i][j], arr[j]))
|
||||
}
|
||||
}
|
||||
}
|
||||
arr
|
||||
}
|
||||
|
||||
let all = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) == length(xs)
|
||||
let any = (p: 'a => bool, xs: array<'a>): bool => length(filter(p, xs)) > 0
|
||||
|
@ -751,7 +765,7 @@ module A = {
|
|||
let diff = (t: t): array<float> =>
|
||||
Belt.Array.zipBy(t, Belt.Array.sliceToEnd(t, 1), (left, right) => right -. left)
|
||||
|
||||
let cumsum = (t: t): array<float> => accumulate((a, b) => a +. b, t)
|
||||
let cumSum = (t: t): array<float> => accumulate((a, b) => a +. b, t)
|
||||
let cumProd = (t: t): array<float> => accumulate((a, b) => a *. b, t)
|
||||
|
||||
exception RangeError(string)
|
||||
|
|
|
@ -327,8 +327,8 @@ module Zipped = {
|
|||
module PointwiseCombination = {
|
||||
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
|
||||
let combine: (
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
interpolator,
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
T.t,
|
||||
T.t,
|
||||
) => result<T.t, Operation.Error.t> = %raw(`
|
||||
|
@ -337,7 +337,7 @@ module PointwiseCombination = {
|
|||
// and interpolates the value on the other side, thus accumulating xs and ys.
|
||||
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
|
||||
|
||||
function(fn, interpolator, t1, t2) {
|
||||
function(interpolator, fn, t1, t2) {
|
||||
let t1n = t1.xs.length;
|
||||
let t2n = t2.xs.length;
|
||||
let outX = [];
|
||||
|
@ -399,11 +399,11 @@ module PointwiseCombination = {
|
|||
This is from an approach to kl divergence that was ultimately rejected. Leaving it in for now because it may help us factor `combine` out of raw javascript soon.
|
||||
*/
|
||||
let combineAlongSupportOfSecondArgument0: (
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
interpolator,
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
T.t,
|
||||
T.t,
|
||||
) => result<T.t, Operation.Error.t> = (fn, interpolator, t1, t2) => {
|
||||
) => result<T.t, Operation.Error.t> = (interpolator, fn, t1, t2) => {
|
||||
let newYs = []
|
||||
let newXs = []
|
||||
let (l1, l2) = (E.A.length(t1.xs), E.A.length(t2.xs))
|
||||
|
@ -496,29 +496,9 @@ module PointwiseCombination = {
|
|||
let newYs = E.A.fmap(x => XtoY.linear(x, t), newXs)
|
||||
{xs: newXs, ys: newYs}
|
||||
}
|
||||
// This function is used for klDivergence
|
||||
let combineAlongSupportOfSecondArgument: (
|
||||
(float, float) => result<float, Operation.Error.t>,
|
||||
T.t,
|
||||
T.t,
|
||||
) => result<T.t, Operation.Error.t> = (fn, prediction, answer) => {
|
||||
let combineWithFn = (answerX: float, i: int) => {
|
||||
let answerY = answer.ys[i]
|
||||
let predictionY = XtoY.linear(answerX, prediction)
|
||||
fn(predictionY, answerY)
|
||||
}
|
||||
let newYsWithError = Js.Array.mapi((x, i) => combineWithFn(x, i), answer.xs)
|
||||
let newYsOrError = E.A.R.firstErrorOrOpen(newYsWithError)
|
||||
let result = switch newYsOrError {
|
||||
| Ok(a) => Ok({xs: answer.xs, ys: a})
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
let addCombine = (interpolator: interpolator, t1: T.t, t2: T.t): T.t =>
|
||||
combine((a, b) => Ok(a +. b), interpolator, t1, t2)->E.R.toExn(
|
||||
combine(interpolator, (a, b) => Ok(a +. b), t1, t2)->E.R.toExn(
|
||||
"Add operation should never fail",
|
||||
_,
|
||||
)
|
||||
|
|
|
@ -1,21 +1,35 @@
|
|||
# Squiggle For VS Code
|
||||
|
||||
_[marketplace](https://marketplace.visualstudio.com/items?itemName=QURI.vscode-squiggle)_
|
||||
## About
|
||||
|
||||
This extension provides support for [Squiggle](https://www.squiggle-language.com/) in VS Code.
|
||||
This extension provides support for [Squiggle](https://www.squiggle-language.com/) in VS Code. It can be found in the VS code _[marketplace](https://marketplace.visualstudio.com/items?itemName=QURI.vscode-squiggle)_
|
||||
|
||||
Features:
|
||||
|
||||
- Preview `.squiggle` files in a preview pane
|
||||
- Syntax highlighting for `.squiggle` and `.squiggleU` files
|
||||
|
||||
# Configuration
|
||||
## Installation
|
||||
|
||||
Some preview settings, e.g. whether to show the summary table or types of outputs, can be configurable on in the VS Code settings and persist between different preview sessions.
|
||||
You can install this extension by going to the "extensions" tab, searching for "Squiggle", and then installing it.
|
||||
|
||||
![](./images/vs-code-install.png)
|
||||
|
||||
## Usage
|
||||
|
||||
After loading a `.squiggle` file, an "Open Preview" button will appear. If you click it, the squiggle model will be shown, and updated as you edit and save you file.
|
||||
|
||||
![](./images/extension-screenshot.png)
|
||||
|
||||
### Configuration (optional)
|
||||
|
||||
Some preview settings, e.g. whether to show the summary table or types of outputs, can be configurable on in the VS Code settings and persist between different preview sessions. The VS Code settings can be accessed with the shortcut `Ctrl+,` with `Ctrl+Shift+P` + searching "Open Settings", or by accessing a file like `$HOME/.config/Code/User/settings.json` in Linux (see [here](https://stackoverflow.com/questions/65908987/how-can-i-open-visual-studio-codes-settings-json-file)) for other operating systems.
|
||||
|
||||
![](./images/vs-code-settings.png)
|
||||
|
||||
Check out the full list of Squiggle settings in the main VS Code settings.
|
||||
|
||||
# Build locally
|
||||
## Build locally
|
||||
|
||||
We assume you ran `yarn` at the monorepo level for all dependencies.
|
||||
|
||||
|
|
BIN
packages/vscode-ext/images/extension-screenshot.png
Normal file
BIN
packages/vscode-ext/images/extension-screenshot.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 233 KiB |
BIN
packages/vscode-ext/images/vs-code-install.png
Normal file
BIN
packages/vscode-ext/images/vs-code-install.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 187 KiB |
BIN
packages/vscode-ext/images/vs-code-settings.png
Normal file
BIN
packages/vscode-ext/images/vs-code-settings.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 77 KiB |
|
@ -133,18 +133,18 @@
|
|||
"devDependencies": {
|
||||
"@types/glob": "^7.2.0",
|
||||
"@types/node": "18.x",
|
||||
"@types/vscode": "^1.68.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.30.4",
|
||||
"@typescript-eslint/parser": "^5.30.4",
|
||||
"eslint": "^8.18.0",
|
||||
"@types/vscode": "^1.69.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.30.6",
|
||||
"@typescript-eslint/parser": "^5.30.6",
|
||||
"eslint": "^8.20.0",
|
||||
"glob": "^8.0.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"typescript": "^4.7.4",
|
||||
"vsce-yarn-patch": "^1.66.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"vscode-languageclient": "^8.0.1",
|
||||
"vscode-languageserver": "^8.0.1",
|
||||
"vscode-languageclient": "^8.0.2",
|
||||
"vscode-languageserver": "^8.0.2",
|
||||
"vscode-languageserver-textdocument": "^1.0.5",
|
||||
"@quri/squiggle-lang": "^0.2.11"
|
||||
}
|
||||
|
|
|
@ -12,11 +12,11 @@
|
|||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.0.0-beta.21",
|
||||
"@docusaurus/preset-classic": "2.0.0-beta.21",
|
||||
"@docusaurus/core": "2.0.0-rc.1",
|
||||
"@docusaurus/preset-classic": "2.0.0-rc.1",
|
||||
"@quri/squiggle-components": "^0.2.20",
|
||||
"base64-js": "^1.5.1",
|
||||
"clsx": "^1.2.0",
|
||||
"clsx": "^1.2.1",
|
||||
"hast-util-is-element": "2.1.2",
|
||||
"pako": "^2.0.4",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
|
|
Loading…
Reference in New Issue
Block a user