Compare commits
133 Commits
project-co
...
develop
Author | SHA1 | Date | |
---|---|---|---|
|
9e2eace05e | ||
|
a0000cd179 | ||
|
56771820aa | ||
|
33f0647be8 | ||
|
878c6f3d4b | ||
|
4cd045b9c8 | ||
|
a617ec0436 | ||
|
80cc20ac72 | ||
|
666524a36a | ||
|
2ed3633fe5 | ||
|
deb88c60fb | ||
|
838d13086a | ||
|
11e80941bc | ||
|
e11cca658e | ||
|
2d0949c3f6 | ||
|
8dac70082c | ||
|
d7e548d84e | ||
|
2cd22fa8e5 | ||
|
21e4908fbf | ||
|
07802ff151 | ||
|
f079c1f6ef | ||
|
91a3a066c2 | ||
|
8608faa79b | ||
|
e076bd935f | ||
|
fdcdf2fa3d | ||
|
2d9fea4f21 | ||
|
f6322fad13 | ||
|
699960d220 | ||
|
ac0cc01852 | ||
|
c65a9d3ae6 | ||
|
113be7d8cb | ||
|
2596d1a695 | ||
|
41153988d8 | ||
|
8c7340185c | ||
|
bd4250c793 | ||
|
c204ce37ad | ||
|
344ee0ce17 | ||
71db023f0c | |||
|
bd94e51b31 | ||
|
964e95f598 | ||
|
98454a87b5 | ||
|
5efb66c3e5 | ||
|
2389dff69a | ||
|
1ea3c975d5 | ||
|
1e13dc71f1 | ||
|
a515d9560c | ||
|
9582be7331 | ||
|
827bb43354 | ||
|
27c4c9acf2 | ||
|
28e944dd04 | ||
|
9992d7da1a | ||
|
de532c72d6 | ||
|
fd6c4ae1de | ||
|
5621567af2 | ||
|
000b7a6248 | ||
|
13a54f3e3d | ||
|
4abdb69f23 | ||
|
ed86fd9f77 | ||
|
bcc5b5846f | ||
|
da8db5db9c | ||
|
d7c6996284 | ||
|
b7d56a3f1f | ||
|
d037913e7c | ||
|
2c01c77646 | ||
|
7493f2e2e5 | ||
|
07fcbeb568 | ||
|
0f8e7ce6b6 | ||
|
cc846aa74c | ||
|
214a54e57e | ||
|
eaa7d38428 | ||
|
96e0418f60 | ||
|
2bb9622edd | ||
|
76ea024342 | ||
|
d5f0a6bcf8 | ||
|
b2c10924cd | ||
|
7bfe52c2d3 | ||
|
d6a48d9cb9 | ||
|
4efd2c9e05 | ||
|
0ab03eca96 | ||
|
9107b241f5 | ||
|
c1e67066aa | ||
|
ccdcb1a72b | ||
|
871254dff9 | ||
|
ff5e0dd14c | ||
|
f6e69dad38 | ||
|
7e4139acb3 | ||
|
af885ef58f | ||
|
061d785996 | ||
|
355ff199c1 | ||
|
a48efc23b0 | ||
|
2c5511efc1 | ||
|
f39c69a2d4 | ||
|
b4c649d03b | ||
|
8184396d3e | ||
|
1a131828e6 | ||
|
92d3c761fa | ||
|
bac954d949 | ||
|
1474630af8 | ||
|
ffc6cd3840 | ||
|
b5c5b81db4 | ||
|
77dbb223f9 | ||
|
19a44eb12f | ||
|
8cbbdf5489 | ||
|
2d0e6432cd | ||
|
0137b44689 | ||
|
6055320aa2 | ||
|
6463c4db5a | ||
|
a45d6c6c57 | ||
|
9f1c5affc4 | ||
|
234ebe2103 | ||
|
298492b3b8 | ||
|
26dbd29ec8 | ||
|
c11fbda8b0 | ||
|
d60792aa93 | ||
|
39cc4a32ca | ||
|
1bb9e75ed3 | ||
|
9caee0fecd | ||
|
54cbcea5d6 | ||
|
54d52e4d91 | ||
|
b1639bf62c | ||
|
a764f3075c | ||
|
184584c9f3 | ||
|
4c56b2fd07 | ||
|
845d38e375 | ||
|
69b32d0b93 | ||
|
111dd5535c | ||
|
41574e08c9 | ||
|
36f7f00fc3 | ||
|
45dd199925 | ||
|
4ecb692e80 | ||
|
36c3a93d08 | ||
|
f67abe55a8 | ||
|
07af79adc8 |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
|
@ -24,7 +24,7 @@
|
||||||
*.json @quinn-dougherty @Hazelfire @berekuk @OAGr
|
*.json @quinn-dougherty @Hazelfire @berekuk @OAGr
|
||||||
*.y*ml @quinn-dougherty @berekuk @OAGr
|
*.y*ml @quinn-dougherty @berekuk @OAGr
|
||||||
*.config.js @Hazelfire @berekuk @OAGr
|
*.config.js @Hazelfire @berekuk @OAGr
|
||||||
netlify.toml @quinn-dougherty @OAGr @berekuk @Hazelfire
|
vercel.json @OAGr @berekuk @Hazelfire
|
||||||
|
|
||||||
# Documentation
|
# Documentation
|
||||||
*.md @quinn-dougherty @OAGr @Hazelfire
|
*.md @quinn-dougherty @OAGr @Hazelfire
|
||||||
|
|
227
.github/workflows/ci.yml
vendored
227
.github/workflows/ci.yml
vendored
|
@ -1,4 +1,4 @@
|
||||||
name: Squiggle packages check
|
name: Squiggle packages checks
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
@ -9,217 +9,40 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- develop
|
- develop
|
||||||
- reducer-dev
|
|
||||||
- epic-reducer-project
|
env:
|
||||||
- epic-0.5.0
|
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||||
|
TURBO_TEAM: quantified-uncertainty
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre_check:
|
build-test-lint:
|
||||||
name: Precheck for skipping redundant jobs
|
name: Build, test, lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
|
||||||
should_skip_lang: ${{ steps.skip_lang_check.outputs.should_skip }}
|
|
||||||
should_skip_components: ${{ steps.skip_components_check.outputs.should_skip }}
|
|
||||||
should_skip_website: ${{ steps.skip_website_check.outputs.should_skip }}
|
|
||||||
should_skip_vscodeext: ${{ steps.skip_vscodeext_check.outputs.should_skip }}
|
|
||||||
should_skip_cli: ${{ steps.skip_cli_check.outputs.should_skip }}
|
|
||||||
steps:
|
|
||||||
- id: skip_lang_check
|
|
||||||
name: Check if the changes are about squiggle-lang src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@v5.2.0
|
|
||||||
with:
|
|
||||||
paths: '["packages/squiggle-lang/**"]'
|
|
||||||
- id: skip_components_check
|
|
||||||
name: Check if the changes are about components src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@v5.2.0
|
|
||||||
with:
|
|
||||||
paths: '["packages/components/**"]'
|
|
||||||
- id: skip_website_check
|
|
||||||
name: Check if the changes are about website src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@v5.2.0
|
|
||||||
with:
|
|
||||||
paths: '["packages/website/**"]'
|
|
||||||
- id: skip_vscodeext_check
|
|
||||||
name: Check if the changes are about vscode extension src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@v5.2.0
|
|
||||||
with:
|
|
||||||
paths: '["packages/vscode-ext/**"]'
|
|
||||||
- id: skip_cli_check
|
|
||||||
name: Check if the changes are about cli src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@v5.2.0
|
|
||||||
with:
|
|
||||||
paths: '["packages/cli/**"]'
|
|
||||||
|
|
||||||
lang-lint:
|
|
||||||
name: Language lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_lang != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/squiggle-lang
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install Dependencies
|
- name: Setup Node.js environment
|
||||||
run: cd ../../ && yarn
|
uses: actions/setup-node@v3
|
||||||
- name: Check rescript lint
|
|
||||||
run: yarn lint:rescript
|
|
||||||
- name: Check javascript, typescript, and markdown lint
|
|
||||||
uses: creyD/prettier_action@v4.2
|
|
||||||
with:
|
with:
|
||||||
dry: true
|
node-version: 16
|
||||||
prettier_options: --check packages/squiggle-lang
|
cache: 'yarn'
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn --frozen-lockfile
|
||||||
|
- name: Turbo run
|
||||||
|
run: npx turbo run build test lint bundle
|
||||||
|
|
||||||
lang-build-test-bundle:
|
coverage:
|
||||||
name: Language build, test, and bundle
|
name: Coverage
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_lang != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/squiggle-lang
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
- name: Install dependencies from monorepo level
|
- name: Setup Node.js environment
|
||||||
run: cd ../../ && yarn
|
uses: actions/setup-node@v2
|
||||||
- name: Build rescript codebase
|
|
||||||
run: yarn build
|
|
||||||
- name: Run rescript tests
|
|
||||||
run: yarn test:rescript
|
|
||||||
- name: Run typescript tests
|
|
||||||
run: yarn test:ts
|
|
||||||
- name: Run webpack
|
|
||||||
run: yarn bundle
|
|
||||||
- name: Upload rescript coverage report
|
|
||||||
run: yarn coverage:rescript:ci
|
|
||||||
- name: Upload typescript coverage report
|
|
||||||
run: yarn coverage:ts:ci
|
|
||||||
|
|
||||||
components-lint:
|
|
||||||
name: Components lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_components != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/components
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check javascript, typescript, and markdown lint
|
|
||||||
uses: creyD/prettier_action@v4.2
|
|
||||||
with:
|
with:
|
||||||
dry: true
|
node-version: 16
|
||||||
prettier_options: --check packages/components --ignore-path packages/components/.prettierignore
|
cache: 'yarn'
|
||||||
|
- name: Install dependencies
|
||||||
components-bundle-build-test:
|
run: yarn
|
||||||
name: Components bundle, build and test
|
- name: Coverage
|
||||||
runs-on: ubuntu-latest
|
run: npx turbo run coverage
|
||||||
needs: pre_check
|
|
||||||
if: ${{ (needs.pre_check.outputs.should_skip_components != 'true') || (needs.pre_check.outputs.should_skip_lang != 'true') }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/components
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build rescript codebase in squiggle-lang
|
|
||||||
run: cd ../squiggle-lang && yarn build
|
|
||||||
- name: Run webpack
|
|
||||||
run: yarn bundle
|
|
||||||
- name: Build storybook
|
|
||||||
run: yarn build
|
|
||||||
- name: Test components
|
|
||||||
run: yarn test
|
|
||||||
|
|
||||||
website-lint:
|
|
||||||
name: Website lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_website != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/website
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check javascript, typescript, and markdown lint
|
|
||||||
uses: creyD/prettier_action@v4.2
|
|
||||||
with:
|
|
||||||
dry: true
|
|
||||||
prettier_options: --check packages/website
|
|
||||||
|
|
||||||
website-build:
|
|
||||||
name: Website build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ (needs.pre_check.outputs.should_skip_website != 'true') || (needs.pre_check.outputs.should_skip_lang != 'true') || (needs.pre_check.outputs.should_skip_components != 'true') }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/website
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build rescript in squiggle-lang
|
|
||||||
run: cd ../squiggle-lang && yarn build
|
|
||||||
- name: Build components
|
|
||||||
run: cd ../components && yarn build
|
|
||||||
- name: Build website assets
|
|
||||||
run: yarn build
|
|
||||||
|
|
||||||
vscode-ext-lint:
|
|
||||||
name: VS Code extension lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_vscodeext != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/vscode-ext
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check javascript, typescript, and markdown lint
|
|
||||||
uses: creyD/prettier_action@v4.2
|
|
||||||
with:
|
|
||||||
dry: true
|
|
||||||
prettier_options: --check packages/vscode-ext
|
|
||||||
|
|
||||||
vscode-ext-build:
|
|
||||||
name: VS Code extension build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ (needs.pre_check.outputs.should_skip_components != 'true') || (needs.pre_check.outputs.should_skip_lang != 'true') }} || (needs.pre_check.outputs.should_skip_vscodeext != 'true') }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/vscode-ext
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build
|
|
||||||
run: yarn compile
|
|
||||||
cli-lint:
|
|
||||||
name: CLI lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_cli != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/cli
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check javascript, typescript, and markdown lint
|
|
||||||
uses: creyD/prettier_action@v4.2
|
|
||||||
with:
|
|
||||||
dry: true
|
|
||||||
prettier_options: --check packages/cli
|
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -12,3 +12,4 @@ yarn-error.log
|
||||||
todo.txt
|
todo.txt
|
||||||
result
|
result
|
||||||
shell.nix
|
shell.nix
|
||||||
|
.turbo
|
||||||
|
|
1
CHANGELOG.md
Normal file
1
CHANGELOG.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
See the [Changelog.mdx page](./packages/website/docs/Changelog.mdx) for the changelog.
|
|
@ -16,7 +16,7 @@ Squiggle is currently pre-alpha.
|
||||||
|
|
||||||
# Bug reports
|
# Bug reports
|
||||||
|
|
||||||
Anyone (with a github account) can file an issue at any time. Please allow Quinn, Sam, and Ozzie to triage, but otherwise just follow the suggestions in the issue templates.
|
Anyone (with a github account) can file an issue at any time. Please allow Slava, Sam, and Ozzie to triage, but otherwise just follow the suggestions in the issue templates.
|
||||||
|
|
||||||
# Project structure
|
# Project structure
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ Squiggle is a **monorepo** with three **packages**.
|
||||||
|
|
||||||
# Deployment ops
|
# Deployment ops
|
||||||
|
|
||||||
We use netlify, and it should only concern Quinn, Sam, and Ozzie.
|
We use Vercel, and it should only concern Slava, Sam, and Ozzie.
|
||||||
|
|
||||||
# Development environment, building, testing, dev server
|
# Development environment, building, testing, dev server
|
||||||
|
|
||||||
|
@ -56,9 +56,9 @@ If you absolutely must, please prefix your commit message with `hotfix: `.
|
||||||
|
|
||||||
Please work against `develop` branch. **Do not** work against `master`.
|
Please work against `develop` branch. **Do not** work against `master`.
|
||||||
|
|
||||||
- For rescript code: Quinn and Ozzie are reviewers
|
- For rescript code: Slava and Ozzie are reviewers
|
||||||
- For js or typescript code: Sam and Ozzie are reviewers
|
- For js or typescript code: Sam and Ozzie are reviewers
|
||||||
- For ops code (i.e. yaml, package.json): Quinn and Sam are reviewers
|
- For ops code (i.e. yaml, package.json): Slava and Sam are reviewers
|
||||||
|
|
||||||
Autopings are set up: if you are not autopinged, you are welcome to comment, but please do not use the formal review feature, send approvals, rejections, or merges.
|
Autopings are set up: if you are not autopinged, you are welcome to comment, but please do not use the formal review feature, send approvals, rejections, or merges.
|
||||||
|
|
||||||
|
|
28
README.md
28
README.md
|
@ -21,10 +21,10 @@ _An estimation language_.
|
||||||
|
|
||||||
## Our deployments
|
## Our deployments
|
||||||
|
|
||||||
- **website/docs prod**: https://squiggle-language.com [![Netlify Status](https://api.netlify.com/api/v1/badges/2139af5c-671d-473d-a9f6-66c96077d8a1/deploy-status)](https://app.netlify.com/sites/squiggle-documentation/deploys)
|
- **website/docs prod**: https://squiggle-language.com
|
||||||
- **website/docs staging**: https://develop--squiggle-documentation.netlify.app/
|
- **website/docs staging**: https://preview.squiggle-language.com
|
||||||
- **components storybook prod**: https://squiggle-components.netlify.app/ [![Netlify Status](https://api.netlify.com/api/v1/badges/b7f724aa-6b20-4d0e-bf86-3fcd1a3e9a70/deploy-status)](https://app.netlify.com/sites/squiggle-components/deploys)
|
- **components storybook prod**: https://components.squiggle-language.com
|
||||||
- **components storybook staging**: https://develop--squiggle-components.netlify.app/
|
- **components storybook staging**: https://preview-components.squiggle-language.com
|
||||||
- **legacy (2020) playground**: https://playground.squiggle-language.com
|
- **legacy (2020) playground**: https://playground.squiggle-language.com
|
||||||
|
|
||||||
## Packages
|
## Packages
|
||||||
|
@ -51,7 +51,25 @@ For any project in the repo, begin by running `yarn` in the top level
|
||||||
yarn
|
yarn
|
||||||
```
|
```
|
||||||
|
|
||||||
See `packages/*/README.md` to work with whatever project you're interested in.
|
Then use `turbo` to build the specific packages or the entire monorepo:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
turbo run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Or:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
turbo run build --filter=@quri/squiggle-components
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also run specific npm scripts for the package you're working on. See `packages/*/README.md` for the details.
|
||||||
|
|
||||||
|
# NixOS users
|
||||||
|
|
||||||
|
This repository requires the use of bundled binaries from node_modules, which
|
||||||
|
are not linked statically. The easiest way to get them working is to enable
|
||||||
|
[nix-ld](https://github.com/Mic92/nix-ld).
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
|
|
|
@ -30,16 +30,6 @@ rec {
|
||||||
patchelf --replace-needed libstdc++.so.6 $THE_SO linux/ninja.exe && echo "- replaced needed for linux/ninja.exe"
|
patchelf --replace-needed libstdc++.so.6 $THE_SO linux/ninja.exe && echo "- replaced needed for linux/ninja.exe"
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
bisect_ppx = {
|
|
||||||
buildInputs = common.which;
|
|
||||||
postInstall = ''
|
|
||||||
echo "PATCHELF'ING BISECT_PPX EXECUTABLE"
|
|
||||||
THE_LD=$(patchelf --print-interpreter $(which mkdir))
|
|
||||||
patchelf --set-interpreter $THE_LD bin/linux/ppx
|
|
||||||
patchelf --set-interpreter $THE_LD bin/linux/bisect-ppx-report
|
|
||||||
cp bin/linux/ppx ppx
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
gentype = {
|
gentype = {
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
mv gentype.exe ELFLESS-gentype.exe
|
mv gentype.exe ELFLESS-gentype.exe
|
||||||
|
|
18
nixos.sh
18
nixos.sh
|
@ -1,18 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# This script is only relevant if you're rolling nixos.
|
|
||||||
|
|
||||||
# Esy (a bisect_ppx dependency/build tool) is borked on nixos without using an FHS shell. https://github.com/esy/esy/issues/858
|
|
||||||
# We need to patchelf rescript executables. https://github.com/NixOS/nixpkgs/issues/107375
|
|
||||||
set -x
|
|
||||||
|
|
||||||
fhsShellName="squiggle-fhs-development"
|
|
||||||
fhsShellDotNix="{pkgs ? import <nixpkgs> {} }: (pkgs.buildFHSUserEnv { name = \"${fhsShellName}\"; targetPkgs = pkgs: [pkgs.yarn pkgs.glibc]; runScript = \"yarn\"; }).env"
|
|
||||||
nix-shell - <<<"$fhsShellDotNix"
|
|
||||||
|
|
||||||
theLd=$(patchelf --print-interpreter $(which mkdir))
|
|
||||||
patchelf --set-interpreter $theLd ./node_modules/gentype/gentype.exe
|
|
||||||
patchelf --set-interpreter $theLd ./node_modules/rescript/linux/*.exe
|
|
||||||
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/ppx
|
|
||||||
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/bisect-ppx-report
|
|
||||||
theSo=$(find /nix/store/*$fhsShellName*/lib64 -name libstdc++.so.6 | head -n 1)
|
|
||||||
patchelf --replace-needed libstdc++.so.6 $theSo ./node_modules/rescript/linux/ninja.exe
|
|
|
@ -2,12 +2,11 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"name": "squiggle",
|
"name": "squiggle",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules",
|
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules"
|
||||||
"format:all": "prettier --write . && cd packages/squiggle-lang && yarn format",
|
|
||||||
"lint:all": "prettier --check . && cd packages/squiggle-lang && yarn lint:rescript"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"prettier": "^2.7.1"
|
"prettier": "^2.7.1",
|
||||||
|
"turbo": "^1.5.5"
|
||||||
},
|
},
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -20,3 +20,30 @@ Runs compilation in the current directory and all of its subdirectories.
|
||||||
### `npx squiggle-cli-experimental watch`
|
### `npx squiggle-cli-experimental watch`
|
||||||
|
|
||||||
Watches `.squiggleU` files in the current directory (and subdirectories) and rebuilds them when they are saved. Note that this will _not_ rebuild files when their dependencies are changed, just when they are changed directly.
|
Watches `.squiggleU` files in the current directory (and subdirectories) and rebuilds them when they are saved. Note that this will _not_ rebuild files when their dependencies are changed, just when they are changed directly.
|
||||||
|
|
||||||
|
## Further instructions
|
||||||
|
|
||||||
|
The above requires having node, npm and npx. To install the first two, see [here](https://nodejs.org/en/), to install npx, run:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install -g npx
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you can run the following without the need for npx:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install squiggle-cli-experimental
|
||||||
|
node node_modules/squiggle-cli-experimental/index.js compile
|
||||||
|
```
|
||||||
|
|
||||||
|
or you can add a script to your `package.json`, like:
|
||||||
|
|
||||||
|
```
|
||||||
|
...
|
||||||
|
scripts: {
|
||||||
|
"compile": "squiggle-cli-experimental compile"
|
||||||
|
}
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
This can be run with `npm run compile`. `npm` knows how to reach into the node_modules directly, so it's not necessary to specify that.
|
||||||
|
|
|
@ -7,11 +7,13 @@
|
||||||
"bin": "index.js",
|
"bin": "index.js",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node ."
|
"start": "node .",
|
||||||
|
"lint": "prettier --check .",
|
||||||
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"chalk": "^5.0.1",
|
"chalk": "^5.1.0",
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"commander": "^9.4.1",
|
"commander": "^9.4.1",
|
||||||
"fs": "^0.0.1-security",
|
"fs": "^0.0.1-security",
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
[build]
|
|
||||||
base = "packages/components/"
|
|
||||||
command = "cd ../squiggle-lang && yarn build && cd ../components && yarn build"
|
|
||||||
publish = "storybook-static/"
|
|
||||||
ignore = "node -e 'process.exitCode = process.env.BRANCH.includes(\"dependabot\") ? 0 : 1' && git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../squiggle-lang"
|
|
||||||
|
|
||||||
[build.environment]
|
|
||||||
NETLIFY_USE_YARN = "true"
|
|
|
@ -12,11 +12,11 @@
|
||||||
"@react-hook/size": "^2.1.2",
|
"@react-hook/size": "^2.1.2",
|
||||||
"@types/uuid": "^8.3.4",
|
"@types/uuid": "^8.3.4",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
"framer-motion": "^7.5.1",
|
"framer-motion": "^7.5.3",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"react": "^18.1.0",
|
"react": "^18.1.0",
|
||||||
"react-ace": "^10.1.0",
|
"react-ace": "^10.1.0",
|
||||||
"react-hook-form": "^7.36.1",
|
"react-hook-form": "^7.37.0",
|
||||||
"react-use": "^17.4.0",
|
"react-use": "^17.4.0",
|
||||||
"react-vega": "^7.6.0",
|
"react-vega": "^7.6.0",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.0",
|
||||||
|
@ -41,7 +41,7 @@
|
||||||
"@testing-library/user-event": "^14.4.3",
|
"@testing-library/user-event": "^14.4.3",
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"@types/lodash": "^4.14.186",
|
"@types/lodash": "^4.14.186",
|
||||||
"@types/node": "^18.8.0",
|
"@types/node": "^18.8.3",
|
||||||
"@types/react": "^18.0.21",
|
"@types/react": "^18.0.21",
|
||||||
"@types/styled-components": "^5.1.26",
|
"@types/styled-components": "^5.1.26",
|
||||||
"@types/uuid": "^8.3.4",
|
"@types/uuid": "^8.3.4",
|
||||||
|
@ -49,8 +49,8 @@
|
||||||
"canvas": "^2.10.1",
|
"canvas": "^2.10.1",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"jest": "^29.0.3",
|
"jest": "^29.0.3",
|
||||||
"jest-environment-jsdom": "^29.0.3",
|
"jest-environment-jsdom": "^29.1.2",
|
||||||
"jsdom": "^20.0.0",
|
"jsdom": "^20.0.1",
|
||||||
"mini-css-extract-plugin": "^2.6.1",
|
"mini-css-extract-plugin": "^2.6.1",
|
||||||
"postcss-cli": "^10.0.0",
|
"postcss-cli": "^10.0.0",
|
||||||
"postcss-import": "^15.0.0",
|
"postcss-import": "^15.0.0",
|
||||||
|
@ -60,11 +60,11 @@
|
||||||
"react-scripts": "^5.0.1",
|
"react-scripts": "^5.0.1",
|
||||||
"style-loader": "^3.3.1",
|
"style-loader": "^3.3.1",
|
||||||
"tailwindcss": "^3.1.8",
|
"tailwindcss": "^3.1.8",
|
||||||
"ts-jest": "^29.0.2",
|
"ts-jest": "^29.0.3",
|
||||||
"ts-loader": "^9.4.1",
|
"ts-loader": "^9.4.1",
|
||||||
"tsconfig-paths-webpack-plugin": "^4.0.0",
|
"tsconfig-paths-webpack-plugin": "^4.0.0",
|
||||||
"typescript": "^4.8.4",
|
"typescript": "^4.8.4",
|
||||||
"web-vitals": "^3.0.2",
|
"web-vitals": "^3.0.3",
|
||||||
"webpack": "^5.74.0",
|
"webpack": "^5.74.0",
|
||||||
"webpack-cli": "^4.10.0",
|
"webpack-cli": "^4.10.0",
|
||||||
"webpack-dev-server": "^4.11.1"
|
"webpack-dev-server": "^4.11.1"
|
||||||
|
|
|
@ -24,13 +24,13 @@ export const Alert: React.FC<{
|
||||||
children,
|
children,
|
||||||
}) => {
|
}) => {
|
||||||
return (
|
return (
|
||||||
<div className={clsx("rounded-md p-4", backgroundColor)}>
|
<div className={clsx("rounded-md p-4", backgroundColor)} role="status">
|
||||||
<div className="flex">
|
<div className="flex">
|
||||||
<Icon
|
<Icon
|
||||||
className={clsx("h-5 w-5 flex-shrink-0", iconColor)}
|
className={clsx("h-5 w-5 flex-shrink-0", iconColor)}
|
||||||
aria-hidden="true"
|
aria-hidden="true"
|
||||||
/>
|
/>
|
||||||
<div className="ml-3">
|
<div className="ml-3 grow">
|
||||||
<header className={clsx("text-sm font-medium", headingColor)}>
|
<header className={clsx("text-sm font-medium", headingColor)}>
|
||||||
{heading}
|
{heading}
|
||||||
</header>
|
</header>
|
||||||
|
|
|
@ -5,6 +5,8 @@ import AceEditor from "react-ace";
|
||||||
import "ace-builds/src-noconflict/mode-golang";
|
import "ace-builds/src-noconflict/mode-golang";
|
||||||
import "ace-builds/src-noconflict/theme-github";
|
import "ace-builds/src-noconflict/theme-github";
|
||||||
|
|
||||||
|
import { SqLocation } from "@quri/squiggle-lang";
|
||||||
|
|
||||||
interface CodeEditorProps {
|
interface CodeEditorProps {
|
||||||
value: string;
|
value: string;
|
||||||
onChange: (value: string) => void;
|
onChange: (value: string) => void;
|
||||||
|
@ -13,15 +15,17 @@ interface CodeEditorProps {
|
||||||
width?: number;
|
width?: number;
|
||||||
height: number;
|
height: number;
|
||||||
showGutter?: boolean;
|
showGutter?: boolean;
|
||||||
|
errorLocations?: SqLocation[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export const CodeEditor: FC<CodeEditorProps> = ({
|
export const CodeEditor: FC<CodeEditorProps> = ({
|
||||||
value,
|
value,
|
||||||
onChange,
|
onChange,
|
||||||
onSubmit,
|
onSubmit,
|
||||||
|
height,
|
||||||
oneLine = false,
|
oneLine = false,
|
||||||
showGutter = false,
|
showGutter = false,
|
||||||
height,
|
errorLocations = [],
|
||||||
}) => {
|
}) => {
|
||||||
const lineCount = value.split("\n").length;
|
const lineCount = value.split("\n").length;
|
||||||
const id = useMemo(() => _.uniqueId(), []);
|
const id = useMemo(() => _.uniqueId(), []);
|
||||||
|
@ -30,8 +34,11 @@ export const CodeEditor: FC<CodeEditorProps> = ({
|
||||||
const onSubmitRef = useRef<typeof onSubmit | null>(null);
|
const onSubmitRef = useRef<typeof onSubmit | null>(null);
|
||||||
onSubmitRef.current = onSubmit;
|
onSubmitRef.current = onSubmit;
|
||||||
|
|
||||||
|
const editorEl = useRef<AceEditor | null>(null);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<AceEditor
|
<AceEditor
|
||||||
|
ref={editorEl}
|
||||||
value={value}
|
value={value}
|
||||||
mode="golang"
|
mode="golang"
|
||||||
theme="github"
|
theme="github"
|
||||||
|
@ -48,10 +55,7 @@ export const CodeEditor: FC<CodeEditorProps> = ({
|
||||||
editorProps={{
|
editorProps={{
|
||||||
$blockScrolling: true,
|
$blockScrolling: true,
|
||||||
}}
|
}}
|
||||||
setOptions={{
|
setOptions={{}}
|
||||||
enableBasicAutocompletion: false,
|
|
||||||
enableLiveAutocompletion: false,
|
|
||||||
}}
|
|
||||||
commands={[
|
commands={[
|
||||||
{
|
{
|
||||||
name: "submit",
|
name: "submit",
|
||||||
|
@ -59,6 +63,14 @@ export const CodeEditor: FC<CodeEditorProps> = ({
|
||||||
exec: () => onSubmitRef.current?.(),
|
exec: () => onSubmitRef.current?.(),
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
|
markers={errorLocations?.map((location) => ({
|
||||||
|
startRow: location.start.line - 1,
|
||||||
|
startCol: location.start.column - 1,
|
||||||
|
endRow: location.end.line - 1,
|
||||||
|
endCol: location.end.column - 1,
|
||||||
|
className: "ace-error-marker",
|
||||||
|
type: "text",
|
||||||
|
}))}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,9 +1,15 @@
|
||||||
import * as React from "react";
|
import * as React from "react";
|
||||||
import { SqLambda, environment, SqValueTag } from "@quri/squiggle-lang";
|
import {
|
||||||
|
SqLambda,
|
||||||
|
environment,
|
||||||
|
SqValueTag,
|
||||||
|
SqError,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
import { FunctionChart1Dist } from "./FunctionChart1Dist";
|
import { FunctionChart1Dist } from "./FunctionChart1Dist";
|
||||||
import { FunctionChart1Number } from "./FunctionChart1Number";
|
import { FunctionChart1Number } from "./FunctionChart1Number";
|
||||||
import { DistributionPlottingSettings } from "./DistributionChart";
|
import { DistributionPlottingSettings } from "./DistributionChart";
|
||||||
import { ErrorAlert, MessageAlert } from "./Alert";
|
import { MessageAlert } from "./Alert";
|
||||||
|
import { SquiggleErrorAlert } from "./SquiggleErrorAlert";
|
||||||
|
|
||||||
export type FunctionChartSettings = {
|
export type FunctionChartSettings = {
|
||||||
start: number;
|
start: number;
|
||||||
|
@ -19,6 +25,25 @@ interface FunctionChartProps {
|
||||||
height: number;
|
height: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const FunctionCallErrorAlert = ({ error }: { error: SqError }) => {
|
||||||
|
const [expanded, setExpanded] = React.useState(false);
|
||||||
|
if (expanded) {
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<MessageAlert heading="Function Display Failed">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<span
|
||||||
|
className="underline decoration-dashed cursor-pointer"
|
||||||
|
onClick={() => setExpanded(!expanded)}
|
||||||
|
>
|
||||||
|
{expanded ? "Hide" : "Show"} error details
|
||||||
|
</span>
|
||||||
|
{expanded ? <SquiggleErrorAlert error={error} /> : null}
|
||||||
|
</div>
|
||||||
|
</MessageAlert>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export const FunctionChart: React.FC<FunctionChartProps> = ({
|
export const FunctionChart: React.FC<FunctionChartProps> = ({
|
||||||
fn,
|
fn,
|
||||||
chartSettings,
|
chartSettings,
|
||||||
|
@ -26,7 +51,8 @@ export const FunctionChart: React.FC<FunctionChartProps> = ({
|
||||||
distributionPlotSettings,
|
distributionPlotSettings,
|
||||||
height,
|
height,
|
||||||
}) => {
|
}) => {
|
||||||
if (fn.parameters.length > 1) {
|
console.log(fn.parameters().length);
|
||||||
|
if (fn.parameters().length !== 1) {
|
||||||
return (
|
return (
|
||||||
<MessageAlert heading="Function Display Not Supported">
|
<MessageAlert heading="Function Display Not Supported">
|
||||||
Only functions with one parameter are displayed.
|
Only functions with one parameter are displayed.
|
||||||
|
@ -47,9 +73,7 @@ export const FunctionChart: React.FC<FunctionChartProps> = ({
|
||||||
const validResult = getValidResult();
|
const validResult = getValidResult();
|
||||||
|
|
||||||
if (validResult.tag === "Error") {
|
if (validResult.tag === "Error") {
|
||||||
return (
|
return <FunctionCallErrorAlert error={validResult.value} />;
|
||||||
<ErrorAlert heading="Error">{validResult.value.toString()}</ErrorAlert>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (validResult.value.tag) {
|
switch (validResult.value.tag) {
|
||||||
|
|
|
@ -2,13 +2,13 @@ import * as React from "react";
|
||||||
import {
|
import {
|
||||||
SqValue,
|
SqValue,
|
||||||
environment,
|
environment,
|
||||||
resultMap,
|
|
||||||
SqValueTag,
|
|
||||||
SqProject,
|
SqProject,
|
||||||
|
defaultEnvironment,
|
||||||
} from "@quri/squiggle-lang";
|
} from "@quri/squiggle-lang";
|
||||||
import { useSquiggle } from "../lib/hooks";
|
import { useSquiggle } from "../lib/hooks";
|
||||||
import { SquiggleViewer } from "./SquiggleViewer";
|
import { SquiggleViewer } from "./SquiggleViewer";
|
||||||
import { JsImports } from "../lib/jsImports";
|
import { JsImports } from "../lib/jsImports";
|
||||||
|
import { getValueToRender } from "../lib/utility";
|
||||||
|
|
||||||
export type SquiggleChartProps = {
|
export type SquiggleChartProps = {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
|
@ -72,15 +72,9 @@ type ProjectExecutionProps = {
|
||||||
const defaultOnChange = () => {};
|
const defaultOnChange = () => {};
|
||||||
const defaultImports: JsImports = {};
|
const defaultImports: JsImports = {};
|
||||||
|
|
||||||
export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
export const splitSquiggleChartSettings = (props: SquiggleChartProps) => {
|
||||||
(props: SquiggleChartProps) => {
|
|
||||||
const {
|
const {
|
||||||
executionId = 0,
|
|
||||||
onChange = defaultOnChange, // defaultOnChange must be constant, don't move its definition here
|
|
||||||
height = 200,
|
|
||||||
jsImports = defaultImports,
|
|
||||||
showSummary = false,
|
showSummary = false,
|
||||||
width,
|
|
||||||
logX = false,
|
logX = false,
|
||||||
expY = false,
|
expY = false,
|
||||||
diagramStart = 0,
|
diagramStart = 0,
|
||||||
|
@ -93,32 +87,8 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
title,
|
title,
|
||||||
xAxisType = "number",
|
xAxisType = "number",
|
||||||
distributionChartActions,
|
distributionChartActions,
|
||||||
enableLocalSettings = false,
|
|
||||||
code,
|
|
||||||
continues = [],
|
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
const p = React.useMemo(() => {
|
|
||||||
if (props.project) {
|
|
||||||
return props.project;
|
|
||||||
} else {
|
|
||||||
const p = SqProject.create();
|
|
||||||
if (props.environment) {
|
|
||||||
p.setEnvironment(props.environment);
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
}, [props.project, props.environment]);
|
|
||||||
|
|
||||||
const { result, bindings } = useSquiggle({
|
|
||||||
continues,
|
|
||||||
project: p,
|
|
||||||
code,
|
|
||||||
jsImports,
|
|
||||||
onChange,
|
|
||||||
executionId,
|
|
||||||
});
|
|
||||||
|
|
||||||
const distributionPlotSettings = {
|
const distributionPlotSettings = {
|
||||||
showSummary,
|
showSummary,
|
||||||
logX,
|
logX,
|
||||||
|
@ -138,18 +108,49 @@ export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
count: diagramCount,
|
count: diagramCount,
|
||||||
};
|
};
|
||||||
|
|
||||||
const resultToRender = resultMap(result, (value) =>
|
return { distributionPlotSettings, chartSettings };
|
||||||
value.tag === SqValueTag.Void ? bindings.asValue() : value
|
};
|
||||||
);
|
|
||||||
|
export const SquiggleChart: React.FC<SquiggleChartProps> = React.memo(
|
||||||
|
(props) => {
|
||||||
|
const { distributionPlotSettings, chartSettings } =
|
||||||
|
splitSquiggleChartSettings(props);
|
||||||
|
|
||||||
|
const {
|
||||||
|
code,
|
||||||
|
jsImports = defaultImports,
|
||||||
|
onChange = defaultOnChange, // defaultOnChange must be constant, don't move its definition here
|
||||||
|
executionId = 0,
|
||||||
|
width,
|
||||||
|
height = 200,
|
||||||
|
enableLocalSettings = false,
|
||||||
|
continues,
|
||||||
|
project,
|
||||||
|
environment,
|
||||||
|
} = props;
|
||||||
|
|
||||||
|
const resultAndBindings = useSquiggle({
|
||||||
|
environment,
|
||||||
|
continues,
|
||||||
|
project,
|
||||||
|
code,
|
||||||
|
jsImports,
|
||||||
|
onChange,
|
||||||
|
executionId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const valueToRender = getValueToRender(resultAndBindings);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SquiggleViewer
|
<SquiggleViewer
|
||||||
result={resultToRender}
|
result={valueToRender}
|
||||||
width={width}
|
width={width}
|
||||||
height={height}
|
height={height}
|
||||||
distributionPlotSettings={distributionPlotSettings}
|
distributionPlotSettings={distributionPlotSettings}
|
||||||
chartSettings={chartSettings}
|
chartSettings={chartSettings}
|
||||||
environment={p.getEnvironment()}
|
environment={
|
||||||
|
project ? project.getEnvironment() : environment ?? defaultEnvironment
|
||||||
|
}
|
||||||
enableLocalSettings={enableLocalSettings}
|
enableLocalSettings={enableLocalSettings}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,20 +1,29 @@
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import { CodeEditor } from "./CodeEditor";
|
import { CodeEditor } from "./CodeEditor";
|
||||||
import { SquiggleContainer } from "./SquiggleContainer";
|
import { SquiggleContainer } from "./SquiggleContainer";
|
||||||
import { SquiggleChart, SquiggleChartProps } from "./SquiggleChart";
|
import {
|
||||||
import { useMaybeControlledValue } from "../lib/hooks";
|
splitSquiggleChartSettings,
|
||||||
|
SquiggleChartProps,
|
||||||
|
} from "./SquiggleChart";
|
||||||
|
import { useMaybeControlledValue, useSquiggle } from "../lib/hooks";
|
||||||
|
import { JsImports } from "../lib/jsImports";
|
||||||
|
import { defaultEnvironment, SqLocation, SqProject } from "@quri/squiggle-lang";
|
||||||
|
import { SquiggleViewer } from "./SquiggleViewer";
|
||||||
|
import { getErrorLocations, getValueToRender } from "../lib/utility";
|
||||||
|
|
||||||
const WrappedCodeEditor: React.FC<{
|
const WrappedCodeEditor: React.FC<{
|
||||||
code: string;
|
code: string;
|
||||||
setCode: (code: string) => void;
|
setCode: (code: string) => void;
|
||||||
}> = ({ code, setCode }) => (
|
errorLocations?: SqLocation[];
|
||||||
<div className="border border-grey-200 p-2 m-4">
|
}> = ({ code, setCode, errorLocations }) => (
|
||||||
|
<div className="border border-grey-200 p-2 m-4" data-testid="squiggle-editor">
|
||||||
<CodeEditor
|
<CodeEditor
|
||||||
value={code}
|
value={code}
|
||||||
onChange={setCode}
|
onChange={setCode}
|
||||||
oneLine={true}
|
oneLine={true}
|
||||||
showGutter={false}
|
showGutter={false}
|
||||||
height={20}
|
height={20}
|
||||||
|
errorLocations={errorLocations}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -24,6 +33,9 @@ export type SquiggleEditorProps = SquiggleChartProps & {
|
||||||
onCodeChange?: (code: string) => void;
|
onCodeChange?: (code: string) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const defaultOnChange = () => {};
|
||||||
|
const defaultImports: JsImports = {};
|
||||||
|
|
||||||
export const SquiggleEditor: React.FC<SquiggleEditorProps> = (props) => {
|
export const SquiggleEditor: React.FC<SquiggleEditorProps> = (props) => {
|
||||||
const [code, setCode] = useMaybeControlledValue({
|
const [code, setCode] = useMaybeControlledValue({
|
||||||
value: props.code,
|
value: props.code,
|
||||||
|
@ -31,11 +43,50 @@ export const SquiggleEditor: React.FC<SquiggleEditorProps> = (props) => {
|
||||||
onChange: props.onCodeChange,
|
onChange: props.onCodeChange,
|
||||||
});
|
});
|
||||||
|
|
||||||
let chartProps = { ...props, code };
|
const { distributionPlotSettings, chartSettings } =
|
||||||
|
splitSquiggleChartSettings(props);
|
||||||
|
|
||||||
|
const {
|
||||||
|
environment,
|
||||||
|
jsImports = defaultImports,
|
||||||
|
onChange = defaultOnChange, // defaultOnChange must be constant, don't move its definition here
|
||||||
|
executionId = 0,
|
||||||
|
width,
|
||||||
|
height = 200,
|
||||||
|
enableLocalSettings = false,
|
||||||
|
continues,
|
||||||
|
project,
|
||||||
|
} = props;
|
||||||
|
|
||||||
|
const resultAndBindings = useSquiggle({
|
||||||
|
environment,
|
||||||
|
continues,
|
||||||
|
code,
|
||||||
|
project,
|
||||||
|
jsImports,
|
||||||
|
onChange,
|
||||||
|
executionId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const valueToRender = getValueToRender(resultAndBindings);
|
||||||
|
const errorLocations = getErrorLocations(resultAndBindings.result);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SquiggleContainer>
|
<SquiggleContainer>
|
||||||
<WrappedCodeEditor code={code} setCode={setCode} />
|
<WrappedCodeEditor
|
||||||
<SquiggleChart {...chartProps} />
|
code={code}
|
||||||
|
setCode={setCode}
|
||||||
|
errorLocations={errorLocations}
|
||||||
|
/>
|
||||||
|
<SquiggleViewer
|
||||||
|
result={valueToRender}
|
||||||
|
width={width}
|
||||||
|
height={height}
|
||||||
|
distributionPlotSettings={distributionPlotSettings}
|
||||||
|
chartSettings={chartSettings}
|
||||||
|
environment={environment ?? defaultEnvironment}
|
||||||
|
enableLocalSettings={enableLocalSettings}
|
||||||
|
/>
|
||||||
</SquiggleContainer>
|
</SquiggleContainer>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { SqError } from "@quri/squiggle-lang";
|
import { SqError, SqFrame } from "@quri/squiggle-lang";
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import { ErrorAlert } from "./Alert";
|
import { ErrorAlert } from "./Alert";
|
||||||
|
|
||||||
|
@ -6,6 +6,39 @@ type Props = {
|
||||||
error: SqError;
|
error: SqError;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const SquiggleErrorAlert: React.FC<Props> = ({ error }) => {
|
const StackTraceFrame: React.FC<{ frame: SqFrame }> = ({ frame }) => {
|
||||||
return <ErrorAlert heading="Error">{error.toString()}</ErrorAlert>;
|
const location = frame.location();
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
{frame.name()}
|
||||||
|
{location
|
||||||
|
? ` at line ${location.start.line}, column ${location.start.column}`
|
||||||
|
: ""}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const StackTrace: React.FC<Props> = ({ error }) => {
|
||||||
|
const frames = error.getFrameArray();
|
||||||
|
return frames.length ? (
|
||||||
|
<div>
|
||||||
|
<div className="font-medium">Stack trace:</div>
|
||||||
|
<div className="ml-4">
|
||||||
|
{frames.map((frame, i) => (
|
||||||
|
<StackTraceFrame frame={frame} key={i} />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SquiggleErrorAlert: React.FC<Props> = ({ error }) => {
|
||||||
|
return (
|
||||||
|
<ErrorAlert heading="Error">
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>{error.toString()}</div>
|
||||||
|
<StackTrace error={error} />
|
||||||
|
</div>
|
||||||
|
</ErrorAlert>
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,7 +8,11 @@ import React, {
|
||||||
} from "react";
|
} from "react";
|
||||||
import { useForm, UseFormRegister, useWatch } from "react-hook-form";
|
import { useForm, UseFormRegister, useWatch } from "react-hook-form";
|
||||||
import * as yup from "yup";
|
import * as yup from "yup";
|
||||||
import { useMaybeControlledValue, useRunnerState } from "../lib/hooks";
|
import {
|
||||||
|
useMaybeControlledValue,
|
||||||
|
useRunnerState,
|
||||||
|
useSquiggle,
|
||||||
|
} from "../lib/hooks";
|
||||||
import { yupResolver } from "@hookform/resolvers/yup";
|
import { yupResolver } from "@hookform/resolvers/yup";
|
||||||
import {
|
import {
|
||||||
ChartSquareBarIcon,
|
ChartSquareBarIcon,
|
||||||
|
@ -24,9 +28,9 @@ import {
|
||||||
} from "@heroicons/react/solid";
|
} from "@heroicons/react/solid";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
|
|
||||||
import { environment } from "@quri/squiggle-lang";
|
import { environment, SqProject } from "@quri/squiggle-lang";
|
||||||
|
|
||||||
import { SquiggleChart, SquiggleChartProps } from "./SquiggleChart";
|
import { SquiggleChartProps } from "./SquiggleChart";
|
||||||
import { CodeEditor } from "./CodeEditor";
|
import { CodeEditor } from "./CodeEditor";
|
||||||
import { JsonEditor } from "./JsonEditor";
|
import { JsonEditor } from "./JsonEditor";
|
||||||
import { ErrorAlert, SuccessAlert } from "./Alert";
|
import { ErrorAlert, SuccessAlert } from "./Alert";
|
||||||
|
@ -40,6 +44,8 @@ import { HeadedSection } from "./ui/HeadedSection";
|
||||||
import { defaultTickFormat } from "../lib/distributionSpecBuilder";
|
import { defaultTickFormat } from "../lib/distributionSpecBuilder";
|
||||||
import { Button } from "./ui/Button";
|
import { Button } from "./ui/Button";
|
||||||
import { JsImports } from "../lib/jsImports";
|
import { JsImports } from "../lib/jsImports";
|
||||||
|
import { getErrorLocations, getValueToRender } from "../lib/utility";
|
||||||
|
import { SquiggleViewer } from "./SquiggleViewer";
|
||||||
|
|
||||||
type PlaygroundProps = SquiggleChartProps & {
|
type PlaygroundProps = SquiggleChartProps & {
|
||||||
/** The initial squiggle string to put in the playground */
|
/** The initial squiggle string to put in the playground */
|
||||||
|
@ -176,7 +182,7 @@ const RunControls: React.FC<{
|
||||||
const CurrentPlayIcon = isRunning ? RefreshIcon : PlayIcon;
|
const CurrentPlayIcon = isRunning ? RefreshIcon : PlayIcon;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex space-x-1 items-center">
|
<div className="flex space-x-1 items-center" data-testid="autorun-controls">
|
||||||
{autorunMode ? null : (
|
{autorunMode ? null : (
|
||||||
<button onClick={run}>
|
<button onClick={run}>
|
||||||
<CurrentPlayIcon
|
<CurrentPlayIcon
|
||||||
|
@ -245,6 +251,8 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
onSettingsChange,
|
onSettingsChange,
|
||||||
showEditor = true,
|
showEditor = true,
|
||||||
showShareButton = false,
|
showShareButton = false,
|
||||||
|
continues,
|
||||||
|
project,
|
||||||
}) => {
|
}) => {
|
||||||
const [code, setCode] = useMaybeControlledValue({
|
const [code, setCode] = useMaybeControlledValue({
|
||||||
value: controlledCode,
|
value: controlledCode,
|
||||||
|
@ -282,7 +290,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
onSettingsChange?.(vars);
|
onSettingsChange?.(vars);
|
||||||
}, [vars, onSettingsChange]);
|
}, [vars, onSettingsChange]);
|
||||||
|
|
||||||
const env: environment = useMemo(
|
const environment: environment = useMemo(
|
||||||
() => ({
|
() => ({
|
||||||
sampleCount: Number(vars.sampleCount),
|
sampleCount: Number(vars.sampleCount),
|
||||||
xyPointLength: Number(vars.xyPointLength),
|
xyPointLength: Number(vars.xyPointLength),
|
||||||
|
@ -299,26 +307,53 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
executionId,
|
executionId,
|
||||||
} = useRunnerState(code);
|
} = useRunnerState(code);
|
||||||
|
|
||||||
|
const resultAndBindings = useSquiggle({
|
||||||
|
environment,
|
||||||
|
continues,
|
||||||
|
code: renderedCode,
|
||||||
|
project,
|
||||||
|
jsImports: imports,
|
||||||
|
executionId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const valueToRender = getValueToRender(resultAndBindings);
|
||||||
|
|
||||||
const squiggleChart =
|
const squiggleChart =
|
||||||
renderedCode === "" ? null : (
|
renderedCode === "" ? null : (
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
{isRunning ? (
|
{isRunning ? (
|
||||||
<div className="absolute inset-0 bg-white opacity-0 animate-semi-appear" />
|
<div className="absolute inset-0 bg-white opacity-0 animate-semi-appear" />
|
||||||
) : null}
|
) : null}
|
||||||
<SquiggleChart
|
<SquiggleViewer
|
||||||
code={renderedCode}
|
result={valueToRender}
|
||||||
executionId={executionId}
|
environment={environment}
|
||||||
environment={env}
|
height={vars.chartHeight || 150}
|
||||||
{...vars}
|
distributionPlotSettings={{
|
||||||
jsImports={imports}
|
showSummary: vars.showSummary ?? false,
|
||||||
|
logX: vars.logX ?? false,
|
||||||
|
expY: vars.expY ?? false,
|
||||||
|
format: vars.tickFormat,
|
||||||
|
minX: vars.minX,
|
||||||
|
maxX: vars.maxX,
|
||||||
|
title: vars.title,
|
||||||
|
actions: vars.distributionChartActions,
|
||||||
|
}}
|
||||||
|
chartSettings={{
|
||||||
|
start: vars.diagramStart ?? 0,
|
||||||
|
stop: vars.diagramStop ?? 10,
|
||||||
|
count: vars.diagramCount ?? 20,
|
||||||
|
}}
|
||||||
enableLocalSettings={true}
|
enableLocalSettings={true}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const errorLocations = getErrorLocations(resultAndBindings.result);
|
||||||
|
|
||||||
const firstTab = vars.showEditor ? (
|
const firstTab = vars.showEditor ? (
|
||||||
<div className="border border-slate-200">
|
<div className="border border-slate-200" data-testid="squiggle-editor">
|
||||||
<CodeEditor
|
<CodeEditor
|
||||||
|
errorLocations={errorLocations}
|
||||||
value={code}
|
value={code}
|
||||||
onChange={setCode}
|
onChange={setCode}
|
||||||
onSubmit={run}
|
onSubmit={run}
|
||||||
|
@ -368,7 +403,9 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
|
||||||
>
|
>
|
||||||
{tabs}
|
{tabs}
|
||||||
</div>
|
</div>
|
||||||
<div className="w-1/2 p-2 pl-4">{squiggleChart}</div>
|
<div className="w-1/2 p-2 pl-4" data-testid="playground-result">
|
||||||
|
{squiggleChart}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { SqValue, SqValueLocation } from "@quri/squiggle-lang";
|
import { SqValue } from "@quri/squiggle-lang";
|
||||||
import React, { useContext, useReducer } from "react";
|
import React, { useContext, useReducer } from "react";
|
||||||
import { Tooltip } from "../ui/Tooltip";
|
import { Tooltip } from "../ui/Tooltip";
|
||||||
import { LocalItemSettings, MergedItemSettings } from "./utils";
|
import { LocalItemSettings, MergedItemSettings } from "./utils";
|
||||||
|
@ -45,7 +45,7 @@ export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||||
: location.path.items[location.path.items.length - 1];
|
: location.path.items[location.path.items.length - 1];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div role={isTopLevel ? "status" : undefined}>
|
||||||
<header className="inline-flex space-x-1">
|
<header className="inline-flex space-x-1">
|
||||||
<Tooltip text={heading}>
|
<Tooltip text={heading}>
|
||||||
<span
|
<span
|
||||||
|
@ -70,7 +70,7 @@ export const VariableBox: React.FC<VariableBoxProps> = ({
|
||||||
<div className="flex w-full">
|
<div className="flex w-full">
|
||||||
{location.path.items.length ? (
|
{location.path.items.length ? (
|
||||||
<div
|
<div
|
||||||
className="border-l-2 border-slate-200 hover:border-indigo-600 w-4 cursor-pointer"
|
className="shrink-0 border-l-2 border-slate-200 hover:border-indigo-600 w-4 cursor-pointer"
|
||||||
onClick={toggleCollapsed}
|
onClick={toggleCollapsed}
|
||||||
></div>
|
></div>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
|
@ -1,36 +1,61 @@
|
||||||
import { SqProject, SqValue } from "@quri/squiggle-lang";
|
import {
|
||||||
|
result,
|
||||||
|
SqError,
|
||||||
|
SqProject,
|
||||||
|
SqRecord,
|
||||||
|
SqValue,
|
||||||
|
environment,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
import { useEffect, useMemo } from "react";
|
import { useEffect, useMemo } from "react";
|
||||||
import { JsImports, jsImportsToSquiggleCode } from "../jsImports";
|
import { JsImports, jsImportsToSquiggleCode } from "../jsImports";
|
||||||
import * as uuid from "uuid";
|
import * as uuid from "uuid";
|
||||||
|
|
||||||
type SquiggleArgs = {
|
type SquiggleArgs = {
|
||||||
|
environment?: environment;
|
||||||
code: string;
|
code: string;
|
||||||
executionId?: number;
|
executionId?: number;
|
||||||
jsImports?: JsImports;
|
jsImports?: JsImports;
|
||||||
project: SqProject;
|
project?: SqProject;
|
||||||
continues: string[];
|
continues?: string[];
|
||||||
onChange?: (expr: SqValue | undefined, sourceName: string) => void;
|
onChange?: (expr: SqValue | undefined, sourceName: string) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
const importSourceName = (sourceName: string) => "imports-" + sourceName;
|
export type ResultAndBindings = {
|
||||||
|
result: result<SqValue, SqError>;
|
||||||
|
bindings: SqRecord;
|
||||||
|
};
|
||||||
|
|
||||||
|
const importSourceName = (sourceName: string) => "imports-" + sourceName;
|
||||||
|
const defaultContinues = [];
|
||||||
|
|
||||||
|
export const useSquiggle = (args: SquiggleArgs): ResultAndBindings => {
|
||||||
|
const project = useMemo(() => {
|
||||||
|
if (args.project) {
|
||||||
|
return args.project;
|
||||||
|
} else {
|
||||||
|
const p = SqProject.create();
|
||||||
|
if (args.environment) {
|
||||||
|
p.setEnvironment(args.environment);
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
}, [args.project, args.environment]);
|
||||||
|
|
||||||
export const useSquiggle = (args: SquiggleArgs) => {
|
|
||||||
const sourceName = useMemo(() => uuid.v4(), []);
|
const sourceName = useMemo(() => uuid.v4(), []);
|
||||||
|
|
||||||
const env = args.project.getEnvironment();
|
const env = project.getEnvironment();
|
||||||
|
const continues = args.continues || defaultContinues;
|
||||||
|
|
||||||
const result = useMemo(
|
const result = useMemo(
|
||||||
() => {
|
() => {
|
||||||
const project = args.project;
|
|
||||||
|
|
||||||
project.setSource(sourceName, args.code);
|
project.setSource(sourceName, args.code);
|
||||||
let continues = args.continues;
|
let fullContinues = continues;
|
||||||
if (args.jsImports && Object.keys(args.jsImports).length) {
|
if (args.jsImports && Object.keys(args.jsImports).length) {
|
||||||
const importsSource = jsImportsToSquiggleCode(args.jsImports);
|
const importsSource = jsImportsToSquiggleCode(args.jsImports);
|
||||||
project.setSource(importSourceName(sourceName), importsSource);
|
project.setSource(importSourceName(sourceName), importsSource);
|
||||||
continues = args.continues.concat(importSourceName(sourceName));
|
fullContinues = continues.concat(importSourceName(sourceName));
|
||||||
}
|
}
|
||||||
project.setContinues(sourceName, continues);
|
project.setContinues(sourceName, fullContinues);
|
||||||
project.run(sourceName);
|
project.run(sourceName);
|
||||||
const result = project.getResult(sourceName);
|
const result = project.getResult(sourceName);
|
||||||
const bindings = project.getBindings(sourceName);
|
const bindings = project.getBindings(sourceName);
|
||||||
|
@ -45,8 +70,8 @@ export const useSquiggle = (args: SquiggleArgs) => {
|
||||||
args.jsImports,
|
args.jsImports,
|
||||||
args.executionId,
|
args.executionId,
|
||||||
sourceName,
|
sourceName,
|
||||||
args.continues,
|
continues,
|
||||||
args.project,
|
project,
|
||||||
env,
|
env,
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
@ -62,11 +87,11 @@ export const useSquiggle = (args: SquiggleArgs) => {
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
args.project.removeSource(sourceName);
|
project.removeSource(sourceName);
|
||||||
if (args.project.getSource(importSourceName(sourceName)))
|
if (project.getSource(importSourceName(sourceName)))
|
||||||
args.project.removeSource(importSourceName(sourceName));
|
project.removeSource(importSourceName(sourceName));
|
||||||
};
|
};
|
||||||
}, [args.project, sourceName]);
|
}, [project, sourceName]);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { result } from "@quri/squiggle-lang";
|
import { result, resultMap, SqValueTag } from "@quri/squiggle-lang";
|
||||||
|
import { ResultAndBindings } from "./hooks/useSquiggle";
|
||||||
|
|
||||||
export function flattenResult<a, b>(x: result<a, b>[]): result<a[], b> {
|
export function flattenResult<a, b>(x: result<a, b>[]): result<a[], b> {
|
||||||
if (x.length === 0) {
|
if (x.length === 0) {
|
||||||
|
@ -35,3 +36,18 @@ export function all(arr: boolean[]): boolean {
|
||||||
export function some(arr: boolean[]): boolean {
|
export function some(arr: boolean[]): boolean {
|
||||||
return arr.reduce((x, y) => x || y, false);
|
return arr.reduce((x, y) => x || y, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getValueToRender({ result, bindings }: ResultAndBindings) {
|
||||||
|
return resultMap(result, (value) =>
|
||||||
|
value.tag === SqValueTag.Void ? bindings.asValue() : value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getErrorLocations(result: ResultAndBindings["result"]) {
|
||||||
|
if (result.tag === "Error") {
|
||||||
|
const location = result.value.location();
|
||||||
|
return location ? [location] : [];
|
||||||
|
} else {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -22,3 +22,8 @@ but this line is still necessary for proper initialization of `--tw-*` variables
|
||||||
.ace_cursor {
|
.ace_cursor {
|
||||||
border-left: 2px solid !important;
|
border-left: 2px solid !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.ace-error-marker {
|
||||||
|
position: absolute;
|
||||||
|
border-bottom: 1px solid red;
|
||||||
|
}
|
||||||
|
|
55
packages/components/test/autorun.test.tsx
Normal file
55
packages/components/test/autorun.test.tsx
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
import { render, screen, waitFor, within } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
|
import * as React from "react";
|
||||||
|
import "@testing-library/jest-dom";
|
||||||
|
import { SquigglePlayground } from "../src/index";
|
||||||
|
|
||||||
|
test("Autorun is default", async () => {
|
||||||
|
render(<SquigglePlayground code="70*30" />);
|
||||||
|
await waitFor(() =>
|
||||||
|
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Autorun can be switched off", async () => {
|
||||||
|
const user = userEvent.setup();
|
||||||
|
render(<SquigglePlayground code="70*30" />);
|
||||||
|
|
||||||
|
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Autorun");
|
||||||
|
|
||||||
|
await waitFor(() =>
|
||||||
|
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100")
|
||||||
|
);
|
||||||
|
|
||||||
|
await user.click(screen.getByText("Autorun")); // disable
|
||||||
|
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Paused");
|
||||||
|
expect(screen.getByTestId("autorun-controls")).not.toHaveTextContent(
|
||||||
|
"Autorun"
|
||||||
|
);
|
||||||
|
|
||||||
|
await user.click(screen.getByText("Paused")); // enable autorun again
|
||||||
|
expect(screen.getByTestId("autorun-controls")).toHaveTextContent("Autorun");
|
||||||
|
|
||||||
|
// we should replace the code here, but it's hard to update react-ace state via user events: https://github.com/securingsincity/react-ace/issues/923
|
||||||
|
// ...or replace react-ace with something else
|
||||||
|
|
||||||
|
// TODO:
|
||||||
|
|
||||||
|
/*
|
||||||
|
const editor = screen
|
||||||
|
.getByTestId("squiggle-editor")
|
||||||
|
.querySelector(".ace_editor") as HTMLElement;
|
||||||
|
editor.focus();
|
||||||
|
// await user.clear(editor);
|
||||||
|
await userEvent.paste("40*40"); // https://github.com/securingsincity/react-ace/issues/923#issuecomment-755502696
|
||||||
|
screen.debug(editor);
|
||||||
|
|
||||||
|
// this makes the tests slower, but it's hard to test otherwise that the code _didn't_ execute
|
||||||
|
await new Promise((r) => setTimeout(r, 300));
|
||||||
|
expect(screen.getByTestId("playground-result")).toHaveTextContent("2100"); // still the old value
|
||||||
|
|
||||||
|
await waitFor(() =>
|
||||||
|
expect(screen.getByTestId("playground-result")).toHaveTextContent("1600")
|
||||||
|
);
|
||||||
|
*/
|
||||||
|
});
|
|
@ -1,9 +1,14 @@
|
||||||
import { render } from "@testing-library/react";
|
import { render, screen } from "@testing-library/react";
|
||||||
import React from "react";
|
import React from "react";
|
||||||
import "@testing-library/jest-dom";
|
import "@testing-library/jest-dom";
|
||||||
import { SquiggleChart } from "../src/index";
|
import {
|
||||||
|
SquiggleChart,
|
||||||
|
SquiggleEditor,
|
||||||
|
SquigglePlayground,
|
||||||
|
} from "../src/index";
|
||||||
|
import { SqProject } from "@quri/squiggle-lang";
|
||||||
|
|
||||||
test("Logs nothing on render", async () => {
|
test("Chart logs nothing on render", async () => {
|
||||||
const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />);
|
const { unmount } = render(<SquiggleChart code={"normal(0, 1)"} />);
|
||||||
unmount();
|
unmount();
|
||||||
|
|
||||||
|
@ -11,3 +16,38 @@ test("Logs nothing on render", async () => {
|
||||||
expect(console.warn).not.toBeCalled();
|
expect(console.warn).not.toBeCalled();
|
||||||
expect(console.error).not.toBeCalled();
|
expect(console.error).not.toBeCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("Editor logs nothing on render", async () => {
|
||||||
|
const { unmount } = render(<SquiggleEditor code={"normal(0, 1)"} />);
|
||||||
|
unmount();
|
||||||
|
|
||||||
|
expect(console.log).not.toBeCalled();
|
||||||
|
expect(console.warn).not.toBeCalled();
|
||||||
|
expect(console.error).not.toBeCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Project dependencies work in editors", async () => {
|
||||||
|
const project = SqProject.create();
|
||||||
|
|
||||||
|
render(<SquiggleEditor code={"x = 1"} project={project} />);
|
||||||
|
const source = project.getSourceIds()[0];
|
||||||
|
const { container } = render(
|
||||||
|
<SquiggleEditor code={"x + 1"} project={project} continues={[source]} />
|
||||||
|
);
|
||||||
|
expect(container).toHaveTextContent("2");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Project dependencies work in playgrounds", async () => {
|
||||||
|
const project = SqProject.create();
|
||||||
|
project.setSource("depend", "x = 1");
|
||||||
|
|
||||||
|
render(
|
||||||
|
<SquigglePlayground
|
||||||
|
code={"x + 1"}
|
||||||
|
project={project}
|
||||||
|
continues={["depend"]}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
// We must await here because SquigglePlayground loads results asynchronously
|
||||||
|
expect(await screen.findByRole("status")).toHaveTextContent("2");
|
||||||
|
});
|
||||||
|
|
4
packages/components/vercel.json
Normal file
4
packages/components/vercel.json
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"buildCommand": "cd ../.. && npx turbo run build --filter=@quri/squiggle-components",
|
||||||
|
"outputDirectory": "storybook-static"
|
||||||
|
}
|
|
@ -3,6 +3,8 @@ lib
|
||||||
*.bs.js
|
*.bs.js
|
||||||
*.gen.tsx
|
*.gen.tsx
|
||||||
.nyc_output/
|
.nyc_output/
|
||||||
_coverage/
|
coverage/
|
||||||
.cache/
|
.cache/
|
||||||
Reducer_Peggy_GeneratedParser.js
|
Reducer_Peggy_GeneratedParser.js
|
||||||
|
ReducerProject_IncludeParser.js
|
||||||
|
src/rescript/Reducer/Reducer_Peggy/helpers.js
|
||||||
|
|
|
@ -32,7 +32,10 @@ describe("dotSubtract", () => {
|
||||||
*/
|
*/
|
||||||
Skip.test("mean of normal minus exponential (property)", () => {
|
Skip.test("mean of normal minus exponential (property)", () => {
|
||||||
assert_(
|
assert_(
|
||||||
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
|
property2(
|
||||||
|
float_(),
|
||||||
|
floatRange(1e-5, 1e5),
|
||||||
|
(mean, rate) => {
|
||||||
// We limit ourselves to stdev=1 so that the integral is trivial
|
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||||
~env,
|
~env,
|
||||||
|
@ -50,7 +53,8 @@ describe("dotSubtract", () => {
|
||||||
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||||
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||||
}
|
}
|
||||||
}),
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
})
|
})
|
||||||
|
|
|
@ -40,7 +40,9 @@ let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
describe("(Algebraic) addition of distributions", () => {
|
describe("(Algebraic) addition of distributions", () => {
|
||||||
describe("mean", () => {
|
describe("mean", () => {
|
||||||
test("normal(mean=5) + normal(mean=20)", () => {
|
test(
|
||||||
|
"normal(mean=5) + normal(mean=20)",
|
||||||
|
() => {
|
||||||
normalDist5
|
normalDist5
|
||||||
->algebraicAdd(normalDist20)
|
->algebraicAdd(normalDist20)
|
||||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||||
|
@ -49,9 +51,12 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
->E.R.toExn("Expected float", _)
|
->E.R.toExn("Expected float", _)
|
||||||
->expect
|
->expect
|
||||||
->toBe(Some(2.5e1))
|
->toBe(Some(2.5e1))
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
|
test(
|
||||||
|
"uniform(low=9, high=10) + beta(alpha=2, beta=5)",
|
||||||
|
() => {
|
||||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
let received =
|
let received =
|
||||||
|
@ -67,8 +72,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// sometimes it works with ~digits=2.
|
// sometimes it works with ~digits=2.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"beta(alpha=2, beta=5) + uniform(low=9, high=10)",
|
||||||
|
() => {
|
||||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
let received =
|
let received =
|
||||||
|
@ -84,7 +92,8 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// sometimes it works with ~digits=2.
|
// sometimes it works with ~digits=2.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
describe("pdf", () => {
|
describe("pdf", () => {
|
||||||
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
||||||
|
@ -122,7 +131,9 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
|
test(
|
||||||
|
"(normal(mean=10) + normal(mean=10)).pdf(1.9e1)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
normalDist20
|
normalDist20
|
||||||
->Ok
|
->Ok
|
||||||
|
@ -150,8 +161,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
uniformDist
|
uniformDist
|
||||||
->algebraicAdd(betaDist)
|
->algebraicAdd(betaDist)
|
||||||
|
@ -166,8 +180,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// This value was calculated by a python script
|
// This value was calculated by a python script
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
betaDist
|
betaDist
|
||||||
->algebraicAdd(uniformDist)
|
->algebraicAdd(uniformDist)
|
||||||
|
@ -180,10 +197,14 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// This is nondeterministic.
|
// This is nondeterministic.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
describe("cdf", () => {
|
describe("cdf", () => {
|
||||||
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
|
testAll(
|
||||||
|
"(normal(mean=5) + normal(mean=5)).cdf (imprecise)",
|
||||||
|
list{6e0, 8e0, 1e1, 1.2e1},
|
||||||
|
x => {
|
||||||
let received =
|
let received =
|
||||||
normalDist10
|
normalDist10
|
||||||
->Ok
|
->Ok
|
||||||
|
@ -212,8 +233,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(normal(mean=10) + normal(mean=10)).cdf(1.25e1)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
normalDist20
|
normalDist20
|
||||||
->Ok
|
->Ok
|
||||||
|
@ -241,8 +265,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
uniformDist
|
uniformDist
|
||||||
->algebraicAdd(betaDist)
|
->algebraicAdd(betaDist)
|
||||||
|
@ -256,8 +283,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// The value was calculated externally using a python script
|
// The value was calculated externally using a python script
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
betaDist
|
betaDist
|
||||||
->algebraicAdd(uniformDist)
|
->algebraicAdd(uniformDist)
|
||||||
|
@ -271,11 +301,15 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// The value was calculated externally using a python script
|
// The value was calculated externally using a python script
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("inv", () => {
|
describe("inv", () => {
|
||||||
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
|
testAll(
|
||||||
|
"(normal(mean=5) + normal(mean=5)).inv (imprecise)",
|
||||||
|
list{5e-2, 4.2e-3, 9e-3},
|
||||||
|
x => {
|
||||||
let received =
|
let received =
|
||||||
normalDist10
|
normalDist10
|
||||||
->Ok
|
->Ok
|
||||||
|
@ -304,8 +338,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(normal(mean=10) + normal(mean=10)).inv(1e-1)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
normalDist20
|
normalDist20
|
||||||
->Ok
|
->Ok
|
||||||
|
@ -333,8 +370,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
uniformDist
|
uniformDist
|
||||||
->algebraicAdd(betaDist)
|
->algebraicAdd(betaDist)
|
||||||
|
@ -348,8 +388,11 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// sometimes it works with ~digits=2.
|
// sometimes it works with ~digits=2.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
|
)
|
||||||
|
test(
|
||||||
|
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)",
|
||||||
|
() => {
|
||||||
let received =
|
let received =
|
||||||
betaDist
|
betaDist
|
||||||
->algebraicAdd(uniformDist)
|
->algebraicAdd(uniformDist)
|
||||||
|
@ -363,6 +406,7 @@ describe("(Algebraic) addition of distributions", () => {
|
||||||
// sometimes it works with ~digits=2.
|
// sometimes it works with ~digits=2.
|
||||||
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -3,7 +3,7 @@ This is the most basic file in our invariants family of tests.
|
||||||
|
|
||||||
Validate that the addition of means equals the mean of the addition, similar for subtraction and multiplication.
|
Validate that the addition of means equals the mean of the addition, similar for subtraction and multiplication.
|
||||||
|
|
||||||
Details in https://develop--squiggle-documentation.netlify.app/docs/internal/invariants/
|
Details in https://squiggle-language.com/docs/internal/invariants/
|
||||||
|
|
||||||
Note: epsilon of 1e3 means the invariants are, in general, not being satisfied.
|
Note: epsilon of 1e3 means the invariants are, in general, not being satisfied.
|
||||||
*/
|
*/
|
||||||
|
@ -87,14 +87,22 @@ describe("Means are invariant", () => {
|
||||||
let testAddInvariant = (t1, t2) =>
|
let testAddInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll("with two of the same distribution", distributions, dist => {
|
testAll(
|
||||||
|
"with two of the same distribution",
|
||||||
|
distributions,
|
||||||
|
dist => {
|
||||||
testAddInvariant(dist, dist)
|
testAddInvariant(dist, dist)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
testAll(
|
||||||
|
"with two different distributions",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
let (dist1, dist2) = dists
|
let (dist1, dist2) = dists
|
||||||
testAddInvariant(dist1, dist2)
|
testAddInvariant(dist1, dist2)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
@ -116,14 +124,22 @@ describe("Means are invariant", () => {
|
||||||
let testSubtractInvariant = (t1, t2) =>
|
let testSubtractInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll("with two of the same distribution", distributions, dist => {
|
testAll(
|
||||||
|
"with two of the same distribution",
|
||||||
|
distributions,
|
||||||
|
dist => {
|
||||||
testSubtractInvariant(dist, dist)
|
testSubtractInvariant(dist, dist)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
testAll(
|
||||||
|
"with two different distributions",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
let (dist1, dist2) = dists
|
let (dist1, dist2) = dists
|
||||||
testSubtractInvariant(dist1, dist2)
|
testSubtractInvariant(dist1, dist2)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
@ -145,14 +161,22 @@ describe("Means are invariant", () => {
|
||||||
let testMultiplicationInvariant = (t1, t2) =>
|
let testMultiplicationInvariant = (t1, t2) =>
|
||||||
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||||
|
|
||||||
testAll("with two of the same distribution", distributions, dist => {
|
testAll(
|
||||||
|
"with two of the same distribution",
|
||||||
|
distributions,
|
||||||
|
dist => {
|
||||||
testMultiplicationInvariant(dist, dist)
|
testMultiplicationInvariant(dist, dist)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
testAll(
|
||||||
|
"with two different distributions",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
let (dist1, dist2) = dists
|
let (dist1, dist2) = dists
|
||||||
testMultiplicationInvariant(dist1, dist2)
|
testMultiplicationInvariant(dist1, dist2)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"with two different distributions in swapped order",
|
"with two different distributions in swapped order",
|
||||||
|
|
|
@ -17,10 +17,9 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
||||||
let answer =
|
let answer =
|
||||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
let prediction =
|
let prediction =
|
||||||
uniformMakeR(
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
||||||
lowPrediction,
|
s => DistributionTypes.ArgumentError(s),
|
||||||
highPrediction,
|
)
|
||||||
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
|
||||||
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
||||||
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
||||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||||
|
@ -183,9 +182,9 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
||||||
let answer =
|
let answer =
|
||||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||||
let prediction =
|
let prediction =
|
||||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
||||||
s,
|
s => DistributionTypes.ArgumentError(s),
|
||||||
))
|
)
|
||||||
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||||
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
||||||
|
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
|
@ -47,10 +47,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, medium, high) = tup
|
let (low, medium, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Triangular({low, medium, high}))),
|
||||||
#ToFloat(#Mean),
|
|
||||||
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||||
},
|
},
|
||||||
|
@ -63,7 +60,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (alpha, beta) = tup
|
let (alpha, beta) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha, beta}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
},
|
},
|
||||||
|
@ -84,8 +81,8 @@ describe("(Symbolic) mean", () => {
|
||||||
let (mean, stdev) = tup
|
let (mean, stdev) = tup
|
||||||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||||
let meanValue =
|
let meanValue =
|
||||||
betaDistribution->E.R2.fmap(d =>
|
betaDistribution->E.R2.fmap(
|
||||||
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
d => run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)),
|
||||||
)
|
)
|
||||||
switch meanValue {
|
switch meanValue {
|
||||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
|
@ -100,7 +97,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (mu, sigma) = tup
|
let (mu, sigma) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu, sigma}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
},
|
},
|
||||||
|
@ -112,7 +109,7 @@ describe("(Symbolic) mean", () => {
|
||||||
tup => {
|
tup => {
|
||||||
let (low, high) = tup
|
let (low, high) = tup
|
||||||
let meanValue = run(
|
let meanValue = run(
|
||||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low, high}))),
|
||||||
)
|
)
|
||||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
},
|
},
|
||||||
|
|
|
@ -33,12 +33,18 @@ describe("Bindings", () => {
|
||||||
let value2 = Reducer_T.IEvNumber(5.)
|
let value2 = Reducer_T.IEvNumber(5.)
|
||||||
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
|
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
|
||||||
|
|
||||||
test("get on extended", () => {
|
test(
|
||||||
|
"get on extended",
|
||||||
|
() => {
|
||||||
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("get on original", () => {
|
test(
|
||||||
|
"get on original",
|
||||||
|
() => {
|
||||||
expect(bindings->Bindings.get("value")) == Some(value)
|
expect(bindings->Bindings.get("value")) == Some(value)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -40,14 +40,23 @@ describe("Namespace", () => {
|
||||||
|
|
||||||
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
|
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
|
||||||
|
|
||||||
test("merge many 1", () => {
|
test(
|
||||||
|
"merge many 1",
|
||||||
|
() => {
|
||||||
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
||||||
})
|
},
|
||||||
test("merge many 2", () => {
|
)
|
||||||
|
test(
|
||||||
|
"merge many 2",
|
||||||
|
() => {
|
||||||
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
||||||
})
|
},
|
||||||
test("merge many 3", () => {
|
)
|
||||||
|
test(
|
||||||
|
"merge many 3",
|
||||||
|
() => {
|
||||||
expect(nsMerged->Namespace.get("value")) == Some(value)
|
expect(nsMerged->Namespace.get("value")) == Some(value)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -182,7 +182,7 @@ describe("Peggy parse", () => {
|
||||||
"a.p1 to a.p2",
|
"a.p1 to a.p2",
|
||||||
"{(:credibleIntervalToDistribution (:$_atIndex_$ :a 'p1') (:$_atIndex_$ :a 'p2'))}",
|
"{(:credibleIntervalToDistribution (:$_atIndex_$ :a 'p1') (:$_atIndex_$ :a 'p2'))}",
|
||||||
) // lower than post
|
) // lower than post
|
||||||
testParse("1 to 2 + 3", "{(:add (:credibleIntervalToDistribution 1 2) 3)}") // higher than binary operators
|
testParse("1 to 2 + 3", "{(:credibleIntervalToDistribution 1 (:add 2 3))}")
|
||||||
testParse(
|
testParse(
|
||||||
"1->add(2) to 3->add(4) -> add(4)",
|
"1->add(2) to 3->add(4) -> add(4)",
|
||||||
"{(:credibleIntervalToDistribution (:add 1 2) (:add (:add 3 4) 4))}",
|
"{(:credibleIntervalToDistribution (:add 1 2) (:add (:add 3 4) 4))}",
|
||||||
|
@ -197,7 +197,7 @@ describe("Peggy parse", () => {
|
||||||
|
|
||||||
describe("lambda", () => {
|
describe("lambda", () => {
|
||||||
testParse("{|x| x}", "{{|:x| :x}}")
|
testParse("{|x| x}", "{{|:x| :x}}")
|
||||||
testParse("f={|x| x}", "{:f = {{|:x| :x}}}")
|
testParse("f={|x| x}", "{:f = {|:x| :x}}")
|
||||||
testParse("f(x)=x", "{:f = {|:x| {:x}}}") // Function definitions are lambda assignments
|
testParse("f(x)=x", "{:f = {|:x| {:x}}}") // Function definitions are lambda assignments
|
||||||
testParse("f(x)=x ? 1 : 0", "{:f = {|:x| {(::$$_ternary_$$ :x 1 0)}}}") // Function definitions are lambda assignments
|
testParse("f(x)=x ? 1 : 0", "{:f = {|:x| {(::$$_ternary_$$ :x 1 0)}}}") // Function definitions are lambda assignments
|
||||||
})
|
})
|
||||||
|
|
|
@ -9,12 +9,12 @@ open Jest
|
||||||
open Expect
|
open Expect
|
||||||
|
|
||||||
let expectParseToBe = (expr, answer) =>
|
let expectParseToBe = (expr, answer) =>
|
||||||
Parse.parse(expr)->Parse.toStringResult->expect->toBe(answer)
|
Parse.parse(expr, "test")->Parse.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
let testParse = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
let testParse = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
let expectToExpressionToBe = (expr, answer, ~v="_", ()) => {
|
let expectExpressionToBe = (expr, answer, ~v="_", ()) => {
|
||||||
let rExpr = Parse.parse(expr)->Result.map(ToExpression.fromNode)
|
let rExpr = Parse.parse(expr, "test")->Result.map(ToExpression.fromNode)
|
||||||
let a1 = rExpr->ExpressionT.toStringResultOkless
|
let a1 = rExpr->ExpressionT.toStringResultOkless
|
||||||
|
|
||||||
if v == "_" {
|
if v == "_" {
|
||||||
|
@ -22,6 +22,7 @@ let expectToExpressionToBe = (expr, answer, ~v="_", ()) => {
|
||||||
} else {
|
} else {
|
||||||
let a2 =
|
let a2 =
|
||||||
rExpr
|
rExpr
|
||||||
|
->E.R2.errMap(e => e->SqError.fromParseError)
|
||||||
->Result.flatMap(expr => Expression.BackCompatible.evaluate(expr))
|
->Result.flatMap(expr => Expression.BackCompatible.evaluate(expr))
|
||||||
->Reducer_Value.toStringResultOkless
|
->Reducer_Value.toStringResultOkless
|
||||||
(a1, a2)->expect->toEqual((answer, v))
|
(a1, a2)->expect->toEqual((answer, v))
|
||||||
|
@ -29,16 +30,16 @@ let expectToExpressionToBe = (expr, answer, ~v="_", ()) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
let testToExpression = (expr, answer, ~v="_", ()) =>
|
let testToExpression = (expr, answer, ~v="_", ()) =>
|
||||||
test(expr, () => expectToExpressionToBe(expr, answer, ~v, ()))
|
test(expr, () => expectExpressionToBe(expr, answer, ~v, ()))
|
||||||
|
|
||||||
module MyOnly = {
|
module MyOnly = {
|
||||||
let testParse = (expr, answer) => Only.test(expr, () => expectParseToBe(expr, answer))
|
let testParse = (expr, answer) => Only.test(expr, () => expectParseToBe(expr, answer))
|
||||||
let testToExpression = (expr, answer, ~v="_", ()) =>
|
let testToExpression = (expr, answer, ~v="_", ()) =>
|
||||||
Only.test(expr, () => expectToExpressionToBe(expr, answer, ~v, ()))
|
Only.test(expr, () => expectExpressionToBe(expr, answer, ~v, ()))
|
||||||
}
|
}
|
||||||
|
|
||||||
module MySkip = {
|
module MySkip = {
|
||||||
let testParse = (expr, answer) => Skip.test(expr, () => expectParseToBe(expr, answer))
|
let testParse = (expr, answer) => Skip.test(expr, () => expectParseToBe(expr, answer))
|
||||||
let testToExpression = (expr, answer, ~v="_", ()) =>
|
let testToExpression = (expr, answer, ~v="_", ()) =>
|
||||||
Skip.test(expr, () => expectToExpressionToBe(expr, answer, ~v, ()))
|
Skip.test(expr, () => expectExpressionToBe(expr, answer, ~v, ()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,9 @@ describe("Peggy to Expression", () => {
|
||||||
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
|
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
|
||||||
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
|
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
|
||||||
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
|
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
|
||||||
describe("ternary bindings", () => {
|
describe(
|
||||||
|
"ternary bindings",
|
||||||
|
() => {
|
||||||
testToExpression(
|
testToExpression(
|
||||||
// expression binding
|
// expression binding
|
||||||
"f(a) = a > 5 ? 1 : 0; f(6)",
|
"f(a) = a > 5 ? 1 : 0; f(6)",
|
||||||
|
@ -97,7 +99,8 @@ describe("Peggy to Expression", () => {
|
||||||
~v="6",
|
~v="6",
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("if then else", () => {
|
describe("if then else", () => {
|
||||||
|
@ -135,7 +138,7 @@ describe("Peggy to Expression", () => {
|
||||||
|
|
||||||
describe("lambda", () => {
|
describe("lambda", () => {
|
||||||
testToExpression("{|x| x}", "{|x| x}", ~v="lambda(x=>internal code)", ())
|
testToExpression("{|x| x}", "{|x| x}", ~v="lambda(x=>internal code)", ())
|
||||||
testToExpression("f={|x| x}", "f = {{|x| x}}", ())
|
testToExpression("f={|x| x}", "f = {|x| x}", ())
|
||||||
testToExpression("f(x)=x", "f = {|x| {x}}", ()) // Function definitions are lambda assignments
|
testToExpression("f(x)=x", "f = {|x| {x}}", ()) // Function definitions are lambda assignments
|
||||||
testToExpression("f(x)=x ? 1 : 0", "f = {|x| {x ? (1) : (0)}}", ())
|
testToExpression("f(x)=x ? 1 : 0", "f = {|x| {x ? (1) : (0)}}", ())
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
module ErrorValue = Reducer_ErrorValue
|
|
||||||
module Expression = Reducer_Expression
|
module Expression = Reducer_Expression
|
||||||
module ExpressionT = Reducer_Expression_T
|
module ExpressionT = Reducer_Expression_T
|
||||||
|
|
||||||
|
@ -9,7 +8,7 @@ let unwrapRecord = rValue =>
|
||||||
rValue->Belt.Result.flatMap(value =>
|
rValue->Belt.Result.flatMap(value =>
|
||||||
switch value {
|
switch value {
|
||||||
| Reducer_T.IEvRecord(aRecord) => Ok(aRecord)
|
| Reducer_T.IEvRecord(aRecord) => Ok(aRecord)
|
||||||
| _ => ErrorValue.RETodo("TODO: Internal bindings must be returned")->Error
|
| _ => SqError.Message.RETodo("TODO: Internal bindings must be returned")->Error
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -23,7 +22,7 @@ let expectEvalError = (code: string) =>
|
||||||
Expression.BackCompatible.evaluateString(code)
|
Expression.BackCompatible.evaluateString(code)
|
||||||
->Reducer_Value.toStringResult
|
->Reducer_Value.toStringResult
|
||||||
->expect
|
->expect
|
||||||
->toMatch("Error\(")
|
->toMatch("Error\\(")
|
||||||
|
|
||||||
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||||
let testDescriptionParseToBe = (desc, expr, answer) =>
|
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||||
|
|
|
@ -37,14 +37,16 @@ describe("eval", () => {
|
||||||
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
|
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
|
||||||
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
|
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
|
||||||
testEvalError("{a: 1}.b") // invalid syntax
|
testEvalError("{a: 1}.b") // invalid syntax
|
||||||
test("always the same property ending", () =>
|
test(
|
||||||
|
"always the same property ending",
|
||||||
|
() =>
|
||||||
expectEvalToBe(
|
expectEvalToBe(
|
||||||
`{
|
`{
|
||||||
a: 1,
|
a: 1,
|
||||||
b: 2,
|
b: 2,
|
||||||
}`,
|
}`,
|
||||||
"Ok({a: 1,b: 2})",
|
"Ok({a: 1,b: 2})",
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -70,7 +72,7 @@ describe("test exceptions", () => {
|
||||||
testDescriptionEvalToBe(
|
testDescriptionEvalToBe(
|
||||||
"javascript exception",
|
"javascript exception",
|
||||||
"javascriptraise('div by 0')",
|
"javascriptraise('div by 0')",
|
||||||
"Error(Error: 'div by 0')",
|
"Error(JS Exception: Error: 'div by 0')",
|
||||||
)
|
)
|
||||||
// testDescriptionEvalToBe(
|
// testDescriptionEvalToBe(
|
||||||
// "rescript exception",
|
// "rescript exception",
|
||||||
|
@ -78,3 +80,33 @@ describe("test exceptions", () => {
|
||||||
// "Error(TODO: unhandled rescript exception)",
|
// "Error(TODO: unhandled rescript exception)",
|
||||||
// )
|
// )
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("stacktraces", () => {
|
||||||
|
test("nested calls", () => {
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
let error =
|
||||||
|
Expression.BackCompatible.evaluateString(`
|
||||||
|
f(x) = {
|
||||||
|
y = "a"
|
||||||
|
x + y
|
||||||
|
}
|
||||||
|
g = {|x| f(x)}
|
||||||
|
h(x) = g(x)
|
||||||
|
h(5)
|
||||||
|
`)
|
||||||
|
->E.R.getError
|
||||||
|
->E.O2.toExn("oops")
|
||||||
|
->SqError.toStringWithStackTrace
|
||||||
|
|
||||||
|
expect(
|
||||||
|
error,
|
||||||
|
)->toBe(`Error: There are function matches for add(), but with different arguments: [add(number, number)]; [add(distribution, number)]; [add(number, distribution)]; [add(distribution, distribution)]; [add(date, duration)]; [add(duration, duration)]
|
||||||
|
Stack trace:
|
||||||
|
f at line 4, column 5
|
||||||
|
g at line 6, column 12
|
||||||
|
h at line 7, column 10
|
||||||
|
<top> at line 8, column 3
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
|
@ -25,7 +25,7 @@ x=1`,
|
||||||
let mainIncludes = Project.getIncludes(project, "main")
|
let mainIncludes = Project.getIncludes(project, "main")
|
||||||
switch mainIncludes {
|
switch mainIncludes {
|
||||||
| Ok(includes) => expect(includes) == ["common"]
|
| Ok(includes) => expect(includes) == ["common"]
|
||||||
| Error(error) => fail(error->Reducer_ErrorValue.errorToString)
|
| Error(error) => fail(error->SqError.toString)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
test("past chain", () => {
|
test("past chain", () => {
|
||||||
|
@ -60,7 +60,7 @@ x=1`,
|
||||||
let mainIncludes = Project.getIncludes(project, "main")
|
let mainIncludes = Project.getIncludes(project, "main")
|
||||||
switch mainIncludes {
|
switch mainIncludes {
|
||||||
| Ok(includes) => expect(includes) == ["common", "myModule"]
|
| Ok(includes) => expect(includes) == ["common", "myModule"]
|
||||||
| Error(error) => fail(error->Reducer_ErrorValue.errorToString)
|
| Error(error) => fail(error->SqError.toString)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ x=1`,
|
||||||
let mainIncludes = Project.getIncludes(project, "main")
|
let mainIncludes = Project.getIncludes(project, "main")
|
||||||
switch mainIncludes {
|
switch mainIncludes {
|
||||||
| Ok(includes) => expect(includes) == ["common", "common2", "myModule"]
|
| Ok(includes) => expect(includes) == ["common", "common2", "myModule"]
|
||||||
| Error(error) => fail(error->Reducer_ErrorValue.errorToString)
|
| Error(error) => fail(error->SqError.toString)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
test("direct past chain", () => {
|
test("direct past chain", () => {
|
||||||
|
|
|
@ -11,7 +11,9 @@ describe("ReducerProject Tutorial", () => {
|
||||||
/*
|
/*
|
||||||
Case "Running a single source".
|
Case "Running a single source".
|
||||||
*/
|
*/
|
||||||
test("run", () => {
|
test(
|
||||||
|
"run",
|
||||||
|
() => {
|
||||||
/* Let's start with running a single source and getting Result as well as the Bindings
|
/* Let's start with running a single source and getting Result as well as the Bindings
|
||||||
First you need to create a project. A project is a collection of sources.
|
First you need to create a project. A project is a collection of sources.
|
||||||
Project takes care of the dependencies between the sources, correct compilation and run order.
|
Project takes care of the dependencies between the sources, correct compilation and run order.
|
||||||
|
@ -51,9 +53,12 @@ Case "Running a single source".
|
||||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(3)", "{}")
|
("Ok(3)", "{}")
|
||||||
/* You've got 3 with empty bindings. */
|
/* You've got 3 with empty bindings. */
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("run summary", () => {
|
test(
|
||||||
|
"run summary",
|
||||||
|
() => {
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
project->Project.setSource("main", "1 + 2")
|
project->Project.setSource("main", "1 + 2")
|
||||||
project->Project.runAll
|
project->Project.runAll
|
||||||
|
@ -64,9 +69,12 @@ Case "Running a single source".
|
||||||
result->Reducer_Value.toStringResult,
|
result->Reducer_Value.toStringResult,
|
||||||
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
||||||
)->expect == ("Ok(3)", "{}")
|
)->expect == ("Ok(3)", "{}")
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("run with an environment", () => {
|
test(
|
||||||
|
"run with an environment",
|
||||||
|
() => {
|
||||||
/* Running the source code like above allows you to set a custom environment */
|
/* Running the source code like above allows you to set a custom environment */
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
|
|
||||||
|
@ -78,15 +86,19 @@ Case "Running a single source".
|
||||||
let result = project->Project.getResult("main")
|
let result = project->Project.getResult("main")
|
||||||
let _bindings = project->Project.getBindings("main")
|
let _bindings = project->Project.getBindings("main")
|
||||||
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("shortcut", () => {
|
test(
|
||||||
|
"shortcut",
|
||||||
|
() => {
|
||||||
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
||||||
/* Examples above was to prepare you for the multi source tutorial. */
|
/* Examples above was to prepare you for the multi source tutorial. */
|
||||||
let (result, bindings) = Project.evaluate("1+2")
|
let (result, bindings) = Project.evaluate("1+2")
|
||||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(3)", "{}")
|
("Ok(3)", "{}")
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,9 @@ describe("ReducerProject Tutorial", () => {
|
||||||
describe("Multi source", () => {
|
describe("Multi source", () => {
|
||||||
/*
|
/*
|
||||||
Case "Running multiple sources" */
|
Case "Running multiple sources" */
|
||||||
test("Chaining", () => {
|
test(
|
||||||
|
"Chaining",
|
||||||
|
() => {
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
/* This time let's add 3 sources and chain them together */
|
/* This time let's add 3 sources and chain them together */
|
||||||
project->Project.setSource("source1", "x=1")
|
project->Project.setSource("source1", "x=1")
|
||||||
|
@ -32,9 +34,12 @@ describe("ReducerProject Tutorial", () => {
|
||||||
|
|
||||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(())", "{z: 3}")
|
("Ok(())", "{z: 3}")
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("Depending", () => {
|
test(
|
||||||
|
"Depending",
|
||||||
|
() => {
|
||||||
/* Instead of chaining the sources, we could have a dependency tree */
|
/* Instead of chaining the sources, we could have a dependency tree */
|
||||||
/* The point here is that any source can depend on multiple sources */
|
/* The point here is that any source can depend on multiple sources */
|
||||||
let project = Project.createProject()
|
let project = Project.createProject()
|
||||||
|
@ -56,9 +61,12 @@ describe("ReducerProject Tutorial", () => {
|
||||||
|
|
||||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||||
("Ok(())", "{z: 3}")
|
("Ok(())", "{z: 3}")
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
test("Intro to including", () => {
|
test(
|
||||||
|
"Intro to including",
|
||||||
|
() => {
|
||||||
/* Though it would not be practical for a storybook,
|
/* Though it would not be practical for a storybook,
|
||||||
let's write the same project above with includes.
|
let's write the same project above with includes.
|
||||||
You will see that parsing includes is setting the dependencies the same way as before. */
|
You will see that parsing includes is setting the dependencies the same way as before. */
|
||||||
|
@ -99,6 +107,7 @@ describe("ReducerProject Tutorial", () => {
|
||||||
- And the depended source1 and source2 is not already there in the project
|
- And the depended source1 and source2 is not already there in the project
|
||||||
- If you knew the includes before hand there would not be point of the include directive.
|
- If you knew the includes before hand there would not be point of the include directive.
|
||||||
More on those on the next section. */
|
More on those on the next section. */
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -24,35 +24,46 @@ Here we will finally proceed to a real life scenario. */
|
||||||
)
|
)
|
||||||
/* We need to parse includes after changing the source */
|
/* We need to parse includes after changing the source */
|
||||||
project->Project.parseIncludes("main")
|
project->Project.parseIncludes("main")
|
||||||
test("getDependencies", () => {
|
test(
|
||||||
|
"getDependencies",
|
||||||
|
() => {
|
||||||
/* Parse includes has set the dependencies */
|
/* Parse includes has set the dependencies */
|
||||||
project->Project.getDependencies("main")->expect == ["common"]
|
project->Project.getDependencies("main")->expect == ["common"]
|
||||||
/* If there were no includes than there would be no dependencies */
|
/* If there were no includes than there would be no dependencies */
|
||||||
/* However if there was a syntax error at includes then would be no dependencies also */
|
/* However if there was a syntax error at includes then would be no dependencies also */
|
||||||
/* Therefore looking at dependencies is not the right way to load includes */
|
/* Therefore looking at dependencies is not the right way to load includes */
|
||||||
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
||||||
})
|
},
|
||||||
test("getIncludes", () => {
|
)
|
||||||
|
test(
|
||||||
|
"getIncludes",
|
||||||
|
() => {
|
||||||
/* Parse includes has set the includes */
|
/* Parse includes has set the includes */
|
||||||
switch project->Project.getIncludes("main") {
|
switch project->Project.getIncludes("main") {
|
||||||
| Ok(includes) => includes->expect == ["common"]
|
| Ok(includes) => includes->expect == ["common"]
|
||||||
| Error(err) => err->Reducer_ErrorValue.errorToString->fail
|
| Error(err) => err->SqError.toString->fail
|
||||||
}
|
}
|
||||||
/* If the includes cannot be parsed then you get a syntax error.
|
/* If the includes cannot be parsed then you get a syntax error.
|
||||||
Otherwise you get the includes.
|
Otherwise you get the includes.
|
||||||
If there is no syntax error then you can load that file and use setSource to add it to the project.
|
If there is no syntax error then you can load that file and use setSource to add it to the project.
|
||||||
And so on recursively... */
|
And so on recursively... */
|
||||||
})
|
},
|
||||||
test("getDependents", () => {
|
)
|
||||||
|
test(
|
||||||
|
"getDependents",
|
||||||
|
() => {
|
||||||
/* For any reason, you are able to query what other sources
|
/* For any reason, you are able to query what other sources
|
||||||
include or depend on the current source.
|
include or depend on the current source.
|
||||||
But you don't need to use this to execute the projects.
|
But you don't need to use this to execute the projects.
|
||||||
It is provided for completeness of information. */
|
It is provided for completeness of information. */
|
||||||
project->Project.getDependents("main")->expect == []
|
project->Project.getDependents("main")->expect == []
|
||||||
/* Nothing is depending on or including main */
|
/* Nothing is depending on or including main */
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
describe("Real Like", () => {
|
describe(
|
||||||
|
"Real Like",
|
||||||
|
() => {
|
||||||
/* Now let's look at recursive and possibly cyclic includes */
|
/* Now let's look at recursive and possibly cyclic includes */
|
||||||
/* There is no function provided to load the include files.
|
/* There is no function provided to load the include files.
|
||||||
Because we have no idea if will it be an ordinary function or will it use promises.
|
Because we have no idea if will it be an ordinary function or will it use promises.
|
||||||
|
@ -85,10 +96,11 @@ Here we will finally proceed to a real life scenario. */
|
||||||
let rIncludes = project->Project.getIncludes(sourceName)
|
let rIncludes = project->Project.getIncludes(sourceName)
|
||||||
switch rIncludes {
|
switch rIncludes {
|
||||||
/* Maybe there is an include syntax error */
|
/* Maybe there is an include syntax error */
|
||||||
| Error(err) => err->Reducer_ErrorValue.errorToString->Js.Exn.raiseError
|
| Error(err) => err->SqError.toString->Js.Exn.raiseError
|
||||||
|
|
||||||
| Ok(includes) =>
|
| Ok(includes) =>
|
||||||
includes->Belt.Array.forEach(newIncludeName => {
|
includes->Belt.Array.forEach(
|
||||||
|
newIncludeName => {
|
||||||
/* We have got one of the new includes.
|
/* We have got one of the new includes.
|
||||||
Let's load it and add it to the project */
|
Let's load it and add it to the project */
|
||||||
let newSource = loadSource(newIncludeName)
|
let newSource = loadSource(newIncludeName)
|
||||||
|
@ -97,7 +109,8 @@ Here we will finally proceed to a real life scenario. */
|
||||||
/* Of course the new source might have includes too. */
|
/* Of course the new source might have includes too. */
|
||||||
/* Let's recursively load them */
|
/* Let's recursively load them */
|
||||||
project->loadIncludesRecursively(newIncludeName, newVisited)
|
project->loadIncludesRecursively(newIncludeName, newVisited)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,12 +156,18 @@ Here we will finally proceed to a real life scenario. */
|
||||||
let result = project->Project.getResult("main")
|
let result = project->Project.getResult("main")
|
||||||
let bindings = project->Project.getBindings("main")
|
let bindings = project->Project.getBindings("main")
|
||||||
/* And see the result and bindings.. */
|
/* And see the result and bindings.. */
|
||||||
test("recursive includes", () => {
|
test(
|
||||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
"recursive includes",
|
||||||
("Ok(6)", "{a: 6,b: 2}")
|
() => {
|
||||||
|
(
|
||||||
|
result->Reducer_Value.toStringResult,
|
||||||
|
bindings->Reducer_Value.toStringRecord,
|
||||||
|
)->expect == ("Ok(6)", "{a: 6,b: 2}")
|
||||||
/* Everything as expected */
|
/* Everything as expected */
|
||||||
})
|
},
|
||||||
})
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Includes myFile as myVariable", () => {
|
describe("Includes myFile as myVariable", () => {
|
||||||
|
@ -163,14 +182,20 @@ Here we will finally proceed to a real life scenario. */
|
||||||
`,
|
`,
|
||||||
)
|
)
|
||||||
Project.parseIncludes(project, "main")
|
Project.parseIncludes(project, "main")
|
||||||
test("getDependencies", () => {
|
test(
|
||||||
|
"getDependencies",
|
||||||
|
() => {
|
||||||
Project.getDependencies(project, "main")->expect == ["common"]
|
Project.getDependencies(project, "main")->expect == ["common"]
|
||||||
})
|
},
|
||||||
test("getIncludes", () => {
|
)
|
||||||
|
test(
|
||||||
|
"getIncludes",
|
||||||
|
() => {
|
||||||
switch Project.getIncludes(project, "main") {
|
switch Project.getIncludes(project, "main") {
|
||||||
| Ok(includes) => includes->expect == ["common"]
|
| Ok(includes) => includes->expect == ["common"]
|
||||||
| Error(err) => err->Reducer_ErrorValue.errorToString->fail
|
| Error(err) => err->SqError.toString->fail
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -30,8 +30,9 @@ describe("ReducerProject Tutorial", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
test("userResults", () => {
|
test("userResults", () => {
|
||||||
let userResultsAsString = Belt.Array.map(userResults, aResult =>
|
let userResultsAsString = Belt.Array.map(
|
||||||
aResult->Reducer_Value.toStringResult
|
userResults,
|
||||||
|
aResult => aResult->Reducer_Value.toStringResult,
|
||||||
)
|
)
|
||||||
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
|
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
|
||||||
})
|
})
|
||||||
|
|
41
packages/squiggle-lang/__tests__/SqError_test.res
Normal file
41
packages/squiggle-lang/__tests__/SqError_test.res
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
describe("SqError.Message", () => {
|
||||||
|
test("toString", () =>
|
||||||
|
expect(SqError.Message.REOther("test error")->SqError.Message.toString)->toBe(
|
||||||
|
"Error: test error",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("SqError", () => {
|
||||||
|
test("fromMessage", () =>
|
||||||
|
expect(SqError.Message.REOther("test error")->SqError.fromMessage->SqError.toString)->toBe(
|
||||||
|
"Error: test error",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
test("toStringWithStackTrace with empty stacktrace", () =>
|
||||||
|
expect(
|
||||||
|
SqError.Message.REOther("test error")->SqError.fromMessage->SqError.toStringWithStackTrace,
|
||||||
|
)->toBe("Error: test error")
|
||||||
|
)
|
||||||
|
|
||||||
|
test("toStringWithStackTrace", () => {
|
||||||
|
let frameStack =
|
||||||
|
Reducer_FrameStack.make()
|
||||||
|
->Reducer_FrameStack.extend("frame1", None)
|
||||||
|
->Reducer_FrameStack.extend("frame2", None)
|
||||||
|
|
||||||
|
expect(
|
||||||
|
SqError.Message.REOther("test error")
|
||||||
|
->SqError.fromMessageWithFrameStack(frameStack)
|
||||||
|
->SqError.toStringWithStackTrace,
|
||||||
|
)->toBe(`Error: test error
|
||||||
|
Stack trace:
|
||||||
|
frame2
|
||||||
|
frame1
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
|
@ -10,6 +10,8 @@ let examples = E.A.to_list(FunctionRegistry_Core.Registry.allExamples(registry))
|
||||||
|
|
||||||
describe("FunctionRegistry Library", () => {
|
describe("FunctionRegistry Library", () => {
|
||||||
describe("Regular tests", () => {
|
describe("Regular tests", () => {
|
||||||
|
testEvalToBe("List.length([3,5,8])", "Ok(3)")
|
||||||
|
testEvalToBe("List.length([])", "Ok(0)")
|
||||||
testEvalToBe("List.make(3, 'HI')", "Ok(['HI','HI','HI'])")
|
testEvalToBe("List.make(3, 'HI')", "Ok(['HI','HI','HI'])")
|
||||||
testEvalToBe("make(3, 'HI')", "Error(make is not defined)")
|
testEvalToBe("make(3, 'HI')", "Error(make is not defined)")
|
||||||
testEvalToBe("List.upTo(1,3)", "Ok([1,2,3])")
|
testEvalToBe("List.upTo(1,3)", "Ok([1,2,3])")
|
||||||
|
@ -80,6 +82,10 @@ describe("FunctionRegistry Library", () => {
|
||||||
"SampleSet.toList(SampleSet.mapN([SampleSet.fromList([1,2,3,4,5,6]), SampleSet.fromList([6, 5, 4, 3, 2, 1])], {|x| x[0] > x[1] ? x[0] : x[1]}))",
|
"SampleSet.toList(SampleSet.mapN([SampleSet.fromList([1,2,3,4,5,6]), SampleSet.fromList([6, 5, 4, 3, 2, 1])], {|x| x[0] > x[1] ? x[0] : x[1]}))",
|
||||||
"Ok([6,5,4,4,5,6])",
|
"Ok([6,5,4,4,5,6])",
|
||||||
)
|
)
|
||||||
|
testEvalToBe(
|
||||||
|
"SampleSet.fromList([1, 2, 3])",
|
||||||
|
"Error(Error: Too few samples when constructing sample set)",
|
||||||
|
)
|
||||||
|
|
||||||
testEvalToBe("Dict.merge({a: 1, b: 2}, {b: 3, c: 4, d: 5})", "Ok({a: 1,b: 3,c: 4,d: 5})")
|
testEvalToBe("Dict.merge({a: 1, b: 2}, {b: 3, c: 4, d: 5})", "Ok({a: 1,b: 3,c: 4,d: 5})")
|
||||||
testEvalToBe(
|
testEvalToBe(
|
||||||
|
@ -93,15 +99,19 @@ describe("FunctionRegistry Library", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Fn auto-testing", () => {
|
describe("Fn auto-testing", () => {
|
||||||
testAll("tests of validity", examples, r => {
|
testAll(
|
||||||
|
"tests of validity",
|
||||||
|
examples,
|
||||||
|
r => {
|
||||||
expectEvalToBeOk(r)
|
expectEvalToBeOk(r)
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
testAll(
|
testAll(
|
||||||
"tests of type",
|
"tests of type",
|
||||||
E.A.to_list(
|
E.A.to_list(
|
||||||
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) =>
|
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(
|
||||||
E.O.isSome(fn.output)
|
((fn, _)) => E.O.isSome(fn.output),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
((fn, example)) => {
|
((fn, example)) => {
|
||||||
|
|
|
@ -45,12 +45,12 @@ let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.generi
|
||||||
let unpackFloat = x => x->toFloat->toExtFloat
|
let unpackFloat = x => x->toFloat->toExtFloat
|
||||||
let unpackDist = y => y->toDist->toExtDist
|
let unpackDist = y => y->toDist->toExtDist
|
||||||
|
|
||||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
||||||
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha, beta}))
|
||||||
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
||||||
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low, high}))
|
||||||
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local, scale}))
|
||||||
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu, sigma}))
|
||||||
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
||||||
|
|
||||||
let normalMake = SymbolicDist.Normal.make
|
let normalMake = SymbolicDist.Normal.make
|
||||||
|
|
|
@ -25,7 +25,6 @@
|
||||||
],
|
],
|
||||||
"suffix": ".bs.js",
|
"suffix": ".bs.js",
|
||||||
"namespace": true,
|
"namespace": true,
|
||||||
"bs-dependencies": ["bisect_ppx"],
|
|
||||||
"bs-dev-dependencies": [
|
"bs-dev-dependencies": [
|
||||||
"@glennsl/rescript-jest",
|
"@glennsl/rescript-jest",
|
||||||
"rescript-fast-check",
|
"rescript-fast-check",
|
||||||
|
@ -45,8 +44,5 @@
|
||||||
"refmt": 3,
|
"refmt": 3,
|
||||||
"warnings": {
|
"warnings": {
|
||||||
"number": "+A-42-48-9-30-4"
|
"number": "+A-42-48-9-30-4"
|
||||||
},
|
}
|
||||||
"ppx-flags": [
|
|
||||||
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,6 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
preset: "ts-jest",
|
preset: "ts-jest",
|
||||||
testEnvironment: "node",
|
testEnvironment: "node",
|
||||||
setupFilesAfterEnv: [
|
|
||||||
"<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js",
|
|
||||||
],
|
|
||||||
testPathIgnorePatterns: [
|
testPathIgnorePatterns: [
|
||||||
".*Fixtures.bs.js",
|
".*Fixtures.bs.js",
|
||||||
"/node_modules/",
|
"/node_modules/",
|
||||||
|
|
|
@ -22,10 +22,8 @@
|
||||||
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
"test:fnRegistry": "jest __tests__/SquiggleLibrary/SquiggleLibrary_FunctionRegistryLibrary_test.bs.js",
|
"test:fnRegistry": "jest __tests__/SquiggleLibrary/SquiggleLibrary_FunctionRegistryLibrary_test.bs.js",
|
||||||
"coverage:rescript": "rm -f *.coverage && yarn clean && BISECT_ENABLE=yes yarn build && yarn test:rescript && bisect-ppx-report html",
|
"coverage:local": "jest --coverage && echo && echo 'Open ./coverage/lcov-report/index.html to see the detailed report.'",
|
||||||
"coverage:ts": "yarn clean && yarn build && nyc --reporter=lcov yarn test:ts",
|
"coverage": "jest --coverage && codecov",
|
||||||
"coverage:rescript:ci": "yarn clean && BISECT_ENABLE=yes yarn build:rescript && yarn test:rescript && bisect-ppx-report send-to Codecov",
|
|
||||||
"coverage:ts:ci": "yarn coverage:ts && codecov",
|
|
||||||
"lint:rescript": "./lint.sh",
|
"lint:rescript": "./lint.sh",
|
||||||
"lint:prettier": "prettier --check .",
|
"lint:prettier": "prettier --check .",
|
||||||
"lint": "yarn lint:rescript && yarn lint:prettier",
|
"lint": "yarn lint:rescript && yarn lint:prettier",
|
||||||
|
@ -41,7 +39,7 @@
|
||||||
],
|
],
|
||||||
"author": "Quantified Uncertainty Research Institute",
|
"author": "Quantified Uncertainty Research Institute",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@rescript/std": "^9.1.4",
|
"@rescript/std": "^10.0.0",
|
||||||
"@stdlib/stats": "^0.0.13",
|
"@stdlib/stats": "^0.0.13",
|
||||||
"jstat": "^1.9.5",
|
"jstat": "^1.9.5",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
|
@ -50,24 +48,20 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@glennsl/rescript-jest": "^0.9.2",
|
"@glennsl/rescript-jest": "^0.9.2",
|
||||||
"@istanbuljs/nyc-config-typescript": "^1.0.2",
|
|
||||||
"@types/jest": "^27.5.0",
|
"@types/jest": "^27.5.0",
|
||||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
"chalk": "^5.1.0",
|
||||||
"bisect_ppx": "^2.7.1",
|
|
||||||
"chalk": "^5.0.1",
|
|
||||||
"codecov": "^3.8.3",
|
"codecov": "^3.8.3",
|
||||||
"fast-check": "^3.1.4",
|
"fast-check": "^3.1.4",
|
||||||
"gentype": "^4.5.0",
|
"gentype": "^4.5.0",
|
||||||
"jest": "^27.5.1",
|
"jest": "^27.5.1",
|
||||||
"moduleserve": "^0.9.1",
|
"moduleserve": "^0.9.1",
|
||||||
"nyc": "^15.1.0",
|
|
||||||
"peggy": "^2.0.1",
|
"peggy": "^2.0.1",
|
||||||
"prettier": "^2.7.1",
|
"prettier": "^2.7.1",
|
||||||
"reanalyze": "^2.23.0",
|
"reanalyze": "^2.23.0",
|
||||||
"rescript": "^9.1.4",
|
"rescript": "^10.0.0",
|
||||||
"rescript-fast-check": "^1.1.1",
|
"rescript-fast-check": "^1.1.1",
|
||||||
"rescript-js-map": "^1.1.0",
|
"rescript-js-map": "^1.1.0",
|
||||||
"ts-jest": "^27.1.4",
|
"ts-jest": "^29.0.3",
|
||||||
"ts-loader": "^9.4.1",
|
"ts-loader": "^9.4.1",
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"typescript": "^4.8.4",
|
"typescript": "^4.8.4",
|
||||||
|
|
|
@ -36,6 +36,6 @@ export const run = (src, { output, sampleCount } = {}) => {
|
||||||
"Time:",
|
"Time:",
|
||||||
String(time),
|
String(time),
|
||||||
result.tag === "Error" ? red(result.tag) : green(result.tag),
|
result.tag === "Error" ? red(result.tag) : green(result.tag),
|
||||||
result.tag === "Error" ? result.value.toString() : ""
|
result.tag === "Error" ? result.value.toStringWithFrameStack() : ""
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,11 +1,18 @@
|
||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
import { run } from "./lib.mjs";
|
import { run } from "./lib.mjs";
|
||||||
|
|
||||||
const src = process.argv[2];
|
import { Command } from "commander";
|
||||||
|
|
||||||
|
const program = new Command();
|
||||||
|
|
||||||
|
program.arguments("<string>");
|
||||||
|
|
||||||
|
const options = program.parse(process.argv);
|
||||||
|
|
||||||
|
const src = program.args[0];
|
||||||
if (!src) {
|
if (!src) {
|
||||||
throw new Error("Expected src");
|
throw new Error("Expected src");
|
||||||
}
|
}
|
||||||
console.log(`Running ${src}`);
|
|
||||||
|
|
||||||
const sampleCount = process.env.SAMPLE_COUNT;
|
const sampleCount = process.env.SAMPLE_COUNT;
|
||||||
|
|
||||||
|
|
|
@ -1,17 +1,48 @@
|
||||||
import * as RSErrorValue from "../rescript/ForTS/ForTS_Reducer_ErrorValue.gen";
|
import * as RSError from "../rescript/SqError.gen";
|
||||||
|
import * as RSReducerT from "../rescript/Reducer/Reducer_T.gen";
|
||||||
|
|
||||||
|
import * as RSFrameStack from "../rescript/Reducer/Reducer_FrameStack.gen";
|
||||||
|
|
||||||
|
export { location as SqLocation } from "../rescript/Reducer/Reducer_Peggy/Reducer_Peggy_Parse.gen";
|
||||||
|
|
||||||
export class SqError {
|
export class SqError {
|
||||||
constructor(private _value: RSErrorValue.reducerErrorValue) {}
|
constructor(private _value: RSError.t) {}
|
||||||
|
|
||||||
toString() {
|
toString() {
|
||||||
return RSErrorValue.toString(this._value);
|
return RSError.toString(this._value);
|
||||||
}
|
}
|
||||||
|
|
||||||
static createTodoError(v: string) {
|
toStringWithStackTrace() {
|
||||||
return new SqError(RSErrorValue.createTodoError(v));
|
return RSError.toStringWithStackTrace(this._value);
|
||||||
}
|
}
|
||||||
|
|
||||||
static createOtherError(v: string) {
|
static createOtherError(v: string) {
|
||||||
return new SqError(RSErrorValue.createOtherError(v));
|
return new SqError(RSError.createOtherError(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
getTopFrame(): SqFrame | undefined {
|
||||||
|
const frame = RSFrameStack.getTopFrame(RSError.getFrameStack(this._value));
|
||||||
|
return frame ? new SqFrame(frame) : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
getFrameArray(): SqFrame[] {
|
||||||
|
const frames = RSError.getFrameArray(this._value);
|
||||||
|
return frames.map((frame) => new SqFrame(frame));
|
||||||
|
}
|
||||||
|
|
||||||
|
location() {
|
||||||
|
return this.getTopFrame()?.location();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SqFrame {
|
||||||
|
constructor(private _value: RSReducerT.frame) {}
|
||||||
|
|
||||||
|
name(): string {
|
||||||
|
return RSFrameStack.Frame.getName(this._value);
|
||||||
|
}
|
||||||
|
|
||||||
|
location() {
|
||||||
|
return RSFrameStack.Frame.getLocation(this._value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import * as RSProject from "../rescript/ForTS/ForTS_ReducerProject.gen";
|
import * as RSProject from "../rescript/ForTS/ForTS_ReducerProject.gen";
|
||||||
import { reducerErrorValue } from "../rescript/ForTS/ForTS_Reducer_ErrorValue.gen";
|
import * as RSError from "../rescript/SqError.gen";
|
||||||
import { environment } from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution_Environment.gen";
|
import { environment } from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution_Environment.gen";
|
||||||
import { SqError } from "./SqError";
|
import { SqError } from "./SqError";
|
||||||
import { SqRecord } from "./SqRecord";
|
import { SqRecord } from "./SqRecord";
|
||||||
|
@ -54,7 +54,7 @@ export class SqProject {
|
||||||
return resultMap2(
|
return resultMap2(
|
||||||
RSProject.getIncludes(this._value, sourceId),
|
RSProject.getIncludes(this._value, sourceId),
|
||||||
(a) => a,
|
(a) => a,
|
||||||
(v: reducerErrorValue) => new SqError(v)
|
(v: RSError.t) => new SqError(v)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ export class SqProject {
|
||||||
items: [],
|
items: [],
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
(v: reducerErrorValue) => new SqError(v)
|
(v: RSError.t) => new SqError(v)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import { isParenthesisNode } from "mathjs";
|
|
||||||
import { SqProject } from "./SqProject";
|
import { SqProject } from "./SqProject";
|
||||||
|
|
||||||
type PathItem = string | number;
|
type PathItem = string | number;
|
||||||
|
|
|
@ -13,7 +13,7 @@ export {
|
||||||
environment,
|
environment,
|
||||||
defaultEnvironment,
|
defaultEnvironment,
|
||||||
} from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution.gen";
|
} from "../rescript/ForTS/ForTS_Distribution/ForTS_Distribution.gen";
|
||||||
export { SqError } from "./SqError";
|
export { SqError, SqFrame, SqLocation } from "./SqError";
|
||||||
export { SqShape } from "./SqPointSetDist";
|
export { SqShape } from "./SqPointSetDist";
|
||||||
|
|
||||||
export { resultMap } from "./types";
|
export { resultMap } from "./types";
|
||||||
|
|
|
@ -141,6 +141,7 @@ let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
Js.log2("Console log requested: ", dist)
|
Js.log2("Console log requested: ", dist)
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
|
|
||||||
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
| #ToScore(LogScore(answer, prior)) =>
|
| #ToScore(LogScore(answer, prior)) =>
|
||||||
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||||
|
|
|
@ -99,6 +99,7 @@ let toFloatOperation = (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| (#Stdev | #Variance | #Mode) as op =>
|
| (#Stdev | #Variance | #Mode) as op =>
|
||||||
switch t {
|
switch t {
|
||||||
| SampleSet(s) =>
|
| SampleSet(s) =>
|
||||||
|
@ -129,7 +130,7 @@ let toPointSet = (
|
||||||
SampleSetDist.toPointSetDist(
|
SampleSetDist.toPointSetDist(
|
||||||
~samples=r,
|
~samples=r,
|
||||||
~samplingInputs={
|
~samplingInputs={
|
||||||
sampleCount: sampleCount,
|
sampleCount,
|
||||||
outputXYPoints: xyPointLength,
|
outputXYPoints: xyPointLength,
|
||||||
pointSetDistLength: xyPointLength,
|
pointSetDistLength: xyPointLength,
|
||||||
kernelWidth: None,
|
kernelWidth: None,
|
||||||
|
@ -427,6 +428,7 @@ module AlgebraicCombination = {
|
||||||
~toSampleSetFn,
|
~toSampleSetFn,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
| (None, AsMonteCarlo) =>
|
| (None, AsMonteCarlo) =>
|
||||||
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
||||||
| (None, AsSymbolic) =>
|
| (None, AsSymbolic) =>
|
||||||
|
@ -443,6 +445,7 @@ module AlgebraicCombination = {
|
||||||
)}`
|
)}`
|
||||||
Error(RequestedStrategyInvalidError(errString))
|
Error(RequestedStrategyInvalidError(errString))
|
||||||
}
|
}
|
||||||
|
|
||||||
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
|
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ let toDiscretePointMassesFromTriangulars = (
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
|
|
||||||
{n: n - 2, masses: masses, means: means, variances: variances}
|
{n: n - 2, masses, means, variances}
|
||||||
} else {
|
} else {
|
||||||
for i in 1 to n - 2 {
|
for i in 1 to n - 2 {
|
||||||
// area of triangle = width * height / 2
|
// area of triangle = width * height / 2
|
||||||
|
@ -91,7 +91,7 @@ let toDiscretePointMassesFromTriangulars = (
|
||||||
) |> ignore
|
) |> ignore
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
{n: n - 2, masses: masses, means: means, variances: variances}
|
{n: n - 2, masses, means, variances}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,7 +184,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
|
||||||
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
|
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
|
||||||
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
|
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
|
||||||
|
|
||||||
{n: n, masses: masses, means: means, variances: variances}
|
{n, masses, means, variances}
|
||||||
}
|
}
|
||||||
|
|
||||||
type argumentPosition = First | Second
|
type argumentPosition = First | Second
|
||||||
|
|
|
@ -45,16 +45,16 @@ module Analysis = {
|
||||||
let getShape = (t: t) => t.xyShape
|
let getShape = (t: t) => t.xyShape
|
||||||
let interpolation = (t: t) => t.interpolation
|
let interpolation = (t: t) => t.interpolation
|
||||||
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
xyShape: xyShape,
|
xyShape,
|
||||||
interpolation: interpolation,
|
interpolation,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
interpolation: interpolation,
|
interpolation,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
||||||
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
|
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
|
||||||
|
@ -135,10 +135,10 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
|
||||||
|
|
||||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache}
|
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache}
|
||||||
|
|
||||||
let sum = (
|
let sum = (
|
||||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||||
|
|
|
@ -4,14 +4,14 @@ open Distributions
|
||||||
type t = PointSetTypes.discreteShape
|
type t = PointSetTypes.discreteShape
|
||||||
|
|
||||||
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||||
xyShape: xyShape,
|
xyShape,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
||||||
xyShape: fn(xyShape),
|
xyShape: fn(xyShape),
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
let getShape = (t: t) => t.xyShape
|
let getShape = (t: t) => t.xyShape
|
||||||
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
|
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
|
||||||
|
@ -63,12 +63,12 @@ let reduce = (
|
||||||
|
|
||||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
||||||
|
|
|
@ -4,10 +4,10 @@ open Distributions
|
||||||
|
|
||||||
type t = PointSetTypes.mixedShape
|
type t = PointSetTypes.mixedShape
|
||||||
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
|
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
|
||||||
continuous: continuous,
|
continuous,
|
||||||
discrete: discrete,
|
discrete,
|
||||||
integralSumCache: integralSumCache,
|
integralSumCache,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let totalLength = (t: t): int => {
|
let totalLength = (t: t): int => {
|
||||||
|
@ -35,7 +35,7 @@ let toDiscrete = ({discrete}: t) => Some(discrete)
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache, t: t): t => {
|
let updateIntegralCache = (integralCache, t: t): t => {
|
||||||
...t,
|
...t,
|
||||||
integralCache: integralCache,
|
integralCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
let combinePointwise = (
|
let combinePointwise = (
|
||||||
|
|
|
@ -79,8 +79,8 @@ module MixedPoint = {
|
||||||
type t = mixedPoint
|
type t = mixedPoint
|
||||||
let toContinuousValue = (t: t) => t.continuous
|
let toContinuousValue = (t: t) => t.continuous
|
||||||
let toDiscreteValue = (t: t) => t.discrete
|
let toDiscreteValue = (t: t) => t.discrete
|
||||||
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
|
let makeContinuous = (continuous: float): t => {continuous, discrete: 0.0}
|
||||||
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
|
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete}
|
||||||
|
|
||||||
let fmap = (fn: float => float, t: t) => {
|
let fmap = (fn: float => float, t: t) => {
|
||||||
continuous: fn(t.continuous),
|
continuous: fn(t.continuous),
|
||||||
|
|
|
@ -4,13 +4,6 @@ module Error = {
|
||||||
type sampleSetError =
|
type sampleSetError =
|
||||||
TooFewSamples | NonNumericInput(string) | OperationError(Operation.operationError)
|
TooFewSamples | NonNumericInput(string) | OperationError(Operation.operationError)
|
||||||
|
|
||||||
let sampleSetErrorToString = (err: sampleSetError): string =>
|
|
||||||
switch err {
|
|
||||||
| TooFewSamples => "Too few samples when constructing sample set"
|
|
||||||
| NonNumericInput(err) => `Found a non-number in input: ${err}`
|
|
||||||
| OperationError(err) => Operation.Error.toString(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type pointsetConversionError = TooFewSamplesForConversionToPointSet
|
type pointsetConversionError = TooFewSamplesForConversionToPointSet
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ module Normal = {
|
||||||
type t = normal
|
type t = normal
|
||||||
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
||||||
stdev > 0.0
|
stdev > 0.0
|
||||||
? Ok(#Normal({mean: mean, stdev: stdev}))
|
? Ok(#Normal({mean, stdev}))
|
||||||
: Error("Standard deviation of normal distribution must be larger than 0")
|
: Error("Standard deviation of normal distribution must be larger than 0")
|
||||||
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
|
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
|
||||||
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
|
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
|
||||||
|
@ -15,7 +15,7 @@ module Normal = {
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let mean = E.A.Floats.mean([low, high])
|
let mean = E.A.Floats.mean([low, high])
|
||||||
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean, stdev})
|
||||||
}
|
}
|
||||||
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
||||||
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
|
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
|
||||||
|
@ -25,12 +25,12 @@ module Normal = {
|
||||||
let add = (n1: t, n2: t) => {
|
let add = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean +. n2.mean
|
let mean = n1.mean +. n2.mean
|
||||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean, stdev})
|
||||||
}
|
}
|
||||||
let subtract = (n1: t, n2: t) => {
|
let subtract = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean -. n2.mean
|
let mean = n1.mean -. n2.mean
|
||||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean, stdev})
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: is this useful here at all? would need the integral as well ...
|
// TODO: is this useful here at all? would need the integral as well ...
|
||||||
|
@ -38,7 +38,7 @@ module Normal = {
|
||||||
let mean =
|
let mean =
|
||||||
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
|
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
|
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean, stdev})
|
||||||
}
|
}
|
||||||
|
|
||||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
|
@ -88,7 +88,7 @@ module Cauchy = {
|
||||||
type t = cauchy
|
type t = cauchy
|
||||||
let make = (local, scale): result<symbolicDist, string> =>
|
let make = (local, scale): result<symbolicDist, string> =>
|
||||||
scale > 0.0
|
scale > 0.0
|
||||||
? Ok(#Cauchy({local: local, scale: scale}))
|
? Ok(#Cauchy({local, scale}))
|
||||||
: Error("Cauchy distribution scale parameter must larger than 0.")
|
: Error("Cauchy distribution scale parameter must larger than 0.")
|
||||||
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
|
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
|
||||||
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
||||||
|
@ -102,7 +102,7 @@ module Triangular = {
|
||||||
type t = triangular
|
type t = triangular
|
||||||
let make = (low, medium, high): result<symbolicDist, string> =>
|
let make = (low, medium, high): result<symbolicDist, string> =>
|
||||||
low < medium && medium < high
|
low < medium && medium < high
|
||||||
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
? Ok(#Triangular({low, medium, high}))
|
||||||
: Error("Triangular values must be increasing order.")
|
: Error("Triangular values must be increasing order.")
|
||||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||||
|
@ -116,7 +116,7 @@ module Beta = {
|
||||||
type t = beta
|
type t = beta
|
||||||
let make = (alpha, beta) =>
|
let make = (alpha, beta) =>
|
||||||
alpha > 0.0 && beta > 0.0
|
alpha > 0.0 && beta > 0.0
|
||||||
? Ok(#Beta({alpha: alpha, beta: beta}))
|
? Ok(#Beta({alpha, beta}))
|
||||||
: Error("Beta distribution parameters must be positive")
|
: Error("Beta distribution parameters must be positive")
|
||||||
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
|
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
|
||||||
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
|
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
|
||||||
|
@ -150,7 +150,7 @@ module Lognormal = {
|
||||||
type t = lognormal
|
type t = lognormal
|
||||||
let make = (mu, sigma) =>
|
let make = (mu, sigma) =>
|
||||||
sigma > 0.0
|
sigma > 0.0
|
||||||
? Ok(#Lognormal({mu: mu, sigma: sigma}))
|
? Ok(#Lognormal({mu, sigma}))
|
||||||
: Error("Lognormal standard deviation must be larger than 0")
|
: Error("Lognormal standard deviation must be larger than 0")
|
||||||
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
||||||
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
||||||
|
@ -164,7 +164,7 @@ module Lognormal = {
|
||||||
let logHigh = Js.Math.log(high)
|
let logHigh = Js.Math.log(high)
|
||||||
let mu = E.A.Floats.mean([logLow, logHigh])
|
let mu = E.A.Floats.mean([logLow, logHigh])
|
||||||
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu, sigma})
|
||||||
}
|
}
|
||||||
let fromMeanAndStdev = (mean, stdev) => {
|
let fromMeanAndStdev = (mean, stdev) => {
|
||||||
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
||||||
|
@ -174,7 +174,7 @@ module Lognormal = {
|
||||||
let meanSquared = mean ** 2.
|
let meanSquared = mean ** 2.
|
||||||
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
||||||
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
||||||
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
Ok(#Lognormal({mu, sigma}))
|
||||||
} else {
|
} else {
|
||||||
Error("Lognormal standard deviation must be larger than 0")
|
Error("Lognormal standard deviation must be larger than 0")
|
||||||
}
|
}
|
||||||
|
@ -184,14 +184,14 @@ module Lognormal = {
|
||||||
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
||||||
let mu = l1.mu +. l2.mu
|
let mu = l1.mu +. l2.mu
|
||||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu, sigma})
|
||||||
}
|
}
|
||||||
let divide = (l1, l2) => {
|
let divide = (l1, l2) => {
|
||||||
let mu = l1.mu -. l2.mu
|
let mu = l1.mu -. l2.mu
|
||||||
// We believe the ratiands will have covariance zero.
|
// We believe the ratiands will have covariance zero.
|
||||||
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
||||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu, sigma})
|
||||||
}
|
}
|
||||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
switch operation {
|
switch operation {
|
||||||
|
@ -220,7 +220,7 @@ module Lognormal = {
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
type t = uniform
|
type t = uniform
|
||||||
let make = (low, high) =>
|
let make = (low, high) =>
|
||||||
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low")
|
high > low ? Ok(#Uniform({low, high})) : Error("High must be larger than low")
|
||||||
|
|
||||||
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
||||||
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
||||||
|
@ -239,9 +239,7 @@ module Uniform = {
|
||||||
module Logistic = {
|
module Logistic = {
|
||||||
type t = logistic
|
type t = logistic
|
||||||
let make = (location, scale) =>
|
let make = (location, scale) =>
|
||||||
scale > 0.0
|
scale > 0.0 ? Ok(#Logistic({location, scale})) : Error("Scale must be positive")
|
||||||
? Ok(#Logistic({location: location, scale: scale}))
|
|
||||||
: Error("Scale must be positive")
|
|
||||||
|
|
||||||
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
||||||
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
||||||
|
@ -285,7 +283,7 @@ module Gamma = {
|
||||||
let make = (shape: float, scale: float) => {
|
let make = (shape: float, scale: float) => {
|
||||||
if shape > 0. {
|
if shape > 0. {
|
||||||
if scale > 0. {
|
if scale > 0. {
|
||||||
Ok(#Gamma({shape: shape, scale: scale}))
|
Ok(#Gamma({shape, scale}))
|
||||||
} else {
|
} else {
|
||||||
Error("scale must be larger than 0")
|
Error("scale must be larger than 0")
|
||||||
}
|
}
|
||||||
|
@ -543,6 +541,6 @@ module T = {
|
||||||
| _ =>
|
| _ =>
|
||||||
let xs = interpolateXs(~xSelection, d, sampleCount)
|
let xs = interpolateXs(~xSelection, d, sampleCount)
|
||||||
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
||||||
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
|
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs, ys}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ let nameSpace = "" // no namespaced versions
|
||||||
|
|
||||||
type simpleDefinition = {
|
type simpleDefinition = {
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
fn: array<Reducer_T.value> => result<Reducer_T.value, errorValue>,
|
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let makeFnMany = (name: string, definitions: array<simpleDefinition>) =>
|
let makeFnMany = (name: string, definitions: array<simpleDefinition>) =>
|
||||||
|
@ -22,8 +22,8 @@ let makeFnMany = (name: string, definitions: array<simpleDefinition>) =>
|
||||||
let makeFn = (
|
let makeFn = (
|
||||||
name: string,
|
name: string,
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
fn: array<Reducer_T.value> => result<Reducer_T.value, errorValue>,
|
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
||||||
) => makeFnMany(name, [{inputs: inputs, fn: fn}])
|
) => makeFnMany(name, [{inputs, fn}])
|
||||||
|
|
||||||
let library = [
|
let library = [
|
||||||
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
|
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
|
||||||
|
@ -62,6 +62,7 @@ let library = [
|
||||||
let answer = Js.String2.concat(a, b)
|
let answer = Js.String2.concat(a, b)
|
||||||
answer->Reducer_T.IEvString->Ok
|
answer->Reducer_T.IEvString->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -72,6 +73,7 @@ let library = [
|
||||||
let _ = Js.Array2.pushMany(a, b)
|
let _ = Js.Array2.pushMany(a, b)
|
||||||
a->Reducer_T.IEvArray->Ok
|
a->Reducer_T.IEvArray->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -81,6 +83,7 @@ let library = [
|
||||||
Js.log(value->Reducer_Value.toString)
|
Js.log(value->Reducer_Value.toString)
|
||||||
value->Ok
|
value->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
@ -90,6 +93,7 @@ let library = [
|
||||||
Js.log(`${label}: ${value->Reducer_Value.toString}`)
|
Js.log(`${label}: ${value->Reducer_Value.toString}`)
|
||||||
value->Ok
|
value->Ok
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -67,7 +67,7 @@ module Integration = {
|
||||||
let applyFunctionAtFloatToFloatOption = (point: float) => {
|
let applyFunctionAtFloatToFloatOption = (point: float) => {
|
||||||
// Defined here so that it has access to environment, reducer
|
// Defined here so that it has access to environment, reducer
|
||||||
let pointAsInternalExpression = FunctionRegistry_Helpers.Wrappers.evNumber(point)
|
let pointAsInternalExpression = FunctionRegistry_Helpers.Wrappers.evNumber(point)
|
||||||
let resultAsInternalExpression = Reducer_Expression_Lambda.doLambdaCall(
|
let resultAsInternalExpression = Reducer_Lambda.doLambdaCall(
|
||||||
aLambda,
|
aLambda,
|
||||||
[pointAsInternalExpression],
|
[pointAsInternalExpression],
|
||||||
environment,
|
environment,
|
||||||
|
@ -77,7 +77,7 @@ module Integration = {
|
||||||
| Reducer_T.IEvNumber(x) => Ok(x)
|
| Reducer_T.IEvNumber(x) => Ok(x)
|
||||||
| _ =>
|
| _ =>
|
||||||
Error(
|
Error(
|
||||||
"Error 1 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead"->Reducer_ErrorValue.REOther,
|
"Error 1 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead"->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
|
@ -135,16 +135,18 @@ module Integration = {
|
||||||
let wrappedResult = result->Reducer_T.IEvNumber->Ok
|
let wrappedResult = result->Reducer_T.IEvNumber->Ok
|
||||||
wrappedResult
|
wrappedResult
|
||||||
}
|
}
|
||||||
|
|
||||||
| (Error(b), _) => Error(b)
|
| (Error(b), _) => Error(b)
|
||||||
| (_, Error(b)) => Error(b)
|
| (_, Error(b)) => Error(b)
|
||||||
}
|
}
|
||||||
resultWithOuterPoints
|
resultWithOuterPoints
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) =>
|
| Error(b) =>
|
||||||
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
|
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
|
||||||
"Original error: " ++
|
"Original error: " ++
|
||||||
b->Reducer_ErrorValue.errorToString)
|
b->SqError.Message.toString)
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
|
@ -169,7 +171,7 @@ module Integration = {
|
||||||
let result = switch inputs {
|
let result = switch inputs {
|
||||||
| [_, _, _, IEvNumber(0.0)] =>
|
| [_, _, _, IEvNumber(0.0)] =>
|
||||||
"Integration error 4 in Danger.integrate: Increment can't be 0."
|
"Integration error 4 in Danger.integrate: Increment can't be 0."
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
| [
|
| [
|
||||||
IEvLambda(aLambda),
|
IEvLambda(aLambda),
|
||||||
|
@ -187,7 +189,7 @@ module Integration = {
|
||||||
)
|
)
|
||||||
| _ =>
|
| _ =>
|
||||||
Error(
|
Error(
|
||||||
Reducer_ErrorValue.REOther(
|
SqError.Message.REOther(
|
||||||
"Integration error 5 in Danger.integrate. Remember that inputs are (function, number (min), number (max), number(increment))",
|
"Integration error 5 in Danger.integrate. Remember that inputs are (function, number (min), number (max), number(increment))",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -213,7 +215,7 @@ module Integration = {
|
||||||
let result = switch inputs {
|
let result = switch inputs {
|
||||||
| [_, _, _, IEvNumber(0.0)] =>
|
| [_, _, _, IEvNumber(0.0)] =>
|
||||||
"Integration error in Danger.integrate: Increment can't be 0."
|
"Integration error in Danger.integrate: Increment can't be 0."
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
| [IEvLambda(aLambda), IEvNumber(min), IEvNumber(max), IEvNumber(epsilon)] =>
|
| [IEvLambda(aLambda), IEvNumber(min), IEvNumber(max), IEvNumber(epsilon)] =>
|
||||||
Helpers.integrateFunctionBetweenWithNumIntegrationPoints(
|
Helpers.integrateFunctionBetweenWithNumIntegrationPoints(
|
||||||
|
@ -225,11 +227,11 @@ module Integration = {
|
||||||
reducer,
|
reducer,
|
||||||
)->E.R2.errMap(b =>
|
)->E.R2.errMap(b =>
|
||||||
("Integration error 7 in Danger.integrate. Something went wrong along the way: " ++
|
("Integration error 7 in Danger.integrate. Something went wrong along the way: " ++
|
||||||
b->Reducer_ErrorValue.errorToString)->Reducer_ErrorValue.REOther
|
b->SqError.Message.toString)->SqError.Message.REOther
|
||||||
)
|
)
|
||||||
| _ =>
|
| _ =>
|
||||||
"Integration error 8 in Danger.integrate. Remember that inputs are (function, number (min), number (max), number(increment))"
|
"Integration error 8 in Danger.integrate. Remember that inputs are (function, number (min), number (max), number(increment))"
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
|
@ -246,7 +248,7 @@ module DiminishingReturns = {
|
||||||
module Helpers = {
|
module Helpers = {
|
||||||
type diminishingReturnsAccumulatorInner = {
|
type diminishingReturnsAccumulatorInner = {
|
||||||
optimalAllocations: array<float>,
|
optimalAllocations: array<float>,
|
||||||
currentMarginalReturns: result<array<float>, errorValue>,
|
currentMarginalReturns: result<array<float>, errorMessage>,
|
||||||
}
|
}
|
||||||
let findBiggestElementIndex = (xs: array<float>) =>
|
let findBiggestElementIndex = (xs: array<float>) =>
|
||||||
E.A.reducei(xs, 0, (acc, newElement, index) => {
|
E.A.reducei(xs, 0, (acc, newElement, index) => {
|
||||||
|
@ -255,7 +257,7 @@ module DiminishingReturns = {
|
||||||
| false => acc
|
| false => acc
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
type diminishingReturnsAccumulator = result<diminishingReturnsAccumulatorInner, errorValue>
|
type diminishingReturnsAccumulator = result<diminishingReturnsAccumulatorInner, errorMessage>
|
||||||
// TODO: This is so complicated, it probably should be its own file. It might also make sense to have it work in Rescript directly, taking in a function rather than a reducer; then something else can wrap that function in the reducer/lambdas/environment.
|
// TODO: This is so complicated, it probably should be its own file. It might also make sense to have it work in Rescript directly, taking in a function rather than a reducer; then something else can wrap that function in the reducer/lambdas/environment.
|
||||||
/*
|
/*
|
||||||
The key idea for this function is that
|
The key idea for this function is that
|
||||||
|
@ -290,25 +292,25 @@ module DiminishingReturns = {
|
||||||
) {
|
) {
|
||||||
| (false, _, _, _) =>
|
| (false, _, _, _) =>
|
||||||
Error(
|
Error(
|
||||||
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, number of functions should be greater than 1."->Reducer_ErrorValue.REOther,
|
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, number of functions should be greater than 1."->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
| (_, false, _, _) =>
|
| (_, false, _, _) =>
|
||||||
Error(
|
Error(
|
||||||
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, funds should be greater than 0."->Reducer_ErrorValue.REOther,
|
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, funds should be greater than 0."->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
| (_, _, false, _) =>
|
| (_, _, false, _) =>
|
||||||
Error(
|
Error(
|
||||||
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, approximateIncrement should be greater than 0."->Reducer_ErrorValue.REOther,
|
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, approximateIncrement should be greater than 0."->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
| (_, _, _, false) =>
|
| (_, _, _, false) =>
|
||||||
Error(
|
Error(
|
||||||
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, approximateIncrement should be smaller than funds amount."->Reducer_ErrorValue.REOther,
|
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions, approximateIncrement should be smaller than funds amount."->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
| (true, true, true, true) => {
|
| (true, true, true, true) => {
|
||||||
let applyFunctionAtPoint = (lambda, point: float) => {
|
let applyFunctionAtPoint = (lambda, point: float) => {
|
||||||
// Defined here so that it has access to environment, reducer
|
// Defined here so that it has access to environment, reducer
|
||||||
let pointAsInternalExpression = FunctionRegistry_Helpers.Wrappers.evNumber(point)
|
let pointAsInternalExpression = FunctionRegistry_Helpers.Wrappers.evNumber(point)
|
||||||
let resultAsInternalExpression = Reducer_Expression_Lambda.doLambdaCall(
|
let resultAsInternalExpression = Reducer_Lambda.doLambdaCall(
|
||||||
lambda,
|
lambda,
|
||||||
[pointAsInternalExpression],
|
[pointAsInternalExpression],
|
||||||
environment,
|
environment,
|
||||||
|
@ -318,7 +320,7 @@ module DiminishingReturns = {
|
||||||
| Reducer_T.IEvNumber(x) => Ok(x)
|
| Reducer_T.IEvNumber(x) => Ok(x)
|
||||||
| _ =>
|
| _ =>
|
||||||
Error(
|
Error(
|
||||||
"Error 1 in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead"->Reducer_ErrorValue.REOther,
|
"Error 1 in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead"->SqError.Message.REOther,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -362,6 +364,7 @@ module DiminishingReturns = {
|
||||||
result[indexOfBiggestDMR] = value
|
result[indexOfBiggestDMR] = value
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -371,10 +374,12 @@ module DiminishingReturns = {
|
||||||
}
|
}
|
||||||
Ok(newAcc)
|
Ok(newAcc)
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
newAccWrapped
|
newAccWrapped
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -411,7 +416,7 @@ module DiminishingReturns = {
|
||||||
| Reducer_T.IEvLambda(lambda) => Ok(lambda)
|
| Reducer_T.IEvLambda(lambda) => Ok(lambda)
|
||||||
| _ =>
|
| _ =>
|
||||||
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions. A member of the array wasn't a function"
|
"Error in Danger.optimalAllocationGivenDiminishingMarginalReturnsForManyFunctions. A member of the array wasn't a function"
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
}
|
}
|
||||||
}, innerlambdas)
|
}, innerlambdas)
|
||||||
|
@ -427,13 +432,15 @@ module DiminishingReturns = {
|
||||||
)
|
)
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(b) => Error(b)
|
| Error(b) => Error(b)
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ =>
|
| _ =>
|
||||||
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
|
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
|
||||||
->Reducer_ErrorValue.REOther
|
->SqError.Message.REOther
|
||||||
->Error
|
->Error
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
|
|
|
@ -4,7 +4,7 @@ open FunctionRegistry_Helpers
|
||||||
let makeFn = (
|
let makeFn = (
|
||||||
name: string,
|
name: string,
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
fn: array<Reducer_T.value> => result<Reducer_T.value, errorValue>,
|
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
||||||
) =>
|
) =>
|
||||||
Function.make(
|
Function.make(
|
||||||
~name,
|
~name,
|
||||||
|
@ -66,7 +66,7 @@ let library = [
|
||||||
| [IEvNumber(year)] =>
|
| [IEvNumber(year)] =>
|
||||||
switch DateTime.Date.makeFromYear(year) {
|
switch DateTime.Date.makeFromYear(year) {
|
||||||
| Ok(t) => IEvDate(t)->Ok
|
| Ok(t) => IEvDate(t)->Ok
|
||||||
| Error(e) => Reducer_ErrorValue.RETodo(e)->Error
|
| Error(e) => SqError.Message.RETodo(e)->Error
|
||||||
}
|
}
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ module Internals = {
|
||||||
->E.A2.fmap(((key, value)) => Wrappers.evArray([IEvString(key), value]))
|
->E.A2.fmap(((key, value)) => Wrappers.evArray([IEvString(key), value]))
|
||||||
->Wrappers.evArray
|
->Wrappers.evArray
|
||||||
|
|
||||||
let fromList = (items: array<Reducer_T.value>): result<Reducer_T.value, errorValue> =>
|
let fromList = (items: array<Reducer_T.value>): result<Reducer_T.value, errorMessage> =>
|
||||||
items
|
items
|
||||||
->E.A2.fmap(item => {
|
->E.A2.fmap(item => {
|
||||||
switch (item: Reducer_T.value) {
|
switch (item: Reducer_T.value) {
|
||||||
|
@ -76,7 +76,7 @@ let library = [
|
||||||
->Belt.Array.map(dictValue =>
|
->Belt.Array.map(dictValue =>
|
||||||
switch dictValue {
|
switch dictValue {
|
||||||
| IEvRecord(dict) => dict
|
| IEvRecord(dict) => dict
|
||||||
| _ => impossibleError->Reducer_ErrorValue.toException
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
->Internals.mergeMany
|
->Internals.mergeMany
|
||||||
|
|
|
@ -16,13 +16,14 @@ module DistributionCreation = {
|
||||||
r
|
r
|
||||||
->E.R.bind(Process.DistOrNumberToDist.twoValuesUsingSymbolicDist(~fn, ~values=_, ~env))
|
->E.R.bind(Process.DistOrNumberToDist.twoValuesUsingSymbolicDist(~fn, ~values=_, ~env))
|
||||||
->E.R2.fmap(Wrappers.evDistribution)
|
->E.R2.fmap(Wrappers.evDistribution)
|
||||||
->E.R2.errMap(e => Reducer_ErrorValue.REOther(e))
|
->E.R2.errMap(e => SqError.Message.REOther(e))
|
||||||
|
|
||||||
let make = (name, fn) => {
|
let make = (name, fn) => {
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name,
|
~name,
|
||||||
~inputs=[FRTypeDistOrNumber, FRTypeDistOrNumber],
|
~inputs=[FRTypeDistOrNumber, FRTypeDistOrNumber],
|
||||||
~run=(inputs, env, _) => inputs->Prepare.ToValueTuple.twoDistOrNumber->process(~fn, ~env),
|
~run=(inputs, context, _) =>
|
||||||
|
inputs->Prepare.ToValueTuple.twoDistOrNumber->process(~fn, ~env=context.environment),
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -31,8 +32,10 @@ module DistributionCreation = {
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name,
|
~name,
|
||||||
~inputs=[FRTypeRecord([("p5", FRTypeDistOrNumber), ("p95", FRTypeDistOrNumber)])],
|
~inputs=[FRTypeRecord([("p5", FRTypeDistOrNumber), ("p95", FRTypeDistOrNumber)])],
|
||||||
~run=(inputs, env, _) =>
|
~run=(inputs, context, _) =>
|
||||||
inputs->Prepare.ToValueTuple.Record.twoDistOrNumber(("p5", "p95"))->process(~fn, ~env),
|
inputs
|
||||||
|
->Prepare.ToValueTuple.Record.twoDistOrNumber(("p5", "p95"))
|
||||||
|
->process(~fn, ~env=context.environment),
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -41,10 +44,10 @@ module DistributionCreation = {
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name,
|
~name,
|
||||||
~inputs=[FRTypeRecord([("mean", FRTypeDistOrNumber), ("stdev", FRTypeDistOrNumber)])],
|
~inputs=[FRTypeRecord([("mean", FRTypeDistOrNumber), ("stdev", FRTypeDistOrNumber)])],
|
||||||
~run=(inputs, env, _) =>
|
~run=(inputs, context, _) =>
|
||||||
inputs
|
inputs
|
||||||
->Prepare.ToValueTuple.Record.twoDistOrNumber(("mean", "stdev"))
|
->Prepare.ToValueTuple.Record.twoDistOrNumber(("mean", "stdev"))
|
||||||
->process(~fn, ~env),
|
->process(~fn, ~env=context.environment),
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -55,13 +58,14 @@ module DistributionCreation = {
|
||||||
r
|
r
|
||||||
->E.R.bind(Process.DistOrNumberToDist.oneValueUsingSymbolicDist(~fn, ~value=_, ~env))
|
->E.R.bind(Process.DistOrNumberToDist.oneValueUsingSymbolicDist(~fn, ~value=_, ~env))
|
||||||
->E.R2.fmap(Wrappers.evDistribution)
|
->E.R2.fmap(Wrappers.evDistribution)
|
||||||
->E.R2.errMap(e => Reducer_ErrorValue.REOther(e))
|
->E.R2.errMap(e => SqError.Message.REOther(e))
|
||||||
|
|
||||||
let make = (name, fn) =>
|
let make = (name, fn) =>
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name,
|
~name,
|
||||||
~inputs=[FRTypeDistOrNumber],
|
~inputs=[FRTypeDistOrNumber],
|
||||||
~run=(inputs, env, _) => inputs->Prepare.ToValueTuple.oneDistOrNumber->process(~fn, ~env),
|
~run=(inputs, context, _) =>
|
||||||
|
inputs->Prepare.ToValueTuple.oneDistOrNumber->process(~fn, ~env=context.environment),
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ module Declaration = {
|
||||||
->E.A.R.firstErrorOrOpen
|
->E.A.R.firstErrorOrOpen
|
||||||
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
|
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
|
||||||
}
|
}
|
||||||
|
|
||||||
| Error(r) => Error(r)
|
| Error(r) => Error(r)
|
||||||
| Ok(_) => Error(impossibleErrorString)
|
| Ok(_) => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,6 +140,7 @@ module Old = {
|
||||||
| Error(err) => error(err)
|
| Error(err) => error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| Some(IEvNumber(_))
|
| Some(IEvNumber(_))
|
||||||
| Some(IEvDistribution(_)) =>
|
| Some(IEvDistribution(_)) =>
|
||||||
switch parseDistributionArray(args) {
|
switch parseDistributionArray(args) {
|
||||||
|
@ -192,6 +193,7 @@ module Old = {
|
||||||
}
|
}
|
||||||
Helpers.toFloatFn(fn, dist, ~env)
|
Helpers.toFloatFn(fn, dist, ~env)
|
||||||
}
|
}
|
||||||
|
|
||||||
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
||||||
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
||||||
| ("sparkline", [IEvDistribution(dist)]) =>
|
| ("sparkline", [IEvDistribution(dist)]) =>
|
||||||
|
@ -296,7 +298,7 @@ module Old = {
|
||||||
|
|
||||||
let genericOutputToReducerValue = (o: DistributionOperation.outputType): result<
|
let genericOutputToReducerValue = (o: DistributionOperation.outputType): result<
|
||||||
Reducer_T.value,
|
Reducer_T.value,
|
||||||
Reducer_ErrorValue.errorValue,
|
SqError.Message.t,
|
||||||
> =>
|
> =>
|
||||||
switch o {
|
switch o {
|
||||||
| Dist(d) => Ok(Reducer_T.IEvDistribution(d))
|
| Dist(d) => Ok(Reducer_T.IEvDistribution(d))
|
||||||
|
@ -311,9 +313,9 @@ module Old = {
|
||||||
switch dispatchToGenericOutput(call, environment) {
|
switch dispatchToGenericOutput(call, environment) {
|
||||||
| Some(o) => genericOutputToReducerValue(o)
|
| Some(o) => genericOutputToReducerValue(o)
|
||||||
| None =>
|
| None =>
|
||||||
Reducer_ErrorValue.REOther("Internal error in FR_GenericDist implementation")
|
SqError.Message.REOther(
|
||||||
->Reducer_ErrorValue.ErrorException
|
"Internal error in FR_GenericDist implementation",
|
||||||
->raise
|
)->SqError.Message.throw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -326,7 +328,7 @@ let makeProxyFn = (name: string, inputs: array<frType>) => {
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name,
|
~name,
|
||||||
~inputs,
|
~inputs,
|
||||||
~run=(inputs, env, _) => Old.dispatch((name, inputs), env),
|
~run=(inputs, context, _) => Old.dispatch((name, inputs), context.environment),
|
||||||
(),
|
(),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@ -402,9 +404,9 @@ let library = E.A.concatMany([
|
||||||
])
|
])
|
||||||
|
|
||||||
// FIXME - impossible to implement with FR due to arbitrary parameters length;
|
// FIXME - impossible to implement with FR due to arbitrary parameters length;
|
||||||
let mxLambda = Reducer_Expression_Lambda.makeFFILambda((inputs, env, _) => {
|
let mxLambda = Reducer_Lambda.makeFFILambda("mx", (inputs, context, _) => {
|
||||||
switch Old.dispatch(("mx", inputs), env) {
|
switch Old.dispatch(("mx", inputs), context.environment) {
|
||||||
| Ok(value) => value
|
| Ok(value) => value
|
||||||
| Error(e) => e->Reducer_ErrorValue.ErrorException->raise
|
| Error(e) => e->SqError.Message.throw
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -5,6 +5,10 @@ let nameSpace = "List"
|
||||||
let requiresNamespace = true
|
let requiresNamespace = true
|
||||||
|
|
||||||
module Internals = {
|
module Internals = {
|
||||||
|
let length = (v: array<Reducer_T.value>): Reducer_T.value => IEvNumber(
|
||||||
|
Belt.Int.toFloat(Array.length(v)),
|
||||||
|
)
|
||||||
|
|
||||||
let makeFromNumber = (n: float, value: Reducer_T.value): Reducer_T.value => IEvArray(
|
let makeFromNumber = (n: float, value: Reducer_T.value): Reducer_T.value => IEvArray(
|
||||||
Belt.Array.make(E.Float.toInt(n), value),
|
Belt.Array.make(E.Float.toInt(n), value),
|
||||||
)
|
)
|
||||||
|
@ -26,11 +30,11 @@ module Internals = {
|
||||||
let map = (
|
let map = (
|
||||||
array: array<Reducer_T.value>,
|
array: array<Reducer_T.value>,
|
||||||
eLambdaValue,
|
eLambdaValue,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
): Reducer_T.value => {
|
): Reducer_T.value => {
|
||||||
Belt.Array.map(array, elem =>
|
Belt.Array.map(array, elem =>
|
||||||
Reducer_Expression_Lambda.doLambdaCall(eLambdaValue, [elem], env, reducer)
|
Reducer_Lambda.doLambdaCall(eLambdaValue, [elem], context, reducer)
|
||||||
)->Wrappers.evArray
|
)->Wrappers.evArray
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,11 +42,11 @@ module Internals = {
|
||||||
aValueArray,
|
aValueArray,
|
||||||
initialValue,
|
initialValue,
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
) => {
|
) => {
|
||||||
aValueArray->E.A.reduce(initialValue, (acc, elem) =>
|
aValueArray->E.A.reduce(initialValue, (acc, elem) =>
|
||||||
Reducer_Expression_Lambda.doLambdaCall(aLambdaValue, [acc, elem], env, reducer)
|
Reducer_Lambda.doLambdaCall(aLambdaValue, [acc, elem], context, reducer)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,22 +54,22 @@ module Internals = {
|
||||||
aValueArray,
|
aValueArray,
|
||||||
initialValue,
|
initialValue,
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
) => {
|
) => {
|
||||||
aValueArray->Belt.Array.reduceReverse(initialValue, (acc, elem) =>
|
aValueArray->Belt.Array.reduceReverse(initialValue, (acc, elem) =>
|
||||||
Reducer_Expression_Lambda.doLambdaCall(aLambdaValue, [acc, elem], env, reducer)
|
Reducer_Lambda.doLambdaCall(aLambdaValue, [acc, elem], context, reducer)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let filter = (
|
let filter = (
|
||||||
aValueArray,
|
aValueArray,
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
) => {
|
) => {
|
||||||
Js.Array2.filter(aValueArray, elem => {
|
Js.Array2.filter(aValueArray, elem => {
|
||||||
let result = Reducer_Expression_Lambda.doLambdaCall(aLambdaValue, [elem], env, reducer)
|
let result = Reducer_Lambda.doLambdaCall(aLambdaValue, [elem], context, reducer)
|
||||||
switch result {
|
switch result {
|
||||||
| IEvBool(true) => true
|
| IEvBool(true) => true
|
||||||
| _ => false
|
| _ => false
|
||||||
|
@ -75,6 +79,26 @@ module Internals = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let library = [
|
let library = [
|
||||||
|
Function.make(
|
||||||
|
~name="length",
|
||||||
|
~nameSpace,
|
||||||
|
~output=EvtNumber,
|
||||||
|
~requiresNamespace=true,
|
||||||
|
~examples=[`List.length([1,4,5])`],
|
||||||
|
~definitions=[
|
||||||
|
FnDefinition.make(
|
||||||
|
~name="length",
|
||||||
|
~inputs=[FRTypeArray(FRTypeAny)],
|
||||||
|
~run=(inputs, _, _) =>
|
||||||
|
switch inputs {
|
||||||
|
| [IEvArray(array)] => Internals.length(array)->Ok
|
||||||
|
| _ => Error(impossibleError)
|
||||||
|
},
|
||||||
|
(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
(),
|
||||||
|
),
|
||||||
Function.make(
|
Function.make(
|
||||||
~name="make",
|
~name="make",
|
||||||
~nameSpace,
|
~nameSpace,
|
||||||
|
|
|
@ -16,30 +16,31 @@ let inputsToDist = (inputs: array<Reducer_T.value>, xyShapeToPointSetDist) => {
|
||||||
let yValue = map->Belt.Map.String.get("y")
|
let yValue = map->Belt.Map.String.get("y")
|
||||||
switch (xValue, yValue) {
|
switch (xValue, yValue) {
|
||||||
| (Some(IEvNumber(x)), Some(IEvNumber(y))) => (x, y)
|
| (Some(IEvNumber(x)), Some(IEvNumber(y))) => (x, y)
|
||||||
| _ => impossibleError->Reducer_ErrorValue.toException
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
| _ => impossibleError->Reducer_ErrorValue.toException
|
|
||||||
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
->Ok
|
->Ok
|
||||||
->E.R.bind(r => r->XYShape.T.makeFromZipped->E.R2.errMap(XYShape.Error.toString))
|
->E.R.bind(r => r->XYShape.T.makeFromZipped->E.R2.errMap(XYShape.Error.toString))
|
||||||
->E.R2.fmap(r => Reducer_T.IEvDistribution(PointSet(r->xyShapeToPointSetDist)))
|
->E.R2.fmap(r => Reducer_T.IEvDistribution(PointSet(r->xyShapeToPointSetDist)))
|
||||||
| _ => impossibleError->Reducer_ErrorValue.toException
|
| _ => impossibleError->SqError.Message.throw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module Internal = {
|
module Internal = {
|
||||||
type t = PointSetDist.t
|
type t = PointSetDist.t
|
||||||
|
|
||||||
let toType = (r): result<Reducer_T.value, Reducer_ErrorValue.errorValue> =>
|
let toType = (r): result<Reducer_T.value, SqError.Message.t> =>
|
||||||
switch r {
|
switch r {
|
||||||
| Ok(r) => Ok(Wrappers.evDistribution(PointSet(r)))
|
| Ok(r) => Ok(Wrappers.evDistribution(PointSet(r)))
|
||||||
| Error(err) => Error(REOperationError(err))
|
| Error(err) => Error(REOperationError(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
let doLambdaCall = (aLambdaValue, list, env, reducer) =>
|
let doLambdaCall = (aLambdaValue, list, env, reducer) =>
|
||||||
switch Reducer_Expression_Lambda.doLambdaCall(aLambdaValue, list, env, reducer) {
|
switch Reducer_Lambda.doLambdaCall(aLambdaValue, list, env, reducer) {
|
||||||
| Reducer_T.IEvNumber(f) => Ok(f)
|
| Reducer_T.IEvNumber(f) => Ok(f)
|
||||||
| _ => Error(Operation.SampleMapNeedsNtoNFunction)
|
| _ => Error(Operation.SampleMapNeedsNtoNFunction)
|
||||||
}
|
}
|
||||||
|
@ -61,18 +62,18 @@ let library = [
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name="fromDist",
|
~name="fromDist",
|
||||||
~inputs=[FRTypeDist],
|
~inputs=[FRTypeDist],
|
||||||
~run=(inputs, env, _) =>
|
~run=(inputs, context, _) =>
|
||||||
switch inputs {
|
switch inputs {
|
||||||
| [IEvDistribution(dist)] =>
|
| [IEvDistribution(dist)] =>
|
||||||
GenericDist.toPointSet(
|
GenericDist.toPointSet(
|
||||||
dist,
|
dist,
|
||||||
~xyPointLength=env.xyPointLength,
|
~xyPointLength=context.environment.xyPointLength,
|
||||||
~sampleCount=env.sampleCount,
|
~sampleCount=context.environment.sampleCount,
|
||||||
(),
|
(),
|
||||||
)
|
)
|
||||||
->E.R2.fmap(Wrappers.pointSet)
|
->E.R2.fmap(Wrappers.pointSet)
|
||||||
->E.R2.fmap(Wrappers.evDistribution)
|
->E.R2.fmap(Wrappers.evDistribution)
|
||||||
->E.R2.errMap(e => Reducer_ErrorValue.REDistributionError(e))
|
->E.R2.errMap(e => SqError.Message.REDistributionError(e))
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
|
|
|
@ -10,41 +10,40 @@ module Internal = {
|
||||||
let doLambdaCall = (
|
let doLambdaCall = (
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
list,
|
list,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
) =>
|
) =>
|
||||||
switch Reducer_Expression_Lambda.doLambdaCall(aLambdaValue, list, env, reducer) {
|
switch Reducer_Lambda.doLambdaCall(aLambdaValue, list, context, reducer) {
|
||||||
| IEvNumber(f) => Ok(f)
|
| IEvNumber(f) => Ok(f)
|
||||||
| _ => Error(Operation.SampleMapNeedsNtoNFunction)
|
| _ => Error(Operation.SampleMapNeedsNtoNFunction)
|
||||||
}
|
}
|
||||||
|
|
||||||
let toType = (r): result<Reducer_T.value, Reducer_ErrorValue.errorValue> =>
|
let toType = (r): result<Reducer_T.value, SqError.Message.t> =>
|
||||||
switch r {
|
switch r {
|
||||||
| Ok(r) => Ok(Wrappers.evDistribution(SampleSet(r)))
|
| Ok(r) => Ok(Wrappers.evDistribution(SampleSet(r)))
|
||||||
| Error(r) => Error(REDistributionError(SampleSetError(r)))
|
| Error(r) => Error(REDistributionError(SampleSetError(r)))
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO: I don't know why this seems to need at least one input
|
//TODO: I don't know why this seems to need at least one input
|
||||||
let fromFn = (aLambdaValue, environment: Reducer_T.environment, reducer: Reducer_T.reducerFn) => {
|
let fromFn = (aLambdaValue, context: Reducer_T.context, reducer: Reducer_T.reducerFn) => {
|
||||||
let sampleCount = environment.sampleCount
|
let sampleCount = context.environment.sampleCount
|
||||||
let fn = r => doLambdaCall(aLambdaValue, [IEvNumber(r)], environment, reducer)
|
let fn = r => doLambdaCall(aLambdaValue, [IEvNumber(r)], context, reducer)
|
||||||
Belt_Array.makeBy(sampleCount, r => fn(r->Js.Int.toFloat))->E.A.R.firstErrorOrOpen
|
Belt_Array.makeBy(sampleCount, r => fn(r->Js.Int.toFloat))->E.A.R.firstErrorOrOpen
|
||||||
}
|
}
|
||||||
|
|
||||||
let map1 = (sampleSetDist: t, aLambdaValue, environment: Reducer_T.environment, reducer) => {
|
let map1 = (sampleSetDist: t, aLambdaValue, context: Reducer_T.context, reducer) => {
|
||||||
let fn = r => doLambdaCall(aLambdaValue, [IEvNumber(r)], environment, reducer)
|
let fn = r => doLambdaCall(aLambdaValue, [IEvNumber(r)], context, reducer)
|
||||||
SampleSetDist.samplesMap(~fn, sampleSetDist)->toType
|
SampleSetDist.samplesMap(~fn, sampleSetDist)->toType
|
||||||
}
|
}
|
||||||
|
|
||||||
let map2 = (t1: t, t2: t, aLambdaValue, environment: Reducer_T.environment, reducer) => {
|
let map2 = (t1: t, t2: t, aLambdaValue, context: Reducer_T.context, reducer) => {
|
||||||
let fn = (a, b) =>
|
let fn = (a, b) => doLambdaCall(aLambdaValue, [IEvNumber(a), IEvNumber(b)], context, reducer)
|
||||||
doLambdaCall(aLambdaValue, [IEvNumber(a), IEvNumber(b)], environment, reducer)
|
|
||||||
SampleSetDist.map2(~fn, ~t1, ~t2)->toType
|
SampleSetDist.map2(~fn, ~t1, ~t2)->toType
|
||||||
}
|
}
|
||||||
|
|
||||||
let map3 = (t1: t, t2: t, t3: t, aLambdaValue, environment: Reducer_T.environment, reducer) => {
|
let map3 = (t1: t, t2: t, t3: t, aLambdaValue, context: Reducer_T.context, reducer) => {
|
||||||
let fn = (a, b, c) =>
|
let fn = (a, b, c) =>
|
||||||
doLambdaCall(aLambdaValue, [IEvNumber(a), IEvNumber(b), IEvNumber(c)], environment, reducer)
|
doLambdaCall(aLambdaValue, [IEvNumber(a), IEvNumber(b), IEvNumber(c)], context, reducer)
|
||||||
SampleSetDist.map3(~fn, ~t1, ~t2, ~t3)->toType
|
SampleSetDist.map3(~fn, ~t1, ~t2, ~t3)->toType
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,7 +59,7 @@ module Internal = {
|
||||||
let mapN = (
|
let mapN = (
|
||||||
aValueArray: array<Reducer_T.value>,
|
aValueArray: array<Reducer_T.value>,
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
environment: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer,
|
reducer,
|
||||||
) => {
|
) => {
|
||||||
switch parseSampleSetArray(aValueArray) {
|
switch parseSampleSetArray(aValueArray) {
|
||||||
|
@ -69,7 +68,7 @@ module Internal = {
|
||||||
doLambdaCall(
|
doLambdaCall(
|
||||||
aLambdaValue,
|
aLambdaValue,
|
||||||
[IEvArray(E.A.fmap(x => Wrappers.evNumber(x), a))],
|
[IEvArray(E.A.fmap(x => Wrappers.evNumber(x), a))],
|
||||||
environment,
|
context,
|
||||||
reducer,
|
reducer,
|
||||||
)
|
)
|
||||||
SampleSetDist.mapN(~fn, ~t1)->toType
|
SampleSetDist.mapN(~fn, ~t1)->toType
|
||||||
|
@ -89,13 +88,13 @@ let libaryBase = [
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name="fromDist",
|
~name="fromDist",
|
||||||
~inputs=[FRTypeDist],
|
~inputs=[FRTypeDist],
|
||||||
~run=(inputs, environment, _) =>
|
~run=(inputs, context, _) =>
|
||||||
switch inputs {
|
switch inputs {
|
||||||
| [IEvDistribution(dist)] =>
|
| [IEvDistribution(dist)] =>
|
||||||
GenericDist.toSampleSetDist(dist, environment.sampleCount)
|
GenericDist.toSampleSetDist(dist, context.environment.sampleCount)
|
||||||
->E.R2.fmap(Wrappers.sampleSet)
|
->E.R2.fmap(Wrappers.sampleSet)
|
||||||
->E.R2.fmap(Wrappers.evDistribution)
|
->E.R2.fmap(Wrappers.evDistribution)
|
||||||
->E.R2.errMap(e => Reducer_ErrorValue.REDistributionError(e))
|
->E.R2.errMap(e => SqError.Message.REDistributionError(e))
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
|
@ -116,7 +115,7 @@ let libaryBase = [
|
||||||
~run=(inputs, _, _) => {
|
~run=(inputs, _, _) => {
|
||||||
let sampleSet =
|
let sampleSet =
|
||||||
inputs->Prepare.ToTypedArray.numbers
|
inputs->Prepare.ToTypedArray.numbers
|
||||||
|> E.R2.bind(r => SampleSetDist.make(r)->E.R2.errMap(_ => "AM I HERE? WHYERE AMI??"))
|
|> E.R2.bind(r => SampleSetDist.make(r)->E.R2.errMap(SampleSetDist.Error.toString))
|
||||||
sampleSet
|
sampleSet
|
||||||
->E.R2.fmap(Wrappers.sampleSet)
|
->E.R2.fmap(Wrappers.sampleSet)
|
||||||
->E.R2.fmap(Wrappers.evDistribution)
|
->E.R2.fmap(Wrappers.evDistribution)
|
||||||
|
@ -163,7 +162,7 @@ let libaryBase = [
|
||||||
| [IEvLambda(lambda)] =>
|
| [IEvLambda(lambda)] =>
|
||||||
switch Internal.fromFn(lambda, environment, reducer) {
|
switch Internal.fromFn(lambda, environment, reducer) {
|
||||||
| Ok(r) => Ok(r->Wrappers.sampleSet->Wrappers.evDistribution)
|
| Ok(r) => Ok(r->Wrappers.sampleSet->Wrappers.evDistribution)
|
||||||
| Error(e) => e->Reducer_ErrorValue.REOperationError->Error
|
| Error(e) => e->SqError.Message.REOperationError->Error
|
||||||
}
|
}
|
||||||
| _ => Error(impossibleError)
|
| _ => Error(impossibleError)
|
||||||
},
|
},
|
||||||
|
@ -290,7 +289,7 @@ module Comparison = {
|
||||||
r
|
r
|
||||||
->E.R2.fmap(r => r->Wrappers.sampleSet->Wrappers.evDistribution)
|
->E.R2.fmap(r => r->Wrappers.sampleSet->Wrappers.evDistribution)
|
||||||
->E.R2.errMap(e =>
|
->E.R2.errMap(e =>
|
||||||
e->DistributionTypes.Error.sampleErrorToDistErr->Reducer_ErrorValue.REDistributionError
|
e->DistributionTypes.Error.sampleErrorToDistErr->SqError.Message.REDistributionError
|
||||||
)
|
)
|
||||||
|
|
||||||
let mkBig = (name, withDist, withFloat) =>
|
let mkBig = (name, withDist, withFloat) =>
|
||||||
|
|
|
@ -6,7 +6,7 @@ let requiresNamespace = true
|
||||||
let runScoring = (estimate, answer, prior, env) => {
|
let runScoring = (estimate, answer, prior, env) => {
|
||||||
GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
|
GenericDist.Score.logScore(~estimate, ~answer, ~prior, ~env)
|
||||||
->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
|
->E.R2.fmap(FunctionRegistry_Helpers.Wrappers.evNumber)
|
||||||
->E.R2.errMap(e => Reducer_ErrorValue.REDistributionError(e))
|
->E.R2.errMap(e => SqError.Message.REDistributionError(e))
|
||||||
}
|
}
|
||||||
|
|
||||||
let library = [
|
let library = [
|
||||||
|
@ -30,15 +30,15 @@ let library = [
|
||||||
("prior", FRTypeDist),
|
("prior", FRTypeDist),
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
~run=(inputs, environment, _) => {
|
~run=(inputs, context, _) => {
|
||||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.threeArgs(
|
||||||
inputs,
|
inputs,
|
||||||
("estimate", "answer", "prior"),
|
("estimate", "answer", "prior"),
|
||||||
) {
|
) {
|
||||||
| Ok([IEvDistribution(estimate), IEvDistribution(d), IEvDistribution(prior)]) =>
|
| Ok([IEvDistribution(estimate), IEvDistribution(d), IEvDistribution(prior)]) =>
|
||||||
runScoring(estimate, Score_Dist(d), Some(prior), environment)
|
runScoring(estimate, Score_Dist(d), Some(prior), context.environment)
|
||||||
| Ok([IEvDistribution(estimate), IEvNumber(d), IEvDistribution(prior)]) =>
|
| Ok([IEvDistribution(estimate), IEvNumber(d), IEvDistribution(prior)]) =>
|
||||||
runScoring(estimate, Score_Scalar(d), Some(prior), environment)
|
runScoring(estimate, Score_Scalar(d), Some(prior), context.environment)
|
||||||
| Error(e) => Error(e->FunctionRegistry_Helpers.wrapError)
|
| Error(e) => Error(e->FunctionRegistry_Helpers.wrapError)
|
||||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
}
|
}
|
||||||
|
@ -48,15 +48,15 @@ let library = [
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name="logScore",
|
~name="logScore",
|
||||||
~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
|
~inputs=[FRTypeRecord([("estimate", FRTypeDist), ("answer", FRTypeDistOrNumber)])],
|
||||||
~run=(inputs, environment, _) => {
|
~run=(inputs, context, _) => {
|
||||||
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(
|
switch FunctionRegistry_Helpers.Prepare.ToValueArray.Record.twoArgs(
|
||||||
inputs,
|
inputs,
|
||||||
("estimate", "answer"),
|
("estimate", "answer"),
|
||||||
) {
|
) {
|
||||||
| Ok([IEvDistribution(estimate), IEvDistribution(d)]) =>
|
| Ok([IEvDistribution(estimate), IEvDistribution(d)]) =>
|
||||||
runScoring(estimate, Score_Dist(d), None, environment)
|
runScoring(estimate, Score_Dist(d), None, context.environment)
|
||||||
| Ok([IEvDistribution(estimate), IEvNumber(d)]) =>
|
| Ok([IEvDistribution(estimate), IEvNumber(d)]) =>
|
||||||
runScoring(estimate, Score_Scalar(d), None, environment)
|
runScoring(estimate, Score_Scalar(d), None, context.environment)
|
||||||
| Error(e) => Error(e->FunctionRegistry_Helpers.wrapError)
|
| Error(e) => Error(e->FunctionRegistry_Helpers.wrapError)
|
||||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
}
|
}
|
||||||
|
@ -76,10 +76,10 @@ let library = [
|
||||||
FnDefinition.make(
|
FnDefinition.make(
|
||||||
~name="klDivergence",
|
~name="klDivergence",
|
||||||
~inputs=[FRTypeDist, FRTypeDist],
|
~inputs=[FRTypeDist, FRTypeDist],
|
||||||
~run=(inputs, environment, _) => {
|
~run=(inputs, context, _) => {
|
||||||
switch inputs {
|
switch inputs {
|
||||||
| [IEvDistribution(estimate), IEvDistribution(d)] =>
|
| [IEvDistribution(estimate), IEvDistribution(d)] =>
|
||||||
runScoring(estimate, Score_Dist(d), None, environment)
|
runScoring(estimate, Score_Dist(d), None, context.environment)
|
||||||
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
| _ => Error(FunctionRegistry_Helpers.impossibleError)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
@genType type reducerProject = ReducerProject_T.project //re-export
|
@genType type reducerProject = ReducerProject_T.project //re-export
|
||||||
|
|
||||||
type reducerErrorValue = ForTS_Reducer_ErrorValue.reducerErrorValue //use
|
type error = SqError.t //use
|
||||||
|
type errorMessage = SqError.Message.t //use
|
||||||
|
|
||||||
type squiggleValue = ForTS_SquiggleValue.squiggleValue //use
|
type squiggleValue = ForTS_SquiggleValue.squiggleValue //use
|
||||||
type squiggleValue_Record = ForTS_SquiggleValue.squiggleValue_Record //use
|
type squiggleValue_Record = ForTS_SquiggleValue.squiggleValue_Record //use
|
||||||
|
@ -103,10 +104,8 @@ let cleanAllResults = (project: reducerProject): unit => project->Private.cleanA
|
||||||
To set the includes one first has to call "parseIncludes". The parsed includes or the parser error is returned.
|
To set the includes one first has to call "parseIncludes". The parsed includes or the parser error is returned.
|
||||||
*/
|
*/
|
||||||
@genType
|
@genType
|
||||||
let getIncludes = (project: reducerProject, sourceId: string): result<
|
let getIncludes = (project: reducerProject, sourceId: string): result<array<string>, error> =>
|
||||||
array<string>,
|
project->Private.getIncludes(sourceId)
|
||||||
reducerErrorValue,
|
|
||||||
> => project->Private.getIncludes(sourceId)
|
|
||||||
|
|
||||||
/* Other sources contributing to the global namespace of this source. */
|
/* Other sources contributing to the global namespace of this source. */
|
||||||
@genType
|
@genType
|
||||||
|
@ -198,10 +197,8 @@ let getBindings = (project: reducerProject, sourceId: string): squiggleValue_Rec
|
||||||
Get the result after running this source file or the project
|
Get the result after running this source file or the project
|
||||||
*/
|
*/
|
||||||
@genType
|
@genType
|
||||||
let getResult = (project: reducerProject, sourceId: string): result<
|
let getResult = (project: reducerProject, sourceId: string): result<squiggleValue, error> =>
|
||||||
squiggleValue,
|
project->Private.getResult(sourceId)
|
||||||
reducerErrorValue,
|
|
||||||
> => project->Private.getResult(sourceId)
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
This is a convenience function to get the result of a single source without creating a project.
|
This is a convenience function to get the result of a single source without creating a project.
|
||||||
|
@ -209,10 +206,8 @@ However, without a project, you cannot handle include directives.
|
||||||
The source has to be include free
|
The source has to be include free
|
||||||
*/
|
*/
|
||||||
@genType
|
@genType
|
||||||
let evaluate = (sourceCode: string): (
|
let evaluate = (sourceCode: string): (result<squiggleValue, error>, squiggleValue_Record) =>
|
||||||
result<squiggleValue, reducerErrorValue>,
|
Private.evaluate(sourceCode)
|
||||||
squiggleValue_Record,
|
|
||||||
) => Private.evaluate(sourceCode)
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let setEnvironment = (project: reducerProject, environment: environment): unit =>
|
let setEnvironment = (project: reducerProject, environment: environment): unit =>
|
||||||
|
@ -220,24 +215,3 @@ let setEnvironment = (project: reducerProject, environment: environment): unit =
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let getEnvironment = (project: reducerProject): environment => project->Private.getEnvironment
|
let getEnvironment = (project: reducerProject): environment => project->Private.getEnvironment
|
||||||
|
|
||||||
/*
|
|
||||||
Foreign function interface is intentionally demolished.
|
|
||||||
There is another way to do that: Umur.
|
|
||||||
Also there is no more conversion from javascript to squiggle values currently.
|
|
||||||
If the conversion to the new project is too difficult, I can add it later.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// let foreignFunctionInterface = (
|
|
||||||
// lambdaValue: squiggleValue_Lambda,
|
|
||||||
// argArray: array<squiggleValue>,
|
|
||||||
// environment: environment,
|
|
||||||
// ): result<squiggleValue, reducerErrorValue> => {
|
|
||||||
// let accessors = ReducerProject_ProjectAccessors_T.identityAccessorsWithEnvironment(environment)
|
|
||||||
// Reducer_Expression_Lambda.foreignFunctionInterface(
|
|
||||||
// lambdaValue,
|
|
||||||
// argArray,
|
|
||||||
// accessors,
|
|
||||||
// Reducer_Expression.reduceExpressionInProject,
|
|
||||||
// )
|
|
||||||
// }
|
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
@genType type reducerErrorValue = Reducer_ErrorValue.errorValue //alias
|
|
||||||
@genType type syntaxErrorLocation = Reducer_ErrorValue.syntaxErrorLocation //alias
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let toString = (e: reducerErrorValue): string => Reducer_ErrorValue.errorToString(e)
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let getLocation = (e: reducerErrorValue): option<syntaxErrorLocation> =>
|
|
||||||
switch e {
|
|
||||||
| RESyntaxError(_, optionalLocation) => optionalLocation
|
|
||||||
| _ => None
|
|
||||||
}
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let createTodoError = (v: string) => Reducer_ErrorValue.RETodo(v)
|
|
||||||
|
|
||||||
@genType
|
|
||||||
let createOtherError = (v: string) => Reducer_ErrorValue.REOther(v)
|
|
|
@ -1,5 +1,5 @@
|
||||||
@genType type squiggleValue = Reducer_T.value //re-export
|
@genType type squiggleValue = Reducer_T.value //re-export
|
||||||
type reducerErrorValue = ForTS_Reducer_ErrorValue.reducerErrorValue //use
|
type error = SqError.t //use
|
||||||
|
|
||||||
@genType type squiggleValue_Array = Reducer_T.arrayValue //re-export recursive type
|
@genType type squiggleValue_Array = Reducer_T.arrayValue //re-export recursive type
|
||||||
@genType type squiggleValue_Record = Reducer_T.map //re-export recursive type
|
@genType type squiggleValue_Record = Reducer_T.map //re-export recursive type
|
||||||
|
@ -69,7 +69,7 @@ let toString = (variant: squiggleValue) => Reducer_Value.toString(variant)
|
||||||
// This is a useful method for unit tests.
|
// This is a useful method for unit tests.
|
||||||
// Convert the result along with the error message to a string.
|
// Convert the result along with the error message to a string.
|
||||||
@genType
|
@genType
|
||||||
let toStringResult = (variantResult: result<squiggleValue, reducerErrorValue>) =>
|
let toStringResult = (variantResult: result<squiggleValue, error>) =>
|
||||||
Reducer_Value.toStringResult(variantResult)
|
Reducer_Value.toStringResult(variantResult)
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
@genType type squiggleValue_Lambda = Reducer_T.lambdaValue //re-export
|
@genType type squiggleValue_Lambda = Reducer_T.lambdaValue //re-export
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let toString = (v: squiggleValue_Lambda): string => Reducer_Value.toStringFunction(v)
|
let toString = (v: squiggleValue_Lambda): string => Reducer_Value.toStringLambda(v)
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let parameters = (v: squiggleValue_Lambda): array<string> => {
|
let parameters = (v: squiggleValue_Lambda): array<string> => Reducer_Lambda.parameters(v)
|
||||||
v.parameters
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
@genType type reducerErrorValue = ForTS_Reducer_ErrorValue.reducerErrorValue //re-export
|
|
||||||
@genType type syntaxErrorLocation = ForTS_Reducer_ErrorValue.syntaxErrorLocation //re-export
|
|
||||||
|
|
||||||
@genType type reducerProject = ForTS_ReducerProject.reducerProject //re-export
|
@genType type reducerProject = ForTS_ReducerProject.reducerProject //re-export
|
||||||
@genType type squiggleValue = ForTS_SquiggleValue.squiggleValue //re-export
|
@genType type squiggleValue = ForTS_SquiggleValue.squiggleValue //re-export
|
||||||
@genType type squiggleValue_Array = ForTS_SquiggleValue_Array.squiggleValue_Array //re-export
|
@genType type squiggleValue_Array = ForTS_SquiggleValue_Array.squiggleValue_Array //re-export
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
type internalExpressionValueType = Reducer_Value.internalExpressionValueType
|
type internalExpressionValueType = Reducer_Value.internalExpressionValueType
|
||||||
type errorValue = Reducer_ErrorValue.errorValue
|
type errorMessage = SqError.Message.t
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Function Registry "Type". A type, without any other information.
|
Function Registry "Type". A type, without any other information.
|
||||||
|
@ -30,9 +30,9 @@ type fnDefinition = {
|
||||||
inputs: array<frType>,
|
inputs: array<frType>,
|
||||||
run: (
|
run: (
|
||||||
array<Reducer_T.value>,
|
array<Reducer_T.value>,
|
||||||
Reducer_T.environment,
|
Reducer_T.context,
|
||||||
Reducer_T.reducerFn,
|
Reducer_T.reducerFn,
|
||||||
) => result<Reducer_T.value, errorValue>,
|
) => result<Reducer_T.value, errorMessage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
type function = {
|
type function = {
|
||||||
|
@ -61,6 +61,7 @@ module FRType = {
|
||||||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||||
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
|
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
|
||||||
}
|
}
|
||||||
|
|
||||||
| FRTypeArray(r) => `list(${toString(r)})`
|
| FRTypeArray(r) => `list(${toString(r)})`
|
||||||
| FRTypeLambda => `lambda`
|
| FRTypeLambda => `lambda`
|
||||||
| FRTypeString => `string`
|
| FRTypeString => `string`
|
||||||
|
@ -122,19 +123,19 @@ module FnDefinition = {
|
||||||
let run = (
|
let run = (
|
||||||
t: t,
|
t: t,
|
||||||
args: array<Reducer_T.value>,
|
args: array<Reducer_T.value>,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
) => {
|
) => {
|
||||||
switch t->isMatch(args) {
|
switch t->isMatch(args) {
|
||||||
| true => t.run(args, env, reducer)
|
| true => t.run(args, context, reducer)
|
||||||
| false => REOther("Incorrect Types")->Error
|
| false => REOther("Incorrect Types")->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let make = (~name, ~inputs, ~run, ()): t => {
|
let make = (~name, ~inputs, ~run, ()): t => {
|
||||||
name: name,
|
name,
|
||||||
inputs: inputs,
|
inputs,
|
||||||
run: run,
|
run,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,14 +161,14 @@ module Function = {
|
||||||
~isExperimental=false,
|
~isExperimental=false,
|
||||||
(),
|
(),
|
||||||
): t => {
|
): t => {
|
||||||
name: name,
|
name,
|
||||||
nameSpace: nameSpace,
|
nameSpace,
|
||||||
definitions: definitions,
|
definitions,
|
||||||
output: output,
|
output,
|
||||||
examples: examples |> E.O.default([]),
|
examples: examples->E.O2.default([]),
|
||||||
isExperimental: isExperimental,
|
isExperimental,
|
||||||
requiresNamespace: requiresNamespace,
|
requiresNamespace,
|
||||||
description: description,
|
description,
|
||||||
}
|
}
|
||||||
|
|
||||||
let toJson = (t: t): functionJson => {
|
let toJson = (t: t): functionJson => {
|
||||||
|
@ -203,15 +204,19 @@ module Registry = {
|
||||||
fn.requiresNamespace ? [] : [def.name],
|
fn.requiresNamespace ? [] : [def.name],
|
||||||
]->E.A.concatMany
|
]->E.A.concatMany
|
||||||
|
|
||||||
names->Belt.Array.reduce(acc, (acc, name) => {
|
names->Belt.Array.reduce(
|
||||||
|
acc,
|
||||||
|
(acc, name) => {
|
||||||
switch acc->Belt.Map.String.get(name) {
|
switch acc->Belt.Map.String.get(name) {
|
||||||
| Some(fns) => {
|
| Some(fns) => {
|
||||||
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
||||||
acc
|
acc
|
||||||
}
|
}
|
||||||
|
|
||||||
| None => acc->Belt.Map.String.set(name, [def])
|
| None => acc->Belt.Map.String.set(name, [def])
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -225,9 +230,9 @@ module Registry = {
|
||||||
registry,
|
registry,
|
||||||
fnName: string,
|
fnName: string,
|
||||||
args: array<Reducer_T.value>,
|
args: array<Reducer_T.value>,
|
||||||
env: Reducer_T.environment,
|
context: Reducer_T.context,
|
||||||
reducer: Reducer_T.reducerFn,
|
reducer: Reducer_T.reducerFn,
|
||||||
): result<Reducer_T.value, errorValue> => {
|
): result<Reducer_T.value, errorMessage> => {
|
||||||
switch Belt.Map.String.get(registry.fnNameDict, fnName) {
|
switch Belt.Map.String.get(registry.fnNameDict, fnName) {
|
||||||
| Some(definitions) => {
|
| Some(definitions) => {
|
||||||
let showNameMatchDefinitions = () => {
|
let showNameMatchDefinitions = () => {
|
||||||
|
@ -241,10 +246,11 @@ module Registry = {
|
||||||
|
|
||||||
let match = definitions->Js.Array2.find(def => def->FnDefinition.isMatch(args))
|
let match = definitions->Js.Array2.find(def => def->FnDefinition.isMatch(args))
|
||||||
switch match {
|
switch match {
|
||||||
| Some(def) => def->FnDefinition.run(args, env, reducer)
|
| Some(def) => def->FnDefinition.run(args, context, reducer)
|
||||||
| None => REOther(showNameMatchDefinitions())->Error
|
| None => REOther(showNameMatchDefinitions())->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| None => RESymbolNotFound(fnName)->Error
|
| None => RESymbolNotFound(fnName)->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,8 @@ open FunctionRegistry_Core
|
||||||
open Reducer_T
|
open Reducer_T
|
||||||
|
|
||||||
let impossibleErrorString = "Wrong inputs / Logically impossible"
|
let impossibleErrorString = "Wrong inputs / Logically impossible"
|
||||||
let impossibleError: errorValue = impossibleErrorString->Reducer_ErrorValue.REOther
|
let impossibleError: errorMessage = impossibleErrorString->SqError.Message.REOther
|
||||||
let wrapError = e => Reducer_ErrorValue.REOther(e)
|
let wrapError = e => SqError.Message.REOther(e)
|
||||||
|
|
||||||
module Wrappers = {
|
module Wrappers = {
|
||||||
let symbolic = r => DistributionTypes.Symbolic(r)
|
let symbolic = r => DistributionTypes.Symbolic(r)
|
||||||
|
@ -34,6 +34,7 @@ module Prepare = {
|
||||||
let n2 = map->Belt.Map.String.getExn(arg2)
|
let n2 = map->Belt.Map.String.getExn(arg2)
|
||||||
Ok([n1, n2])
|
Ok([n1, n2])
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleErrorString)
|
| _ => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,6 +46,7 @@ module Prepare = {
|
||||||
let n3 = map->Belt.Map.String.getExn(arg3)
|
let n3 = map->Belt.Map.String.getExn(arg3)
|
||||||
Ok([n1, n2, n3])
|
Ok([n1, n2, n3])
|
||||||
}
|
}
|
||||||
|
|
||||||
| _ => Error(impossibleErrorString)
|
| _ => Error(impossibleErrorString)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,4 +44,4 @@ let removeResult = ({namespace} as bindings: t): t => {
|
||||||
|
|
||||||
let locals = ({namespace}: t): Reducer_T.namespace => namespace
|
let locals = ({namespace}: t): Reducer_T.namespace => namespace
|
||||||
|
|
||||||
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace: namespace, parent: None}
|
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace, parent: None}
|
||||||
|
|
|
@ -4,9 +4,13 @@ let defaultEnvironment: Reducer_T.environment = DistributionOperation.defaultEnv
|
||||||
|
|
||||||
let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environment): t => {
|
let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environment): t => {
|
||||||
{
|
{
|
||||||
|
frameStack: list{},
|
||||||
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
|
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
|
||||||
environment: environment,
|
environment,
|
||||||
|
inFunction: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let createDefaultContext = (): t => createContext(SquiggleLibrary_StdLib.stdLib, defaultEnvironment)
|
let currentFunctionName = (t: t): string => {
|
||||||
|
t.inFunction->E.O2.fmap(Reducer_Lambda_T.name)->E.O2.default(Reducer_T.topFrameName)
|
||||||
|
}
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
// types are disabled until review and rewrite for 0.5 interpreter compatibility
|
|
||||||
/*
|
|
||||||
module ProjectAccessorsT = ReducerProject_ProjectAccessors_T
|
|
||||||
module T = Reducer_Dispatch_T
|
|
||||||
module TypeChecker = Reducer_Type_TypeChecker
|
|
||||||
open Reducer_Value
|
|
||||||
|
|
||||||
type errorValue = Reducer_ErrorValue.errorValue
|
|
||||||
|
|
||||||
let makeFromTypes = jumpTable => {
|
|
||||||
let dispatchChainPiece: T.dispatchChainPiece = (
|
|
||||||
(fnName, fnArgs): functionCall,
|
|
||||||
accessors: ProjectAccessorsT.t,
|
|
||||||
) => {
|
|
||||||
let jumpTableEntry = jumpTable->Js.Array2.find(elem => {
|
|
||||||
let (candidName, candidType, _) = elem
|
|
||||||
candidName == fnName && TypeChecker.checkITypeArgumentsBool(candidType, fnArgs)
|
|
||||||
})
|
|
||||||
switch jumpTableEntry {
|
|
||||||
| Some((_, _, bridgeFn)) => bridgeFn(fnArgs, accessors)->Some
|
|
||||||
| _ => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dispatchChainPiece
|
|
||||||
}
|
|
||||||
|
|
||||||
*/
|
|
|
@ -1,21 +0,0 @@
|
||||||
// module ExpressionT = Reducer_Expression_T
|
|
||||||
// module ProjectAccessorsT = ReducerProject_ProjectAccessors_T
|
|
||||||
|
|
||||||
// // Each piece of the dispatch chain computes the result or returns None so that the chain can continue
|
|
||||||
// type dispatchChainPiece = (
|
|
||||||
// Reducer_Value.functionCall,
|
|
||||||
// ProjectAccessorsT.t,
|
|
||||||
// ) => option<result<Reducer_T.value, Reducer_ErrorValue.errorValue>>
|
|
||||||
|
|
||||||
// type dispatchChainPieceWithReducer = (
|
|
||||||
// Reducer_Value.functionCall,
|
|
||||||
// ProjectAccessorsT.t,
|
|
||||||
// Reducer_T.reducerFn,
|
|
||||||
// ) => option<result<Reducer_T.value, Reducer_ErrorValue.errorValue>>
|
|
||||||
|
|
||||||
// // This is a switch statement case implementation: get the arguments and compute the result
|
|
||||||
// type genericIEvFunction = (
|
|
||||||
// array<Reducer_T.value>,
|
|
||||||
// ProjectAccessorsT.t,
|
|
||||||
// ) => result<Reducer_T.value, Reducer_ErrorValue.errorValue>
|
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
//TODO: Do not export here but in ForTS__Types
|
|
||||||
@gentype.import("peggy") @genType.as("LocationRange")
|
|
||||||
type syntaxErrorLocation
|
|
||||||
|
|
||||||
@genType.opaque
|
|
||||||
type errorValue =
|
|
||||||
| REArityError(option<string>, int, int)
|
|
||||||
| REArrayIndexNotFound(string, int)
|
|
||||||
| REAssignmentExpected
|
|
||||||
| REDistributionError(DistributionTypes.error)
|
|
||||||
| REExpectedType(string, string)
|
|
||||||
| REExpressionExpected
|
|
||||||
| REFunctionExpected(string)
|
|
||||||
| REFunctionNotFound(string)
|
|
||||||
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
|
||||||
| REMacroNotFound(string)
|
|
||||||
| RENotAFunction(string)
|
|
||||||
| REOperationError(Operation.operationError)
|
|
||||||
| RERecordPropertyNotFound(string, string)
|
|
||||||
| RESymbolNotFound(string)
|
|
||||||
| RESyntaxError(string, option<syntaxErrorLocation>)
|
|
||||||
| RETodo(string) // To do
|
|
||||||
| REUnitNotFound(string)
|
|
||||||
| RENeedToRun
|
|
||||||
| REOther(string)
|
|
||||||
|
|
||||||
type t = errorValue
|
|
||||||
|
|
||||||
exception ErrorException(errorValue)
|
|
||||||
|
|
||||||
let errorToString = err =>
|
|
||||||
switch err {
|
|
||||||
| REArityError(_oFnName, arity, usedArity) =>
|
|
||||||
`${Js.String.make(arity)} arguments expected. Instead ${Js.String.make(
|
|
||||||
usedArity,
|
|
||||||
)} argument(s) were passed.`
|
|
||||||
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
|
||||||
| REAssignmentExpected => "Assignment expected"
|
|
||||||
| REExpressionExpected => "Expression expected"
|
|
||||||
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
|
||||||
| REFunctionNotFound(msg) => `Function not found: ${msg}`
|
|
||||||
| REDistributionError(err) => `Distribution Math Error: ${DistributionTypes.Error.toString(err)}`
|
|
||||||
| REOperationError(err) => `Math Error: ${Operation.Error.toString(err)}`
|
|
||||||
| REJavaScriptExn(omsg, oname) => {
|
|
||||||
let answer = "JS Exception:"
|
|
||||||
let answer = switch oname {
|
|
||||||
| Some(name) => `${answer} ${name}`
|
|
||||||
| _ => answer
|
|
||||||
}
|
|
||||||
let answer = switch omsg {
|
|
||||||
| Some(msg) => `${answer}: ${msg}`
|
|
||||||
| _ => answer
|
|
||||||
}
|
|
||||||
answer
|
|
||||||
}
|
|
||||||
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
|
||||||
| RENotAFunction(valueString) => `${valueString} is not a function`
|
|
||||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
|
||||||
| RESymbolNotFound(symbolName) => `${symbolName} is not defined`
|
|
||||||
| RESyntaxError(desc, _) => `Syntax Error: ${desc}`
|
|
||||||
| RETodo(msg) => `TODO: ${msg}`
|
|
||||||
| REExpectedType(typeName, valueString) => `Expected type: ${typeName} but got: ${valueString}`
|
|
||||||
| REUnitNotFound(unitName) => `Unit not found: ${unitName}`
|
|
||||||
| RENeedToRun => "Need to run"
|
|
||||||
| REOther(msg) => `Error: ${msg}`
|
|
||||||
}
|
|
||||||
|
|
||||||
let fromException = exn =>
|
|
||||||
switch exn {
|
|
||||||
| ErrorException(e) => e
|
|
||||||
| Js.Exn.Error(e) =>
|
|
||||||
switch Js.Exn.message(e) {
|
|
||||||
| Some(message) => REOther(message)
|
|
||||||
| None =>
|
|
||||||
switch Js.Exn.name(e) {
|
|
||||||
| Some(name) => REOther(name)
|
|
||||||
| None => REOther("Unknown error")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
| _e => REOther("Unknown error")
|
|
||||||
}
|
|
||||||
|
|
||||||
let toException = (errorValue: t) => raise(ErrorException(errorValue))
|
|
|
@ -1,3 +0,0 @@
|
||||||
// There are switch statement cases in the code which are impossible to reach by design.
|
|
||||||
// ImpossibleException is a sign of programming error.
|
|
||||||
exception ImpossibleException(string)
|
|
|
@ -1,16 +1,25 @@
|
||||||
module Bindings = Reducer_Bindings
|
module Bindings = Reducer_Bindings
|
||||||
module Lambda = Reducer_Expression_Lambda
|
|
||||||
module Result = Belt.Result
|
module Result = Belt.Result
|
||||||
module T = Reducer_T
|
module T = Reducer_T
|
||||||
|
|
||||||
type errorValue = Reducer_ErrorValue.errorValue
|
let toLocation = (expression: T.expression): Reducer_Peggy_Parse.location => {
|
||||||
|
expression.ast.location
|
||||||
|
}
|
||||||
|
|
||||||
|
let throwFrom = (error: SqError.Message.t, expression: T.expression, context: T.context) =>
|
||||||
|
error->SqError.throwMessageWithFrameStack(
|
||||||
|
context.frameStack->Reducer_FrameStack.extend(
|
||||||
|
context->Reducer_Context.currentFunctionName,
|
||||||
|
Some(expression->toLocation),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Recursively evaluate the expression
|
Recursively evaluate the expression
|
||||||
*/
|
*/
|
||||||
let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
// Js.log(`reduce: ${expression->Reducer_Expression_T.toString}`)
|
// Js.log(`reduce: ${expression->Reducer_Expression_T.toString}`)
|
||||||
switch expression {
|
switch expression.content {
|
||||||
| T.EBlock(statements) => {
|
| T.EBlock(statements) => {
|
||||||
let innerContext = {...context, bindings: context.bindings->Bindings.extend}
|
let innerContext = {...context, bindings: context.bindings->Bindings.extend}
|
||||||
let (value, _) =
|
let (value, _) =
|
||||||
|
@ -49,7 +58,7 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
let (key, _) = eKey->evaluate(context)
|
let (key, _) = eKey->evaluate(context)
|
||||||
let keyString = switch key {
|
let keyString = switch key {
|
||||||
| IEvString(s) => s
|
| IEvString(s) => s
|
||||||
| _ => REOther("Record keys must be strings")->Reducer_ErrorValue.ErrorException->raise
|
| _ => REOther("Record keys must be strings")->throwFrom(expression, context)
|
||||||
}
|
}
|
||||||
let (value, _) = eValue->evaluate(context)
|
let (value, _) = eValue->evaluate(context)
|
||||||
(keyString, value)
|
(keyString, value)
|
||||||
|
@ -73,7 +82,7 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
| T.ESymbol(name) =>
|
| T.ESymbol(name) =>
|
||||||
switch context.bindings->Bindings.get(name) {
|
switch context.bindings->Bindings.get(name) {
|
||||||
| Some(v) => (v, context)
|
| Some(v) => (v, context)
|
||||||
| None => Reducer_ErrorValue.RESymbolNotFound(name)->Reducer_ErrorValue.ErrorException->raise
|
| None => RESymbolNotFound(name)->throwFrom(expression, context)
|
||||||
}
|
}
|
||||||
|
|
||||||
| T.EValue(value) => (value, context)
|
| T.EValue(value) => (value, context)
|
||||||
|
@ -82,28 +91,40 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
let (predicateResult, _) = predicate->evaluate(context)
|
let (predicateResult, _) = predicate->evaluate(context)
|
||||||
switch predicateResult {
|
switch predicateResult {
|
||||||
| T.IEvBool(value) => (value ? trueCase : falseCase)->evaluate(context)
|
| T.IEvBool(value) => (value ? trueCase : falseCase)->evaluate(context)
|
||||||
| _ => REExpectedType("Boolean", "")->Reducer_ErrorValue.ErrorException->raise
|
| _ => REExpectedType("Boolean", "")->throwFrom(expression, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
| T.ELambda(parameters, body) => (
|
| T.ELambda(parameters, body, name) => (
|
||||||
Lambda.makeLambda(parameters, context.bindings, body)->T.IEvLambda,
|
Reducer_Lambda.makeLambda(
|
||||||
|
name,
|
||||||
|
parameters,
|
||||||
|
context.bindings,
|
||||||
|
body,
|
||||||
|
expression->toLocation,
|
||||||
|
)->T.IEvLambda,
|
||||||
context,
|
context,
|
||||||
)
|
)
|
||||||
|
|
||||||
| T.ECall(fn, args) => {
|
| T.ECall(fn, args) => {
|
||||||
let (lambda, _) = fn->evaluate(context)
|
let (lambda, _) = fn->evaluate(context)
|
||||||
let argValues = Js.Array2.map(args, arg => {
|
let argValues = Belt.Array.map(args, arg => {
|
||||||
let (argValue, _) = arg->evaluate(context)
|
let (argValue, _) = arg->evaluate(context)
|
||||||
argValue
|
argValue
|
||||||
})
|
})
|
||||||
switch lambda {
|
switch lambda {
|
||||||
| T.IEvLambda(lambda) => (
|
| T.IEvLambda(lambda) => {
|
||||||
Lambda.doLambdaCall(lambda, argValues, context.environment, evaluate),
|
let result = Reducer_Lambda.doLambdaCallFrom(
|
||||||
|
lambda,
|
||||||
|
argValues,
|
||||||
context,
|
context,
|
||||||
|
evaluate,
|
||||||
|
Some(expression->toLocation), // we have to pass the location of a current expression here, to put it on frameStack
|
||||||
)
|
)
|
||||||
| _ =>
|
(result, context)
|
||||||
RENotAFunction(lambda->Reducer_Value.toString)->Reducer_ErrorValue.ErrorException->raise
|
}
|
||||||
|
|
||||||
|
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -112,19 +133,22 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
||||||
module BackCompatible = {
|
module BackCompatible = {
|
||||||
// Those methods are used to support the existing tests
|
// Those methods are used to support the existing tests
|
||||||
// If they are used outside limited testing context, error location reporting will fail
|
// If they are used outside limited testing context, error location reporting will fail
|
||||||
let parse = (peggyCode: string): result<T.expression, errorValue> =>
|
let parse = (peggyCode: string): result<T.expression, Reducer_Peggy_Parse.parseError> =>
|
||||||
peggyCode->Reducer_Peggy_Parse.parse->Result.map(Reducer_Peggy_ToExpression.fromNode)
|
peggyCode->Reducer_Peggy_Parse.parse("main")->Result.map(Reducer_Peggy_ToExpression.fromNode)
|
||||||
|
|
||||||
let evaluate = (expression: T.expression): result<T.value, errorValue> => {
|
let createDefaultContext = () =>
|
||||||
let context = Reducer_Context.createDefaultContext()
|
Reducer_Context.createContext(SquiggleLibrary_StdLib.stdLib, Reducer_Context.defaultEnvironment)
|
||||||
|
|
||||||
|
let evaluate = (expression: T.expression): result<T.value, SqError.t> => {
|
||||||
|
let context = createDefaultContext()
|
||||||
try {
|
try {
|
||||||
let (value, _) = expression->evaluate(context)
|
let (value, _) = expression->evaluate(context)
|
||||||
value->Ok
|
value->Ok
|
||||||
} catch {
|
} catch {
|
||||||
| exn => Reducer_ErrorValue.fromException(exn)->Error
|
| exn => exn->SqError.fromException->Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let evaluateString = (peggyCode: string): result<T.value, errorValue> =>
|
let evaluateString = (peggyCode: string): result<T.value, SqError.t> =>
|
||||||
parse(peggyCode)->Result.flatMap(evaluate)
|
parse(peggyCode)->E.R2.errMap(e => e->SqError.fromParseError)->Result.flatMap(evaluate)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,19 @@
|
||||||
module BErrorValue = Reducer_ErrorValue
|
|
||||||
module T = Reducer_T
|
module T = Reducer_T
|
||||||
|
|
||||||
type errorValue = BErrorValue.errorValue
|
|
||||||
type expression = Reducer_T.expression
|
type expression = Reducer_T.expression
|
||||||
|
type expressionContent = Reducer_T.expressionContent
|
||||||
|
|
||||||
let eArray = (anArray: array<T.expression>) => anArray->T.EArray
|
let eArray = (anArray: array<T.expression>): expressionContent => anArray->T.EArray
|
||||||
|
|
||||||
let eBool = aBool => aBool->T.IEvBool->T.EValue
|
let eBool = aBool => aBool->T.IEvBool->T.EValue
|
||||||
|
|
||||||
let eCall = (fn: expression, args: array<expression>): expression => T.ECall(fn, args)
|
let eCall = (fn: expression, args: array<expression>): expressionContent => T.ECall(fn, args)
|
||||||
|
|
||||||
let eLambda = (parameters: array<string>, expr: expression) => T.ELambda(parameters, expr)
|
let eLambda = (
|
||||||
|
parameters: array<string>,
|
||||||
|
expr: expression,
|
||||||
|
name: option<string>,
|
||||||
|
): expressionContent => T.ELambda(parameters, expr, name)
|
||||||
|
|
||||||
let eNumber = aNumber => aNumber->T.IEvNumber->T.EValue
|
let eNumber = aNumber => aNumber->T.IEvNumber->T.EValue
|
||||||
|
|
||||||
|
@ -18,13 +21,13 @@ let eRecord = (aMap: array<(T.expression, T.expression)>) => aMap->T.ERecord
|
||||||
|
|
||||||
let eString = aString => aString->T.IEvString->T.EValue
|
let eString = aString => aString->T.IEvString->T.EValue
|
||||||
|
|
||||||
let eSymbol = (name: string): expression => T.ESymbol(name)
|
let eSymbol = (name: string): expressionContent => T.ESymbol(name)
|
||||||
|
|
||||||
let eBlock = (exprs: array<expression>): expression => T.EBlock(exprs)
|
let eBlock = (exprs: array<expression>): expressionContent => T.EBlock(exprs)
|
||||||
|
|
||||||
let eProgram = (exprs: array<expression>): expression => T.EProgram(exprs)
|
let eProgram = (exprs: array<expression>): expressionContent => T.EProgram(exprs)
|
||||||
|
|
||||||
let eLetStatement = (symbol: string, valueExpression: expression): expression => T.EAssign(
|
let eLetStatement = (symbol: string, valueExpression: expression): expressionContent => T.EAssign(
|
||||||
symbol,
|
symbol,
|
||||||
valueExpression,
|
valueExpression,
|
||||||
)
|
)
|
||||||
|
@ -33,11 +36,8 @@ let eTernary = (
|
||||||
predicate: expression,
|
predicate: expression,
|
||||||
trueCase: expression,
|
trueCase: expression,
|
||||||
falseCase: expression,
|
falseCase: expression,
|
||||||
): expression => T.ETernary(predicate, trueCase, falseCase)
|
): expressionContent => T.ETernary(predicate, trueCase, falseCase)
|
||||||
|
|
||||||
let eIdentifier = (name: string): expression => name->T.ESymbol
|
let eIdentifier = (name: string): expressionContent => name->T.ESymbol
|
||||||
|
|
||||||
// let eTypeIdentifier = (name: string): expression =>
|
let eVoid: expressionContent = T.IEvVoid->T.EValue
|
||||||
// name->T.IEvTypeIdentifier->T.EValue
|
|
||||||
|
|
||||||
let eVoid: expression = T.IEvVoid->T.EValue
|
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
module ErrorValue = Reducer_ErrorValue
|
|
||||||
|
|
||||||
let doLambdaCall = (
|
|
||||||
lambdaValue: Reducer_T.lambdaValue,
|
|
||||||
args,
|
|
||||||
environment: Reducer_T.environment,
|
|
||||||
reducer: Reducer_T.reducerFn,
|
|
||||||
): Reducer_T.value => {
|
|
||||||
lambdaValue.body(args, environment, reducer)
|
|
||||||
}
|
|
||||||
|
|
||||||
let makeLambda = (
|
|
||||||
parameters: array<string>,
|
|
||||||
bindings: Reducer_T.bindings,
|
|
||||||
body: Reducer_T.expression,
|
|
||||||
): Reducer_T.lambdaValue => {
|
|
||||||
// TODO - clone bindings to avoid later redefinitions affecting lambdas?
|
|
||||||
|
|
||||||
// Note: with this implementation, FFI lambdas (created by other methods than calling `makeLambda`) are allowed to violate the rules, pollute the bindings, etc.
|
|
||||||
// Not sure yet if that's a bug or a feature.
|
|
||||||
// FunctionRegistry functions are unaffected by this, their API is too limited.
|
|
||||||
|
|
||||||
let lambda = (
|
|
||||||
arguments: array<Reducer_T.value>,
|
|
||||||
environment: Reducer_T.environment,
|
|
||||||
reducer: Reducer_T.reducerFn,
|
|
||||||
) => {
|
|
||||||
let argsLength = arguments->Js.Array2.length
|
|
||||||
let parametersLength = parameters->Js.Array2.length
|
|
||||||
if argsLength !== parametersLength {
|
|
||||||
ErrorValue.REArityError(None, parametersLength, argsLength)->ErrorValue.ErrorException->raise
|
|
||||||
}
|
|
||||||
|
|
||||||
let localBindings = bindings->Reducer_Bindings.extend
|
|
||||||
let localBindingsWithParameters = parameters->Belt.Array.reduceWithIndex(localBindings, (
|
|
||||||
currentBindings,
|
|
||||||
parameter,
|
|
||||||
index,
|
|
||||||
) => {
|
|
||||||
currentBindings->Reducer_Bindings.set(parameter, arguments[index])
|
|
||||||
})
|
|
||||||
|
|
||||||
let (value, _) = reducer(
|
|
||||||
body,
|
|
||||||
{bindings: localBindingsWithParameters, environment: environment},
|
|
||||||
)
|
|
||||||
value
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// context: bindings,
|
|
||||||
body: lambda,
|
|
||||||
parameters: parameters,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let makeFFILambda = (body: Reducer_T.lambdaBody): Reducer_T.lambdaValue => {
|
|
||||||
body: body,
|
|
||||||
parameters: ["..."],
|
|
||||||
}
|
|
|
@ -12,7 +12,7 @@ let semicolonJoin = values =>
|
||||||
Converts the expression to String
|
Converts the expression to String
|
||||||
*/
|
*/
|
||||||
let rec toString = (expression: t) =>
|
let rec toString = (expression: t) =>
|
||||||
switch expression {
|
switch expression.content {
|
||||||
| EBlock(statements) =>
|
| EBlock(statements) =>
|
||||||
`{${Js.Array2.map(statements, aValue => toString(aValue))->semicolonJoin}}`
|
`{${Js.Array2.map(statements, aValue => toString(aValue))->semicolonJoin}}`
|
||||||
| EProgram(statements) => Js.Array2.map(statements, aValue => toString(aValue))->semicolonJoin
|
| EProgram(statements) => Js.Array2.map(statements, aValue => toString(aValue))->semicolonJoin
|
||||||
|
@ -24,37 +24,23 @@ let rec toString = (expression: t) =>
|
||||||
`${predicate->toString} ? (${trueCase->toString}) : (${falseCase->toString})`
|
`${predicate->toString} ? (${trueCase->toString}) : (${falseCase->toString})`
|
||||||
| EAssign(name, value) => `${name} = ${value->toString}`
|
| EAssign(name, value) => `${name} = ${value->toString}`
|
||||||
| ECall(fn, args) => `(${fn->toString})(${args->Js.Array2.map(toString)->commaJoin})`
|
| ECall(fn, args) => `(${fn->toString})(${args->Js.Array2.map(toString)->commaJoin})`
|
||||||
| ELambda(parameters, body) => `{|${parameters->commaJoin}| ${body->toString}}`
|
| ELambda(parameters, body, _) => `{|${parameters->commaJoin}| ${body->toString}}`
|
||||||
| EValue(aValue) => Reducer_Value.toString(aValue)
|
| EValue(aValue) => Reducer_Value.toString(aValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
let toStringResult = codeResult =>
|
let toStringResult = codeResult =>
|
||||||
switch codeResult {
|
switch codeResult {
|
||||||
| Ok(a) => `Ok(${toString(a)})`
|
| Ok(a) => `Ok(${toString(a)})`
|
||||||
| Error(m) => `Error(${Reducer_ErrorValue.errorToString(m)})`
|
| Error(m) => `Error(${Reducer_Peggy_Parse.toStringError(m)})`
|
||||||
}
|
}
|
||||||
|
|
||||||
let toStringResultOkless = codeResult =>
|
let toStringResultOkless = codeResult =>
|
||||||
switch codeResult {
|
switch codeResult {
|
||||||
| Ok(a) => toString(a)
|
| Ok(a) => toString(a)
|
||||||
| Error(m) => `Error(${Reducer_ErrorValue.errorToString(m)})`
|
| Error(m) => `Error(${Reducer_Peggy_Parse.toStringError(m)})`
|
||||||
}
|
}
|
||||||
|
|
||||||
let inspect = (expr: t): t => {
|
let inspect = (expr: t): t => {
|
||||||
Js.log(toString(expr))
|
Js.log(toString(expr))
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
|
|
||||||
let inspectResult = (r: result<t, Reducer_ErrorValue.errorValue>): result<
|
|
||||||
t,
|
|
||||||
Reducer_ErrorValue.errorValue,
|
|
||||||
> => {
|
|
||||||
Js.log(toStringResult(r))
|
|
||||||
r
|
|
||||||
}
|
|
||||||
|
|
||||||
let resultToValue = (rExpression: result<t, Reducer_ErrorValue.t>): t =>
|
|
||||||
switch rExpression {
|
|
||||||
| Ok(expression) => expression
|
|
||||||
| Error(errorValue) => Reducer_ErrorValue.toException(errorValue)
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
// This is called "frameStack" and not "callStack", because the last frame in errors is often not a function call.
|
||||||
|
// A "frame" is a pair of a scope (function or top-level scope, currently stored as a string) and a location inside it.
|
||||||
|
// See this comment to deconfuse about what a frame is: https://github.com/quantified-uncertainty/squiggle/pull/1172#issuecomment-1264115038
|
||||||
|
type t = Reducer_T.frameStack
|
||||||
|
|
||||||
|
module Frame = {
|
||||||
|
let toString = ({name, location}: Reducer_T.frame) =>
|
||||||
|
name ++
|
||||||
|
switch location {
|
||||||
|
| Some(location) =>
|
||||||
|
` at line ${location.start.line->Js.Int.toString}, column ${location.start.column->Js.Int.toString}` // TODO - source id?
|
||||||
|
| None => ""
|
||||||
|
}
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let getLocation = (t: Reducer_T.frame): option<Reducer_Peggy_Parse.location> => t.location
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let getName = (t: Reducer_T.frame): string => t.name
|
||||||
|
}
|
||||||
|
|
||||||
|
let make = (): t => list{}
|
||||||
|
|
||||||
|
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
|
||||||
|
t->Belt.List.add({
|
||||||
|
name,
|
||||||
|
location,
|
||||||
|
})
|
||||||
|
|
||||||
|
// this is useful for SyntaxErrors
|
||||||
|
let makeSingleFrameStack = (location: Reducer_Peggy_Parse.location): t =>
|
||||||
|
make()->extend(Reducer_T.topFrameName, Some(location))
|
||||||
|
|
||||||
|
// this includes the left offset because it's mostly used in SqError.toStringWithStackTrace
|
||||||
|
let toString = (t: t) =>
|
||||||
|
t
|
||||||
|
->Belt.List.map(s => " " ++ s->Frame.toString ++ "\n")
|
||||||
|
->Belt.List.toArray
|
||||||
|
->Js.Array2.joinWith("")
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let toFrameArray = (t: t): array<Reducer_T.frame> => t->Belt.List.toArray
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let getTopFrame = (t: t): option<Reducer_T.frame> => t->Belt.List.head
|
||||||
|
|
||||||
|
let isEmpty = (t: t): bool =>
|
||||||
|
switch t->Belt.List.head {
|
||||||
|
| Some(_) => true
|
||||||
|
| None => false
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
type t = Reducer_T.lambdaValue
|
||||||
|
|
||||||
|
// user-defined functions, i.e. `add2 = {|x, y| x + y}`, are built by this method
|
||||||
|
let makeLambda = (
|
||||||
|
name: option<string>,
|
||||||
|
parameters: array<string>,
|
||||||
|
bindings: Reducer_T.bindings,
|
||||||
|
body: Reducer_T.expression,
|
||||||
|
location: Reducer_Peggy_Parse.location,
|
||||||
|
): t => {
|
||||||
|
let lambda = (
|
||||||
|
arguments: array<Reducer_T.value>,
|
||||||
|
context: Reducer_T.context,
|
||||||
|
reducer: Reducer_T.reducerFn,
|
||||||
|
) => {
|
||||||
|
let argsLength = arguments->E.A.length
|
||||||
|
let parametersLength = parameters->E.A.length
|
||||||
|
if argsLength !== parametersLength {
|
||||||
|
SqError.Message.REArityError(None, parametersLength, argsLength)->SqError.Message.throw
|
||||||
|
}
|
||||||
|
|
||||||
|
// create new bindings scope - technically not necessary, since bindings are immutable, but might help with debugging/new features in the future
|
||||||
|
let localBindings = bindings->Reducer_Bindings.extend
|
||||||
|
|
||||||
|
let localBindingsWithParameters = parameters->Belt.Array.reduceWithIndex(localBindings, (
|
||||||
|
currentBindings,
|
||||||
|
parameter,
|
||||||
|
index,
|
||||||
|
) => {
|
||||||
|
currentBindings->Reducer_Bindings.set(parameter, arguments[index])
|
||||||
|
})
|
||||||
|
|
||||||
|
let lambdaContext: Reducer_T.context = {
|
||||||
|
bindings: localBindingsWithParameters, // based on bindings at the moment of lambda creation
|
||||||
|
environment: context.environment, // environment at the moment when lambda is called
|
||||||
|
frameStack: context.frameStack, // already extended in `doLambdaCall`
|
||||||
|
inFunction: context.inFunction, // already updated in `doLambdaCall`
|
||||||
|
}
|
||||||
|
|
||||||
|
let (value, _) = reducer(body, lambdaContext)
|
||||||
|
value
|
||||||
|
}
|
||||||
|
|
||||||
|
FnLambda({
|
||||||
|
// context: bindings,
|
||||||
|
name,
|
||||||
|
body: lambda,
|
||||||
|
parameters,
|
||||||
|
location,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// stdlib functions (everything in FunctionRegistry) are built by this method. Body is generated in SquiggleLibrary_StdLib.res
|
||||||
|
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
|
||||||
|
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
|
||||||
|
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
|
||||||
|
body,
|
||||||
|
name,
|
||||||
|
})
|
||||||
|
|
||||||
|
// this function doesn't scale to FunctionRegistry's polymorphic functions
|
||||||
|
let parameters = (t: t): array<string> => {
|
||||||
|
switch t {
|
||||||
|
| FnLambda({parameters}) => parameters
|
||||||
|
| FnBuiltin(_) => ["..."]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let doLambdaCallFrom = (
|
||||||
|
t: t,
|
||||||
|
args: array<Reducer_T.value>,
|
||||||
|
context: Reducer_T.context,
|
||||||
|
reducer,
|
||||||
|
location: option<Reducer_Peggy_Parse.location>,
|
||||||
|
) => {
|
||||||
|
let newContext = {
|
||||||
|
...context,
|
||||||
|
frameStack: context.frameStack->Reducer_FrameStack.extend(
|
||||||
|
context->Reducer_Context.currentFunctionName,
|
||||||
|
location,
|
||||||
|
),
|
||||||
|
inFunction: Some(t),
|
||||||
|
}
|
||||||
|
|
||||||
|
SqError.rethrowWithFrameStack(() => {
|
||||||
|
switch t {
|
||||||
|
| FnLambda({body}) => body(args, newContext, reducer)
|
||||||
|
| FnBuiltin({body}) => body(args, newContext, reducer)
|
||||||
|
}
|
||||||
|
}, newContext.frameStack)
|
||||||
|
}
|
||||||
|
|
||||||
|
let doLambdaCall = (t: t, args, context, reducer) => {
|
||||||
|
doLambdaCallFrom(t, args, context, reducer, None)
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
type t = Reducer_T.lambdaValue
|
||||||
|
|
||||||
|
let name = (t: t): string => {
|
||||||
|
switch t {
|
||||||
|
| FnLambda({name}) => name->E.O2.default("<anonymous>")
|
||||||
|
| FnBuiltin({name}) => name
|
||||||
|
}
|
||||||
|
}
|
|
@ -7,26 +7,26 @@
|
||||||
start
|
start
|
||||||
= _nl start:outerBlock _nl finalComment? {return start}
|
= _nl start:outerBlock _nl finalComment? {return start}
|
||||||
|
|
||||||
zeroOMoreArgumentsBlockOrExpression = innerBlockOrExpression / lambda
|
zeroOMoreArgumentsBlockOrExpression = lambda / innerBlockOrExpression
|
||||||
|
|
||||||
outerBlock
|
outerBlock
|
||||||
= statements:array_statements finalExpression: (statementSeparator @expression)?
|
= statements:array_statements finalExpression: (statementSeparator @expression)?
|
||||||
{ if (finalExpression) statements.push(finalExpression)
|
{ if (finalExpression) statements.push(finalExpression)
|
||||||
return h.nodeProgram(statements) }
|
return h.nodeProgram(statements, location()) }
|
||||||
/ finalExpression: expression
|
/ finalExpression: expression
|
||||||
{ return h.nodeProgram([finalExpression]) }
|
{ return h.nodeProgram([finalExpression], location()) }
|
||||||
|
|
||||||
innerBlockOrExpression
|
innerBlockOrExpression
|
||||||
= quotedInnerBlock
|
= quotedInnerBlock
|
||||||
/ finalExpression: expression
|
/ finalExpression: expression
|
||||||
{ return h.nodeBlock([finalExpression])}
|
{ return h.nodeBlock([finalExpression], location())}
|
||||||
|
|
||||||
quotedInnerBlock
|
quotedInnerBlock
|
||||||
= '{' _nl statements:array_statements finalExpression: (statementSeparator @expression) _nl '}'
|
= '{' _nl statements:array_statements finalExpression: (statementSeparator @expression) _nl '}'
|
||||||
{ if (finalExpression) statements.push(finalExpression)
|
{ if (finalExpression) statements.push(finalExpression)
|
||||||
return h.nodeBlock(statements) }
|
return h.nodeBlock(statements, location()) }
|
||||||
/ '{' _nl finalExpression: expression _nl '}'
|
/ '{' _nl finalExpression: expression _nl '}'
|
||||||
{ return h.nodeBlock([finalExpression]) }
|
{ return h.nodeBlock([finalExpression], location()) }
|
||||||
|
|
||||||
array_statements
|
array_statements
|
||||||
= head:statement tail:(statementSeparator @array_statements )
|
= head:statement tail:(statementSeparator @array_statements )
|
||||||
|
@ -42,16 +42,16 @@ statement
|
||||||
voidStatement
|
voidStatement
|
||||||
= "call" _nl value:zeroOMoreArgumentsBlockOrExpression
|
= "call" _nl value:zeroOMoreArgumentsBlockOrExpression
|
||||||
{ var variable = h.nodeIdentifier("_", location());
|
{ var variable = h.nodeIdentifier("_", location());
|
||||||
return h.nodeLetStatement(variable, value); }
|
return h.nodeLetStatement(variable, value, location()); }
|
||||||
|
|
||||||
letStatement
|
letStatement
|
||||||
= variable:variable _ assignmentOp _nl value:zeroOMoreArgumentsBlockOrExpression
|
= variable:variable _ assignmentOp _nl value:zeroOMoreArgumentsBlockOrExpression
|
||||||
{ return h.nodeLetStatement(variable, value) }
|
{ return h.nodeLetStatement(variable, value, location()) }
|
||||||
|
|
||||||
defunStatement
|
defunStatement
|
||||||
= variable:variable '(' _nl args:array_parameters _nl ')' _ assignmentOp _nl body:innerBlockOrExpression
|
= variable:variable '(' _nl args:array_parameters _nl ')' _ assignmentOp _nl body:innerBlockOrExpression
|
||||||
{ var value = h.nodeLambda(args, body)
|
{ var value = h.nodeLambda(args, body, location(), variable)
|
||||||
return h.nodeLetStatement(variable, value) }
|
return h.nodeLetStatement(variable, value, location()) }
|
||||||
|
|
||||||
assignmentOp "assignment" = '='
|
assignmentOp "assignment" = '='
|
||||||
|
|
||||||
|
@ -67,16 +67,16 @@ ifthenelse
|
||||||
= 'if' __nl condition:logicalAdditive
|
= 'if' __nl condition:logicalAdditive
|
||||||
__nl 'then' __nl trueExpression:innerBlockOrExpression
|
__nl 'then' __nl trueExpression:innerBlockOrExpression
|
||||||
__nl 'else' __nl falseExpression:(ifthenelse/innerBlockOrExpression)
|
__nl 'else' __nl falseExpression:(ifthenelse/innerBlockOrExpression)
|
||||||
{ return h.nodeTernary(condition, trueExpression, falseExpression) }
|
{ return h.nodeTernary(condition, trueExpression, falseExpression, location()) }
|
||||||
|
|
||||||
ternary
|
ternary
|
||||||
= condition:logicalAdditive _ '?' _nl trueExpression:logicalAdditive _ ':' _nl falseExpression:(ternary/logicalAdditive)
|
= condition:logicalAdditive _ '?' _nl trueExpression:logicalAdditive _ ':' _nl falseExpression:(ternary/logicalAdditive)
|
||||||
{ return h.nodeTernary(condition, trueExpression, falseExpression) }
|
{ return h.nodeTernary(condition, trueExpression, falseExpression, location()) }
|
||||||
|
|
||||||
logicalAdditive
|
logicalAdditive
|
||||||
= head:logicalMultiplicative tail:(_ operator:logicalAdditiveOp _nl arg:logicalMultiplicative {return {operator: operator, right: arg}})*
|
= head:logicalMultiplicative tail:(_ operator:logicalAdditiveOp _nl arg:logicalMultiplicative {return {operator: operator, right: arg}})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
logicalAdditiveOp "operator" = '||'
|
logicalAdditiveOp "operator" = '||'
|
||||||
|
@ -85,29 +85,37 @@ logicalAdditive
|
||||||
logicalMultiplicative
|
logicalMultiplicative
|
||||||
= head:equality tail:(_ operator:logicalMultiplicativeOp _nl arg:equality {return {operator: operator, right: arg}})*
|
= head:equality tail:(_ operator:logicalMultiplicativeOp _nl arg:equality {return {operator: operator, right: arg}})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
logicalMultiplicativeOp "operator" = '&&'
|
logicalMultiplicativeOp "operator" = '&&'
|
||||||
|
|
||||||
equality
|
equality
|
||||||
= left:relational _ operator:equalityOp _nl right:relational
|
= left:relational _ operator:equalityOp _nl right:relational
|
||||||
{ return h.makeFunctionCall(h.toFunction[operator], [left, right])}
|
{ return h.makeFunctionCall(h.toFunction[operator], [left, right], location())}
|
||||||
/ relational
|
/ relational
|
||||||
|
|
||||||
equalityOp "operator" = '=='/'!='
|
equalityOp "operator" = '=='/'!='
|
||||||
|
|
||||||
relational
|
relational
|
||||||
= left:additive _ operator:relationalOp _nl right:additive
|
= left:credibleInterval _ operator:relationalOp _nl right:credibleInterval
|
||||||
{ return h.makeFunctionCall(h.toFunction[operator], [left, right])}
|
{ return h.makeFunctionCall(h.toFunction[operator], [left, right], location())}
|
||||||
/ additive
|
/ credibleInterval
|
||||||
|
|
||||||
relationalOp "operator" = '<='/'<'/'>='/'>'
|
relationalOp "operator" = '<='/'<'/'>='/'>'
|
||||||
|
|
||||||
|
credibleInterval
|
||||||
|
= head:additive tail:(__ operator:credibleIntervalOp __nl arg:additive {return {operator: operator, right: arg}})*
|
||||||
|
{ return tail.reduce(function(result, element) {
|
||||||
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
|
}, head)}
|
||||||
|
|
||||||
|
credibleIntervalOp "operator" = 'to'
|
||||||
|
|
||||||
additive
|
additive
|
||||||
= head:multiplicative tail:(_ operator:additiveOp _nl arg:multiplicative {return {operator: operator, right: arg}})*
|
= head:multiplicative tail:(_ operator:additiveOp _nl arg:multiplicative {return {operator: operator, right: arg}})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
additiveOp "operator" = '+' / '-' / '.+' / '.-'
|
additiveOp "operator" = '+' / '-' / '.+' / '.-'
|
||||||
|
@ -115,31 +123,23 @@ additive
|
||||||
multiplicative
|
multiplicative
|
||||||
= head:power tail:(_ operator:multiplicativeOp _nl arg:power {return {operator: operator, right: arg}})*
|
= head:power tail:(_ operator:multiplicativeOp _nl arg:power {return {operator: operator, right: arg}})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
multiplicativeOp "operator" = '*' / '/' / '.*' / './'
|
multiplicativeOp "operator" = '*' / '/' / '.*' / './'
|
||||||
|
|
||||||
power
|
power
|
||||||
= head:credibleInterval tail:(_ operator:powerOp _nl arg:credibleInterval {return {operator: operator, right: arg}})*
|
= head:chainFunctionCall tail:(_ operator:powerOp _nl arg:chainFunctionCall {return {operator: operator, right: arg}})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
powerOp "operator" = '^' / '.^'
|
powerOp "operator" = '^' / '.^'
|
||||||
|
|
||||||
credibleInterval
|
|
||||||
= head:chainFunctionCall tail:(__ operator:credibleIntervalOp __nl arg:chainFunctionCall {return {operator: operator, right: arg}})*
|
|
||||||
{ return tail.reduce(function(result, element) {
|
|
||||||
return h.makeFunctionCall(h.toFunction[element.operator], [result, element.right])
|
|
||||||
}, head)}
|
|
||||||
|
|
||||||
credibleIntervalOp "operator" = 'to'
|
|
||||||
|
|
||||||
chainFunctionCall
|
chainFunctionCall
|
||||||
= head:unary tail:(_ ('->'/'|>') _nl chained:chainedFunction {return chained})*
|
= head:unary tail:(_ ('->'/'|>') _nl chained:chainedFunction {return chained})*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(element.fnName, [result, ...element.args])
|
return h.makeFunctionCall(element.fnName, [result, ...element.args], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
chainedFunction
|
chainedFunction
|
||||||
|
@ -154,7 +154,7 @@ chainFunctionCall
|
||||||
|
|
||||||
unary
|
unary
|
||||||
= unaryOperator:unaryOperator _nl right:(unary/postOperator)
|
= unaryOperator:unaryOperator _nl right:(unary/postOperator)
|
||||||
{ return h.makeFunctionCall(h.unaryToFunction[unaryOperator], [right])}
|
{ return h.makeFunctionCall(h.unaryToFunction[unaryOperator], [right], location())}
|
||||||
/ postOperator
|
/ postOperator
|
||||||
|
|
||||||
unaryOperator "unary operator"
|
unaryOperator "unary operator"
|
||||||
|
@ -169,17 +169,17 @@ collectionElement
|
||||||
tail:(
|
tail:(
|
||||||
_ '[' _nl arg:expression _nl ']' {return {fn: h.postOperatorToFunction['[]'], args: [arg]}}
|
_ '[' _nl arg:expression _nl ']' {return {fn: h.postOperatorToFunction['[]'], args: [arg]}}
|
||||||
/ _ '(' _nl args:array_functionArguments _nl ')' {return {fn: h.postOperatorToFunction['()'], args: args}}
|
/ _ '(' _nl args:array_functionArguments _nl ')' {return {fn: h.postOperatorToFunction['()'], args: args}}
|
||||||
/ '.' arg:$dollarIdentifier {return {fn: h.postOperatorToFunction['[]'], args: [h.nodeString(arg)]}}
|
/ '.' arg:$dollarIdentifier {return {fn: h.postOperatorToFunction['[]'], args: [h.nodeString(arg, location())]}}
|
||||||
)*
|
)*
|
||||||
{ return tail.reduce(function(result, element) {
|
{ return tail.reduce(function(result, element) {
|
||||||
return h.makeFunctionCall(element.fn, [result, ...element.args])
|
return h.makeFunctionCall(element.fn, [result, ...element.args], location())
|
||||||
}, head)}
|
}, head)}
|
||||||
|
|
||||||
array_functionArguments
|
array_functionArguments
|
||||||
= head:expression tail:(_ ',' _nl @expression)*
|
= head:expression tail:(_ ',' _nl @expression)*
|
||||||
{ return [head, ...tail]; }
|
{ return [head, ...tail]; }
|
||||||
/ ""
|
/ ""
|
||||||
{return [h.nodeVoid()];}
|
{return [h.nodeVoid(location())];}
|
||||||
|
|
||||||
atom
|
atom
|
||||||
= '(' _nl expression:expression _nl ')' {return expression}
|
= '(' _nl expression:expression _nl ')' {return expression}
|
||||||
|
@ -195,7 +195,7 @@ basicLiteral
|
||||||
/ voidLiteral
|
/ voidLiteral
|
||||||
|
|
||||||
voidLiteral 'void'
|
voidLiteral 'void'
|
||||||
= "()" {return h.nodeVoid();}
|
= "()" {return h.nodeVoid(location());}
|
||||||
|
|
||||||
variable = dollarIdentifierWithModule / dollarIdentifier
|
variable = dollarIdentifierWithModule / dollarIdentifier
|
||||||
|
|
||||||
|
@ -221,36 +221,36 @@ dollarIdentifier '$identifier'
|
||||||
= ([\$_a-z]+[\$_a-z0-9]i*) {return h.nodeIdentifier(text(), location())}
|
= ([\$_a-z]+[\$_a-z0-9]i*) {return h.nodeIdentifier(text(), location())}
|
||||||
|
|
||||||
moduleIdentifier 'identifier'
|
moduleIdentifier 'identifier'
|
||||||
= ([A-Z]+[_a-z0-9]i*) {return h.nodeModuleIdentifier(text())}
|
= ([A-Z]+[_a-z0-9]i*) {return h.nodeModuleIdentifier(text(), location())}
|
||||||
|
|
||||||
|
|
||||||
string 'string'
|
string 'string'
|
||||||
= characters:("'" @([^'])* "'") {return h.nodeString(characters.join(''))}
|
= characters:("'" @([^'])* "'") {return h.nodeString(characters.join(''), location())}
|
||||||
/ characters:('"' @([^"])* '"') {return h.nodeString(characters.join(''))}
|
/ characters:('"' @([^"])* '"') {return h.nodeString(characters.join(''), location())}
|
||||||
|
|
||||||
number = number:(float / integer) unit:unitIdentifier?
|
number = number:(float / integer) unit:unitIdentifier?
|
||||||
{
|
{
|
||||||
if (unit === null)
|
if (unit === null)
|
||||||
{ return number }
|
{ return number }
|
||||||
else
|
else
|
||||||
{ return h.makeFunctionCall('fromUnit_'+unit.value, [number])
|
{ return h.makeFunctionCall('fromUnit_'+unit.value, [number], location())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
integer 'integer'
|
integer 'integer'
|
||||||
= d+ !"\." ![e]i
|
= d+ !"\." ![e]i
|
||||||
{ return h.nodeInteger(parseInt(text()))}
|
{ return h.nodeInteger(parseInt(text()), location())}
|
||||||
|
|
||||||
float 'float'
|
float 'float'
|
||||||
= $(((d+ "\." d*) / ("\." d+)) floatExponent? / d+ floatExponent)
|
= $(((d+ "\." d*) / ("\." d+)) floatExponent? / d+ floatExponent)
|
||||||
{ return h.nodeFloat(parseFloat(text()))}
|
{ return h.nodeFloat(parseFloat(text()), location())}
|
||||||
|
|
||||||
floatExponent = [e]i '-'? d+
|
floatExponent = [e]i '-'? d+
|
||||||
d = [0-9]
|
d = [0-9]
|
||||||
|
|
||||||
boolean 'boolean'
|
boolean 'boolean'
|
||||||
= ('true'/'false') ! [a-z]i ! [_$]
|
= ('true'/'false') ! [a-z]i ! [_$]
|
||||||
{ return h.nodeBoolean(text() === 'true')}
|
{ return h.nodeBoolean(text() === 'true', location())}
|
||||||
|
|
||||||
valueConstructor
|
valueConstructor
|
||||||
= recordConstructor
|
= recordConstructor
|
||||||
|
@ -261,15 +261,15 @@ valueConstructor
|
||||||
lambda
|
lambda
|
||||||
= '{' _nl '|' _nl args:array_parameters _nl '|' _nl statements:array_statements finalExpression: (statementSeparator @expression) _nl '}'
|
= '{' _nl '|' _nl args:array_parameters _nl '|' _nl statements:array_statements finalExpression: (statementSeparator @expression) _nl '}'
|
||||||
{ statements.push(finalExpression)
|
{ statements.push(finalExpression)
|
||||||
return h.nodeLambda(args, h.nodeBlock(statements)) }
|
return h.nodeLambda(args, h.nodeBlock(statements, location()), location(), undefined) }
|
||||||
/ '{' _nl '|' _nl args:array_parameters _nl '|' _nl finalExpression: expression _nl '}'
|
/ '{' _nl '|' _nl args:array_parameters _nl '|' _nl finalExpression: expression _nl '}'
|
||||||
{ return h.nodeLambda(args, finalExpression) }
|
{ return h.nodeLambda(args, finalExpression, location(), undefined) }
|
||||||
|
|
||||||
arrayConstructor 'array'
|
arrayConstructor 'array'
|
||||||
= '[' _nl ']'
|
= '[' _nl ']'
|
||||||
{ return h.constructArray([]); }
|
{ return h.constructArray([], location()); }
|
||||||
/ '[' _nl args:array_elements _nl ']'
|
/ '[' _nl args:array_elements _nl ']'
|
||||||
{ return h.constructArray(args); }
|
{ return h.constructArray(args, location()); }
|
||||||
|
|
||||||
array_elements
|
array_elements
|
||||||
= head:expression tail:(_ ',' _nl @expression)*
|
= head:expression tail:(_ ',' _nl @expression)*
|
||||||
|
@ -277,7 +277,7 @@ arrayConstructor 'array'
|
||||||
|
|
||||||
recordConstructor 'record'
|
recordConstructor 'record'
|
||||||
= '{' _nl args:array_recordArguments _nl end_of_record
|
= '{' _nl args:array_recordArguments _nl end_of_record
|
||||||
{ return h.constructRecord(args); }
|
{ return h.constructRecord(args, location()); }
|
||||||
|
|
||||||
end_of_record
|
end_of_record
|
||||||
= '}'
|
= '}'
|
||||||
|
@ -289,7 +289,7 @@ recordConstructor 'record'
|
||||||
|
|
||||||
keyValuePair
|
keyValuePair
|
||||||
= key:expression _ ':' _nl value:expression
|
= key:expression _ ':' _nl value:expression
|
||||||
{ return h.nodeKeyValue(key, value)}
|
{ return h.nodeKeyValue(key, value, location())}
|
||||||
|
|
||||||
// Separators
|
// Separators
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,37 @@
|
||||||
module Extra = Reducer_Extra
|
module Extra = Reducer_Extra
|
||||||
open Reducer_ErrorValue
|
|
||||||
|
|
||||||
type node = {"type": string}
|
@genType
|
||||||
|
type locationPoint = {
|
||||||
|
line: int,
|
||||||
|
column: int,
|
||||||
|
}
|
||||||
|
@genType
|
||||||
|
type location = {
|
||||||
|
source: string,
|
||||||
|
start: locationPoint,
|
||||||
|
end: locationPoint,
|
||||||
|
}
|
||||||
|
|
||||||
@module("./Reducer_Peggy_GeneratedParser.js") external parse__: string => node = "parse"
|
type node = {"type": string, "location": location}
|
||||||
|
|
||||||
type withLocation = {"location": Reducer_ErrorValue.syntaxErrorLocation}
|
type parseError = SyntaxError(string, location)
|
||||||
|
|
||||||
|
type parseResult = result<node, parseError>
|
||||||
|
|
||||||
|
@module("./Reducer_Peggy_GeneratedParser.js")
|
||||||
|
external parse__: (string, {"grammarSource": string}) => node = "parse"
|
||||||
|
|
||||||
|
type withLocation = {"location": location}
|
||||||
external castWithLocation: Js.Exn.t => withLocation = "%identity"
|
external castWithLocation: Js.Exn.t => withLocation = "%identity"
|
||||||
|
|
||||||
let syntaxErrorToLocation = (error: Js.Exn.t): Reducer_ErrorValue.syntaxErrorLocation =>
|
let syntaxErrorToLocation = (error: Js.Exn.t): location => castWithLocation(error)["location"]
|
||||||
castWithLocation(error)["location"]
|
|
||||||
|
|
||||||
let parse = (expr: string): result<node, errorValue> =>
|
let parse = (expr: string, source: string): parseResult =>
|
||||||
try {
|
try {
|
||||||
Ok(parse__(expr))
|
Ok(parse__(expr, {"grammarSource": source}))
|
||||||
} catch {
|
} catch {
|
||||||
| Js.Exn.Error(obj) =>
|
| Js.Exn.Error(obj) =>
|
||||||
RESyntaxError(Belt.Option.getExn(Js.Exn.message(obj)), syntaxErrorToLocation(obj)->Some)->Error
|
SyntaxError(Belt.Option.getExn(Js.Exn.message(obj)), syntaxErrorToLocation(obj))->Error
|
||||||
}
|
}
|
||||||
|
|
||||||
type nodeBlock = {...node, "statements": array<node>}
|
type nodeBlock = {...node, "statements": array<node>}
|
||||||
|
@ -29,32 +44,35 @@ type nodeIdentifier = {...node, "value": string}
|
||||||
type nodeInteger = {...node, "value": int}
|
type nodeInteger = {...node, "value": int}
|
||||||
type nodeKeyValue = {...node, "key": node, "value": node}
|
type nodeKeyValue = {...node, "key": node, "value": node}
|
||||||
type nodeRecord = {...node, "elements": array<nodeKeyValue>}
|
type nodeRecord = {...node, "elements": array<nodeKeyValue>}
|
||||||
type nodeLambda = {...node, "args": array<nodeIdentifier>, "body": node}
|
type nodeLambda = {...node, "args": array<nodeIdentifier>, "body": node, "name": option<string>}
|
||||||
type nodeLetStatement = {...node, "variable": nodeIdentifier, "value": node}
|
type nodeLetStatement = {...node, "variable": nodeIdentifier, "value": node}
|
||||||
type nodeModuleIdentifier = {...node, "value": string}
|
type nodeModuleIdentifier = {...node, "value": string}
|
||||||
type nodeString = {...node, "value": string}
|
type nodeString = {...node, "value": string}
|
||||||
type nodeTernary = {...node, "condition": node, "trueExpression": node, "falseExpression": node}
|
type nodeTernary = {...node, "condition": node, "trueExpression": node, "falseExpression": node}
|
||||||
// type nodeTypeIdentifier = {...node, "value": string}
|
|
||||||
type nodeVoid = node
|
type nodeVoid = node
|
||||||
|
|
||||||
type peggyNode =
|
type astContent =
|
||||||
| PgNodeBlock(nodeBlock)
|
| ASTBlock(nodeBlock)
|
||||||
| PgNodeProgram(nodeProgram)
|
| ASTProgram(nodeProgram)
|
||||||
| PgNodeArray(nodeArray)
|
| ASTArray(nodeArray)
|
||||||
| PgNodeRecord(nodeRecord)
|
| ASTRecord(nodeRecord)
|
||||||
| PgNodeBoolean(nodeBoolean)
|
| ASTBoolean(nodeBoolean)
|
||||||
| PgNodeFloat(nodeFloat)
|
| ASTFloat(nodeFloat)
|
||||||
| PgNodeCall(nodeCall)
|
| ASTCall(nodeCall)
|
||||||
| PgNodeIdentifier(nodeIdentifier)
|
| ASTIdentifier(nodeIdentifier)
|
||||||
| PgNodeInteger(nodeInteger)
|
| ASTInteger(nodeInteger)
|
||||||
| PgNodeKeyValue(nodeKeyValue)
|
| ASTKeyValue(nodeKeyValue)
|
||||||
| PgNodeLambda(nodeLambda)
|
| ASTLambda(nodeLambda)
|
||||||
| PgNodeLetStatement(nodeLetStatement)
|
| ASTLetStatement(nodeLetStatement)
|
||||||
| PgNodeModuleIdentifier(nodeModuleIdentifier)
|
| ASTModuleIdentifier(nodeModuleIdentifier)
|
||||||
| PgNodeString(nodeString)
|
| ASTString(nodeString)
|
||||||
| PgNodeTernary(nodeTernary)
|
| ASTTernary(nodeTernary)
|
||||||
// | PgNodeTypeIdentifier(nodeTypeIdentifier)
|
| ASTVoid(nodeVoid)
|
||||||
| PgNodeVoid(nodeVoid)
|
|
||||||
|
type ast = {
|
||||||
|
location: location,
|
||||||
|
content: astContent,
|
||||||
|
}
|
||||||
|
|
||||||
external castNodeBlock: node => nodeBlock = "%identity"
|
external castNodeBlock: node => nodeBlock = "%identity"
|
||||||
external castNodeProgram: node => nodeProgram = "%identity"
|
external castNodeProgram: node => nodeProgram = "%identity"
|
||||||
|
@ -71,80 +89,92 @@ external castNodeLetStatement: node => nodeLetStatement = "%identity"
|
||||||
external castNodeModuleIdentifier: node => nodeModuleIdentifier = "%identity"
|
external castNodeModuleIdentifier: node => nodeModuleIdentifier = "%identity"
|
||||||
external castNodeString: node => nodeString = "%identity"
|
external castNodeString: node => nodeString = "%identity"
|
||||||
external castNodeTernary: node => nodeTernary = "%identity"
|
external castNodeTernary: node => nodeTernary = "%identity"
|
||||||
// external castNodeTypeIdentifier: node => nodeTypeIdentifier = "%identity"
|
|
||||||
external castNodeVoid: node => nodeVoid = "%identity"
|
external castNodeVoid: node => nodeVoid = "%identity"
|
||||||
|
|
||||||
exception UnsupportedPeggyNodeType(string) // This should never happen; programming error
|
exception UnsupportedPeggyNodeType(string) // This should never happen; programming error
|
||||||
let castNodeType = (node: node) =>
|
let nodeToAST = (node: node) => {
|
||||||
switch node["type"] {
|
let content = switch node["type"] {
|
||||||
| "Block" => node->castNodeBlock->PgNodeBlock
|
| "Block" => node->castNodeBlock->ASTBlock
|
||||||
| "Program" => node->castNodeBlock->PgNodeProgram
|
| "Program" => node->castNodeBlock->ASTProgram
|
||||||
| "Array" => node->castNodeArray->PgNodeArray
|
| "Array" => node->castNodeArray->ASTArray
|
||||||
| "Record" => node->castNodeRecord->PgNodeRecord
|
| "Record" => node->castNodeRecord->ASTRecord
|
||||||
| "Boolean" => node->castNodeBoolean->PgNodeBoolean
|
| "Boolean" => node->castNodeBoolean->ASTBoolean
|
||||||
| "Call" => node->castNodeCall->PgNodeCall
|
| "Call" => node->castNodeCall->ASTCall
|
||||||
| "Float" => node->castNodeFloat->PgNodeFloat
|
| "Float" => node->castNodeFloat->ASTFloat
|
||||||
| "Identifier" => node->castNodeIdentifier->PgNodeIdentifier
|
| "Identifier" => node->castNodeIdentifier->ASTIdentifier
|
||||||
| "Integer" => node->castNodeInteger->PgNodeInteger
|
| "Integer" => node->castNodeInteger->ASTInteger
|
||||||
| "KeyValue" => node->castNodeKeyValue->PgNodeKeyValue
|
| "KeyValue" => node->castNodeKeyValue->ASTKeyValue
|
||||||
| "Lambda" => node->castNodeLambda->PgNodeLambda
|
| "Lambda" => node->castNodeLambda->ASTLambda
|
||||||
| "LetStatement" => node->castNodeLetStatement->PgNodeLetStatement
|
| "LetStatement" => node->castNodeLetStatement->ASTLetStatement
|
||||||
| "ModuleIdentifier" => node->castNodeModuleIdentifier->PgNodeModuleIdentifier
|
| "ModuleIdentifier" => node->castNodeModuleIdentifier->ASTModuleIdentifier
|
||||||
| "String" => node->castNodeString->PgNodeString
|
| "String" => node->castNodeString->ASTString
|
||||||
| "Ternary" => node->castNodeTernary->PgNodeTernary
|
| "Ternary" => node->castNodeTernary->ASTTernary
|
||||||
// | "TypeIdentifier" => node->castNodeTypeIdentifier->PgNodeTypeIdentifier
|
| "Void" => node->castNodeVoid->ASTVoid
|
||||||
| "Void" => node->castNodeVoid->PgNodeVoid
|
|
||||||
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
|
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
|
||||||
}
|
}
|
||||||
|
|
||||||
let rec pgToString = (peggyNode: peggyNode): string => {
|
{location: node["location"], content}
|
||||||
|
}
|
||||||
|
|
||||||
|
let nodeIdentifierToAST = (node: nodeIdentifier) => {
|
||||||
|
{location: node["location"], content: node->ASTIdentifier}
|
||||||
|
}
|
||||||
|
|
||||||
|
let nodeKeyValueToAST = (node: nodeKeyValue) => {
|
||||||
|
{location: node["location"], content: node->ASTKeyValue}
|
||||||
|
}
|
||||||
|
|
||||||
|
let rec pgToString = (ast: ast): string => {
|
||||||
let argsToString = (args: array<nodeIdentifier>): string =>
|
let argsToString = (args: array<nodeIdentifier>): string =>
|
||||||
args->Js.Array2.map(arg => PgNodeIdentifier(arg)->pgToString)->Js.Array2.toString
|
args->Belt.Array.map(arg => arg->nodeIdentifierToAST->pgToString)->Js.Array2.toString
|
||||||
|
|
||||||
let nodesToStringUsingSeparator = (nodes: array<node>, separator: string): string =>
|
let nodesToStringUsingSeparator = (nodes: array<node>, separator: string): string =>
|
||||||
nodes->Js.Array2.map(toString)->Extra.Array.intersperse(separator)->Js.String.concatMany("")
|
nodes->Belt.Array.map(toString)->Extra.Array.intersperse(separator)->Js.String.concatMany("")
|
||||||
|
|
||||||
let pgNodesToStringUsingSeparator = (nodes: array<peggyNode>, separator: string): string =>
|
let pgNodesToStringUsingSeparator = (nodes: array<ast>, separator: string): string =>
|
||||||
nodes->Js.Array2.map(pgToString)->Extra.Array.intersperse(separator)->Js.String.concatMany("")
|
nodes->Belt.Array.map(pgToString)->Extra.Array.intersperse(separator)->Js.String.concatMany("")
|
||||||
|
|
||||||
switch peggyNode {
|
switch ast.content {
|
||||||
| PgNodeBlock(node)
|
| ASTBlock(node)
|
||||||
| PgNodeProgram(node) =>
|
| ASTProgram(node) =>
|
||||||
"{" ++ node["statements"]->nodesToStringUsingSeparator("; ") ++ "}"
|
"{" ++ node["statements"]->nodesToStringUsingSeparator("; ") ++ "}"
|
||||||
| PgNodeArray(node) => "[" ++ node["elements"]->nodesToStringUsingSeparator("; ") ++ "]"
|
| ASTArray(node) => "[" ++ node["elements"]->nodesToStringUsingSeparator("; ") ++ "]"
|
||||||
| PgNodeRecord(node) =>
|
| ASTRecord(node) =>
|
||||||
"{" ++
|
"{" ++
|
||||||
node["elements"]
|
node["elements"]
|
||||||
->Js.Array2.map(element => PgNodeKeyValue(element))
|
->Belt.Array.map(element => element->nodeKeyValueToAST)
|
||||||
->pgNodesToStringUsingSeparator(", ") ++ "}"
|
->pgNodesToStringUsingSeparator(", ") ++ "}"
|
||||||
| PgNodeBoolean(node) => node["value"]->Js.String.make
|
| ASTBoolean(node) => node["value"]->Js.String.make
|
||||||
| PgNodeCall(node) =>
|
| ASTCall(node) =>
|
||||||
"(" ++ node["fn"]->toString ++ " " ++ node["args"]->nodesToStringUsingSeparator(" ") ++ ")"
|
"(" ++ node["fn"]->toString ++ " " ++ node["args"]->nodesToStringUsingSeparator(" ") ++ ")"
|
||||||
| PgNodeFloat(node) => node["value"]->Js.String.make
|
| ASTFloat(node) => node["value"]->Js.String.make
|
||||||
| PgNodeIdentifier(node) => `:${node["value"]}`
|
| ASTIdentifier(node) => `:${node["value"]}`
|
||||||
| PgNodeInteger(node) => node["value"]->Js.String.make
|
| ASTInteger(node) => node["value"]->Js.String.make
|
||||||
| PgNodeKeyValue(node) => toString(node["key"]) ++ ": " ++ toString(node["value"])
|
| ASTKeyValue(node) => toString(node["key"]) ++ ": " ++ toString(node["value"])
|
||||||
| PgNodeLambda(node) =>
|
| ASTLambda(node) => "{|" ++ node["args"]->argsToString ++ "| " ++ node["body"]->toString ++ "}"
|
||||||
"{|" ++ node["args"]->argsToString ++ "| " ++ node["body"]->toString ++ "}"
|
| ASTLetStatement(node) =>
|
||||||
| PgNodeLetStatement(node) =>
|
pgToString(node["variable"]->nodeIdentifierToAST) ++ " = " ++ toString(node["value"])
|
||||||
pgToString(PgNodeIdentifier(node["variable"])) ++ " = " ++ toString(node["value"])
|
| ASTModuleIdentifier(node) => `@${node["value"]}`
|
||||||
| PgNodeModuleIdentifier(node) => `@${node["value"]}`
|
| ASTString(node) => `'${node["value"]->Js.String.make}'`
|
||||||
| PgNodeString(node) => `'${node["value"]->Js.String.make}'`
|
| ASTTernary(node) =>
|
||||||
| PgNodeTernary(node) =>
|
|
||||||
"(::$$_ternary_$$ " ++
|
"(::$$_ternary_$$ " ++
|
||||||
toString(node["condition"]) ++
|
toString(node["condition"]) ++
|
||||||
" " ++
|
" " ++
|
||||||
toString(node["trueExpression"]) ++
|
toString(node["trueExpression"]) ++
|
||||||
" " ++
|
" " ++
|
||||||
toString(node["falseExpression"]) ++ ")"
|
toString(node["falseExpression"]) ++ ")"
|
||||||
// | PgNodeTypeIdentifier(node) => `#${node["value"]}`
|
| ASTVoid(_node) => "()"
|
||||||
| PgNodeVoid(_node) => "()"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
and toString = (node: node): string => node->castNodeType->pgToString
|
and toString = (node: node): string => node->nodeToAST->pgToString
|
||||||
|
|
||||||
let toStringResult = (rNode: result<node, errorValue>): string =>
|
let toStringError = (error: parseError): string => {
|
||||||
switch rNode {
|
let SyntaxError(message, _) = error
|
||||||
| Ok(node) => toString(node)
|
`Syntax Error: ${message}}`
|
||||||
| Error(error) => `Error(${errorToString(error)})`
|
}
|
||||||
|
|
||||||
|
let toStringResult = (rNode: parseResult): string =>
|
||||||
|
switch rNode {
|
||||||
|
| Ok(node) => node->toString
|
||||||
|
| Error(error) => `Error(${error->toStringError})`
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,23 +3,27 @@ module ExpressionT = Reducer_Expression_T
|
||||||
module Parse = Reducer_Peggy_Parse
|
module Parse = Reducer_Peggy_Parse
|
||||||
|
|
||||||
type expression = Reducer_T.expression
|
type expression = Reducer_T.expression
|
||||||
|
type expressionContent = Reducer_T.expressionContent
|
||||||
|
|
||||||
let rec fromNode = (node: Parse.node): expression => {
|
let rec fromNode = (node: Parse.node): expression => {
|
||||||
|
let ast = Parse.nodeToAST(node)
|
||||||
|
|
||||||
|
let content: expressionContent = {
|
||||||
let caseBlock = nodeBlock =>
|
let caseBlock = nodeBlock =>
|
||||||
ExpressionBuilder.eBlock(nodeBlock["statements"]->Js.Array2.map(fromNode))
|
ExpressionBuilder.eBlock(nodeBlock["statements"]->Js.Array2.map(fromNode))
|
||||||
|
|
||||||
let caseProgram = nodeProgram =>
|
let caseProgram = nodeProgram =>
|
||||||
ExpressionBuilder.eProgram(nodeProgram["statements"]->Js.Array2.map(fromNode))
|
ExpressionBuilder.eProgram(nodeProgram["statements"]->Js.Array2.map(fromNode))
|
||||||
|
|
||||||
let caseLambda = (nodeLambda: Parse.nodeLambda): expression => {
|
let caseLambda = (nodeLambda: Parse.nodeLambda): expressionContent => {
|
||||||
let args =
|
let args =
|
||||||
nodeLambda["args"]->Js.Array2.map((argNode: Parse.nodeIdentifier) => argNode["value"])
|
nodeLambda["args"]->Js.Array2.map((argNode: Parse.nodeIdentifier) => argNode["value"])
|
||||||
let body = nodeLambda["body"]->fromNode
|
let body = nodeLambda["body"]->fromNode
|
||||||
|
|
||||||
ExpressionBuilder.eLambda(args, body)
|
ExpressionBuilder.eLambda(args, body, nodeLambda["name"])
|
||||||
}
|
}
|
||||||
|
|
||||||
let caseRecord = (nodeRecord): expression => {
|
let caseRecord = (nodeRecord): expressionContent => {
|
||||||
nodeRecord["elements"]
|
nodeRecord["elements"]
|
||||||
->Js.Array2.map(keyValueNode => (
|
->Js.Array2.map(keyValueNode => (
|
||||||
keyValueNode["key"]->fromNode,
|
keyValueNode["key"]->fromNode,
|
||||||
|
@ -28,30 +32,30 @@ let rec fromNode = (node: Parse.node): expression => {
|
||||||
->ExpressionBuilder.eRecord
|
->ExpressionBuilder.eRecord
|
||||||
}
|
}
|
||||||
|
|
||||||
switch Parse.castNodeType(node) {
|
switch ast.content {
|
||||||
| PgNodeBlock(nodeBlock) => caseBlock(nodeBlock)
|
| ASTBlock(nodeBlock) => caseBlock(nodeBlock)
|
||||||
| PgNodeProgram(nodeProgram) => caseProgram(nodeProgram)
|
| ASTProgram(nodeProgram) => caseProgram(nodeProgram)
|
||||||
| PgNodeArray(nodeArray) =>
|
| ASTArray(nodeArray) =>
|
||||||
ExpressionBuilder.eArray(nodeArray["elements"]->Js.Array2.map(fromNode))
|
ExpressionBuilder.eArray(nodeArray["elements"]->Js.Array2.map(fromNode))
|
||||||
| PgNodeRecord(nodeRecord) => caseRecord(nodeRecord)
|
| ASTRecord(nodeRecord) => caseRecord(nodeRecord)
|
||||||
| PgNodeBoolean(nodeBoolean) => ExpressionBuilder.eBool(nodeBoolean["value"])
|
| ASTBoolean(nodeBoolean) => ExpressionBuilder.eBool(nodeBoolean["value"])
|
||||||
| PgNodeCall(nodeCall) =>
|
| ASTCall(nodeCall) =>
|
||||||
ExpressionBuilder.eCall(fromNode(nodeCall["fn"]), nodeCall["args"]->Js.Array2.map(fromNode))
|
ExpressionBuilder.eCall(fromNode(nodeCall["fn"]), nodeCall["args"]->Js.Array2.map(fromNode))
|
||||||
| PgNodeFloat(nodeFloat) => ExpressionBuilder.eNumber(nodeFloat["value"])
|
| ASTFloat(nodeFloat) => ExpressionBuilder.eNumber(nodeFloat["value"])
|
||||||
| PgNodeIdentifier(nodeIdentifier) => ExpressionBuilder.eSymbol(nodeIdentifier["value"])
|
| ASTIdentifier(nodeIdentifier) => ExpressionBuilder.eSymbol(nodeIdentifier["value"])
|
||||||
| PgNodeInteger(nodeInteger) => ExpressionBuilder.eNumber(Belt.Int.toFloat(nodeInteger["value"]))
|
| ASTInteger(nodeInteger) => ExpressionBuilder.eNumber(Belt.Int.toFloat(nodeInteger["value"]))
|
||||||
| PgNodeKeyValue(nodeKeyValue) =>
|
| ASTKeyValue(nodeKeyValue) =>
|
||||||
ExpressionBuilder.eArray([fromNode(nodeKeyValue["key"]), fromNode(nodeKeyValue["value"])])
|
ExpressionBuilder.eArray([fromNode(nodeKeyValue["key"]), fromNode(nodeKeyValue["value"])])
|
||||||
| PgNodeLambda(nodeLambda) => caseLambda(nodeLambda)
|
| ASTLambda(nodeLambda) => caseLambda(nodeLambda)
|
||||||
| PgNodeLetStatement(nodeLetStatement) =>
|
| ASTLetStatement(nodeLetStatement) =>
|
||||||
ExpressionBuilder.eLetStatement(
|
ExpressionBuilder.eLetStatement(
|
||||||
nodeLetStatement["variable"]["value"],
|
nodeLetStatement["variable"]["value"],
|
||||||
fromNode(nodeLetStatement["value"]),
|
fromNode(nodeLetStatement["value"]),
|
||||||
)
|
)
|
||||||
| PgNodeModuleIdentifier(nodeModuleIdentifier) =>
|
| ASTModuleIdentifier(nodeModuleIdentifier) =>
|
||||||
ExpressionBuilder.eIdentifier(nodeModuleIdentifier["value"])
|
ExpressionBuilder.eIdentifier(nodeModuleIdentifier["value"])
|
||||||
| PgNodeString(nodeString) => ExpressionBuilder.eString(nodeString["value"])
|
| ASTString(nodeString) => ExpressionBuilder.eString(nodeString["value"])
|
||||||
| PgNodeTernary(nodeTernary) =>
|
| ASTTernary(nodeTernary) =>
|
||||||
ExpressionBuilder.eTernary(
|
ExpressionBuilder.eTernary(
|
||||||
fromNode(nodeTernary["condition"]),
|
fromNode(nodeTernary["condition"]),
|
||||||
fromNode(nodeTernary["trueExpression"]),
|
fromNode(nodeTernary["trueExpression"]),
|
||||||
|
@ -59,6 +63,12 @@ let rec fromNode = (node: Parse.node): expression => {
|
||||||
)
|
)
|
||||||
// | PgNodeTypeIdentifier(nodeTypeIdentifier) =>
|
// | PgNodeTypeIdentifier(nodeTypeIdentifier) =>
|
||||||
// ExpressionBuilder.eTypeIdentifier(nodeTypeIdentifier["value"])
|
// ExpressionBuilder.eTypeIdentifier(nodeTypeIdentifier["value"])
|
||||||
| PgNodeVoid(_) => ExpressionBuilder.eVoid
|
| ASTVoid(_) => ExpressionBuilder.eVoid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
ast,
|
||||||
|
content,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user