Merge pull request #296 from quantified-uncertainty/develop
Develop -> Master, April 15
This commit is contained in:
commit
09080f8d73
25
.github/CODEOWNERS
vendored
25
.github/CODEOWNERS
vendored
|
@ -8,15 +8,24 @@
|
||||||
# IMPORTANT NOTE: in order to actually get pinged, commit access is required.
|
# IMPORTANT NOTE: in order to actually get pinged, commit access is required.
|
||||||
# This also holds true for GitHub teams.
|
# This also holds true for GitHub teams.
|
||||||
|
|
||||||
# This file
|
# Rescript
|
||||||
/.github/CODEOWNERS @quinn-dougherty
|
*.res @OAGr @quinn-dougherty
|
||||||
|
*.resi @OAGr @quinn-dougherty
|
||||||
|
|
||||||
# Any rescript code
|
# Typescript
|
||||||
*.res @Hazelfire @OAGr @quinn-dougherty
|
|
||||||
|
|
||||||
# Any typescript code
|
|
||||||
*.tsx @Hazelfire @OAGr
|
*.tsx @Hazelfire @OAGr
|
||||||
|
*.ts @Hazelfire @OAGr
|
||||||
|
|
||||||
|
# Javascript
|
||||||
|
*.js @Hazelfire @OAGr
|
||||||
|
|
||||||
# Any opsy files
|
# Any opsy files
|
||||||
*.json @quinn-dougherty @Hazelfire
|
.github/** @quinn-dougherty @OAGr
|
||||||
*.y*ml @quinn-dougherty
|
*.json @quinn-dougherty @Hazelfire @OAGr
|
||||||
|
*.y*ml @quinn-dougherty @OAGr
|
||||||
|
*.config.js @Hazelfire @OAGr
|
||||||
|
netlify.toml @quinn-dougherty @OAGr @Hazelfire
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
*.md @quinn-dougherty @OAGr @Hazelfire
|
||||||
|
*.mdx @quinn-dougherty @OAGr @Hazelfire
|
||||||
|
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
blank_issues_enabled: true
|
||||||
|
contact_links:
|
||||||
|
- name: Ideas and feature requests - Squiggle Discussions on GitHub
|
||||||
|
url: https://github.com/quantified-uncertainty/squiggle/discussions
|
||||||
|
about: Please propose and discuss new features here. Remember to search for your idea before posting a new topic! Where would you like to see Squiggle go over the next few months, several months, or few years?
|
14
.github/ISSUE_TEMPLATE/developer-bug.md
vendored
14
.github/ISSUE_TEMPLATE/developer-bug.md
vendored
|
@ -1,14 +0,0 @@
|
||||||
---
|
|
||||||
name: Developer friction when contributing to Squiggle
|
|
||||||
about: Did your yarn scripts fail? Did the CI diverge from a README? Have a testing-related task? Etc.
|
|
||||||
labels: 'ops & testing'
|
|
||||||
---
|
|
||||||
# Description:
|
|
||||||
|
|
||||||
|
|
||||||
# The OS and version, yarn version, etc. in which this came up
|
|
||||||
_delete this section if testing task_
|
|
||||||
|
|
||||||
# Desired behavior
|
|
||||||
|
|
||||||
|
|
7
.github/ISSUE_TEMPLATE/future.md
vendored
7
.github/ISSUE_TEMPLATE/future.md
vendored
|
@ -1,7 +0,0 @@
|
||||||
---
|
|
||||||
name: Idea or feature request
|
|
||||||
about: Where would you like to see Squiggle go over the next few months, several months, or few years?
|
|
||||||
---
|
|
||||||
# Description
|
|
||||||
|
|
||||||
|
|
13
.github/ISSUE_TEMPLATE/ops-testing.md
vendored
Normal file
13
.github/ISSUE_TEMPLATE/ops-testing.md
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
---
|
||||||
|
name: Operations and testing
|
||||||
|
about: Have a testing-related task? Developer friction when contributing to squiggle? Etc.
|
||||||
|
labels: "ops & testing"
|
||||||
|
---
|
||||||
|
|
||||||
|
# Description:
|
||||||
|
|
||||||
|
# The OS and version, yarn version, etc. in which this came up
|
||||||
|
|
||||||
|
<!-- delete this section if testing task or otherwise not applicable -->
|
||||||
|
|
||||||
|
# Desired behavior
|
19
.github/ISSUE_TEMPLATE/pl.md
vendored
19
.github/ISSUE_TEMPLATE/pl.md
vendored
|
@ -1,14 +1,13 @@
|
||||||
---
|
---
|
||||||
name: Regarding the programming language
|
name: Regarding the programming language (the `squiggle-lang` package)
|
||||||
about: Interpreter, parser, syntax, semantics, and including distributions
|
about: Anything concerning distributions/numerics, as well as the interpreter, parser, syntax, semantics
|
||||||
labels: 'programming language'
|
labels: "programming language"
|
||||||
---
|
---
|
||||||
<!-- mark one with an x -->
|
|
||||||
- _ Is refactor
|
<!-- mark one with an x -->
|
||||||
- _ Is new feature
|
|
||||||
- _ Concerns documentation
|
- \_ Is refactor
|
||||||
|
- \_ Is new feature
|
||||||
|
- \_ Concerns documentation
|
||||||
|
|
||||||
# Description of suggestion or shortcoming:
|
# Description of suggestion or shortcoming:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
9
.github/ISSUE_TEMPLATE/user-bug.md
vendored
9
.github/ISSUE_TEMPLATE/user-bug.md
vendored
|
@ -1,18 +1,17 @@
|
||||||
---
|
---
|
||||||
name: Bug reports for Squiggle users
|
name: Bug reports for Squiggle users
|
||||||
about: Rendering oddly, trouble with the playground, something like this?
|
about: Rendering oddly? Is there a mathematical correctness problem?
|
||||||
labels: 'bug'
|
labels: "bug"
|
||||||
---
|
---
|
||||||
|
|
||||||
# Description:
|
# Description:
|
||||||
|
|
||||||
|
|
||||||
# Steps to reproduce:
|
# Steps to reproduce:
|
||||||
|
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
3.
|
3.
|
||||||
|
|
||||||
# Expected behavior:
|
# Expected behavior:
|
||||||
|
|
||||||
|
|
||||||
# What I got instead:
|
# What I got instead:
|
||||||
|
|
||||||
|
|
16
.github/dependabot.yml
vendored
16
.github/dependabot.yml
vendored
|
@ -9,19 +9,3 @@ updates:
|
||||||
directory: "/" # Location of package manifests
|
directory: "/" # Location of package manifests
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
- package-ecosystem: "npm" # See documentation for possible values
|
|
||||||
directory: "/packages/squiggle-lang" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "npm" # See documentation for possible values
|
|
||||||
directory: "/packages/components" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "npm" # See documentation for possible values
|
|
||||||
directory: "/packages/website" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "npm" # See documentation for possible values
|
|
||||||
directory: "/packages/playground" # Location of package manifests
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
|
|
92
.github/workflows/ci.yaml
vendored
92
.github/workflows/ci.yaml
vendored
|
@ -1,92 +0,0 @@
|
||||||
name: Squiggle packages check
|
|
||||||
|
|
||||||
on:
|
|
||||||
push: # Delete this line if there becomes a scarcity of build minutes.
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- staging
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
pre_check:
|
|
||||||
name: Precheck for skipping redundant jobs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
should_skip_lang: ${{ steps.skip_lang_check.outputs.should_skip }}
|
|
||||||
should_skip_components: ${{ steps.skip_components_check.outputs.should_skip }}
|
|
||||||
should_skip_website: ${{ steps.skip_website_check.outputs.should_skip }}
|
|
||||||
steps:
|
|
||||||
- id: skip_lang_check
|
|
||||||
name: Check if the changes are about squiggle-lang src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
|
||||||
with:
|
|
||||||
paths: '["packages/squiggle-lang/**"]'
|
|
||||||
- id: skip_components_check
|
|
||||||
name: Check if the changes are about components src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
|
||||||
with:
|
|
||||||
paths: '["packages/components/**"]'
|
|
||||||
- id: skip_website_check
|
|
||||||
name: Check if the changes are about website src files
|
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
|
||||||
with:
|
|
||||||
paths: '["packages/website/**"]'
|
|
||||||
|
|
||||||
lang-build-test:
|
|
||||||
name: Language build and test
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_lang != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/squiggle-lang
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build rescript codebase
|
|
||||||
run: yarn build
|
|
||||||
- name: Run tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Run webpack
|
|
||||||
run: yarn bundle
|
|
||||||
|
|
||||||
components-build-test:
|
|
||||||
name: Components build and test
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [pre_check]
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_components != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/components
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build rescript codebase in squiggle-lang
|
|
||||||
run: cd ../squiggle-lang && yarn build
|
|
||||||
- name: Run webpack
|
|
||||||
run: yarn bundle
|
|
||||||
- name: Build storybook
|
|
||||||
run: yarn build
|
|
||||||
|
|
||||||
website-build:
|
|
||||||
name: Website build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: pre_check
|
|
||||||
if: ${{ needs.pre_check.outputs.should_skip_website != 'true' }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
working-directory: packages/website
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Install dependencies from monorepo level
|
|
||||||
run: cd ../../ && yarn
|
|
||||||
- name: Build rescript in squiggle-lang
|
|
||||||
run: cd ../squiggle-lang && yarn build
|
|
||||||
- name: Build website assets
|
|
||||||
run: yarn build
|
|
151
.github/workflows/ci.yml
vendored
Normal file
151
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
name: Squiggle packages check
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre_check:
|
||||||
|
name: Precheck for skipping redundant jobs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
should_skip_lang: ${{ steps.skip_lang_check.outputs.should_skip }}
|
||||||
|
should_skip_components: ${{ steps.skip_components_check.outputs.should_skip }}
|
||||||
|
should_skip_website: ${{ steps.skip_website_check.outputs.should_skip }}
|
||||||
|
steps:
|
||||||
|
- id: skip_lang_check
|
||||||
|
name: Check if the changes are about squiggle-lang src files
|
||||||
|
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||||
|
with:
|
||||||
|
paths: '["packages/squiggle-lang/**"]'
|
||||||
|
- id: skip_components_check
|
||||||
|
name: Check if the changes are about components src files
|
||||||
|
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||||
|
with:
|
||||||
|
paths: '["packages/components/**"]'
|
||||||
|
- id: skip_website_check
|
||||||
|
name: Check if the changes are about website src files
|
||||||
|
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||||
|
with:
|
||||||
|
paths: '["packages/website/**"]'
|
||||||
|
|
||||||
|
lang-lint:
|
||||||
|
name: Language lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ needs.pre_check.outputs.should_skip_lang != 'true' }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/squiggle-lang
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: cd ../../ && yarn
|
||||||
|
- name: Check rescript lint
|
||||||
|
run: yarn lint:rescript
|
||||||
|
- name: Check javascript, typescript, and markdown lint
|
||||||
|
uses: creyD/prettier_action@v4.2
|
||||||
|
with:
|
||||||
|
dry: true
|
||||||
|
prettier_options: --check packages/squiggle-lang
|
||||||
|
|
||||||
|
lang-build-test-bundle:
|
||||||
|
name: Language build, test, and bundle
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ needs.pre_check.outputs.should_skip_lang != 'true' }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/squiggle-lang
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install dependencies from monorepo level
|
||||||
|
run: cd ../../ && yarn
|
||||||
|
- name: Build rescript codebase
|
||||||
|
run: yarn build
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Run webpack
|
||||||
|
run: yarn bundle
|
||||||
|
- name: Upload coverage report
|
||||||
|
run: yarn coverage:ci
|
||||||
|
|
||||||
|
components-lint:
|
||||||
|
name: Components lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ needs.pre_check.outputs.should_skip_components != 'true' }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/components
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Check javascript, typescript, and markdown lint
|
||||||
|
uses: creyD/prettier_action@v4.2
|
||||||
|
with:
|
||||||
|
dry: true
|
||||||
|
prettier_options: --check packages/components
|
||||||
|
|
||||||
|
components-bundle-build:
|
||||||
|
name: Components bundle and build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ (needs.pre_check.outputs.should_skip_components != 'true') || (needs.pre_check.outputs.should_skip_lang != 'true') }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/components
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install dependencies from monorepo level
|
||||||
|
run: cd ../../ && yarn
|
||||||
|
- name: Build rescript codebase in squiggle-lang
|
||||||
|
run: cd ../squiggle-lang && yarn build
|
||||||
|
- name: Run webpack
|
||||||
|
run: yarn bundle
|
||||||
|
- name: Build storybook
|
||||||
|
run: yarn build
|
||||||
|
|
||||||
|
website-lint:
|
||||||
|
name: Website lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ needs.pre_check.outputs.should_skip_website != 'true' }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/website
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Check javascript, typescript, and markdown lint
|
||||||
|
uses: creyD/prettier_action@v4.2
|
||||||
|
with:
|
||||||
|
dry: true
|
||||||
|
prettier_options: --check packages/website
|
||||||
|
|
||||||
|
website-build:
|
||||||
|
name: Website build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pre_check
|
||||||
|
if: ${{ (needs.pre_check.outputs.should_skip_website != 'true') || (needs.pre_check.outputs.should_skip_lang != 'true') || (needs.pre_check.outputs.should_skip_components != 'true') }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: packages/website
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install dependencies from monorepo level
|
||||||
|
run: cd ../../ && yarn
|
||||||
|
- name: Build rescript in squiggle-lang
|
||||||
|
run: cd ../squiggle-lang && yarn build
|
||||||
|
- name: Build website assets
|
||||||
|
run: yarn build
|
75
.github/workflows/codeql-analysis.yml
vendored
75
.github/workflows/codeql-analysis.yml
vendored
|
@ -13,16 +13,20 @@ name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- staging
|
- production
|
||||||
|
- staging
|
||||||
|
- develop
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
# The branches below must be a subset of the branches above
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- staging
|
- production
|
||||||
|
- staging
|
||||||
|
- develop
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '42 19 * * 0'
|
- cron: "42 19 * * 0"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
|
@ -36,39 +40,42 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'javascript' ]
|
language: ["javascript"]
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v1
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
# By default, queries listed here will override any specified in a config file.
|
# By default, queries listed here will override any specified in a config file.
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v1
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn
|
||||||
|
- name: Build rescript
|
||||||
|
run: cd packages/squiggle-lang && yarn build
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
# 📚 https://git.io/JvXDl
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
#- run: |
|
||||||
# and modify them (or add more) to build your code if your project
|
# make bootstrap
|
||||||
# uses a compiled language
|
# make release
|
||||||
|
|
||||||
#- run: |
|
- name: Perform CodeQL Analysis
|
||||||
# make bootstrap
|
uses: github/codeql-action/analyze@v1
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v1
|
|
||||||
|
|
9
.prettierignore
Normal file
9
.prettierignore
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
.direnv
|
||||||
|
*.bs.js
|
||||||
|
*.gen.tsx
|
||||||
|
packages/*/dist
|
||||||
|
packages/components/storybook-static
|
||||||
|
node_modules
|
||||||
|
packages/*/node_modules
|
||||||
|
packages/website/.docusaurus
|
||||||
|
packages/squiggle-lang/lib
|
115
CONTRIBUTING.md
115
CONTRIBUTING.md
|
@ -2,9 +2,9 @@ _The current document was written quickly and not exhaustively, yet, it's unfini
|
||||||
|
|
||||||
# Contributing to Squiggle
|
# Contributing to Squiggle
|
||||||
|
|
||||||
We welcome contributions from developers, especially people in react/typescript, rescript, and interpreters/parsers. We also are keen to hear issues filed by users!
|
We welcome contributions from developers, especially people in react/typescript, rescript, and interpreters/parsers. We also are keen to hear issues filed by users!
|
||||||
|
|
||||||
Squiggle is currently pre-alpha.
|
Squiggle is currently pre-alpha.
|
||||||
|
|
||||||
# Quick links
|
# Quick links
|
||||||
|
|
||||||
|
@ -12,41 +12,130 @@ Squiggle is currently pre-alpha.
|
||||||
- The team presently communicates via the **EA Forecasting and Epistemics** slack (channels `#squiggle` and `#squiggle-ops`), you can track down an invite by reaching out to Ozzie Gooen
|
- The team presently communicates via the **EA Forecasting and Epistemics** slack (channels `#squiggle` and `#squiggle-ops`), you can track down an invite by reaching out to Ozzie Gooen
|
||||||
- [Squiggle documentation](https://www.squiggle-language.com/docs/Language)
|
- [Squiggle documentation](https://www.squiggle-language.com/docs/Language)
|
||||||
- [Rescript documentation](https://rescript-lang.org/docs/manual/latest/introduction)
|
- [Rescript documentation](https://rescript-lang.org/docs/manual/latest/introduction)
|
||||||
- You can email `quinn@quantifieduncertainty.org` if you need assistance in onboarding or if you have questions
|
- You can email `quinn@quantifieduncertainty.org` if you need assistance in onboarding or if you have questions
|
||||||
|
|
||||||
# Bug reports
|
# Bug reports
|
||||||
|
|
||||||
Anyone (with a github account) can file an issue at any time. Please allow Quinn, Sam, and Ozzie to triage, but otherwise just follow the suggestions in the issue templates.
|
Anyone (with a github account) can file an issue at any time. Please allow Quinn, Sam, and Ozzie to triage, but otherwise just follow the suggestions in the issue templates.
|
||||||
|
|
||||||
# Project structure
|
# Project structure
|
||||||
|
|
||||||
Squiggle is a **monorepo** with four **packages**.
|
Squiggle is a **monorepo** with four **packages**.
|
||||||
|
|
||||||
- **components** is where we improve reactive interfacing with Squiggle
|
- **components** is where we improve reactive interfacing with Squiggle
|
||||||
- **playground** is the site `playground.squiggle-language.com`
|
- **playground** is the site `playground.squiggle-language.com`
|
||||||
- **squiggle-lang** is where the magic happens: probability distributions, the interpreter, etc.
|
- **squiggle-lang** is where the magic happens: probability distributions, the interpreter, etc.
|
||||||
- **website** is the site `squiggle-language.com`
|
- **website** is the site `squiggle-language.com`
|
||||||
|
|
||||||
# Deployment ops
|
# Deployment ops
|
||||||
|
|
||||||
We use netlify, and it should only concern Quinn, Sam, and Ozzie.
|
We use netlify, and it should only concern Quinn, Sam, and Ozzie.
|
||||||
|
|
||||||
# Development environment, building, testing, dev server
|
# Development environment, building, testing, dev server
|
||||||
|
|
||||||
You need `yarn`.
|
You need `yarn`.
|
||||||
|
|
||||||
Being a monorepo, where packages are connected by dependency, it's important to follow `README.md`s closely. Each package has it's own `README.md`, which is where the bulk of information is.
|
Being a monorepo, where packages are connected by dependency, it's important to follow `README.md`s closely. Each package has it's own `README.md`, which is where the bulk of information is.
|
||||||
|
|
||||||
We aspire for `ci.yaml` and `README.md`s to be in one-to-one correspondence.
|
We aspire for `ci.yaml` and `README.md`s to be in one-to-one correspondence.
|
||||||
|
|
||||||
## If you're on NixOS
|
## If you're on NixOS
|
||||||
|
|
||||||
You'll need to run a command like this in order to get `yarn build` to run, especially in `packages/squiggle-lang`.
|
You'll need to run a command like this in order to get `yarn build` to run, especially in `packages/squiggle-lang`.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
patchelf --set-interpreter $(patchelf --print-interpreter $(which mkdir)) ./node_modules/gentype/gentype.exe
|
patchelf --set-interpreter $(patchelf --print-interpreter $(which mkdir)) ./node_modules/gentype/gentype.exe
|
||||||
```
|
```
|
||||||
|
|
||||||
See [here](https://github.com/NixOS/nixpkgs/issues/107375)
|
See [here](https://github.com/NixOS/nixpkgs/issues/107375)
|
||||||
|
|
||||||
# Pull request protocol
|
# Pull request protocol
|
||||||
|
|
||||||
Please work against `staging` branch. **Do not** work against `master`. Please do not merge without approval from some subset of Quinn, Sam, and Ozzie; they will be auto-pinged.
|
Please work against `develop` branch. **Do not** work against `master`.
|
||||||
|
|
||||||
|
- For rescript code: Quinn and Ozzie are reviewers
|
||||||
|
- For js or typescript code: Sam and Ozzie are reviewers
|
||||||
|
- For ops code (i.e. yaml, package.json): Quinn and Sam are reviewers
|
||||||
|
|
||||||
|
Autopings are set up: if you are not autopinged, you are welcome to comment, but please do not use the formal review feature, send approvals, rejections, or merges.
|
||||||
|
|
||||||
|
# Code Quality
|
||||||
|
|
||||||
|
- Aim for at least 8/10\* quality in `/packages/squiggle-lang`, and 7/10 quality in `/packages/components`.
|
||||||
|
- If you submit a PR that is under a 7, for some reason, describe the reasoning for this in the PR.
|
||||||
|
|
||||||
|
* This quality score is subjective.
|
||||||
|
|
||||||
|
# Rescript Style
|
||||||
|
|
||||||
|
**Use `->` instead of `|>`**
|
||||||
|
Note: Our codebase used to use `|>`, so there's a lot of that in the system. We'll gradually change it.
|
||||||
|
|
||||||
|
**Use `x -> y -> z` instead of `let foo = y(x); let bar = z(foo)`**
|
||||||
|
|
||||||
|
**Don't use anonymous functions with over three lines**
|
||||||
|
Bad:
|
||||||
|
|
||||||
|
```rescript
|
||||||
|
foo
|
||||||
|
-> E.O.fmap(r => {
|
||||||
|
let a = 34;
|
||||||
|
let b = 35;
|
||||||
|
let c = 48;
|
||||||
|
r + a + b + c
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Good:
|
||||||
|
|
||||||
|
```rescript
|
||||||
|
let addingFn = (r => {
|
||||||
|
let a = 34;
|
||||||
|
let b = 35;
|
||||||
|
let c = 48;
|
||||||
|
r + a + b + c
|
||||||
|
}
|
||||||
|
foo -> addingFn
|
||||||
|
```
|
||||||
|
|
||||||
|
**Write out types for everything, even if there's an interface file**
|
||||||
|
We'll try this for one month (ending May 5, 2022), then revisit.
|
||||||
|
|
||||||
|
**Use the Rescript optional default syntax**
|
||||||
|
Rescript is clever about function inputs. There's custom syntax for default and optional arguments. In the cases where this applies, use it.
|
||||||
|
|
||||||
|
From https://rescript-lang.org/docs/manual/latest/function:
|
||||||
|
|
||||||
|
```rescript
|
||||||
|
// radius can be omitted
|
||||||
|
let drawCircle = (~color, ~radius=?, ()) => {
|
||||||
|
setColor(color)
|
||||||
|
switch radius {
|
||||||
|
| None => startAt(1, 1)
|
||||||
|
| Some(r_) => startAt(r_, r_)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Use named arguments**
|
||||||
|
If a function is called externally (in a different file), and has either:
|
||||||
|
|
||||||
|
1. Two arguments of the same type
|
||||||
|
2. Three paramaters or more.
|
||||||
|
|
||||||
|
**Module naming: Use x_y as module names**
|
||||||
|
For example: `Myname_Myproject_Add.res`. Rescript/Ocaml both require files to have unique names, so long names are needed to keep different parts separate from each other.
|
||||||
|
|
||||||
|
See [this page](https://dev.to/yawaramin/a-modular-ocaml-project-structure-1ikd) for more information. (Though note that they use two underscores, and we do one. We might refactor that later.
|
||||||
|
|
||||||
|
**Module naming: Don't rename modules**
|
||||||
|
We have some of this in the Reducer code, but generally discourage it.
|
||||||
|
|
||||||
|
**Use interface files (.resi) for files with very public interfaces**
|
||||||
|
|
||||||
|
### Recommended Rescript resources
|
||||||
|
|
||||||
|
- https://dev.to/yawaramin/a-modular-ocaml-project-structure-1ikd
|
||||||
|
- https://github.com/avohq/reasonml-code-style-guide
|
||||||
|
- https://cs.brown.edu/courses/cs017/content/docs/reasonml-style.pdf
|
||||||
|
- https://github.com/ostera/reason-design-patterns/
|
||||||
|
|
52
README.md
52
README.md
|
@ -1,51 +1,47 @@
|
||||||
# Squiggle
|
# Squiggle
|
||||||
![Packages check](https://github.com/QURIresearch/squiggle/actions/workflows/ci.yaml/badge.svg)
|
|
||||||
|
|
||||||
This is an experiment DSL/language for making probabilistic estimates. The full story can be found [here](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3).
|
[![Packages check](https://github.com/quantified-uncertainty/squiggle/actions/workflows/ci.yml/badge.svg)](https://github.com/quantified-uncertainty/squiggle/actions/workflows/ci.yml)
|
||||||
|
[![npm version](https://badge.fury.io/js/@quri%2Fsquiggle-lang.svg)](https://www.npmjs.com/package/@quri/squiggle-lang)
|
||||||
|
[![npm version](https://badge.fury.io/js/@quri%2Fsquiggle-components.svg)](https://www.npmjs.com/package/@quri/squiggle-components)
|
||||||
|
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/quantified-uncertainty/squiggle/blob/develop/LICENSE)
|
||||||
|
[![codecov](https://codecov.io/gh/quantified-uncertainty/squiggle/branch/develop/graph/badge.svg?token=QRLBL5CQ7C)](https://codecov.io/gh/quantified-uncertainty/squiggle)
|
||||||
|
|
||||||
|
This is an experimental DSL/language for making probabilistic estimates. The full story can be found [here](https://www.lesswrong.com/s/rDe8QE5NvXcZYzgZ3).
|
||||||
|
|
||||||
## Our deployments
|
## Our deployments
|
||||||
|
|
||||||
- **website/docs prod**: https://squiggle-language.com
|
- **website/docs prod**: https://squiggle-language.com [![Netlify Status](https://api.netlify.com/api/v1/badges/2139af5c-671d-473d-a9f6-66c96077d8a1/deploy-status)](https://app.netlify.com/sites/squiggle-documentation/deploys)
|
||||||
- **website/docs staging**: https://staging--squiggle-documentation.netlify.app/
|
- **website/docs staging**: https://develop--squiggle-documentation.netlify.app/
|
||||||
- **old playground**: https://playground.squiggle-language.com
|
- **components storybook prod**: https://squiggle-components.netlify.app/ [![Netlify Status](https://api.netlify.com/api/v1/badges/b7f724aa-6b20-4d0e-bf86-3fcd1a3e9a70/deploy-status)](https://app.netlify.com/sites/squiggle-components/deploys)
|
||||||
|
- **components storybook staging**: https://develop--squiggle-components.netlify.app/
|
||||||
|
- **legacy (2020) playground**: https://playground.squiggle-language.com
|
||||||
|
|
||||||
## Packages
|
## Packages
|
||||||
This monorepo has several packages that can be used for various purposes. All
|
|
||||||
|
This monorepo has several packages that can be used for various purposes. All
|
||||||
the packages can be found in `packages`.
|
the packages can be found in `packages`.
|
||||||
|
|
||||||
- `@quri/squiggle-lang` in `packages/squiggle-lang` contains the core language, particularly
|
- `@quri/squiggle-lang` in `packages/squiggle-lang` contains the core language, particularly
|
||||||
an interface to parse squiggle expressions and return descriptions of distributions
|
an interface to parse squiggle expressions and return descriptions of distributions
|
||||||
or results.
|
or results.
|
||||||
- `@quri/squiggle-components` in `packages/components` contains React components that
|
- `@quri/squiggle-components` in `packages/components` contains React components that
|
||||||
can be passed squiggle strings as props, and return a presentation of the result
|
can be passed squiggle strings as props, and return a presentation of the result
|
||||||
of the calculation.
|
of the calculation.
|
||||||
- `@quri/playground` in `packages/playground` contains a website for a playground
|
|
||||||
for squiggle. This website is hosted at `playground.squiggle-language.com`
|
|
||||||
- `@quri/squiggle-website` in `packages/website` The main descriptive website for squiggle,
|
- `@quri/squiggle-website` in `packages/website` The main descriptive website for squiggle,
|
||||||
it is hosted at `squiggle-language.com`.
|
it is hosted at `squiggle-language.com`.
|
||||||
|
|
||||||
The playground depends on the components library which then depends on the language. This means that if you wish to work on the components library, you will need to build (no need to bundle) the language, and as of this writing playground doesn't really work.
|
The playground depends on the components library which then depends on the language. This means that if you wish to work on the components library, you will need to build (no need to bundle) the language, and as of this writing playground doesn't really work.
|
||||||
|
|
||||||
# Develop
|
# Develop
|
||||||
|
|
||||||
For any project in the repo, begin by running `yarn` in the top level (TODO: is this true?)
|
For any project in the repo, begin by running `yarn` in the top level (TODO: is this true?)
|
||||||
|
|
||||||
``` sh
|
```sh
|
||||||
yarn
|
yarn
|
||||||
```
|
```
|
||||||
|
|
||||||
See `packages/*/README.md` to work with whatever project you're interested in.
|
See `packages/*/README.md` to work with whatever project you're interested in.
|
||||||
|
|
||||||
## `codium` for `rescript`
|
|
||||||
|
|
||||||
If you have `nix` installed with `flakes` enabled, you can build a `codium` in this repo for `rescript` development, if you don't want to pollute your machine's global editor with another mode/extension.
|
|
||||||
|
|
||||||
``` sh
|
|
||||||
nix develop
|
|
||||||
codium
|
|
||||||
```
|
|
||||||
|
|
||||||
The `nix develop` shell also provides `yarn`.
|
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
See `CONTRIBUTING.md`.
|
|
||||||
|
See `CONTRIBUTING.md`.
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
let
|
|
||||||
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
|
|
||||||
inherit (lock.nodes.flake-compat.locked) owner repo rev narHash;
|
|
||||||
flake-compat = builtins.fetchTarball {
|
|
||||||
url = "https://github.com/${owner}/${repo}/archive/${rev}.tar.gz";
|
|
||||||
sha256 = narHash;
|
|
||||||
};
|
|
||||||
in
|
|
||||||
import flake-compat { src = ./.; }
|
|
44
flake.lock
44
flake.lock
|
@ -1,44 +0,0 @@
|
||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"flake-compat": {
|
|
||||||
"flake": false,
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1641205782,
|
|
||||||
"narHash": "sha256-4jY7RCWUoZ9cKD8co0/4tFARpWB+57+r1bLLvXNJliY=",
|
|
||||||
"owner": "edolstra",
|
|
||||||
"repo": "flake-compat",
|
|
||||||
"rev": "b7547d3eed6f32d06102ead8991ec52ab0a4f1a7",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "edolstra",
|
|
||||||
"repo": "flake-compat",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1647893727,
|
|
||||||
"narHash": "sha256-pOi7VdCb+s5Cwh5CS7YEZVRgH9uCmE87J5W7iXv29Ck=",
|
|
||||||
"owner": "nixos",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "1ec61dd4167f04be8d05c45780818826132eea0d",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nixos",
|
|
||||||
"ref": "nixos-unstable",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-compat": "flake-compat",
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
94
flake.nix
94
flake.nix
|
@ -1,94 +0,0 @@
|
||||||
{
|
|
||||||
description = "Building codium for rescript development";
|
|
||||||
|
|
||||||
inputs = {
|
|
||||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
|
||||||
flake-compat = {
|
|
||||||
url = "github:edolstra/flake-compat";
|
|
||||||
flake = false;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs =
|
|
||||||
{ self
|
|
||||||
, nixpkgs
|
|
||||||
, flake-compat
|
|
||||||
}:
|
|
||||||
let
|
|
||||||
# Generate a user-friendly version number.
|
|
||||||
version = builtins.substring 0 8 self.lastModifiedDate;
|
|
||||||
# System types to support.
|
|
||||||
supportedSystems = [ "x86_64-linux" "aarch64-linux" "aarch64-darwin" ];
|
|
||||||
# Helper function to generate an attrset '{ x86_64-linux = f "x86_64-linux"; ... }'.
|
|
||||||
forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
|
|
||||||
# Nixpkgs instantiated for supported system types.
|
|
||||||
nixpkgsFor = forAllSystems (system:
|
|
||||||
import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
overlays = [ self.overlay ];
|
|
||||||
});
|
|
||||||
in
|
|
||||||
{
|
|
||||||
overlay = final: prev: { };
|
|
||||||
# the default devShell used when running `nix develop`
|
|
||||||
devShell = forAllSystems (system: self.devShells.${system}.defaultShell);
|
|
||||||
devShells = forAllSystems (system:
|
|
||||||
let
|
|
||||||
pkgs = nixpkgsFor."${system}";
|
|
||||||
in
|
|
||||||
{
|
|
||||||
# In case we don't want to provide an editor, this defaultShell will provide only coq packages we need.
|
|
||||||
defaultShell = pkgs.mkShell {
|
|
||||||
buildInputs = with pkgs; [
|
|
||||||
yarn
|
|
||||||
yarn2nix
|
|
||||||
nodePackages.npm
|
|
||||||
nodejs
|
|
||||||
patchelf
|
|
||||||
(pkgs.vscode-with-extensions.override {
|
|
||||||
vscode = pkgs.vscodium;
|
|
||||||
vscodeExtensions = pkgs.vscode-utils.extensionsFromVscodeMarketplace [
|
|
||||||
{
|
|
||||||
name = "rescript-vscode";
|
|
||||||
publisher = "chenglou92";
|
|
||||||
version = "1.2.1";
|
|
||||||
sha256 = "sha256-7/YakKtJ4WhgAR4rZltrq8g4TtM5QZ2spbrEUrNoXVg=";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
name = "vim";
|
|
||||||
publisher = "vscodevim";
|
|
||||||
version = "1.22.2";
|
|
||||||
sha256 = "sha256-dtIlgODzRdoMKnG9050ZcCX3w15A/R3FaMc+ZylvBbU=";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
name = "vscode-typescript-next";
|
|
||||||
publisher = "ms-vscode";
|
|
||||||
version = "4.7.20220323";
|
|
||||||
sha256 = "sha256-mjiBCyg5As/XAU9I5k6jEZWGJA3P6P5o1roe2bS7aUI=";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
name = "nix-ide";
|
|
||||||
publisher = "jnoortheen";
|
|
||||||
version = "0.1.20";
|
|
||||||
sha256 = "sha256-Q6X41I68m0jaCXaQGEFOoAbSUrr/wFhfCH5KrduOtZo=";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
name = "json";
|
|
||||||
publisher = "ZainChen";
|
|
||||||
version = "2.0.2";
|
|
||||||
sha256 = "sha256-nC3Q8KuCtn/jg1j/NaAxWGvnKe/ykrPm2PUjfsJz8aI=";
|
|
||||||
}
|
|
||||||
{
|
|
||||||
name = "prettier-vscode";
|
|
||||||
publisher = "esbenp";
|
|
||||||
version = "9.3.0";
|
|
||||||
sha256 = "sha256-hJgPjWf7a8+ltjmXTK8U/MwqgIZqBjmcCfHsAk2G3PA=";
|
|
||||||
}
|
|
||||||
];
|
|
||||||
})
|
|
||||||
];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
}
|
|
10
package.json
10
package.json
|
@ -2,13 +2,19 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"name": "squiggle",
|
"name": "squiggle",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules"
|
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules",
|
||||||
|
"format:all": "prettier --write . && cd packages/squiggle-lang && yarn format",
|
||||||
|
"lint:all": "prettier --check . && cd packages/squiggle-lang && yarn lint:rescript"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier": "^2.6.2"
|
||||||
},
|
},
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
],
|
],
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"@types/react": "^17.0.43"
|
"@types/react": "^18.0.1",
|
||||||
|
"react": "^18.0.0"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@1.22.17"
|
"packageManager": "yarn@1.22.17"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
dist
|
|
||||||
build
|
|
||||||
node_modules
|
|
||||||
storybook-static
|
|
||||||
.storybook
|
|
|
@ -1,31 +1,37 @@
|
||||||
//const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
|
//const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
|
||||||
const custom = require('../webpack.config.js');
|
const custom = require("../webpack.config.js");
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
webpackFinal: async (config) => {
|
webpackFinal: async (config) => {
|
||||||
config.resolve.alias = custom.resolve.alias;
|
config.resolve.alias = custom.resolve.alias;
|
||||||
return { ...config, module: { ...config.module, rules: config.module.rules.concat(custom.module.rules.filter(x => x.loader === "ts-loader")) } };
|
return {
|
||||||
|
...config,
|
||||||
|
module: {
|
||||||
|
...config.module,
|
||||||
|
rules: config.module.rules.concat(
|
||||||
|
custom.module.rules.filter((x) => x.loader === "ts-loader")
|
||||||
|
),
|
||||||
|
},
|
||||||
|
};
|
||||||
},
|
},
|
||||||
"stories": [
|
stories: ["../src/**/*.stories.mdx", "../src/**/*.stories.@(js|jsx|ts|tsx)"],
|
||||||
"../src/**/*.stories.mdx",
|
addons: [
|
||||||
"../src/**/*.stories.@(js|jsx|ts|tsx)"
|
|
||||||
],
|
|
||||||
"addons": [
|
|
||||||
"@storybook/addon-links",
|
"@storybook/addon-links",
|
||||||
"@storybook/addon-essentials",
|
"@storybook/addon-essentials",
|
||||||
"@storybook/preset-create-react-app"
|
"@storybook/preset-create-react-app",
|
||||||
],
|
],
|
||||||
"framework": "@storybook/react",
|
framework: "@storybook/react",
|
||||||
"core": {
|
core: {
|
||||||
"builder": "webpack5"
|
builder: "webpack5",
|
||||||
},
|
},
|
||||||
typescript: {
|
typescript: {
|
||||||
check: false,
|
check: false,
|
||||||
checkOptions: {},
|
checkOptions: {},
|
||||||
reactDocgen: 'react-docgen-typescript',
|
reactDocgen: "react-docgen-typescript",
|
||||||
reactDocgenTypescriptOptions: {
|
reactDocgenTypescriptOptions: {
|
||||||
shouldExtractLiteralValuesFromEnum: true,
|
shouldExtractLiteralValuesFromEnum: true,
|
||||||
propFilter: (prop) => (prop.parent ? !/node_modules/.test(prop.parent.fileName) : true),
|
propFilter: (prop) =>
|
||||||
|
prop.parent ? !/node_modules/.test(prop.parent.fileName) : true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
};
|
||||||
|
|
|
@ -6,4 +6,4 @@ export const parameters = {
|
||||||
date: /Date$/,
|
date: /Date$/,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
};
|
||||||
|
|
8
packages/components/netlify.toml
Normal file
8
packages/components/netlify.toml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
[build]
|
||||||
|
base = "packages/components/"
|
||||||
|
command = "cd ../squiggle-lang && yarn build && cd ../components && yarn build"
|
||||||
|
publish = "storybook-static/"
|
||||||
|
ignore = "node -e 'process.exitCode = process.env.BRANCH.includes(\"dependabot\") ? 0 : 1' && git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../squiggle-lang"
|
||||||
|
|
||||||
|
[build.environment]
|
||||||
|
NETLIFY_USE_YARN = "true"
|
|
@ -3,22 +3,23 @@
|
||||||
"version": "0.1.8",
|
"version": "0.1.8",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@quri/squiggle-lang": "0.2.2",
|
"@quri/squiggle-lang": "0.2.2",
|
||||||
|
"@react-hook/size": "^2.1.2",
|
||||||
"@testing-library/jest-dom": "^5.16.4",
|
"@testing-library/jest-dom": "^5.16.4",
|
||||||
"@testing-library/react": "^13.0.0",
|
"@testing-library/react": "^13.0.1",
|
||||||
"@testing-library/user-event": "^14.0.4",
|
"@testing-library/user-event": "^14.0.4",
|
||||||
"@types/jest": "^27.4.0",
|
"@types/jest": "^27.4.0",
|
||||||
"@types/lodash": "^4.14.181",
|
"@types/lodash": "^4.14.181",
|
||||||
"@types/node": "^17.0.23",
|
"@types/node": "^17.0.24",
|
||||||
"@types/react": "^18.0.0",
|
"@types/react": "^18.0.3",
|
||||||
"@types/react-dom": "^18.0.0",
|
"@types/react-dom": "^18.0.1",
|
||||||
"antd": "^4.19.3",
|
"antd": "^4.19.3",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"react": "^18.0.0",
|
"react": "^18.0.0",
|
||||||
|
"react-ace": "10.0.0",
|
||||||
"react-dom": "^18.0.0",
|
"react-dom": "^18.0.0",
|
||||||
"react-scripts": "5.0.0",
|
"react-scripts": "5.0.1",
|
||||||
"react-vega": "^7.5.0",
|
"react-vega": "^7.5.0",
|
||||||
"react-ace": "9.5.0",
|
|
||||||
"styled-components": "^5.3.5",
|
"styled-components": "^5.3.5",
|
||||||
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
"tsconfig-paths-webpack-plugin": "^3.5.2",
|
||||||
"typescript": "^4.6.3",
|
"typescript": "^4.6.3",
|
||||||
|
@ -32,7 +33,9 @@
|
||||||
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
"start": "cross-env REACT_APP_FAST_REFRESH=false && start-storybook -p 6006 -s public",
|
||||||
"build": "tsc -b && build-storybook -s public",
|
"build": "tsc -b && build-storybook -s public",
|
||||||
"bundle": "webpack",
|
"bundle": "webpack",
|
||||||
"all": "yarn bundle && yarn build"
|
"all": "yarn bundle && yarn build",
|
||||||
|
"lint": "prettier --check .",
|
||||||
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"eslintConfig": {
|
"eslintConfig": {
|
||||||
"extends": [
|
"extends": [
|
||||||
|
@ -63,21 +66,19 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/styled-components": "^5.1.24",
|
|
||||||
"css-loader": "^6.7.1",
|
|
||||||
"style-loader": "^3.3.1",
|
|
||||||
"@babel/plugin-proposal-private-property-in-object": "^7.16.7",
|
"@babel/plugin-proposal-private-property-in-object": "^7.16.7",
|
||||||
"@storybook/addon-actions": "^6.4.20",
|
"@storybook/addon-actions": "^6.4.22",
|
||||||
"@storybook/addon-essentials": "^6.4.20",
|
"@storybook/addon-essentials": "^6.4.22",
|
||||||
"@storybook/addon-links": "^6.4.20",
|
"@storybook/addon-links": "^6.4.22",
|
||||||
"@storybook/builder-webpack5": "^6.4.20",
|
"@storybook/builder-webpack5": "^6.4.22",
|
||||||
"@storybook/manager-webpack5": "^6.4.20",
|
"@storybook/manager-webpack5": "^6.4.22",
|
||||||
"@storybook/node-logger": "^6.4.20",
|
"@storybook/node-logger": "^6.4.22",
|
||||||
"@storybook/preset-create-react-app": "^4.1.0",
|
"@storybook/preset-create-react-app": "^4.1.0",
|
||||||
"@storybook/react": "^6.4.20",
|
"@storybook/react": "^6.4.22",
|
||||||
"@types/webpack": "^4.41.32",
|
"@types/styled-components": "^5.1.24",
|
||||||
"prettier": "^2.6.2",
|
"@types/webpack": "^5.28.0",
|
||||||
"react-codejar": "^1.1.2",
|
"react-codejar": "^1.1.2",
|
||||||
|
"style-loader": "^3.3.1",
|
||||||
"ts-loader": "^9.2.8",
|
"ts-loader": "^9.2.8",
|
||||||
"webpack": "^5.72.0",
|
"webpack": "^5.72.0",
|
||||||
"webpack-cli": "^4.9.2",
|
"webpack-cli": "^4.9.2",
|
||||||
|
|
|
@ -1,98 +0,0 @@
|
||||||
import * as React from "react";
|
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
const orderOfMagnitudeNum = (n: number) => {
|
|
||||||
return Math.pow(10, n);
|
|
||||||
};
|
|
||||||
|
|
||||||
// 105 -> 3
|
|
||||||
const orderOfMagnitude = (n: number) => {
|
|
||||||
return Math.floor(Math.log(n) / Math.LN10 + 0.000000001);
|
|
||||||
};
|
|
||||||
|
|
||||||
function withXSigFigs(number: number, sigFigs: number) {
|
|
||||||
const withPrecision = number.toPrecision(sigFigs);
|
|
||||||
const formatted = Number(withPrecision);
|
|
||||||
return `${formatted}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
class NumberShowerBuilder {
|
|
||||||
number: number;
|
|
||||||
precision: number;
|
|
||||||
|
|
||||||
constructor(number: number, precision = 2) {
|
|
||||||
this.number = number;
|
|
||||||
this.precision = precision;
|
|
||||||
}
|
|
||||||
|
|
||||||
convert() {
|
|
||||||
const number = Math.abs(this.number);
|
|
||||||
const response = this.evaluate(number);
|
|
||||||
if (this.number < 0) {
|
|
||||||
response.value = "-" + response.value;
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
metricSystem(number: number, order: number) {
|
|
||||||
const newNumber = number / orderOfMagnitudeNum(order);
|
|
||||||
const precision = this.precision;
|
|
||||||
return `${withXSigFigs(newNumber, precision)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
evaluate(number: number) {
|
|
||||||
if (number === 0) {
|
|
||||||
return { value: this.metricSystem(0, 0) };
|
|
||||||
}
|
|
||||||
|
|
||||||
const order = orderOfMagnitude(number);
|
|
||||||
if (order < -2) {
|
|
||||||
return { value: this.metricSystem(number, order), power: order };
|
|
||||||
} else if (order < 4) {
|
|
||||||
return { value: this.metricSystem(number, 0) };
|
|
||||||
} else if (order < 6) {
|
|
||||||
return { value: this.metricSystem(number, 3), symbol: "K" };
|
|
||||||
} else if (order < 9) {
|
|
||||||
return { value: this.metricSystem(number, 6), symbol: "M" };
|
|
||||||
} else if (order < 12) {
|
|
||||||
return { value: this.metricSystem(number, 9), symbol: "B" };
|
|
||||||
} else if (order < 15) {
|
|
||||||
return { value: this.metricSystem(number, 12), symbol: "T" };
|
|
||||||
} else {
|
|
||||||
return { value: this.metricSystem(number, order), power: order };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function numberShow(number: number, precision = 2) {
|
|
||||||
const ns = new NumberShowerBuilder(number, precision);
|
|
||||||
return ns.convert();
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface NumberShowerProps {
|
|
||||||
number: number;
|
|
||||||
precision?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export let NumberShower: React.FC<NumberShowerProps> = ({
|
|
||||||
number,
|
|
||||||
precision = 2
|
|
||||||
}: NumberShowerProps) => {
|
|
||||||
let numberWithPresentation = numberShow(number, precision);
|
|
||||||
return (
|
|
||||||
<span>
|
|
||||||
{numberWithPresentation.value}
|
|
||||||
{numberWithPresentation.symbol}
|
|
||||||
{numberWithPresentation.power ? (
|
|
||||||
<span>
|
|
||||||
{"\u00b710"}
|
|
||||||
<span style={{ fontSize: "0.6em", verticalAlign: "super" }}>
|
|
||||||
{numberWithPresentation.power}
|
|
||||||
</span>
|
|
||||||
</span>
|
|
||||||
) : (
|
|
||||||
<></>
|
|
||||||
)}
|
|
||||||
</span>
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -1,346 +0,0 @@
|
||||||
import * as React from "react";
|
|
||||||
import _ from "lodash";
|
|
||||||
import type { Spec } from "vega";
|
|
||||||
import { run } from "@quri/squiggle-lang";
|
|
||||||
import type {
|
|
||||||
DistPlus,
|
|
||||||
SamplingInputs,
|
|
||||||
exportEnv,
|
|
||||||
exportDistribution,
|
|
||||||
} from "@quri/squiggle-lang";
|
|
||||||
import { createClassFromSpec } from "react-vega";
|
|
||||||
import * as chartSpecification from "./spec-distributions.json";
|
|
||||||
import * as percentilesSpec from "./spec-percentiles.json";
|
|
||||||
import { NumberShower } from "./NumberShower";
|
|
||||||
import styled from "styled-components";
|
|
||||||
|
|
||||||
let SquiggleVegaChart = createClassFromSpec({
|
|
||||||
spec: chartSpecification as Spec,
|
|
||||||
});
|
|
||||||
|
|
||||||
let SquigglePercentilesChart = createClassFromSpec({
|
|
||||||
spec: percentilesSpec as Spec,
|
|
||||||
});
|
|
||||||
|
|
||||||
export interface SquiggleChartProps {
|
|
||||||
/** The input string for squiggle */
|
|
||||||
squiggleString?: string;
|
|
||||||
|
|
||||||
/** If the output requires monte carlo sampling, the amount of samples */
|
|
||||||
sampleCount?: number;
|
|
||||||
/** The amount of points returned to draw the distribution */
|
|
||||||
outputXYPoints?: number;
|
|
||||||
kernelWidth?: number;
|
|
||||||
pointDistLength?: number;
|
|
||||||
/** If the result is a function, where the function starts */
|
|
||||||
diagramStart?: number;
|
|
||||||
/** If the result is a function, where the function ends */
|
|
||||||
diagramStop?: number;
|
|
||||||
/** If the result is a function, how many points along the function it samples */
|
|
||||||
diagramCount?: number;
|
|
||||||
/** variables declared before this expression */
|
|
||||||
environment?: exportEnv;
|
|
||||||
/** When the environment changes */
|
|
||||||
onEnvChange?(env: exportEnv): void;
|
|
||||||
/** CSS width of the element */
|
|
||||||
width?: number;
|
|
||||||
height?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Error = styled.div`
|
|
||||||
border: 1px solid #792e2e;
|
|
||||||
background: #eee2e2;
|
|
||||||
padding: 0.4em 0.8em;
|
|
||||||
`;
|
|
||||||
|
|
||||||
const ShowError: React.FC<{ heading: string; children: React.ReactNode }> = ({
|
|
||||||
heading = "Error",
|
|
||||||
children,
|
|
||||||
}) => {
|
|
||||||
return (
|
|
||||||
<Error>
|
|
||||||
<h3>{heading}</h3>
|
|
||||||
{children}
|
|
||||||
</Error>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
|
||||||
squiggleString = "",
|
|
||||||
sampleCount = 1000,
|
|
||||||
outputXYPoints = 1000,
|
|
||||||
kernelWidth,
|
|
||||||
pointDistLength = 1000,
|
|
||||||
diagramStart = 0,
|
|
||||||
diagramStop = 10,
|
|
||||||
diagramCount = 20,
|
|
||||||
environment = [],
|
|
||||||
onEnvChange = () => {},
|
|
||||||
width = 500,
|
|
||||||
height = 60,
|
|
||||||
}: SquiggleChartProps) => {
|
|
||||||
let samplingInputs: SamplingInputs = {
|
|
||||||
sampleCount: sampleCount,
|
|
||||||
outputXYPoints: outputXYPoints,
|
|
||||||
kernelWidth: kernelWidth,
|
|
||||||
pointDistLength: pointDistLength,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = run(squiggleString, samplingInputs, environment);
|
|
||||||
if (result.tag === "Ok") {
|
|
||||||
let environment = result.value.environment;
|
|
||||||
let exports = result.value.exports;
|
|
||||||
onEnvChange(environment);
|
|
||||||
let chartResults = exports.map((chartResult: exportDistribution) => {
|
|
||||||
if (chartResult["NAME"] === "Float") {
|
|
||||||
return <NumberShower precision={3} number={chartResult["VAL"]} />;
|
|
||||||
} else if (chartResult["NAME"] === "DistPlus") {
|
|
||||||
let shape = chartResult.VAL.pointSetDist;
|
|
||||||
if (shape.tag === "Continuous") {
|
|
||||||
let xyShape = shape.value.xyShape;
|
|
||||||
let totalY = xyShape.ys.reduce((a, b) => a + b);
|
|
||||||
let total = 0;
|
|
||||||
let cdf = xyShape.ys.map((y) => {
|
|
||||||
total += y;
|
|
||||||
return total / totalY;
|
|
||||||
});
|
|
||||||
let values = _.zip(cdf, xyShape.xs, xyShape.ys).map(([c, x, y]) => ({
|
|
||||||
cdf: (c * 100).toFixed(2) + "%",
|
|
||||||
x: x,
|
|
||||||
y: y,
|
|
||||||
}));
|
|
||||||
|
|
||||||
return (
|
|
||||||
<SquiggleVegaChart
|
|
||||||
width={width}
|
|
||||||
height={height}
|
|
||||||
data={{ con: values }}
|
|
||||||
actions={false}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
} else if (shape.tag === "Discrete") {
|
|
||||||
let xyShape = shape.value.xyShape;
|
|
||||||
let totalY = xyShape.ys.reduce((a, b) => a + b);
|
|
||||||
let total = 0;
|
|
||||||
let cdf = xyShape.ys.map((y) => {
|
|
||||||
total += y;
|
|
||||||
return total / totalY;
|
|
||||||
});
|
|
||||||
let values = _.zip(cdf, xyShape.xs, xyShape.ys).map(([c, x, y]) => ({
|
|
||||||
cdf: (c * 100).toFixed(2) + "%",
|
|
||||||
x: x,
|
|
||||||
y: y,
|
|
||||||
}));
|
|
||||||
|
|
||||||
return <SquiggleVegaChart data={{ dis: values }} actions={false} />;
|
|
||||||
} else if (shape.tag === "Mixed") {
|
|
||||||
let discreteShape = shape.value.discrete.xyShape;
|
|
||||||
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
|
||||||
|
|
||||||
let discretePoints = _.zip(discreteShape.xs, discreteShape.ys);
|
|
||||||
let continuousShape = shape.value.continuous.xyShape;
|
|
||||||
let continuousPoints = _.zip(continuousShape.xs, continuousShape.ys);
|
|
||||||
|
|
||||||
interface labeledPoint {
|
|
||||||
x: number;
|
|
||||||
y: number;
|
|
||||||
type: "discrete" | "continuous";
|
|
||||||
}
|
|
||||||
|
|
||||||
let markedDisPoints: labeledPoint[] = discretePoints.map(
|
|
||||||
([x, y]) => ({ x: x, y: y, type: "discrete" })
|
|
||||||
);
|
|
||||||
let markedConPoints: labeledPoint[] = continuousPoints.map(
|
|
||||||
([x, y]) => ({ x: x, y: y, type: "continuous" })
|
|
||||||
);
|
|
||||||
|
|
||||||
let sortedPoints = _.sortBy(
|
|
||||||
markedDisPoints.concat(markedConPoints),
|
|
||||||
"x"
|
|
||||||
);
|
|
||||||
|
|
||||||
let totalContinuous = 1 - totalDiscrete;
|
|
||||||
let totalY = continuousShape.ys.reduce(
|
|
||||||
(a: number, b: number) => a + b
|
|
||||||
);
|
|
||||||
|
|
||||||
let total = 0;
|
|
||||||
let cdf = sortedPoints.map((point: labeledPoint) => {
|
|
||||||
if (point.type === "discrete") {
|
|
||||||
total += point.y;
|
|
||||||
return total;
|
|
||||||
} else if (point.type === "continuous") {
|
|
||||||
total += (point.y / totalY) * totalContinuous;
|
|
||||||
return total;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
interface cdfLabeledPoint {
|
|
||||||
cdf: string;
|
|
||||||
x: number;
|
|
||||||
y: number;
|
|
||||||
type: "discrete" | "continuous";
|
|
||||||
}
|
|
||||||
let cdfLabeledPoint: cdfLabeledPoint[] = _.zipWith(
|
|
||||||
cdf,
|
|
||||||
sortedPoints,
|
|
||||||
(c: number, point: labeledPoint) => ({
|
|
||||||
...point,
|
|
||||||
cdf: (c * 100).toFixed(2) + "%",
|
|
||||||
})
|
|
||||||
);
|
|
||||||
let continuousValues = cdfLabeledPoint.filter(
|
|
||||||
(x) => x.type === "continuous"
|
|
||||||
);
|
|
||||||
let discreteValues = cdfLabeledPoint.filter(
|
|
||||||
(x) => x.type === "discrete"
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<SquiggleVegaChart
|
|
||||||
data={{ con: continuousValues, dis: discreteValues }}
|
|
||||||
actions={false}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else if (chartResult.NAME === "Function") {
|
|
||||||
// We are looking at a function. In this case, we draw a Percentiles chart
|
|
||||||
let start = diagramStart;
|
|
||||||
let stop = diagramStop;
|
|
||||||
let count = diagramCount;
|
|
||||||
let step = (stop - start) / count;
|
|
||||||
let data = _.range(start, stop, step).map((x) => {
|
|
||||||
if (chartResult.NAME === "Function") {
|
|
||||||
let result = chartResult.VAL(x);
|
|
||||||
if (result.tag === "Ok") {
|
|
||||||
let percentileArray = [
|
|
||||||
0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95,
|
|
||||||
0.99,
|
|
||||||
];
|
|
||||||
|
|
||||||
let percentiles = getPercentiles(percentileArray, result.value);
|
|
||||||
return {
|
|
||||||
x: x,
|
|
||||||
p1: percentiles[0],
|
|
||||||
p5: percentiles[1],
|
|
||||||
p10: percentiles[2],
|
|
||||||
p20: percentiles[3],
|
|
||||||
p30: percentiles[4],
|
|
||||||
p40: percentiles[5],
|
|
||||||
p50: percentiles[6],
|
|
||||||
p60: percentiles[7],
|
|
||||||
p70: percentiles[8],
|
|
||||||
p80: percentiles[9],
|
|
||||||
p90: percentiles[10],
|
|
||||||
p95: percentiles[11],
|
|
||||||
p99: percentiles[12],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return (
|
|
||||||
<SquigglePercentilesChart
|
|
||||||
data={{ facet: data.filter((x) => x !== null) }}
|
|
||||||
actions={false}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return <>{chartResults}</>;
|
|
||||||
} else if (result.tag === "Error") {
|
|
||||||
// At this point, we came across an error. What was our error?
|
|
||||||
return (
|
|
||||||
<ShowError heading={"Parse Error"}>
|
|
||||||
{result.value}
|
|
||||||
</ShowError>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return <p>{"Invalid Response"}</p>;
|
|
||||||
};
|
|
||||||
|
|
||||||
function getPercentiles(percentiles: number[], t: DistPlus) {
|
|
||||||
if (t.pointSetDist.tag === "Discrete") {
|
|
||||||
let total = 0;
|
|
||||||
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
|
||||||
let bounds = percentiles.map((_) => maxX);
|
|
||||||
_.zipWith(
|
|
||||||
t.pointSetDist.value.xyShape.xs,
|
|
||||||
t.pointSetDist.value.xyShape.ys,
|
|
||||||
(x, y) => {
|
|
||||||
total += y;
|
|
||||||
percentiles.forEach((v, i) => {
|
|
||||||
if (total > v && bounds[i] === maxX) {
|
|
||||||
bounds[i] = x;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
return bounds;
|
|
||||||
} else if (t.pointSetDist.tag === "Continuous") {
|
|
||||||
let total = 0;
|
|
||||||
let maxX = _.max(t.pointSetDist.value.xyShape.xs);
|
|
||||||
let totalY = _.sum(t.pointSetDist.value.xyShape.ys);
|
|
||||||
let bounds = percentiles.map((_) => maxX);
|
|
||||||
_.zipWith(
|
|
||||||
t.pointSetDist.value.xyShape.xs,
|
|
||||||
t.pointSetDist.value.xyShape.ys,
|
|
||||||
(x, y) => {
|
|
||||||
total += y / totalY;
|
|
||||||
percentiles.forEach((v, i) => {
|
|
||||||
if (total > v && bounds[i] === maxX) {
|
|
||||||
bounds[i] = x;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
);
|
|
||||||
return bounds;
|
|
||||||
} else if (t.pointSetDist.tag === "Mixed") {
|
|
||||||
let discreteShape = t.pointSetDist.value.discrete.xyShape;
|
|
||||||
let totalDiscrete = discreteShape.ys.reduce((a, b) => a + b);
|
|
||||||
|
|
||||||
let discretePoints = _.zip(discreteShape.xs, discreteShape.ys);
|
|
||||||
let continuousShape = t.pointSetDist.value.continuous.xyShape;
|
|
||||||
let continuousPoints = _.zip(continuousShape.xs, continuousShape.ys);
|
|
||||||
|
|
||||||
interface labeledPoint {
|
|
||||||
x: number;
|
|
||||||
y: number;
|
|
||||||
type: "discrete" | "continuous";
|
|
||||||
}
|
|
||||||
|
|
||||||
let markedDisPoints: labeledPoint[] = discretePoints.map(([x, y]) => ({
|
|
||||||
x: x,
|
|
||||||
y: y,
|
|
||||||
type: "discrete",
|
|
||||||
}));
|
|
||||||
let markedConPoints: labeledPoint[] = continuousPoints.map(([x, y]) => ({
|
|
||||||
x: x,
|
|
||||||
y: y,
|
|
||||||
type: "continuous",
|
|
||||||
}));
|
|
||||||
|
|
||||||
let sortedPoints = _.sortBy(markedDisPoints.concat(markedConPoints), "x");
|
|
||||||
|
|
||||||
let totalContinuous = 1 - totalDiscrete;
|
|
||||||
let totalY = continuousShape.ys.reduce((a: number, b: number) => a + b);
|
|
||||||
|
|
||||||
let total = 0;
|
|
||||||
let maxX = _.max(sortedPoints.map((x) => x.x));
|
|
||||||
let bounds = percentiles.map((_) => maxX);
|
|
||||||
sortedPoints.map((point: labeledPoint) => {
|
|
||||||
if (point.type === "discrete") {
|
|
||||||
total += point.y;
|
|
||||||
} else if (point.type === "continuous") {
|
|
||||||
total += (point.y / totalY) * totalContinuous;
|
|
||||||
}
|
|
||||||
percentiles.forEach((v, i) => {
|
|
||||||
if (total > v && bounds[i] === maxX) {
|
|
||||||
bounds[i] = total;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return total;
|
|
||||||
});
|
|
||||||
return bounds;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,131 +0,0 @@
|
||||||
import _ from "lodash";
|
|
||||||
import React, { FC, useState } from "react";
|
|
||||||
import ReactDOM from "react-dom";
|
|
||||||
import { SquiggleChart } from "./SquiggleChart";
|
|
||||||
import CodeEditor from "./CodeEditor";
|
|
||||||
import { Form, Input, Card, Row, Col } from "antd";
|
|
||||||
import "antd/dist/antd.css";
|
|
||||||
|
|
||||||
interface FieldFloatProps {
|
|
||||||
label: string;
|
|
||||||
className?: string;
|
|
||||||
value: number;
|
|
||||||
onChange: (value: number) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
function FieldFloat(Props: FieldFloatProps) {
|
|
||||||
let [contents, setContents] = useState(Props.value + "");
|
|
||||||
return (
|
|
||||||
<Form.Item label={Props.label}>
|
|
||||||
<Input
|
|
||||||
value={contents}
|
|
||||||
className={Props.className ? Props.className : ""}
|
|
||||||
onChange={(e) => {
|
|
||||||
setContents(e.target.value);
|
|
||||||
let result = parseFloat(contents);
|
|
||||||
if (_.isFinite(result)) {
|
|
||||||
Props.onChange(result);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Form.Item>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
initialSquiggleString: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
let SquigglePlayground: FC<Props> = (props) => {
|
|
||||||
let [squiggleString, setSquiggleString] = useState(
|
|
||||||
props.initialSquiggleString
|
|
||||||
);
|
|
||||||
let [sampleCount, setSampleCount] = useState(1000);
|
|
||||||
let [outputXYPoints, setOutputXYPoints] = useState(1000);
|
|
||||||
let [pointDistLength, setPointDistLength] = useState(1000);
|
|
||||||
let [diagramStart, setDiagramStart] = useState(0);
|
|
||||||
let [diagramStop, setDiagramStop] = useState(10);
|
|
||||||
let [diagramCount, setDiagramCount] = useState(20);
|
|
||||||
var demoDist = (
|
|
||||||
<SquiggleChart
|
|
||||||
squiggleString={squiggleString}
|
|
||||||
sampleCount={sampleCount}
|
|
||||||
outputXYPoints={outputXYPoints}
|
|
||||||
diagramStart={diagramStart}
|
|
||||||
diagramStop={diagramStop}
|
|
||||||
diagramCount={diagramCount}
|
|
||||||
pointDistLength={pointDistLength}
|
|
||||||
height={150}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
return (
|
|
||||||
<Row>
|
|
||||||
<Col span={12}>
|
|
||||||
<Card title="Distribution Form">
|
|
||||||
<Form>
|
|
||||||
<Row gutter={16}>
|
|
||||||
<Col span={24}>
|
|
||||||
<CodeEditor
|
|
||||||
value={squiggleString}
|
|
||||||
onChange={setSquiggleString}
|
|
||||||
oneLine={false}
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
<Row gutter={16}>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={sampleCount}
|
|
||||||
label="Sample Count"
|
|
||||||
onChange={setSampleCount}
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={outputXYPoints}
|
|
||||||
onChange={setOutputXYPoints}
|
|
||||||
label="Output XY-points"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={pointDistLength}
|
|
||||||
onChange={setPointDistLength}
|
|
||||||
label="Downsample To"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramStart}
|
|
||||||
onChange={setDiagramStart}
|
|
||||||
label="Diagram Start"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramStop}
|
|
||||||
onChange={setDiagramStop}
|
|
||||||
label="Diagram Stop"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>
|
|
||||||
<FieldFloat
|
|
||||||
value={diagramCount}
|
|
||||||
onChange={setDiagramCount}
|
|
||||||
label="Diagram Count"
|
|
||||||
/>
|
|
||||||
</Col>
|
|
||||||
</Row>
|
|
||||||
</Form>
|
|
||||||
</Card>
|
|
||||||
</Col>
|
|
||||||
<Col span={12}>{demoDist}</Col>
|
|
||||||
</Row>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
export default SquigglePlayground;
|
|
||||||
export function renderSquigglePlaygroundToDom(props: Props) {
|
|
||||||
let parent = document.createElement("div");
|
|
||||||
ReactDOM.render(<SquigglePlayground {...props} />, parent);
|
|
||||||
return parent;
|
|
||||||
}
|
|
|
@ -10,12 +10,16 @@ interface CodeEditorProps {
|
||||||
onChange: (value: string) => void;
|
onChange: (value: string) => void;
|
||||||
oneLine?: boolean;
|
oneLine?: boolean;
|
||||||
width?: number;
|
width?: number;
|
||||||
|
height: number;
|
||||||
|
showGutter?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export let CodeEditor: FC<CodeEditorProps> = ({
|
export let CodeEditor: FC<CodeEditorProps> = ({
|
||||||
value,
|
value,
|
||||||
onChange,
|
onChange,
|
||||||
oneLine = false,
|
oneLine = false,
|
||||||
|
showGutter = false,
|
||||||
|
height,
|
||||||
}: CodeEditorProps) => {
|
}: CodeEditorProps) => {
|
||||||
let lineCount = value.split("\n").length;
|
let lineCount = value.split("\n").length;
|
||||||
let id = _.uniqueId();
|
let id = _.uniqueId();
|
||||||
|
@ -25,9 +29,10 @@ export let CodeEditor: FC<CodeEditorProps> = ({
|
||||||
mode="golang"
|
mode="golang"
|
||||||
theme="github"
|
theme="github"
|
||||||
width={"100%"}
|
width={"100%"}
|
||||||
minLines={oneLine ? lineCount : 15}
|
height={String(height) + "px"}
|
||||||
maxLines={oneLine ? lineCount : 15}
|
minLines={oneLine ? lineCount : undefined}
|
||||||
showGutter={false}
|
maxLines={oneLine ? lineCount : undefined}
|
||||||
|
showGutter={showGutter}
|
||||||
highlightActiveLine={false}
|
highlightActiveLine={false}
|
||||||
showPrintMargin={false}
|
showPrintMargin={false}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
42
packages/components/src/components/DistributionChart.tsx
Normal file
42
packages/components/src/components/DistributionChart.tsx
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import _ from "lodash";
|
||||||
|
import type { Spec } from "vega";
|
||||||
|
import type { Distribution } from "@quri/squiggle-lang";
|
||||||
|
import { distributionErrorToString } from "@quri/squiggle-lang";
|
||||||
|
import { createClassFromSpec } from "react-vega";
|
||||||
|
import * as chartSpecification from "../vega-specs/spec-distributions.json";
|
||||||
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
|
||||||
|
let SquiggleVegaChart = createClassFromSpec({
|
||||||
|
spec: chartSpecification as Spec,
|
||||||
|
});
|
||||||
|
|
||||||
|
type DistributionChartProps = {
|
||||||
|
distribution: Distribution;
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DistributionChart: React.FC<DistributionChartProps> = ({
|
||||||
|
distribution,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
}: DistributionChartProps) => {
|
||||||
|
let shape = distribution.pointSet();
|
||||||
|
if (shape.tag === "Ok") {
|
||||||
|
return (
|
||||||
|
<SquiggleVegaChart
|
||||||
|
data={{ con: shape.value.continuous, dis: shape.value.discrete }}
|
||||||
|
width={width - 20}
|
||||||
|
height={height}
|
||||||
|
actions={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return (
|
||||||
|
<ErrorBox heading="Distribution Error">
|
||||||
|
{distributionErrorToString(shape.value)}
|
||||||
|
</ErrorBox>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
20
packages/components/src/components/ErrorBox.tsx
Normal file
20
packages/components/src/components/ErrorBox.tsx
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import styled from "styled-components";
|
||||||
|
|
||||||
|
const ShowError = styled.div`
|
||||||
|
border: 1px solid #792e2e;
|
||||||
|
background: #eee2e2;
|
||||||
|
padding: 0.4em 0.8em;
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const ErrorBox: React.FC<{
|
||||||
|
heading: string;
|
||||||
|
children: React.ReactNode;
|
||||||
|
}> = ({ heading = "Error", children }) => {
|
||||||
|
return (
|
||||||
|
<ShowError>
|
||||||
|
<h3>{heading}</h3>
|
||||||
|
{children}
|
||||||
|
</ShowError>
|
||||||
|
);
|
||||||
|
};
|
115
packages/components/src/components/FunctionChart.tsx
Normal file
115
packages/components/src/components/FunctionChart.tsx
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import _ from "lodash";
|
||||||
|
import type { Spec } from "vega";
|
||||||
|
import type { Distribution, errorValue, result } from "@quri/squiggle-lang";
|
||||||
|
import { createClassFromSpec } from "react-vega";
|
||||||
|
import * as percentilesSpec from "../vega-specs/spec-percentiles.json";
|
||||||
|
import { DistributionChart } from "./DistributionChart";
|
||||||
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
|
||||||
|
let SquigglePercentilesChart = createClassFromSpec({
|
||||||
|
spec: percentilesSpec as Spec,
|
||||||
|
});
|
||||||
|
|
||||||
|
type distPlusFn = (a: number) => result<Distribution, errorValue>;
|
||||||
|
|
||||||
|
const _rangeByCount = (start: number, stop: number, count: number) => {
|
||||||
|
const step = (stop - start) / (count - 1);
|
||||||
|
const items = _.range(start, stop, step);
|
||||||
|
const result = items.concat([stop]);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
function unwrap<a, b>(x: result<a, b>): a {
|
||||||
|
if (x.tag === "Ok") {
|
||||||
|
return x.value;
|
||||||
|
} else {
|
||||||
|
throw Error("FAILURE TO UNWRAP");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapFilter<a, b>(xs: a[], f: (x: a) => b | undefined): b[] {
|
||||||
|
let initial: b[] = [];
|
||||||
|
return xs.reduce((previous, current) => {
|
||||||
|
let value: b | undefined = f(current);
|
||||||
|
if (value !== undefined) {
|
||||||
|
return previous.concat([value]);
|
||||||
|
} else {
|
||||||
|
return previous;
|
||||||
|
}
|
||||||
|
}, initial);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const FunctionChart: React.FC<{
|
||||||
|
distPlusFn: distPlusFn;
|
||||||
|
diagramStart: number;
|
||||||
|
diagramStop: number;
|
||||||
|
diagramCount: number;
|
||||||
|
}> = ({ distPlusFn, diagramStart, diagramStop, diagramCount }) => {
|
||||||
|
let [mouseOverlay, setMouseOverlay] = React.useState(0);
|
||||||
|
function handleHover(...args) {
|
||||||
|
setMouseOverlay(args[1]);
|
||||||
|
}
|
||||||
|
function handleOut() {
|
||||||
|
setMouseOverlay(NaN);
|
||||||
|
}
|
||||||
|
const signalListeners = { mousemove: handleHover, mouseout: handleOut };
|
||||||
|
let mouseItem = distPlusFn(mouseOverlay);
|
||||||
|
let showChart =
|
||||||
|
mouseItem.tag === "Ok" ? (
|
||||||
|
<DistributionChart
|
||||||
|
distribution={mouseItem.value}
|
||||||
|
width={400}
|
||||||
|
height={140}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
);
|
||||||
|
let data1 = _rangeByCount(diagramStart, diagramStop, diagramCount);
|
||||||
|
let valueData = mapFilter(data1, (x) => {
|
||||||
|
let result = distPlusFn(x);
|
||||||
|
if (result.tag === "Ok") {
|
||||||
|
return { x: x, value: result.value };
|
||||||
|
}
|
||||||
|
}).map(({ x, value }) => {
|
||||||
|
return {
|
||||||
|
x: x,
|
||||||
|
p1: unwrap(value.inv(0.01)),
|
||||||
|
p5: unwrap(value.inv(0.05)),
|
||||||
|
p10: unwrap(value.inv(0.12)),
|
||||||
|
p20: unwrap(value.inv(0.2)),
|
||||||
|
p30: unwrap(value.inv(0.3)),
|
||||||
|
p40: unwrap(value.inv(0.4)),
|
||||||
|
p50: unwrap(value.inv(0.5)),
|
||||||
|
p60: unwrap(value.inv(0.6)),
|
||||||
|
p70: unwrap(value.inv(0.7)),
|
||||||
|
p80: unwrap(value.inv(0.8)),
|
||||||
|
p90: unwrap(value.inv(0.9)),
|
||||||
|
p95: unwrap(value.inv(0.95)),
|
||||||
|
p99: unwrap(value.inv(0.99)),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorData = mapFilter(data1, (x) => {
|
||||||
|
let result = distPlusFn(x);
|
||||||
|
if (result.tag === "Error") {
|
||||||
|
return { x: x, error: result.value };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let error2 = _.groupBy(errorData, (x) => x.error);
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<SquigglePercentilesChart
|
||||||
|
data={{ facet: valueData }}
|
||||||
|
actions={false}
|
||||||
|
signalListeners={signalListeners}
|
||||||
|
/>
|
||||||
|
{showChart}
|
||||||
|
{_.keysIn(error2).map((k) => (
|
||||||
|
<ErrorBox heading={k}>
|
||||||
|
{`Values: [${error2[k].map((r) => r.x.toFixed(2)).join(",")}]`}
|
||||||
|
</ErrorBox>
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
98
packages/components/src/components/NumberShower.tsx
Normal file
98
packages/components/src/components/NumberShower.tsx
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import _ from "lodash";
|
||||||
|
|
||||||
|
const orderOfMagnitudeNum = (n: number) => {
|
||||||
|
return Math.pow(10, n);
|
||||||
|
};
|
||||||
|
|
||||||
|
// 105 -> 3
|
||||||
|
const orderOfMagnitude = (n: number) => {
|
||||||
|
return Math.floor(Math.log(n) / Math.LN10 + 0.000000001);
|
||||||
|
};
|
||||||
|
|
||||||
|
function withXSigFigs(number: number, sigFigs: number) {
|
||||||
|
const withPrecision = number.toPrecision(sigFigs);
|
||||||
|
const formatted = Number(withPrecision);
|
||||||
|
return `${formatted}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
class NumberShowerBuilder {
|
||||||
|
number: number;
|
||||||
|
precision: number;
|
||||||
|
|
||||||
|
constructor(number: number, precision = 2) {
|
||||||
|
this.number = number;
|
||||||
|
this.precision = precision;
|
||||||
|
}
|
||||||
|
|
||||||
|
convert() {
|
||||||
|
const number = Math.abs(this.number);
|
||||||
|
const response = this.evaluate(number);
|
||||||
|
if (this.number < 0) {
|
||||||
|
response.value = "-" + response.value;
|
||||||
|
}
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
metricSystem(number: number, order: number) {
|
||||||
|
const newNumber = number / orderOfMagnitudeNum(order);
|
||||||
|
const precision = this.precision;
|
||||||
|
return `${withXSigFigs(newNumber, precision)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
evaluate(number: number) {
|
||||||
|
if (number === 0) {
|
||||||
|
return { value: this.metricSystem(0, 0) };
|
||||||
|
}
|
||||||
|
|
||||||
|
const order = orderOfMagnitude(number);
|
||||||
|
if (order < -2) {
|
||||||
|
return { value: this.metricSystem(number, order), power: order };
|
||||||
|
} else if (order < 4) {
|
||||||
|
return { value: this.metricSystem(number, 0) };
|
||||||
|
} else if (order < 6) {
|
||||||
|
return { value: this.metricSystem(number, 3), symbol: "K" };
|
||||||
|
} else if (order < 9) {
|
||||||
|
return { value: this.metricSystem(number, 6), symbol: "M" };
|
||||||
|
} else if (order < 12) {
|
||||||
|
return { value: this.metricSystem(number, 9), symbol: "B" };
|
||||||
|
} else if (order < 15) {
|
||||||
|
return { value: this.metricSystem(number, 12), symbol: "T" };
|
||||||
|
} else {
|
||||||
|
return { value: this.metricSystem(number, order), power: order };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function numberShow(number: number, precision = 2) {
|
||||||
|
const ns = new NumberShowerBuilder(number, precision);
|
||||||
|
return ns.convert();
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NumberShowerProps {
|
||||||
|
number: number;
|
||||||
|
precision?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export let NumberShower: React.FC<NumberShowerProps> = ({
|
||||||
|
number,
|
||||||
|
precision = 2,
|
||||||
|
}: NumberShowerProps) => {
|
||||||
|
let numberWithPresentation = numberShow(number, precision);
|
||||||
|
return (
|
||||||
|
<span>
|
||||||
|
{numberWithPresentation.value}
|
||||||
|
{numberWithPresentation.symbol}
|
||||||
|
{numberWithPresentation.power ? (
|
||||||
|
<span>
|
||||||
|
{"\u00b710"}
|
||||||
|
<span style={{ fontSize: "0.6em", verticalAlign: "super" }}>
|
||||||
|
{numberWithPresentation.power}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
175
packages/components/src/components/SquiggleChart.tsx
Normal file
175
packages/components/src/components/SquiggleChart.tsx
Normal file
|
@ -0,0 +1,175 @@
|
||||||
|
import * as React from "react";
|
||||||
|
import _ from "lodash";
|
||||||
|
import styled from "styled-components";
|
||||||
|
import {
|
||||||
|
run,
|
||||||
|
errorValueToString,
|
||||||
|
squiggleExpression,
|
||||||
|
} from "@quri/squiggle-lang";
|
||||||
|
import type { samplingParams, exportEnv } from "@quri/squiggle-lang";
|
||||||
|
import { NumberShower } from "./NumberShower";
|
||||||
|
import { DistributionChart } from "./DistributionChart";
|
||||||
|
import { ErrorBox } from "./ErrorBox";
|
||||||
|
import useSize from "@react-hook/size";
|
||||||
|
|
||||||
|
const variableBox = {
|
||||||
|
Component: styled.div`
|
||||||
|
background: white;
|
||||||
|
border: 1px solid #eee;
|
||||||
|
border-radius: 2px;
|
||||||
|
margin-bottom: 0.4em;
|
||||||
|
`,
|
||||||
|
Heading: styled.div`
|
||||||
|
border-bottom: 1px solid #eee;
|
||||||
|
padding-left: 0.8em;
|
||||||
|
padding-right: 0.8em;
|
||||||
|
padding-top: 0.1em;
|
||||||
|
`,
|
||||||
|
Body: styled.div`
|
||||||
|
padding: 0.4em 0.8em;
|
||||||
|
`,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const VariableBox: React.FC<{
|
||||||
|
heading: string;
|
||||||
|
children: React.ReactNode;
|
||||||
|
}> = ({ heading = "Error", children }) => {
|
||||||
|
return (
|
||||||
|
<variableBox.Component>
|
||||||
|
<variableBox.Heading>
|
||||||
|
<h3>{heading}</h3>
|
||||||
|
</variableBox.Heading>
|
||||||
|
<variableBox.Body>{children}</variableBox.Body>
|
||||||
|
</variableBox.Component>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface SquiggleItemProps {
|
||||||
|
/** The input string for squiggle */
|
||||||
|
expression: squiggleExpression;
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SquiggleItem: React.FC<SquiggleItemProps> = ({
|
||||||
|
expression,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
}: SquiggleItemProps) => {
|
||||||
|
switch (expression.tag) {
|
||||||
|
case "number":
|
||||||
|
return (
|
||||||
|
<VariableBox heading="Number">
|
||||||
|
<NumberShower precision={3} number={expression.value} />
|
||||||
|
</VariableBox>
|
||||||
|
);
|
||||||
|
case "distribution": {
|
||||||
|
let distType = expression.value.type();
|
||||||
|
return (
|
||||||
|
<VariableBox heading={`Distribution (${distType})`}>
|
||||||
|
{distType === "Symbolic" ? (
|
||||||
|
<>
|
||||||
|
<div>{expression.value.toString()}</div>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
|
<DistributionChart
|
||||||
|
distribution={expression.value}
|
||||||
|
height={height}
|
||||||
|
width={width}
|
||||||
|
/>
|
||||||
|
</VariableBox>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
case "string":
|
||||||
|
return (
|
||||||
|
<VariableBox heading="String">{`"${expression.value}"`}</VariableBox>
|
||||||
|
);
|
||||||
|
case "boolean":
|
||||||
|
return (
|
||||||
|
<VariableBox heading="Boolean">
|
||||||
|
{expression.value.toString()}
|
||||||
|
</VariableBox>
|
||||||
|
);
|
||||||
|
case "symbol":
|
||||||
|
return <VariableBox heading="Symbol">{expression.value}</VariableBox>;
|
||||||
|
case "call":
|
||||||
|
return <VariableBox heading="Call">{expression.value}</VariableBox>;
|
||||||
|
case "array":
|
||||||
|
return (
|
||||||
|
<VariableBox heading="Array">
|
||||||
|
{expression.value.map((r) => (
|
||||||
|
<SquiggleItem expression={r} width={width - 20} height={50} />
|
||||||
|
))}
|
||||||
|
</VariableBox>
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return (
|
||||||
|
<ErrorBox heading="No Viewer">
|
||||||
|
{"We don't currently have a working viewer for record types."}
|
||||||
|
</ErrorBox>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface SquiggleChartProps {
|
||||||
|
/** The input string for squiggle */
|
||||||
|
squiggleString?: string;
|
||||||
|
/** If the output requires monte carlo sampling, the amount of samples */
|
||||||
|
sampleCount?: number;
|
||||||
|
/** The amount of points returned to draw the distribution */
|
||||||
|
outputXYPoints?: number;
|
||||||
|
kernelWidth?: number;
|
||||||
|
pointDistLength?: number;
|
||||||
|
/** If the result is a function, where the function starts */
|
||||||
|
diagramStart?: number;
|
||||||
|
/** If the result is a function, where the function ends */
|
||||||
|
diagramStop?: number;
|
||||||
|
/** If the result is a function, how many points along the function it samples */
|
||||||
|
diagramCount?: number;
|
||||||
|
/** variables declared before this expression */
|
||||||
|
environment?: exportEnv;
|
||||||
|
/** When the environment changes */
|
||||||
|
onEnvChange?(env: exportEnv): void;
|
||||||
|
/** CSS width of the element */
|
||||||
|
width?: number;
|
||||||
|
height?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SquiggleChart: React.FC<SquiggleChartProps> = ({
|
||||||
|
squiggleString = "",
|
||||||
|
sampleCount = 1000,
|
||||||
|
outputXYPoints = 1000,
|
||||||
|
environment = [],
|
||||||
|
onEnvChange = () => {},
|
||||||
|
height = 60,
|
||||||
|
width = NaN,
|
||||||
|
}: SquiggleChartProps) => {
|
||||||
|
const target = React.useRef(null);
|
||||||
|
const [componentWidth] = useSize(target);
|
||||||
|
// I would have wanted to just use componentWidth, but this created infinite loops with SquiggleChart.stories.
|
||||||
|
//So you can manually add a width, as an escape hatch.
|
||||||
|
let _width = width || componentWidth;
|
||||||
|
let samplingInputs: samplingParams = {
|
||||||
|
sampleCount: sampleCount,
|
||||||
|
xyPointLength: outputXYPoints,
|
||||||
|
};
|
||||||
|
let expressionResult = run(squiggleString, samplingInputs, environment);
|
||||||
|
let internal: JSX.Element;
|
||||||
|
if (expressionResult.tag === "Ok") {
|
||||||
|
onEnvChange(environment);
|
||||||
|
let expression = expressionResult.value;
|
||||||
|
internal = (
|
||||||
|
<SquiggleItem expression={expression} width={_width} height={height} />
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// At this point, we came across an error. What was our error?
|
||||||
|
internal = (
|
||||||
|
<ErrorBox heading={"Parse Error"}>
|
||||||
|
{errorValueToString(expressionResult.value)}
|
||||||
|
</ErrorBox>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return <div ref={target}>{internal}</div>;
|
||||||
|
};
|
|
@ -3,7 +3,7 @@ import * as ReactDOM from "react-dom";
|
||||||
import { SquiggleChart } from "./SquiggleChart";
|
import { SquiggleChart } from "./SquiggleChart";
|
||||||
import { CodeEditor } from "./CodeEditor";
|
import { CodeEditor } from "./CodeEditor";
|
||||||
import type { exportEnv } from "@quri/squiggle-lang";
|
import type { exportEnv } from "@quri/squiggle-lang";
|
||||||
import styled from 'styled-components'
|
import styled from "styled-components";
|
||||||
|
|
||||||
export interface SquiggleEditorProps {
|
export interface SquiggleEditorProps {
|
||||||
/** The input string for squiggle */
|
/** The input string for squiggle */
|
||||||
|
@ -55,6 +55,8 @@ export let SquiggleEditor: React.FC<SquiggleEditorProps> = ({
|
||||||
value={expression}
|
value={expression}
|
||||||
onChange={setExpression}
|
onChange={setExpression}
|
||||||
oneLine={true}
|
oneLine={true}
|
||||||
|
showGutter={false}
|
||||||
|
height={20}
|
||||||
/>
|
/>
|
||||||
</Input>
|
</Input>
|
||||||
<SquiggleChart
|
<SquiggleChart
|
114
packages/components/src/components/SquigglePlayground.tsx
Normal file
114
packages/components/src/components/SquigglePlayground.tsx
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
import _ from "lodash";
|
||||||
|
import React, { FC, useState } from "react";
|
||||||
|
import ReactDOM from "react-dom";
|
||||||
|
import { SquiggleChart } from "./SquiggleChart";
|
||||||
|
import CodeEditor from "./CodeEditor";
|
||||||
|
import { Form, Input, Row, Col } from "antd";
|
||||||
|
import styled from "styled-components";
|
||||||
|
import "antd/dist/antd.css";
|
||||||
|
|
||||||
|
interface FieldFloatProps {
|
||||||
|
label: string;
|
||||||
|
className?: string;
|
||||||
|
value: number;
|
||||||
|
onChange: (value: number) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function FieldFloat(Props: FieldFloatProps) {
|
||||||
|
let [contents, setContents] = useState(Props.value + "");
|
||||||
|
return (
|
||||||
|
<Form.Item label={Props.label}>
|
||||||
|
<Input
|
||||||
|
value={contents}
|
||||||
|
className={Props.className ? Props.className : ""}
|
||||||
|
onChange={(e) => {
|
||||||
|
setContents(e.target.value);
|
||||||
|
let result = parseFloat(contents);
|
||||||
|
if (_.isFinite(result)) {
|
||||||
|
Props.onChange(result);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Form.Item>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
initialSquiggleString?: string;
|
||||||
|
height?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Props2 {
|
||||||
|
height: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ShowBox = styled.div<Props2>`
|
||||||
|
border: 1px solid #eee;
|
||||||
|
border-radius: 2px;
|
||||||
|
height: ${(props) => props.height};
|
||||||
|
`;
|
||||||
|
|
||||||
|
const MyComponent = styled.div`
|
||||||
|
color: ${(props) => props.theme.colors.main};
|
||||||
|
`;
|
||||||
|
|
||||||
|
interface TitleProps {
|
||||||
|
readonly maxHeight: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Display = styled.div<TitleProps>`
|
||||||
|
background: #f6f6f6;
|
||||||
|
border-left: 1px solid #eee;
|
||||||
|
height: 100vh;
|
||||||
|
padding: 3px;
|
||||||
|
overflow-y: auto;
|
||||||
|
max-height: ${(props) => props.maxHeight}px;
|
||||||
|
`;
|
||||||
|
|
||||||
|
let SquigglePlayground: FC<Props> = ({
|
||||||
|
initialSquiggleString = "",
|
||||||
|
height = 300,
|
||||||
|
}: Props) => {
|
||||||
|
let [squiggleString, setSquiggleString] = useState(initialSquiggleString);
|
||||||
|
let [sampleCount, setSampleCount] = useState(1000);
|
||||||
|
let [outputXYPoints, setOutputXYPoints] = useState(1000);
|
||||||
|
let [pointDistLength, setPointDistLength] = useState(1000);
|
||||||
|
let [diagramStart, setDiagramStart] = useState(0);
|
||||||
|
let [diagramStop, setDiagramStop] = useState(10);
|
||||||
|
let [diagramCount, setDiagramCount] = useState(20);
|
||||||
|
return (
|
||||||
|
<ShowBox height={height}>
|
||||||
|
<Row>
|
||||||
|
<Col span={12}>
|
||||||
|
<CodeEditor
|
||||||
|
value={squiggleString}
|
||||||
|
onChange={setSquiggleString}
|
||||||
|
oneLine={false}
|
||||||
|
showGutter={true}
|
||||||
|
height={height - 3}
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
<Col span={12}>
|
||||||
|
<Display maxHeight={height - 3}>
|
||||||
|
<SquiggleChart
|
||||||
|
squiggleString={squiggleString}
|
||||||
|
sampleCount={sampleCount}
|
||||||
|
outputXYPoints={outputXYPoints}
|
||||||
|
diagramStart={diagramStart}
|
||||||
|
diagramStop={diagramStop}
|
||||||
|
diagramCount={diagramCount}
|
||||||
|
pointDistLength={pointDistLength}
|
||||||
|
height={150}
|
||||||
|
/>
|
||||||
|
</Display>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
</ShowBox>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default SquigglePlayground;
|
||||||
|
export function renderSquigglePlaygroundToDom(props: Props) {
|
||||||
|
let parent = document.createElement("div");
|
||||||
|
ReactDOM.render(<SquigglePlayground {...props} />, parent);
|
||||||
|
return parent;
|
||||||
|
}
|
|
@ -1,6 +1,9 @@
|
||||||
export { SquiggleChart } from "./SquiggleChart";
|
export { SquiggleChart } from "./components/SquiggleChart";
|
||||||
export { SquiggleEditor, renderSquiggleEditorToDom } from "./SquiggleEditor";
|
export {
|
||||||
|
SquiggleEditor,
|
||||||
|
renderSquiggleEditorToDom,
|
||||||
|
} from "./components/SquiggleEditor";
|
||||||
import SquigglePlayground, {
|
import SquigglePlayground, {
|
||||||
renderSquigglePlaygroundToDom,
|
renderSquigglePlaygroundToDom,
|
||||||
} from "./SquigglePlayground";
|
} from "./components/SquigglePlayground";
|
||||||
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
export { SquigglePlayground, renderSquigglePlaygroundToDom };
|
||||||
|
|
|
@ -3,4 +3,4 @@ import { Meta } from "@storybook/addon-docs";
|
||||||
<Meta title="Squiggle/Introduction" />
|
<Meta title="Squiggle/Introduction" />
|
||||||
|
|
||||||
This is the component library for Squiggle. These are React
|
This is the component library for Squiggle. These are React
|
||||||
components, and can be used in any application that you see fit.
|
components, and can be used in any application that you see fit.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { NumberShower } from "../NumberShower";
|
import { NumberShower } from "../components/NumberShower";
|
||||||
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
<Meta title="Squiggle/NumberShower" component={NumberShower} />
|
<Meta title="Squiggle/NumberShower" component={NumberShower} />
|
||||||
|
@ -14,10 +14,10 @@ It uses the symbols "K", "M", "B", and "T", to represent thousands, millions, bi
|
||||||
name="Ten Thousand"
|
name="Ten Thousand"
|
||||||
args={{
|
args={{
|
||||||
number: 10000,
|
number: 10000,
|
||||||
precision: 2
|
precision: 2,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{args => <NumberShower {...args}/>}
|
{(args) => <NumberShower {...args} />}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
|
@ -26,10 +26,10 @@ It uses the symbols "K", "M", "B", and "T", to represent thousands, millions, bi
|
||||||
name="Ten Billion"
|
name="Ten Billion"
|
||||||
args={{
|
args={{
|
||||||
number: 10000000000,
|
number: 10000000000,
|
||||||
precision: 2
|
precision: 2,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{args => <NumberShower {...args}/>}
|
{(args) => <NumberShower {...args} />}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
|
@ -38,10 +38,10 @@ It uses the symbols "K", "M", "B", and "T", to represent thousands, millions, bi
|
||||||
name="1.2*10^15"
|
name="1.2*10^15"
|
||||||
args={{
|
args={{
|
||||||
number: 1200000000000000,
|
number: 1200000000000000,
|
||||||
precision: 2
|
precision: 2,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{args => <NumberShower {...args}/>}
|
{(args) => <NumberShower {...args} />}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
|
@ -50,10 +50,10 @@ It uses the symbols "K", "M", "B", and "T", to represent thousands, millions, bi
|
||||||
name="1.35*10^-13"
|
name="1.35*10^-13"
|
||||||
args={{
|
args={{
|
||||||
number: 0.000000000000135,
|
number: 0.000000000000135,
|
||||||
precision: 2
|
precision: 2,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{args => <NumberShower {...args}/>}
|
{(args) => <NumberShower {...args} />}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
import { SquiggleChart } from "../SquiggleChart";
|
import { SquiggleChart } from "../components/SquiggleChart";
|
||||||
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
<Meta title="Squiggle/SquiggleChart" component={SquiggleChart} />
|
<Meta title="Squiggle/SquiggleChart" component={SquiggleChart} />
|
||||||
|
|
||||||
export const Template = SquiggleChart;
|
export const Template = SquiggleChart;
|
||||||
|
/*
|
||||||
|
We have to hardcode a width here, because otherwise some interaction with
|
||||||
|
Storybook creates an infinite loop with the internal width
|
||||||
|
*/
|
||||||
|
const width = 600;
|
||||||
|
|
||||||
# Squiggle Chart
|
# Squiggle Chart
|
||||||
|
|
||||||
|
@ -18,13 +23,42 @@ could be continuous, discrete or mixed.
|
||||||
|
|
||||||
## Distributions
|
## Distributions
|
||||||
|
|
||||||
### Continuous Distributions
|
### Continuous Distributions (Symbolic)
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
name="Normal"
|
name="Continuous Symbolic"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "normal(5,2)",
|
squiggleString: "normal(5,2)",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
### Continuous Distributions (PointSet)
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Continuous Pointset"
|
||||||
|
args={{
|
||||||
|
squiggleString: "toPointSet(normal(5,2))",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
### Continuous Distributions (SampleSet)
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Continuous SampleSet"
|
||||||
|
args={{
|
||||||
|
squiggleString: "toSampleSet(normal(5,2), 1000)",
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -37,7 +71,8 @@ could be continuous, discrete or mixed.
|
||||||
<Story
|
<Story
|
||||||
name="Discrete"
|
name="Discrete"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "mm(0, 1, 3, 5, 8, 10, [0.1, 0.8, 0.5, 0.3, 0.2, 0.1])",
|
squiggleString: "mx(0, 1, 3, 5, 8, 10, [0.1, 0.8, 0.5, 0.3, 0.2, 0.1])",
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -50,7 +85,9 @@ could be continuous, discrete or mixed.
|
||||||
<Story
|
<Story
|
||||||
name="Mixed"
|
name="Mixed"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "mm(0, 1, 3, 5, 8, normal(8, 1), [0.1, 0.3, 0.4, 0.35, 0.2, 0.8])",
|
squiggleString:
|
||||||
|
"mx(0, 1, 3, 5, 8, normal(8, 1), [0.1, 0.3, 0.4, 0.35, 0.2, 0.8])",
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -67,23 +104,21 @@ to allow large and small numbers being printed cleanly.
|
||||||
name="Constant"
|
name="Constant"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "500000000",
|
squiggleString: "500000000",
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
</Story>
|
</Story>
|
||||||
</Canvas>
|
</Canvas>
|
||||||
|
|
||||||
## Functions
|
## Arrays
|
||||||
|
|
||||||
Full functions can be returned. These plot out the results of distributions between a set of x-coordinates.
|
|
||||||
|
|
||||||
The default is show 10 points between 0 and 10.
|
|
||||||
|
|
||||||
<Canvas>
|
<Canvas>
|
||||||
<Story
|
<Story
|
||||||
name="Function"
|
name="Array"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "f(x) = normal(x^2,x^1.8)\nf",
|
squiggleString: "[normal(5,2), normal(10,1), normal(40,2), 400000]",
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
@ -97,6 +132,49 @@ The default is show 10 points between 0 and 10.
|
||||||
name="Error"
|
name="Error"
|
||||||
args={{
|
args={{
|
||||||
squiggleString: "f(x) = normal(",
|
squiggleString: "f(x) = normal(",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
## Booleans
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Boolean"
|
||||||
|
args={{
|
||||||
|
squiggleString: "3 == 3",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
## Records
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="Record"
|
||||||
|
args={{
|
||||||
|
squiggleString: "{foo: 35 to 50, bar: [1,2,3]}",
|
||||||
|
width,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Template.bind({})}
|
||||||
|
</Story>
|
||||||
|
</Canvas>
|
||||||
|
|
||||||
|
## Strings
|
||||||
|
|
||||||
|
<Canvas>
|
||||||
|
<Story
|
||||||
|
name="String"
|
||||||
|
args={{
|
||||||
|
squiggleString: '"Lucky day!"',
|
||||||
|
width,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { SquiggleEditor } from "../SquiggleEditor";
|
import { SquiggleEditor } from "../components/SquiggleEditor";
|
||||||
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
|
||||||
<Meta title="Squiggle/SquiggleEditor" component={SquiggleEditor} />
|
<Meta title="Squiggle/SquiggleEditor" component={SquiggleEditor} />
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import SquigglePlayground from "../SquigglePlayground";
|
import SquigglePlayground from "../components/SquigglePlayground";
|
||||||
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
import { Canvas, Meta, Story, Props } from "@storybook/addon-docs";
|
||||||
|
import styled from "styled-components";
|
||||||
|
|
||||||
<Meta title="Squiggle/SquigglePlayground" component={SquigglePlayground} />
|
<Meta title="Squiggle/SquigglePlayground" component={SquigglePlayground} />
|
||||||
|
|
||||||
|
@ -15,6 +16,7 @@ including sampling settings, in squiggle.
|
||||||
name="Normal"
|
name="Normal"
|
||||||
args={{
|
args={{
|
||||||
initialSquiggleString: "normal(5,2)",
|
initialSquiggleString: "normal(5,2)",
|
||||||
|
height: 500,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{Template.bind({})}
|
{Template.bind({})}
|
||||||
|
|
|
@ -82,10 +82,13 @@
|
||||||
{
|
{
|
||||||
"orient": "bottom",
|
"orient": "bottom",
|
||||||
"scale": "xscale",
|
"scale": "xscale",
|
||||||
"labelColor": "#666",
|
"labelColor": "#727d93",
|
||||||
"tickColor": "#ddd",
|
"tickColor": "#fff",
|
||||||
|
"tickOpacity": 0.0,
|
||||||
|
"domainColor": "#fff",
|
||||||
|
"domainOpacity": 0.0,
|
||||||
"format": "~s",
|
"format": "~s",
|
||||||
"tickCount": 20
|
"tickCount": 10
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"marks": [
|
"marks": [
|
||||||
|
@ -127,10 +130,6 @@
|
||||||
},
|
},
|
||||||
"encode": {
|
"encode": {
|
||||||
"enter": {
|
"enter": {
|
||||||
"y2": {
|
|
||||||
"scale": "yscale",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"width": {
|
"width": {
|
||||||
"value": 1
|
"value": 1
|
||||||
}
|
}
|
||||||
|
@ -143,6 +142,10 @@
|
||||||
"y": {
|
"y": {
|
||||||
"scale": "yscale",
|
"scale": "yscale",
|
||||||
"field": "y"
|
"field": "y"
|
||||||
|
},
|
||||||
|
"y2": {
|
||||||
|
"scale": "yscale",
|
||||||
|
"value": 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -157,9 +160,7 @@
|
||||||
"shape": {
|
"shape": {
|
||||||
"value": "circle"
|
"value": "circle"
|
||||||
},
|
},
|
||||||
"width": {
|
"size": [{ "value": 100 }],
|
||||||
"value": 5
|
|
||||||
},
|
|
||||||
"tooltip": {
|
"tooltip": {
|
||||||
"signal": "datum.y"
|
"signal": "datum.y"
|
||||||
}
|
}
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"$schema": "https://vega.github.io/schema/vega/v5.json",
|
"$schema": "https://vega.github.io/schema/vega/v5.json",
|
||||||
"width": 500,
|
"width": 500,
|
||||||
"height": 400,
|
"height": 200,
|
||||||
"padding": 5,
|
"padding": 5,
|
||||||
"data": [
|
"data": [
|
||||||
{
|
{
|
||||||
|
@ -93,54 +93,49 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"signals": [
|
||||||
|
{
|
||||||
|
"name": "mousemove",
|
||||||
|
"on": [{ "events": "mousemove", "update": "invert('xscale', x())" }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mouseout",
|
||||||
|
"on": [{ "events": "mouseout", "update": "invert('xscale', x())" }]
|
||||||
|
}
|
||||||
|
],
|
||||||
"axes": [
|
"axes": [
|
||||||
{
|
{
|
||||||
"orient": "bottom",
|
"orient": "bottom",
|
||||||
"scale": "xscale",
|
"scale": "xscale",
|
||||||
"grid": false,
|
"grid": false,
|
||||||
"tickSize": 2,
|
"labelColor": "#727d93",
|
||||||
"encode": {
|
"tickColor": "#fff",
|
||||||
"grid": {
|
"tickOpacity": 0.0,
|
||||||
"enter": {
|
"domainColor": "#727d93",
|
||||||
"stroke": {
|
"domainOpacity": 0.1,
|
||||||
"value": "#ccc"
|
"tickCount": 5
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ticks": {
|
|
||||||
"enter": {
|
|
||||||
"stroke": {
|
|
||||||
"value": "#ccc"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"orient": "left",
|
"orient": "left",
|
||||||
"scale": "yscale",
|
"scale": "yscale",
|
||||||
"grid": false,
|
"grid": false,
|
||||||
"domain": false,
|
"labelColor": "#727d93",
|
||||||
"tickSize": 2,
|
"tickColor": "#fff",
|
||||||
"encode": {
|
"tickOpacity": 0.0,
|
||||||
"grid": {
|
"domainColor": "#727d93",
|
||||||
"enter": {
|
"domainOpacity": 0.1,
|
||||||
"stroke": {
|
"tickCount": 5
|
||||||
"value": "#ccc"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ticks": {
|
|
||||||
"enter": {
|
|
||||||
"stroke": {
|
|
||||||
"value": "#ccc"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"marks": [
|
"marks": [
|
||||||
|
{
|
||||||
|
"type": "rule",
|
||||||
|
"encode": {
|
||||||
|
"update": {
|
||||||
|
"xscale": { "scale": "xscale", "signal": "mousemove" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "area",
|
"type": "area",
|
||||||
"from": {
|
"from": {
|
|
@ -5,10 +5,12 @@
|
||||||
},
|
},
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"jsx": "react",
|
"jsx": "react",
|
||||||
|
"skipLibCheck": true,
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"noImplicitAny": false,
|
"noImplicitAny": false,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"removeComments": true,
|
"removeComments": true,
|
||||||
|
"strict": true,
|
||||||
"preserveConstEnums": true,
|
"preserveConstEnums": true,
|
||||||
"composite": true,
|
"composite": true,
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
|
@ -16,7 +18,10 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"sourceMap": true
|
"sourceMap": true
|
||||||
},
|
},
|
||||||
"files": ["src/spec-distributions.json", "src/spec-percentiles.json"],
|
"files": [
|
||||||
|
"src/vega-specs/spec-distributions.json",
|
||||||
|
"src/vega-specs/spec-percentiles.json"
|
||||||
|
],
|
||||||
"target": "ES6",
|
"target": "ES6",
|
||||||
"include": ["src/**/*", "src/*"],
|
"include": ["src/**/*", "src/*"],
|
||||||
"exclude": ["node_modules", "**/*.spec.ts", "webpack.config.js"],
|
"exclude": ["node_modules", "**/*.spec.ts", "webpack.config.js"],
|
||||||
|
|
|
@ -3,13 +3,14 @@ const path = require("path");
|
||||||
module.exports = {
|
module.exports = {
|
||||||
mode: "production",
|
mode: "production",
|
||||||
devtool: "source-map",
|
devtool: "source-map",
|
||||||
|
profile: true,
|
||||||
entry: "./src/index.ts",
|
entry: "./src/index.ts",
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [
|
||||||
{
|
{
|
||||||
test: /\.tsx?$/,
|
test: /\.tsx?$/,
|
||||||
loader: "ts-loader",
|
loader: "ts-loader",
|
||||||
options: { projectReferences: true },
|
options: { projectReferences: true, transpileOnly: true },
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
4
packages/squiggle-lang/.prettierignore
Normal file
4
packages/squiggle-lang/.prettierignore
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
dist
|
||||||
|
lib
|
||||||
|
*.bs.js
|
||||||
|
*.gen.tsx
|
|
@ -1,24 +1,28 @@
|
||||||
# Squiggle language
|
# Squiggle language
|
||||||
|
|
||||||
## Build for development
|
## Build for development
|
||||||
We assume that you ran `yarn` at the monorepo level.
|
|
||||||
``` sh
|
We assume that you ran `yarn` at the monorepo level.
|
||||||
|
|
||||||
|
```sh
|
||||||
yarn build
|
yarn build
|
||||||
```
|
```
|
||||||
|
|
||||||
`yarn bundle` is needed for a deployment.
|
`yarn bundle` is needed for a deployment.
|
||||||
|
|
||||||
Other:
|
Other:
|
||||||
``` sh
|
|
||||||
|
```sh
|
||||||
yarn start # listens to files and recompiles at every mutation
|
yarn start # listens to files and recompiles at every mutation
|
||||||
yarn test
|
yarn test
|
||||||
yarn test:watch # keeps an active session and runs all tests at every mutation
|
yarn test:watch # keeps an active session and runs all tests at every mutation
|
||||||
|
|
||||||
# where o := open in osx and o := xdg-open in linux,
|
# where o := open in osx and o := xdg-open in linux,
|
||||||
yarn coverage; o _coverage/index.html # produces coverage report and opens it in browser
|
yarn coverage; o _coverage/index.html # produces coverage report and opens it in browser
|
||||||
```
|
```
|
||||||
|
|
||||||
## Information
|
## Information
|
||||||
|
|
||||||
Squiggle is a language for representing probability distributions, as well as functions that return probability distributions. Its original intended use is for improving epistemics around EA decisions.
|
Squiggle is a language for representing probability distributions, as well as functions that return probability distributions. Its original intended use is for improving epistemics around EA decisions.
|
||||||
|
|
||||||
This package, `@quri/squiggle-lang`, contains the core language of squiggle. The main feature revolves around evaluating squiggle expressions. Currently the package only exports a single function, named "run", which from a squiggle string returns an object representing the result of the evaluation.
|
This package, `@quri/squiggle-lang`, contains the core language of squiggle. The main feature revolves around evaluating squiggle expressions. Currently the package only exports a single function, named "run", which from a squiggle string returns an object representing the result of the evaluation.
|
||||||
|
@ -32,7 +36,9 @@ ReScript has an interesting philosophy of not providing much in the way of effec
|
||||||
`.gen.ts` files are created by the [`@genType`](https://rescript-lang.org/docs/gentype/latest/getting-started) decorator, which creates typescript typings for needed parts of the codebase so that they can be easily used in typescript. These .gen.ts files reference the .bs.js files generated by rescript.
|
`.gen.ts` files are created by the [`@genType`](https://rescript-lang.org/docs/gentype/latest/getting-started) decorator, which creates typescript typings for needed parts of the codebase so that they can be easily used in typescript. These .gen.ts files reference the .bs.js files generated by rescript.
|
||||||
|
|
||||||
### Errors regarding the `rationale` package
|
### Errors regarding the `rationale` package
|
||||||
|
|
||||||
You may notice sometimes, that there are errors about the `rationale` package. If you ever get these errors, `yarn build` should fix this issue. These errors occur because `yarn build` also needs to create build files that are in `node_modules`. So if you replace `node_modules` you may need to rebuild to get those files back.
|
You may notice sometimes, that there are errors about the `rationale` package. If you ever get these errors, `yarn build` should fix this issue. These errors occur because `yarn build` also needs to create build files that are in `node_modules`. So if you replace `node_modules` you may need to rebuild to get those files back.
|
||||||
|
|
||||||
## Distributing this package or using this package from other monorepo packages
|
## Distributing this package or using this package from other monorepo packages
|
||||||
As it says in the other `packages/*/README.md`s, building this package is an essential step of building other packages.
|
|
||||||
|
As it says in the other `packages/*/README.md`s, building this package is an essential step of building other packages.
|
||||||
|
|
|
@ -4,10 +4,10 @@ open Expect
|
||||||
describe("Bandwidth", () => {
|
describe("Bandwidth", () => {
|
||||||
test("nrd0()", () => {
|
test("nrd0()", () => {
|
||||||
let data = [1., 4., 3., 2.]
|
let data = [1., 4., 3., 2.]
|
||||||
expect(SampleSetDist_Bandwidth.nrd0(data)) -> toEqual(0.7625801874014622)
|
expect(SampleSetDist_Bandwidth.nrd0(data))->toEqual(0.7625801874014622)
|
||||||
})
|
})
|
||||||
test("nrd()", () => {
|
test("nrd()", () => {
|
||||||
let data = [1., 4., 3., 2.]
|
let data = [1., 4., 3., 2.]
|
||||||
expect(SampleSetDist_Bandwidth.nrd(data)) -> toEqual(0.8981499984950554)
|
expect(SampleSetDist_Bandwidth.nrd(data))->toEqual(0.8981499984950554)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -18,11 +18,9 @@ let {
|
||||||
triangularDist,
|
triangularDist,
|
||||||
exponentialDist,
|
exponentialDist,
|
||||||
} = module(GenericDist_Fixtures)
|
} = module(GenericDist_Fixtures)
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
|
||||||
|
|
||||||
let {toFloat, toDist, toString, toError} = module(DistributionOperation.Output)
|
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
||||||
let {run} = module(DistributionOperation)
|
let {run} = module(DistributionOperation)
|
||||||
let {fmap} = module(DistributionOperation.Output)
|
|
||||||
let run = run(~env)
|
let run = run(~env)
|
||||||
let outputMap = fmap(~env)
|
let outputMap = fmap(~env)
|
||||||
let toExt: option<'a> => 'a = E.O.toExt(
|
let toExt: option<'a> => 'a = E.O.toExt(
|
||||||
|
|
|
@ -6,6 +6,9 @@ let normalDist: GenericDist_Types.genericDist = normalDist5
|
||||||
let betaDist: GenericDist_Types.genericDist = Symbolic(#Beta({alpha: 2.0, beta: 5.0}))
|
let betaDist: GenericDist_Types.genericDist = Symbolic(#Beta({alpha: 2.0, beta: 5.0}))
|
||||||
let lognormalDist: GenericDist_Types.genericDist = Symbolic(#Lognormal({mu: 0.0, sigma: 1.0}))
|
let lognormalDist: GenericDist_Types.genericDist = Symbolic(#Lognormal({mu: 0.0, sigma: 1.0}))
|
||||||
let cauchyDist: GenericDist_Types.genericDist = Symbolic(#Cauchy({local: 1.0, scale: 1.0}))
|
let cauchyDist: GenericDist_Types.genericDist = Symbolic(#Cauchy({local: 1.0, scale: 1.0}))
|
||||||
let triangularDist: GenericDist_Types.genericDist = Symbolic(#Triangular({low: 1.0, medium: 2.0, high: 3.0}))
|
let triangularDist: GenericDist_Types.genericDist = Symbolic(
|
||||||
|
#Triangular({low: 1.0, medium: 2.0, high: 3.0}),
|
||||||
|
)
|
||||||
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
||||||
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
||||||
|
let floatDist: GenericDist_Types.genericDist = Symbolic(#Float(1e1))
|
||||||
|
|
|
@ -0,0 +1,368 @@
|
||||||
|
/*
|
||||||
|
This file is like a half measure between one-off unit tests and proper invariant validation.
|
||||||
|
As such, I'm not that excited about it, though it does provide some structure and will alarm us
|
||||||
|
when things substantially change.
|
||||||
|
Also, there are some open comments in https://github.com/quantified-uncertainty/squiggle/pull/232 that haven't been addressed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
let {
|
||||||
|
normalDist5, // mean=5, stdev=2
|
||||||
|
normalDist10, // mean=10, stdev=2
|
||||||
|
normalDist20, // mean=20, stdev=2
|
||||||
|
normalDist, // mean=5; stdev=2
|
||||||
|
uniformDist, // low=9; high=10
|
||||||
|
betaDist, // alpha=2; beta=5
|
||||||
|
lognormalDist, // mu=0; sigma=1
|
||||||
|
cauchyDist, // local=1; scale=1
|
||||||
|
triangularDist, // low=1; medium=2; high=3;
|
||||||
|
exponentialDist, // rate=2
|
||||||
|
} = module(GenericDist_Fixtures)
|
||||||
|
|
||||||
|
let {
|
||||||
|
algebraicAdd,
|
||||||
|
algebraicMultiply,
|
||||||
|
algebraicDivide,
|
||||||
|
algebraicSubtract,
|
||||||
|
algebraicLogarithm,
|
||||||
|
algebraicPower,
|
||||||
|
} = module(DistributionOperation.Constructors)
|
||||||
|
|
||||||
|
let algebraicAdd = algebraicAdd(~env)
|
||||||
|
let algebraicMultiply = algebraicMultiply(~env)
|
||||||
|
let algebraicDivide = algebraicDivide(~env)
|
||||||
|
let algebraicSubtract = algebraicSubtract(~env)
|
||||||
|
let algebraicLogarithm = algebraicLogarithm(~env)
|
||||||
|
let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
|
describe("(Algebraic) addition of distributions", () => {
|
||||||
|
describe("mean", () => {
|
||||||
|
test("normal(mean=5) + normal(mean=20)", () => {
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist20)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
->expect
|
||||||
|
->toBe(Some(2.5e1))
|
||||||
|
})
|
||||||
|
|
||||||
|
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
|
||||||
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.01927225696028752, ~digits=1) // (uniformMean +. betaMean)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
|
||||||
|
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||||
|
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(GenericDist_Types.Constructors.UsingDists.mean)
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.019275414920485248, ~digits=1) // (uniformMean +. betaMean)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("pdf", () => {
|
||||||
|
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
||||||
|
testAll(
|
||||||
|
"(normal(mean=5) + normal(mean=5)).pdf (imprecise)",
|
||||||
|
list{8e0, 1e1, 1.2e1, 1.4e1},
|
||||||
|
x => {
|
||||||
|
let received =
|
||||||
|
normalDist10 // this should be normal(10, sqrt(8))
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001978994877226945, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.pdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001978994877226945, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("cdf", () => {
|
||||||
|
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
|
||||||
|
let received =
|
||||||
|
normalDist10
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.0013961779932477507, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.cdf(d, 1e1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=4.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(0.001388898111625753, ~digits=3)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("inv", () => {
|
||||||
|
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
|
||||||
|
let received =
|
||||||
|
normalDist10
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist5
|
||||||
|
->algebraicAdd(normalDist5)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, x))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
|
||||||
|
let received =
|
||||||
|
normalDist20
|
||||||
|
->Ok
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
let calculated =
|
||||||
|
normalDist10
|
||||||
|
->algebraicAdd(normalDist10)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 1e-1))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toOption
|
||||||
|
->E.O.flatten
|
||||||
|
switch received {
|
||||||
|
| None =>
|
||||||
|
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||||
|
->expect
|
||||||
|
->toBe("never")
|
||||||
|
| Some(x) =>
|
||||||
|
switch calculated {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
uniformDist
|
||||||
|
->algebraicAdd(betaDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(10.927078217530806, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
|
||||||
|
let received =
|
||||||
|
betaDist
|
||||||
|
->algebraicAdd(uniformDist)
|
||||||
|
->E.R2.fmap(d => GenericDist_Types.Constructors.UsingDists.inv(d, 2e-2))
|
||||||
|
->E.R2.fmap(run)
|
||||||
|
->E.R2.fmap(toFloat)
|
||||||
|
->E.R.toExn
|
||||||
|
switch received {
|
||||||
|
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||||
|
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||||
|
// sometimes it works with ~digits=2.
|
||||||
|
| Some(x) => x->expect->toBeSoCloseTo(10.915396627014363, ~digits=0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,156 @@
|
||||||
|
/*
|
||||||
|
This is the most basic file in our invariants family of tests.
|
||||||
|
|
||||||
|
Validate that the addition of means equals the mean of the addition, similar for subtraction and multiplication.
|
||||||
|
|
||||||
|
Details in https://develop--squiggle-documentation.netlify.app/docs/internal/invariants/
|
||||||
|
|
||||||
|
Note: epsilon of 1e3 means the invariants are, in general, not being satisfied.
|
||||||
|
*/
|
||||||
|
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
open TestHelpers
|
||||||
|
|
||||||
|
module Internals = {
|
||||||
|
let epsilon = 5e1
|
||||||
|
|
||||||
|
let mean = GenericDist_Types.Constructors.UsingDists.mean
|
||||||
|
|
||||||
|
let expectImpossiblePath: string => assertion = algebraicOp =>
|
||||||
|
`${algebraicOp} has`->expect->toEqual("failed")
|
||||||
|
|
||||||
|
let distributions = list{
|
||||||
|
normalMake(4e0, 1e0),
|
||||||
|
betaMake(2e0, 4e0),
|
||||||
|
exponentialMake(1.234e0),
|
||||||
|
uniformMake(7e0, 1e1),
|
||||||
|
// cauchyMake(1e0, 1e0),
|
||||||
|
lognormalMake(2e0, 1e0),
|
||||||
|
triangularMake(1e0, 1e1, 5e1),
|
||||||
|
Ok(floatMake(1e1)),
|
||||||
|
}
|
||||||
|
let pairsOfDifferentDistributions = E.L.combinations2(distributions)
|
||||||
|
|
||||||
|
let runMean: DistributionTypes.genericDist => float = dist => {
|
||||||
|
dist->mean->run->toFloat->E.O2.toExn("Shouldn't see this because we trust testcase input")
|
||||||
|
}
|
||||||
|
|
||||||
|
let testOperationMean = (
|
||||||
|
distOp: (
|
||||||
|
DistributionTypes.genericDist,
|
||||||
|
DistributionTypes.genericDist,
|
||||||
|
) => result<DistributionTypes.genericDist, DistributionTypes.error>,
|
||||||
|
description: string,
|
||||||
|
floatOp: (float, float) => float,
|
||||||
|
dist1': SymbolicDistTypes.symbolicDist,
|
||||||
|
dist2': SymbolicDistTypes.symbolicDist,
|
||||||
|
~epsilon: float,
|
||||||
|
) => {
|
||||||
|
let dist1 = dist1'->DistributionTypes.Symbolic
|
||||||
|
let dist2 = dist2'->DistributionTypes.Symbolic
|
||||||
|
let received =
|
||||||
|
distOp(dist1, dist2)->E.R2.fmap(mean)->E.R2.fmap(run)->E.R2.fmap(toFloat)->E.R.toExn
|
||||||
|
let expected = floatOp(runMean(dist1), runMean(dist2))
|
||||||
|
switch received {
|
||||||
|
| None => expectImpossiblePath(description)
|
||||||
|
| Some(x) => expectErrorToBeBounded(x, expected, ~epsilon)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let {
|
||||||
|
algebraicAdd,
|
||||||
|
algebraicMultiply,
|
||||||
|
algebraicDivide,
|
||||||
|
algebraicSubtract,
|
||||||
|
algebraicLogarithm,
|
||||||
|
algebraicPower,
|
||||||
|
} = module(DistributionOperation.Constructors)
|
||||||
|
|
||||||
|
let algebraicAdd = algebraicAdd(~env)
|
||||||
|
let algebraicMultiply = algebraicMultiply(~env)
|
||||||
|
let algebraicDivide = algebraicDivide(~env)
|
||||||
|
let algebraicSubtract = algebraicSubtract(~env)
|
||||||
|
let algebraicLogarithm = algebraicLogarithm(~env)
|
||||||
|
let algebraicPower = algebraicPower(~env)
|
||||||
|
|
||||||
|
let {testOperationMean, distributions, pairsOfDifferentDistributions, epsilon} = module(Internals)
|
||||||
|
|
||||||
|
describe("Means are invariant", () => {
|
||||||
|
describe("for addition", () => {
|
||||||
|
let testAdditionMean = testOperationMean(algebraicAdd, "algebraicAdd", \"+.", ~epsilon)
|
||||||
|
|
||||||
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
|
E.R.liftM2(testAdditionMean, dist, dist)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testAdditionMean, dist1, dist2)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll(
|
||||||
|
"with two different distributions in swapped order",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testAdditionMean, dist2, dist1)->E.R.toExn
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("for subtraction", () => {
|
||||||
|
let testSubtractionMean = testOperationMean(
|
||||||
|
algebraicSubtract,
|
||||||
|
"algebraicSubtract",
|
||||||
|
\"-.",
|
||||||
|
~epsilon,
|
||||||
|
)
|
||||||
|
|
||||||
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
|
E.R.liftM2(testSubtractionMean, dist, dist)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testSubtractionMean, dist1, dist2)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll(
|
||||||
|
"with two different distributions in swapped order",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testSubtractionMean, dist2, dist1)->E.R.toExn
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("for multiplication", () => {
|
||||||
|
let testMultiplicationMean = testOperationMean(
|
||||||
|
algebraicMultiply,
|
||||||
|
"algebraicMultiply",
|
||||||
|
\"*.",
|
||||||
|
~epsilon,
|
||||||
|
)
|
||||||
|
|
||||||
|
testAll("with two of the same distribution", distributions, dist => {
|
||||||
|
E.R.liftM2(testMultiplicationMean, dist, dist)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testMultiplicationMean, dist1, dist2)->E.R.toExn
|
||||||
|
})
|
||||||
|
|
||||||
|
testAll(
|
||||||
|
"with two different distributions in swapped order",
|
||||||
|
pairsOfDifferentDistributions,
|
||||||
|
dists => {
|
||||||
|
let (dist1, dist2) = dists
|
||||||
|
E.R.liftM2(testMultiplicationMean, dist2, dist1)->E.R.toExn
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,70 +1,65 @@
|
||||||
open Jest
|
open Jest
|
||||||
open Expect
|
open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
|
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
|
||||||
let mkBeta = (alpha, beta) => GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
|
||||||
let mkExponential = rate => GenericDist_Types.Symbolic(#Exponential({rate: rate}))
|
|
||||||
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
|
||||||
let mkCauchy = (local, scale) => GenericDist_Types.Symbolic(#Cauchy({local: local, scale: scale}))
|
|
||||||
let mkLognormal = (mu, sigma) => GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
|
||||||
|
|
||||||
describe("mixture", () => {
|
describe("mixture", () => {
|
||||||
testAll("fair mean of two normal distributions", list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)}, tup => { // should be property
|
|
||||||
let (mean1, mean2) = tup
|
|
||||||
let meanValue = {
|
|
||||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))
|
|
||||||
-> outputMap(FromDist(ToFloat(#Mean)))
|
|
||||||
}
|
|
||||||
meanValue -> unpackFloat -> expect -> toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
|
||||||
})
|
|
||||||
testAll(
|
testAll(
|
||||||
"weighted mean of a beta and an exponential",
|
"fair mean of two normal distributions",
|
||||||
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
|
list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)},
|
||||||
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
|
tup => {
|
||||||
tup => {
|
// should be property
|
||||||
let ((alpha, beta), rate) = tup
|
let (mean1, mean2) = tup
|
||||||
let betaWeight = 0.25
|
let meanValue = {
|
||||||
let exponentialWeight = 0.75
|
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
||||||
let meanValue = {
|
FromDist(ToFloat(#Mean)),
|
||||||
run(Mixture(
|
|
||||||
[
|
|
||||||
(mkBeta(alpha, beta), betaWeight),
|
|
||||||
(mkExponential(rate), exponentialWeight)
|
|
||||||
]
|
|
||||||
)) -> outputMap(FromDist(ToFloat(#Mean)))
|
|
||||||
}
|
|
||||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
|
||||||
let exponentialMean = 1.0 /. rate
|
|
||||||
meanValue
|
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> toBeSoCloseTo(
|
|
||||||
betaWeight *. betaMean +. exponentialWeight *. exponentialMean,
|
|
||||||
~digits=-1
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||||
|
},
|
||||||
)
|
)
|
||||||
testAll(
|
testAll(
|
||||||
"weighted mean of lognormal and uniform",
|
"weighted mean of a beta and an exponential",
|
||||||
// Would not survive property tests: very easy to find cases that NaN out.
|
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
|
||||||
list{((-1e2,1e1), (2e0,1e0)), ((-1e-16,1e-16), (1e-8,1e0)), ((0.0,1e0), (1e0,1e-2))},
|
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
|
||||||
tup => {
|
tup => {
|
||||||
let ((low, high), (mu, sigma)) = tup
|
let ((alpha, beta), rate) = tup
|
||||||
let uniformWeight = 0.6
|
let betaWeight = 0.25
|
||||||
let lognormalWeight = 0.4
|
let exponentialWeight = 0.75
|
||||||
let meanValue = {
|
let meanValue = {
|
||||||
run(Mixture([(mkUniform(low, high), uniformWeight), (mkLognormal(mu, sigma), lognormalWeight)]))
|
run(
|
||||||
-> outputMap(FromDist(ToFloat(#Mean)))
|
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
||||||
}
|
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||||
let uniformMean = (low +. high) /. 2.0
|
|
||||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
|
||||||
meanValue
|
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
|
|
||||||
}
|
}
|
||||||
|
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||||
|
let exponentialMean = 1.0 /. rate
|
||||||
|
meanValue
|
||||||
|
->unpackFloat
|
||||||
|
->expect
|
||||||
|
->toBeSoCloseTo(betaWeight *. betaMean +. exponentialWeight *. exponentialMean, ~digits=-1)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
testAll(
|
||||||
|
"weighted mean of lognormal and uniform",
|
||||||
|
// Would not survive property tests: very easy to find cases that NaN out.
|
||||||
|
list{((-1e2, 1e1), (2e0, 1e0)), ((-1e-16, 1e-16), (1e-8, 1e0)), ((0.0, 1e0), (1e0, 1e-2))},
|
||||||
|
tup => {
|
||||||
|
let ((low, high), (mu, sigma)) = tup
|
||||||
|
let uniformWeight = 0.6
|
||||||
|
let lognormalWeight = 0.4
|
||||||
|
let meanValue = {
|
||||||
|
run(
|
||||||
|
Mixture([
|
||||||
|
(mkUniform(low, high), uniformWeight),
|
||||||
|
(mkLognormal(mu, sigma), lognormalWeight),
|
||||||
|
]),
|
||||||
|
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||||
|
}
|
||||||
|
let uniformMean = (low +. high) /. 2.0
|
||||||
|
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||||
|
meanValue
|
||||||
|
->unpackFloat
|
||||||
|
->expect
|
||||||
|
->toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
|
||||||
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -38,4 +38,3 @@ describe("Continuous and discrete splits", () => {
|
||||||
let toArr2 = discrete2 |> E.FloatFloatMap.toArray
|
let toArr2 = discrete2 |> E.FloatFloatMap.toArray
|
||||||
makeTest("splitMedium at count=500", toArr2 |> Belt.Array.length, 500)
|
makeTest("splitMedium at count=500", toArr2 |> Belt.Array.length, 500)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -3,131 +3,115 @@ open Expect
|
||||||
open TestHelpers
|
open TestHelpers
|
||||||
|
|
||||||
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
|
||||||
describe("(Symbolic) normalize", () => {
|
describe("(Symbolic) normalize", () => {
|
||||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||||
let normalValue = mkNormal(mean, 2.0)
|
let normalValue = mkNormal(mean, 2.0)
|
||||||
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
||||||
normalizedValue
|
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
||||||
-> unpackDist
|
|
||||||
-> expect
|
|
||||||
-> toEqual(normalValue)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("(Symbolic) mean", () => {
|
describe("(Symbolic) mean", () => {
|
||||||
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))
|
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> toBeCloseTo(mean)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))
|
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> ExpectJs.toBeFalsy
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))
|
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> toBeCloseTo(0.0)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))))
|
let meanValue = run(
|
||||||
meanValue -> unpackFloat -> expect -> toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Exponential({rate: rate}))),
|
||||||
|
)
|
||||||
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||||
})
|
})
|
||||||
|
|
||||||
test("of a cauchy distribution", () => {
|
test("of a cauchy distribution", () => {
|
||||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))))
|
let meanValue = run(
|
||||||
meanValue
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||||
-> unpackFloat
|
)
|
||||||
-> expect
|
meanValue->unpackFloat->expect->toBeSoCloseTo(1.0098094001641797, ~digits=5)
|
||||||
-> toBeCloseTo(2.01868297874546)
|
|
||||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||||
})
|
})
|
||||||
|
|
||||||
testAll("of triangular distributions", list{(1.0,2.0,3.0), (-1e7,-1e-7,1e-7), (-1e-7,1e0,1e7), (-1e-16,0.0,1e-16)}, tup => {
|
testAll(
|
||||||
let (low, medium, high) = tup
|
"of triangular distributions",
|
||||||
let meanValue = run(FromDist(
|
list{(1.0, 2.0, 3.0), (-1e7, -1e-7, 1e-7), (-1e-7, 1e0, 1e7), (-1e-16, 0.0, 1e-16)},
|
||||||
ToFloat(#Mean),
|
tup => {
|
||||||
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high}))
|
let (low, medium, high) = tup
|
||||||
))
|
let meanValue = run(
|
||||||
meanValue
|
FromDist(
|
||||||
-> unpackFloat
|
ToFloat(#Mean),
|
||||||
-> expect
|
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||||
-> toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
),
|
||||||
})
|
)
|
||||||
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
// TODO: nonpositive inputs are SUPPOSED to crash.
|
// TODO: nonpositive inputs are SUPPOSED to crash.
|
||||||
testAll("of beta distributions", list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)}, tup => {
|
testAll(
|
||||||
let (alpha, beta) = tup
|
"of beta distributions",
|
||||||
let meanValue = run(FromDist(
|
list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)},
|
||||||
ToFloat(#Mean),
|
tup => {
|
||||||
GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
let (alpha, beta) = tup
|
||||||
))
|
let meanValue = run(
|
||||||
meanValue
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||||
-> unpackFloat
|
)
|
||||||
-> expect
|
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||||
-> toBeCloseTo(1.0 /. (1.0 +. (beta /. alpha))) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
},
|
||||||
})
|
)
|
||||||
|
|
||||||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||||
test("of beta(0, 0)", () => {
|
test("of beta(0, 0)", () => {
|
||||||
let meanValue = run(FromDist(
|
let meanValue = run(
|
||||||
ToFloat(#Mean),
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||||
GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))
|
)
|
||||||
))
|
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||||
meanValue
|
|
||||||
-> unpackFloat
|
|
||||||
-> expect
|
|
||||||
-> ExpectJs.toBeFalsy
|
|
||||||
})
|
})
|
||||||
|
|
||||||
testAll("of lognormal distributions", list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)}, tup => {
|
testAll(
|
||||||
let (mu, sigma) = tup
|
"of lognormal distributions",
|
||||||
let meanValue = run(FromDist(
|
list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)},
|
||||||
ToFloat(#Mean),
|
tup => {
|
||||||
GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
let (mu, sigma) = tup
|
||||||
))
|
let meanValue = run(
|
||||||
meanValue
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||||
-> unpackFloat
|
)
|
||||||
-> expect
|
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||||
-> toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0 )) // https://brilliant.org/wiki/log-normal-distribution/
|
},
|
||||||
})
|
)
|
||||||
|
|
||||||
testAll("of uniform distributions", list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)}, tup => {
|
testAll(
|
||||||
let (low, high) = tup
|
"of uniform distributions",
|
||||||
let meanValue = run(FromDist(
|
list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)},
|
||||||
ToFloat(#Mean),
|
tup => {
|
||||||
GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
let (low, high) = tup
|
||||||
))
|
let meanValue = run(
|
||||||
meanValue
|
FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||||
-> unpackFloat
|
)
|
||||||
-> expect
|
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||||
-> toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
},
|
||||||
})
|
)
|
||||||
|
|
||||||
test("of a float", () => {
|
test("of a float", () => {
|
||||||
let meanValue = run(FromDist(
|
let meanValue = run(FromDist(ToFloat(#Mean), DistributionTypes.Symbolic(#Float(7.7))))
|
||||||
ToFloat(#Mean),
|
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||||
GenericDist_Types.Symbolic(#Float(7.7))
|
|
||||||
))
|
|
||||||
meanValue -> unpackFloat -> expect -> toBeCloseTo(7.7)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Normal distribution with sparklines", () => {
|
describe("Normal distribution with sparklines", () => {
|
||||||
|
|
||||||
let parameterWiseAdditionPdf = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
|
let parameterWiseAdditionPdf = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
|
||||||
let normalDistAtSumMeanConstr = SymbolicDist.Normal.add(n1, n2)
|
let normalDistAtSumMeanConstr = SymbolicDist.Normal.add(n1, n2)
|
||||||
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
|
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
|
||||||
| #Normal(params) => params
|
| #Normal(params) => params
|
||||||
}
|
}
|
||||||
x => SymbolicDist.Normal.pdf(x, normalDistAtSumMean)
|
x => SymbolicDist.Normal.pdf(x, normalDistAtSumMean)
|
||||||
}
|
}
|
||||||
|
@ -138,24 +122,25 @@ describe("Normal distribution with sparklines", () => {
|
||||||
|
|
||||||
test("mean=5 pdf", () => {
|
test("mean=5 pdf", () => {
|
||||||
let pdfNormalDistAtMean5 = x => SymbolicDist.Normal.pdf(x, normalDistAtMean5)
|
let pdfNormalDistAtMean5 = x => SymbolicDist.Normal.pdf(x, normalDistAtMean5)
|
||||||
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
|
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
|
||||||
Sparklines.create(sparklineMean5, ())
|
Sparklines.create(sparklineMean5, ())
|
||||||
-> expect
|
->expect
|
||||||
-> toEqual(`▁▂▃▆██▇▅▂▁▁▁▁▁▁▁▁▁▁▁`)
|
->toEqual(`▁▂▃▆██▇▅▂▁▁▁▁▁▁▁▁▁▁▁`)
|
||||||
})
|
})
|
||||||
|
|
||||||
test("parameter-wise addition of two normal distributions", () => {
|
test("parameter-wise addition of two normal distributions", () => {
|
||||||
let sparklineMean15 = normalDistAtMean5 -> parameterWiseAdditionPdf(normalDistAtMean10) -> fnImage(range20Float)
|
let sparklineMean15 =
|
||||||
|
normalDistAtMean5->parameterWiseAdditionPdf(normalDistAtMean10)->fnImage(range20Float)
|
||||||
Sparklines.create(sparklineMean15, ())
|
Sparklines.create(sparklineMean15, ())
|
||||||
-> expect
|
->expect
|
||||||
-> toEqual(`▁▁▁▁▁▁▁▁▁▂▃▄▆███▇▅▄▂`)
|
->toEqual(`▁▁▁▁▁▁▁▁▁▂▃▄▆███▇▅▄▂`)
|
||||||
})
|
})
|
||||||
|
|
||||||
test("mean=10 cdf", () => {
|
test("mean=10 cdf", () => {
|
||||||
let cdfNormalDistAtMean10 = x => SymbolicDist.Normal.cdf(x, normalDistAtMean10)
|
let cdfNormalDistAtMean10 = x => SymbolicDist.Normal.cdf(x, normalDistAtMean10)
|
||||||
let sparklineMean10 = fnImage(cdfNormalDistAtMean10, range20Float)
|
let sparklineMean10 = fnImage(cdfNormalDistAtMean10, range20Float)
|
||||||
Sparklines.create(sparklineMean10, ())
|
Sparklines.create(sparklineMean10, ())
|
||||||
-> expect
|
->expect
|
||||||
-> toEqual(`▁▁▁▁▁▁▁▁▂▄▅▇████████`)
|
->toEqual(`▁▁▁▁▁▁▁▁▂▄▅▇████████`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,53 +1,56 @@
|
||||||
import { run, GenericDist, resultMap, makeSampleSetDist } from "../src/js/index";
|
import {
|
||||||
|
run,
|
||||||
|
Distribution,
|
||||||
|
resultMap,
|
||||||
|
squiggleExpression,
|
||||||
|
errorValueToString,
|
||||||
|
} from "../src/js/index";
|
||||||
|
|
||||||
let testRun = (x: string) => {
|
let testRun = (x: string): squiggleExpression => {
|
||||||
let result = run(x);
|
let result = run(x, { sampleCount: 100, xyPointLength: 100 });
|
||||||
if (result.tag == "Ok") {
|
expect(result.tag).toEqual("Ok");
|
||||||
return { tag: "Ok", value: result.value.exports };
|
if (result.tag === "Ok") {
|
||||||
|
return result.value;
|
||||||
} else {
|
} else {
|
||||||
return result;
|
throw Error(
|
||||||
|
"Expected squiggle expression to evaluate but got error: " +
|
||||||
|
errorValueToString(result.value)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function Ok<b>(x: b) {
|
||||||
|
return { tag: "Ok", value: x };
|
||||||
|
}
|
||||||
|
|
||||||
describe("Simple calculations and results", () => {
|
describe("Simple calculations and results", () => {
|
||||||
test("mean(normal(5,2))", () => {
|
test("mean(normal(5,2))", () => {
|
||||||
expect(testRun("mean(normal(5,2))")).toEqual({
|
expect(testRun("mean(normal(5,2))")).toEqual({
|
||||||
tag: "Ok",
|
tag: "number",
|
||||||
value: [{ NAME: "Float", VAL: 5 }],
|
value: 5,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
test("10+10", () => {
|
test("10+10", () => {
|
||||||
let foo = testRun("10 + 10");
|
let foo = testRun("10 + 10");
|
||||||
expect(foo).toEqual({ tag: "Ok", value: [{ NAME: "Float", VAL: 20 }] });
|
expect(foo).toEqual({ tag: "number", value: 20 });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe("Log function", () => {
|
describe("Log function", () => {
|
||||||
test("log(1) = 0", () => {
|
test("log(1) = 0", () => {
|
||||||
let foo = testRun("log(1)");
|
let foo = testRun("log(1)");
|
||||||
expect(foo).toEqual({ tag: "Ok", value: [{ NAME: "Float", VAL: 0 }] });
|
expect(foo).toEqual({ tag: "number", value: 0 });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("Multimodal too many weights error", () => {
|
describe("Distribution", () => {
|
||||||
test("mm(0,0,[0,0,0])", () => {
|
|
||||||
let foo = testRun("mm(0,0,[0,0,0])");
|
|
||||||
expect(foo).toEqual({
|
|
||||||
tag: "Error",
|
|
||||||
value: "Function multimodal error: Too many weights provided",
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("GenericDist", () => {
|
|
||||||
|
|
||||||
//It's important that sampleCount is less than 9. If it's more, than that will create randomness
|
//It's important that sampleCount is less than 9. If it's more, than that will create randomness
|
||||||
//Also, note, the value should be created using makeSampleSetDist() later on.
|
//Also, note, the value should be created using makeSampleSetDist() later on.
|
||||||
let env = { sampleCount: 8, xyPointLength: 100 };
|
let env = { sampleCount: 8, xyPointLength: 100 };
|
||||||
let dist = new GenericDist(
|
let dist = new Distribution(
|
||||||
{ tag: "SampleSet", value: [3, 4, 5, 6, 6, 7, 10, 15, 30] },
|
{ tag: "SampleSet", value: [3, 4, 5, 6, 6, 7, 10, 15, 30] },
|
||||||
env
|
env
|
||||||
);
|
);
|
||||||
let dist2 = new GenericDist(
|
let dist2 = new Distribution(
|
||||||
{ tag: "SampleSet", value: [20, 22, 24, 29, 30, 35, 38, 44, 52] },
|
{ tag: "SampleSet", value: [20, 22, 24, 29, 30, 35, 38, 44, 52] },
|
||||||
env
|
env
|
||||||
);
|
);
|
||||||
|
@ -66,22 +69,24 @@ describe("GenericDist", () => {
|
||||||
});
|
});
|
||||||
test("toPointSet", () => {
|
test("toPointSet", () => {
|
||||||
expect(
|
expect(
|
||||||
resultMap(dist.toPointSet(), (r: GenericDist) => r.toString()).value.value
|
resultMap(dist.toPointSet(), (r: Distribution) => r.toString())
|
||||||
).toBe("Point Set Distribution");
|
).toEqual(Ok("Point Set Distribution"));
|
||||||
});
|
});
|
||||||
test("toSparkline", () => {
|
test("toSparkline", () => {
|
||||||
expect(dist.toSparkline(20).value).toBe("▁▁▃▅███▆▄▃▂▁▁▂▂▃▂▁▁▁");
|
expect(dist.toSparkline(20).value).toEqual("▁▁▃▅███▆▄▃▂▁▁▂▂▃▂▁▁▁");
|
||||||
});
|
});
|
||||||
test("algebraicAdd", () => {
|
test("algebraicAdd", () => {
|
||||||
expect(
|
expect(
|
||||||
resultMap(dist.algebraicAdd(dist2), (r: GenericDist) => r.toSparkline(20))
|
resultMap(dist.algebraicAdd(dist2), (r: Distribution) =>
|
||||||
.value.value
|
r.toSparkline(20)
|
||||||
).toBe("▁▁▂▄▆████▇▆▄▄▃▃▃▂▁▁▁");
|
).value
|
||||||
|
).toEqual(Ok("▁▁▂▄▆████▇▆▄▄▃▃▃▂▁▁▁"));
|
||||||
});
|
});
|
||||||
test("pointwiseAdd", () => {
|
test("pointwiseAdd", () => {
|
||||||
expect(
|
expect(
|
||||||
resultMap(dist.pointwiseAdd(dist2), (r: GenericDist) => r.toSparkline(20))
|
resultMap(dist.pointwiseAdd(dist2), (r: Distribution) =>
|
||||||
.value.value
|
r.toSparkline(20)
|
||||||
).toBe("▁▂▅██▅▅▅▆▇█▆▅▃▃▂▂▁▁▁");
|
).value
|
||||||
|
).toEqual(Ok("▁▂▅██▅▅▅▆▇█▆▅▃▃▂▂▁▁▁"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -3,8 +3,8 @@ open Expect
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
let makeTest = (~only=false, str, item1, item2) =>
|
||||||
only
|
only
|
||||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||||
: test(str, () => expect(item1) -> toEqual(item2))
|
: test(str, () => expect(item1)->toEqual(item2))
|
||||||
|
|
||||||
describe("Lodash", () =>
|
describe("Lodash", () =>
|
||||||
describe("Lodash", () => {
|
describe("Lodash", () => {
|
||||||
|
|
|
@ -4,17 +4,19 @@ open Jest
|
||||||
open Expect
|
open Expect
|
||||||
|
|
||||||
let expectEvalToBe = (expr: string, answer: string) =>
|
let expectEvalToBe = (expr: string, answer: string) =>
|
||||||
Reducer.eval(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
Reducer.evaluate(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
|
let testEval = (expr, answer) => test(expr, () => expectEvalToBe(expr, answer))
|
||||||
|
|
||||||
describe("builtin", () => {
|
describe("builtin", () => {
|
||||||
// All MathJs operators and functions are available for string, number and boolean
|
// All MathJs operators and functions are available for string, number and boolean
|
||||||
// .e.g + - / * > >= < <= == /= not and or
|
// .e.g + - / * > >= < <= == /= not and or
|
||||||
// See https://mathjs.org/docs/expressions/syntax.html
|
// See https://mathjs.org/docs/expressions/syntax.html
|
||||||
// See https://mathjs.org/docs/reference/functions.html
|
// See https://mathjs.org/docs/reference/functions.html
|
||||||
test("-1", () => expectEvalToBe("-1", "Ok(-1)"))
|
testEval("-1", "Ok(-1)")
|
||||||
test("1-1", () => expectEvalToBe("1-1", "Ok(0)"))
|
testEval("1-1", "Ok(0)")
|
||||||
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
testEval("2>1", "Ok(true)")
|
||||||
test("concat('a','b')", () => expectEvalToBe("concat('a','b')", "Ok('ab')"))
|
testEval("concat('a','b')", "Ok('ab')")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("builtin exception", () => {
|
describe("builtin exception", () => {
|
||||||
|
|
|
@ -7,45 +7,62 @@ open Expect
|
||||||
let expectParseToBe = (expr, answer) =>
|
let expectParseToBe = (expr, answer) =>
|
||||||
Parse.parse(expr)->Result.flatMap(Parse.castNodeType)->Parse.toStringResult->expect->toBe(answer)
|
Parse.parse(expr)->Result.flatMap(Parse.castNodeType)->Parse.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
|
let testParse = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
|
let testDescriptionParse = (desc, expr, answer) => test(desc, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
|
module MySkip = {
|
||||||
|
let testParse = (expr, answer) => Skip.test(expr, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
|
let testDescriptionParse = (desc, expr, answer) =>
|
||||||
|
Skip.test(desc, () => expectParseToBe(expr, answer))
|
||||||
|
}
|
||||||
|
|
||||||
describe("MathJs parse", () => {
|
describe("MathJs parse", () => {
|
||||||
describe("literals operators paranthesis", () => {
|
describe("literals operators paranthesis", () => {
|
||||||
test("1", () => expectParseToBe("1", "1"))
|
testParse("1", "1")
|
||||||
test("'hello'", () => expectParseToBe("'hello'", "'hello'"))
|
testParse("'hello'", "'hello'")
|
||||||
test("true", () => expectParseToBe("true", "true"))
|
testParse("true", "true")
|
||||||
test("1+2", () => expectParseToBe("1+2", "add(1, 2)"))
|
testParse("1+2", "add(1, 2)")
|
||||||
test("add(1,2)", () => expectParseToBe("add(1,2)", "add(1, 2)"))
|
testParse("add(1,2)", "add(1, 2)")
|
||||||
test("(1)", () => expectParseToBe("(1)", "(1)"))
|
testParse("(1)", "(1)")
|
||||||
test("(1+2)", () => expectParseToBe("(1+2)", "(add(1, 2))"))
|
testParse("(1+2)", "(add(1, 2))")
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("multi-line", () => {
|
||||||
|
testParse("1; 2", "{1; 2}")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("variables", () => {
|
describe("variables", () => {
|
||||||
Skip.test("define", () => expectParseToBe("x = 1", "???"))
|
testParse("x = 1", "x = 1")
|
||||||
Skip.test("use", () => expectParseToBe("x", "???"))
|
testParse("x", "x")
|
||||||
|
testParse("x = 1; x", "{x = 1; x}")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("functions", () => {
|
describe("functions", () => {
|
||||||
Skip.test("define", () => expectParseToBe("identity(x) = x", "???"))
|
MySkip.testParse("identity(x) = x", "???")
|
||||||
Skip.test("use", () => expectParseToBe("identity(x)", "???"))
|
MySkip.testParse("identity(x)", "???")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("arrays", () => {
|
describe("arrays", () => {
|
||||||
test("empty", () => expectParseToBe("[]", "[]"))
|
testDescriptionParse("empty", "[]", "[]")
|
||||||
test("define", () => expectParseToBe("[0, 1, 2]", "[0, 1, 2]"))
|
testDescriptionParse("define", "[0, 1, 2]", "[0, 1, 2]")
|
||||||
test("define with strings", () => expectParseToBe("['hello', 'world']", "['hello', 'world']"))
|
testDescriptionParse("define with strings", "['hello', 'world']", "['hello', 'world']")
|
||||||
Skip.test("range", () => expectParseToBe("range(0, 4)", "range(0, 4)"))
|
MySkip.testParse("range(0, 4)", "range(0, 4)")
|
||||||
test("index", () => expectParseToBe("([0,1,2])[1]", "([0, 1, 2])[1]"))
|
testDescriptionParse("index", "([0,1,2])[1]", "([0, 1, 2])[1]")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("records", () => {
|
describe("records", () => {
|
||||||
test("define", () => expectParseToBe("{a: 1, b: 2}", "{a: 1, b: 2}"))
|
testDescriptionParse("define", "{a: 1, b: 2}", "{a: 1, b: 2}")
|
||||||
test("use", () => expectParseToBe("record.property", "record['property']"))
|
testDescriptionParse("use", "record.property", "record['property']")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("comments", () => {
|
describe("comments", () => {
|
||||||
Skip.test("define", () => expectParseToBe("# This is a comment", "???"))
|
MySkip.testDescriptionParse("define", "# This is a comment", "???")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("if statement", () => {
|
describe("if statement", () => {
|
||||||
Skip.test("define", () => expectParseToBe("if (true) { 1 } else { 0 }", "???"))
|
// TODO Tertiary operator instead
|
||||||
|
MySkip.testDescriptionParse("define", "if (true) { 1 } else { 0 }", "???")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -8,4 +8,4 @@ let expectParseToBe = (expr: string, answer: string) =>
|
||||||
Reducer.parse(expr)->Expression.toStringResult->expect->toBe(answer)
|
Reducer.parse(expr)->Expression.toStringResult->expect->toBe(answer)
|
||||||
|
|
||||||
let expectEvalToBe = (expr: string, answer: string) =>
|
let expectEvalToBe = (expr: string, answer: string) =>
|
||||||
Reducer.eval(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
Reducer.evaluate(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
||||||
|
|
|
@ -1,6 +1,15 @@
|
||||||
open Jest
|
open Jest
|
||||||
open Reducer_TestHelpers
|
open Reducer_TestHelpers
|
||||||
|
|
||||||
|
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
|
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||||
|
test(desc, () => expectParseToBe(expr, answer))
|
||||||
|
|
||||||
|
let testEvalToBe = (expr, answer) => test(expr, () => expectEvalToBe(expr, answer))
|
||||||
|
|
||||||
|
let testDescriptionEvalToBe = (desc, expr, answer) => test(desc, () => expectEvalToBe(expr, answer))
|
||||||
|
|
||||||
describe("reducer using mathjs parse", () => {
|
describe("reducer using mathjs parse", () => {
|
||||||
// Test the MathJs parser compatibility
|
// Test the MathJs parser compatibility
|
||||||
// Those tests toString that there is a semantic mapping from MathJs to Expression
|
// Those tests toString that there is a semantic mapping from MathJs to Expression
|
||||||
|
@ -10,31 +19,45 @@ describe("reducer using mathjs parse", () => {
|
||||||
// Those tests toString that we are converting mathjs parse tree to what we need
|
// Those tests toString that we are converting mathjs parse tree to what we need
|
||||||
|
|
||||||
describe("expressions", () => {
|
describe("expressions", () => {
|
||||||
test("1", () => expectParseToBe("1", "Ok(1)"))
|
testParseToBe("1", "Ok(1)")
|
||||||
test("(1)", () => expectParseToBe("(1)", "Ok(1)"))
|
testParseToBe("(1)", "Ok(1)")
|
||||||
test("1+2", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||||
test("(1+2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||||
test("add(1,2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||||
test("1+2*3", () => expectParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))"))
|
testParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))")
|
||||||
})
|
})
|
||||||
describe("arrays", () => {
|
describe("arrays", () => {
|
||||||
//Note. () is a empty list in Lisp
|
//Note. () is a empty list in Lisp
|
||||||
// The only builtin structure in Lisp is list. There are no arrays
|
// The only builtin structure in Lisp is list. There are no arrays
|
||||||
// [1,2,3] becomes (1 2 3)
|
// [1,2,3] becomes (1 2 3)
|
||||||
test("empty", () => expectParseToBe("[]", "Ok(())"))
|
testDescriptionParseToBe("empty", "[]", "Ok(())")
|
||||||
test("[1, 2, 3]", () => expectParseToBe("[1, 2, 3]", "Ok((1 2 3))"))
|
testParseToBe("[1, 2, 3]", "Ok((1 2 3))")
|
||||||
test("['hello', 'world']", () => expectParseToBe("['hello', 'world']", "Ok(('hello' 'world'))"))
|
testParseToBe("['hello', 'world']", "Ok(('hello' 'world'))")
|
||||||
test("index", () => expectParseToBe("([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))"))
|
testDescriptionParseToBe("index", "([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))")
|
||||||
})
|
})
|
||||||
describe("records", () => {
|
describe("records", () => {
|
||||||
test("define", () =>
|
testDescriptionParseToBe("define", "{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
||||||
expectParseToBe("{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
testDescriptionParseToBe(
|
||||||
|
"use",
|
||||||
|
"{a: 1, b: 2}.a",
|
||||||
|
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
||||||
)
|
)
|
||||||
test("use", () =>
|
})
|
||||||
expectParseToBe(
|
describe("multi-line", () => {
|
||||||
"{a: 1, b: 2}.a",
|
testParseToBe("1; 2", "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) 1) 2))")
|
||||||
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
testParseToBe(
|
||||||
)
|
"1+1; 2+1",
|
||||||
|
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:add 1 1)) (:add 2 1)))",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
describe("assignment", () => {
|
||||||
|
testParseToBe(
|
||||||
|
"x=1; x",
|
||||||
|
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x 1)) :x))",
|
||||||
|
)
|
||||||
|
testParseToBe(
|
||||||
|
"x=1+1; x+1",
|
||||||
|
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x (:add 1 1))) (:add :x 1)))",
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -45,37 +68,51 @@ describe("eval", () => {
|
||||||
// See https://mathjs.org/docs/expressions/syntax.html
|
// See https://mathjs.org/docs/expressions/syntax.html
|
||||||
// See https://mathjs.org/docs/reference/functions.html
|
// See https://mathjs.org/docs/reference/functions.html
|
||||||
describe("expressions", () => {
|
describe("expressions", () => {
|
||||||
test("1", () => expectEvalToBe("1", "Ok(1)"))
|
testEvalToBe("1", "Ok(1)")
|
||||||
test("1+2", () => expectEvalToBe("1+2", "Ok(3)"))
|
testEvalToBe("1+2", "Ok(3)")
|
||||||
test("(1+2)*3", () => expectEvalToBe("(1+2)*3", "Ok(9)"))
|
testEvalToBe("(1+2)*3", "Ok(9)")
|
||||||
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
testEvalToBe("2>1", "Ok(true)")
|
||||||
test("concat('a ', 'b')", () => expectEvalToBe("concat('a ', 'b')", "Ok('a b')"))
|
testEvalToBe("concat('a ', 'b')", "Ok('a b')")
|
||||||
test("log(10)", () => expectEvalToBe("log(10)", "Ok(2.302585092994046)"))
|
testEvalToBe("log(10)", "Ok(2.302585092994046)")
|
||||||
test("cos(10)", () => expectEvalToBe("cos(10)", "Ok(-0.8390715290764524)"))
|
testEvalToBe("cos(10)", "Ok(-0.8390715290764524)")
|
||||||
// TODO more built ins
|
// TODO more built ins
|
||||||
})
|
})
|
||||||
describe("arrays", () => {
|
describe("arrays", () => {
|
||||||
test("empty array", () => expectEvalToBe("[]", "Ok([])"))
|
test("empty array", () => expectEvalToBe("[]", "Ok([])"))
|
||||||
test("[1, 2, 3]", () => expectEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])"))
|
testEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])")
|
||||||
test("['hello', 'world']", () => expectEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])"))
|
testEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])")
|
||||||
test("index", () => expectEvalToBe("([0,1,2])[1]", "Ok(1)"))
|
testEvalToBe("([0,1,2])[1]", "Ok(1)")
|
||||||
test("index not found", () =>
|
testDescriptionEvalToBe("index not found", "([0,1,2])[10]", "Error(Array index not found: 10)")
|
||||||
expectEvalToBe("([0,1,2])[10]", "Error(Array index not found: 10)")
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
describe("records", () => {
|
describe("records", () => {
|
||||||
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1, b: 2})"))
|
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1, b: 2})"))
|
||||||
test("index", () => expectEvalToBe("{a: 1}.a", "Ok(1)"))
|
test("index", () => expectEvalToBe("{a: 1}.a", "Ok(1)"))
|
||||||
test("index not found", () => expectEvalToBe("{a: 1}.b", "Error(Record property not found: b)"))
|
test("index not found", () => expectEvalToBe("{a: 1}.b", "Error(Record property not found: b)"))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("multi-line", () => {
|
||||||
|
testEvalToBe("1; 2", "Error(Assignment expected)")
|
||||||
|
testEvalToBe("1+1; 2+1", "Error(Assignment expected)")
|
||||||
|
})
|
||||||
|
describe("assignment", () => {
|
||||||
|
testEvalToBe("x=1; x", "Ok(1)")
|
||||||
|
testEvalToBe("x=1+1; x+1", "Ok(3)")
|
||||||
|
testEvalToBe("x=1; y=x+1; y+1", "Ok(3)")
|
||||||
|
testEvalToBe("1; x=1", "Error(Assignment expected)")
|
||||||
|
testEvalToBe("1; 1", "Error(Assignment expected)")
|
||||||
|
testEvalToBe("x=1; x=1", "Error(Expression expected)")
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("test exceptions", () => {
|
describe("test exceptions", () => {
|
||||||
test("javascript exception", () =>
|
testDescriptionEvalToBe(
|
||||||
expectEvalToBe("jsraise('div by 0')", "Error(JS Exception: Error: 'div by 0')")
|
"javascript exception",
|
||||||
|
"javascriptraise('div by 0')",
|
||||||
|
"Error(JS Exception: Error: 'div by 0')",
|
||||||
)
|
)
|
||||||
|
testDescriptionEvalToBe(
|
||||||
test("rescript exception", () =>
|
"rescript exception",
|
||||||
expectEvalToBe("resraise()", "Error(TODO: unhandled rescript exception)")
|
"rescriptraise()",
|
||||||
|
"Error(TODO: unhandled rescript exception)",
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -19,12 +19,12 @@ describe("eval on distribution functions", () => {
|
||||||
testEval("lognormal(5,2)", "Ok(Lognormal(5,2))")
|
testEval("lognormal(5,2)", "Ok(Lognormal(5,2))")
|
||||||
})
|
})
|
||||||
describe("unaryMinus", () => {
|
describe("unaryMinus", () => {
|
||||||
testEval("mean(-normal(5,2))", "Ok(-5.002887370380851)")
|
testEval("mean(-normal(5,2))", "Ok(-5)")
|
||||||
})
|
})
|
||||||
describe("to", () => {
|
describe("to", () => {
|
||||||
testEval("5 to 2", "Error(TODO: Low value must be less than high value.)")
|
testEval("5 to 2", "Error(TODO: Low value must be less than high value.)")
|
||||||
testEval("to(2,5)", "Ok(Lognormal(1.1512925464970227,0.278507821238345))")
|
testEval("to(2,5)", "Ok(Lognormal(1.1512925464970227,0.27853260523016377))")
|
||||||
testEval("to(-2,2)", "Ok(Normal(0,1.215913388057542))")
|
testEval("to(-2,2)", "Ok(Normal(0,1.2159136638235384))")
|
||||||
})
|
})
|
||||||
describe("mean", () => {
|
describe("mean", () => {
|
||||||
testEval("mean(normal(5,2))", "Ok(5)")
|
testEval("mean(normal(5,2))", "Ok(5)")
|
||||||
|
@ -45,10 +45,30 @@ describe("eval on distribution functions", () => {
|
||||||
describe("add", () => {
|
describe("add", () => {
|
||||||
testEval("add(normal(5,2), normal(10,2))", "Ok(Normal(15,2.8284271247461903))")
|
testEval("add(normal(5,2), normal(10,2))", "Ok(Normal(15,2.8284271247461903))")
|
||||||
testEval("add(normal(5,2), lognormal(10,2))", "Ok(Sample Set Distribution)")
|
testEval("add(normal(5,2), lognormal(10,2))", "Ok(Sample Set Distribution)")
|
||||||
testEval("add(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
testEval("add(normal(5,2), 3)", "Ok(Normal(8,2))")
|
||||||
testEval("add(3, normal(5,2))", "Ok(Point Set Distribution)")
|
testEval("add(3, normal(5,2))", "Ok(Normal(8,2))")
|
||||||
testEval("3+normal(5,2)", "Ok(Point Set Distribution)")
|
testEval("3+normal(5,2)", "Ok(Normal(8,2))")
|
||||||
testEval("normal(5,2)+3", "Ok(Point Set Distribution)")
|
testEval("normal(5,2)+3", "Ok(Normal(8,2))")
|
||||||
|
})
|
||||||
|
describe("subtract", () => {
|
||||||
|
testEval("10 - normal(5, 1)", "Ok(Normal(5,1))")
|
||||||
|
testEval("normal(5, 1) - 10", "Ok(Normal(-5,1))")
|
||||||
|
})
|
||||||
|
describe("multiply", () => {
|
||||||
|
testEval("normal(10, 2) * 2", "Ok(Normal(20,4))")
|
||||||
|
testEval("2 * normal(10, 2)", "Ok(Normal(20,4))")
|
||||||
|
testEval("lognormal(5,2) * lognormal(10,2)", "Ok(Lognormal(15,2.8284271247461903))")
|
||||||
|
testEval("lognormal(10, 2) * lognormal(5, 2)", "Ok(Lognormal(15,2.8284271247461903))")
|
||||||
|
testEval("2 * lognormal(5, 2)", "Ok(Lognormal(5.693147180559945,2))")
|
||||||
|
testEval("lognormal(5, 2) * 2", "Ok(Lognormal(5.693147180559945,2))")
|
||||||
|
})
|
||||||
|
describe("division", () => {
|
||||||
|
testEval("lognormal(5,2) / lognormal(10,2)", "Ok(Lognormal(-5,2.8284271247461903))")
|
||||||
|
testEval("lognormal(10,2) / lognormal(5,2)", "Ok(Lognormal(5,2.8284271247461903))")
|
||||||
|
testEval("lognormal(5, 2) / 2", "Ok(Lognormal(4.306852819440055,2))")
|
||||||
|
testEval("2 / lognormal(5, 2)", "Ok(Lognormal(-4.306852819440055,2))")
|
||||||
|
testEval("2 / normal(10, 2)", "Ok(Point Set Distribution)")
|
||||||
|
testEval("normal(10, 2) / 2", "Ok(Normal(5,1))")
|
||||||
})
|
})
|
||||||
describe("truncate", () => {
|
describe("truncate", () => {
|
||||||
testEval("truncateLeft(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
testEval("truncateLeft(normal(5,2), 3)", "Ok(Point Set Distribution)")
|
||||||
|
@ -90,16 +110,8 @@ describe("eval on distribution functions", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("mixture", () => {
|
describe("mixture", () => {
|
||||||
testEval(
|
testEval("mx(normal(5,2), normal(10,1), normal(15, 1))", "Ok(Point Set Distribution)")
|
||||||
~skip=true,
|
testEval("mixture(normal(5,2), normal(10,1), [0.2, 0.4])", "Ok(Point Set Distribution)")
|
||||||
"mx(normal(5,2), normal(10,1), normal(15, 1))",
|
|
||||||
"Ok(Point Set Distribution)",
|
|
||||||
)
|
|
||||||
testEval(
|
|
||||||
~skip=true,
|
|
||||||
"mixture(normal(5,2), normal(10,1), [.2,, .4])",
|
|
||||||
"Ok(Point Set Distribution)",
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -109,9 +121,17 @@ describe("parse on distribution functions", () => {
|
||||||
testParse("3 ^ normal(5,1)", "Ok((:pow 3 (:normal 5 1)))")
|
testParse("3 ^ normal(5,1)", "Ok((:pow 3 (:normal 5 1)))")
|
||||||
testParse("normal(5,2) ^ 3", "Ok((:pow (:normal 5 2) 3))")
|
testParse("normal(5,2) ^ 3", "Ok((:pow (:normal 5 2) 3))")
|
||||||
})
|
})
|
||||||
|
describe("subtraction", () => {
|
||||||
|
testParse("10 - normal(5,1)", "Ok((:subtract 10 (:normal 5 1)))")
|
||||||
|
testParse("normal(5,1) - 10", "Ok((:subtract (:normal 5 1) 10))")
|
||||||
|
})
|
||||||
describe("pointwise arithmetic expressions", () => {
|
describe("pointwise arithmetic expressions", () => {
|
||||||
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||||
testParse(~skip=true, "normal(5,2) .- normal(5,1)", "Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))")
|
testParse(
|
||||||
|
~skip=true,
|
||||||
|
"normal(5,2) .- normal(5,1)",
|
||||||
|
"Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))",
|
||||||
|
)
|
||||||
testParse("normal(5,2) .* normal(5,1)", "Ok((:dotMultiply (:normal 5 2) (:normal 5 1)))")
|
testParse("normal(5,2) .* normal(5,1)", "Ok((:dotMultiply (:normal 5 2) (:normal 5 1)))")
|
||||||
testParse("normal(5,2) ./ normal(5,1)", "Ok((:dotDivide (:normal 5 2) (:normal 5 1)))")
|
testParse("normal(5,2) ./ normal(5,1)", "Ok((:dotDivide (:normal 5 2) (:normal 5 1)))")
|
||||||
testParse("normal(5,2) .^ normal(5,1)", "Ok((:dotPow (:normal 5 2) (:normal 5 1)))")
|
testParse("normal(5,2) .^ normal(5,1)", "Ok((:dotPow (:normal 5 2) (:normal 5 1)))")
|
||||||
|
|
|
@ -1,26 +1,62 @@
|
||||||
open Jest
|
open Jest
|
||||||
open Expect
|
open Expect
|
||||||
|
|
||||||
|
/*
|
||||||
|
This encodes the expression for percent error
|
||||||
|
The test says "the percent error of received against expected is bounded by epsilon"
|
||||||
|
|
||||||
|
However, the semantics are degraded by catching some numerical instability:
|
||||||
|
when expected is too small, the return of this function might blow up to infinity.
|
||||||
|
So we capture that by taking the max of abs(expected) against a 1.
|
||||||
|
|
||||||
|
A sanity check of this function would be welcome, in general it is a better way of approaching
|
||||||
|
squiggle-lang tests than toBeSoCloseTo.
|
||||||
|
*/
|
||||||
|
let expectErrorToBeBounded = (received, expected, ~epsilon) => {
|
||||||
|
let distance = Js.Math.abs_float(received -. expected)
|
||||||
|
let expectedAbs = Js.Math.abs_float(expected)
|
||||||
|
let normalizingDenom = Js.Math.max_float(expectedAbs, 1e0)
|
||||||
|
let error = distance /. normalizingDenom
|
||||||
|
error->expect->toBeLessThan(epsilon)
|
||||||
|
}
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
let makeTest = (~only=false, str, item1, item2) =>
|
||||||
only
|
only
|
||||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||||
: test(str, () => expect(item1) -> toEqual(item2))
|
: test(str, () => expect(item1)->toEqual(item2))
|
||||||
|
|
||||||
|
|
||||||
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
||||||
|
|
||||||
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
|
||||||
|
|
||||||
let env: DistributionOperation.env = {
|
let env: DistributionOperation.env = {
|
||||||
sampleCount: 100,
|
sampleCount: 10000,
|
||||||
xyPointLength: 100,
|
xyPointLength: 1000,
|
||||||
}
|
}
|
||||||
|
|
||||||
let run = DistributionOperation.run(~env)
|
let run = DistributionOperation.run(~env)
|
||||||
let outputMap = fmap(~env)
|
let outputMap = fmap(~env)
|
||||||
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
||||||
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
||||||
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(unreachableInTestFileMessage)
|
let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.genericDist = E.O.toExt(
|
||||||
|
unreachableInTestFileMessage,
|
||||||
|
)
|
||||||
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
||||||
let unpackFloat = x => x -> toFloat -> toExtFloat
|
let unpackFloat = x => x->toFloat->toExtFloat
|
||||||
let unpackDist = y => y -> toDist -> toExtDist
|
let unpackDist = y => y->toDist->toExtDist
|
||||||
|
|
||||||
|
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||||
|
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||||
|
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
||||||
|
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
||||||
|
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||||
|
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
|
|
||||||
|
let normalMake = SymbolicDist.Normal.make
|
||||||
|
let betaMake = SymbolicDist.Beta.make
|
||||||
|
let exponentialMake = SymbolicDist.Exponential.make
|
||||||
|
let uniformMake = SymbolicDist.Uniform.make
|
||||||
|
let cauchyMake = SymbolicDist.Cauchy.make
|
||||||
|
let lognormalMake = SymbolicDist.Lognormal.make
|
||||||
|
let triangularMake = SymbolicDist.Triangular.make
|
||||||
|
let floatMake = SymbolicDist.Float.make
|
||||||
|
|
10
packages/squiggle-lang/__tests__/Utility_test.res
Normal file
10
packages/squiggle-lang/__tests__/Utility_test.res
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
open Jest
|
||||||
|
open Expect
|
||||||
|
|
||||||
|
describe("E.L.combinations2", () => {
|
||||||
|
test("size three", () => {
|
||||||
|
E.L.combinations2(list{"alice", "bob", "eve"})
|
||||||
|
->expect
|
||||||
|
->toEqual(list{("alice", "bob"), ("alice", "eve"), ("bob", "eve")})
|
||||||
|
})
|
||||||
|
})
|
|
@ -3,8 +3,8 @@ open Expect
|
||||||
|
|
||||||
let makeTest = (~only=false, str, item1, item2) =>
|
let makeTest = (~only=false, str, item1, item2) =>
|
||||||
only
|
only
|
||||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||||
: test(str, () => expect(item1) -> toEqual(item2))
|
: test(str, () => expect(item1)->toEqual(item2))
|
||||||
|
|
||||||
let pointSetDist1: PointSetTypes.xyShape = {xs: [1., 4., 8.], ys: [0.2, 0.4, 0.8]}
|
let pointSetDist1: PointSetTypes.xyShape = {xs: [1., 4., 8.], ys: [0.2, 0.4, 0.8]}
|
||||||
|
|
||||||
|
@ -21,7 +21,11 @@ let pointSetDist3: PointSetTypes.xyShape = {
|
||||||
describe("XYShapes", () => {
|
describe("XYShapes", () => {
|
||||||
describe("logScorePoint", () => {
|
describe("logScorePoint", () => {
|
||||||
makeTest("When identical", XYShape.logScorePoint(30, pointSetDist1, pointSetDist1), Some(0.0))
|
makeTest("When identical", XYShape.logScorePoint(30, pointSetDist1, pointSetDist1), Some(0.0))
|
||||||
makeTest("When similar", XYShape.logScorePoint(30, pointSetDist1, pointSetDist2), Some(1.658971191043856))
|
makeTest(
|
||||||
|
"When similar",
|
||||||
|
XYShape.logScorePoint(30, pointSetDist1, pointSetDist2),
|
||||||
|
Some(1.658971191043856),
|
||||||
|
)
|
||||||
makeTest(
|
makeTest(
|
||||||
"When very different",
|
"When very different",
|
||||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist3),
|
XYShape.logScorePoint(30, pointSetDist1, pointSetDist3),
|
||||||
|
|
|
@ -11,11 +11,7 @@
|
||||||
"subdirs": true
|
"subdirs": true
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"bsc-flags": [
|
"bsc-flags": ["-bs-super-errors", "-bs-no-version-header", "-bs-g"],
|
||||||
"-bs-super-errors",
|
|
||||||
"-bs-no-version-header",
|
|
||||||
"-bs-g"
|
|
||||||
],
|
|
||||||
"package-specs": [
|
"package-specs": [
|
||||||
{
|
{
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
|
@ -27,13 +23,13 @@
|
||||||
"bs-dependencies": [
|
"bs-dependencies": [
|
||||||
"@glennsl/rescript-jest",
|
"@glennsl/rescript-jest",
|
||||||
"@glennsl/bs-json",
|
"@glennsl/bs-json",
|
||||||
"rationale",
|
"rationale",
|
||||||
"bisect_ppx"
|
"bisect_ppx"
|
||||||
],
|
],
|
||||||
"gentypeconfig": {
|
"gentypeconfig": {
|
||||||
"language": "typescript",
|
"language": "typescript",
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"shims": {"Js": "Js"},
|
"shims": { "Js": "Js" },
|
||||||
"debug": {
|
"debug": {
|
||||||
"all": false,
|
"all": false,
|
||||||
"basic": false
|
"basic": false
|
||||||
|
@ -44,10 +40,6 @@
|
||||||
"number": "+A-42-48-9-30-4-102-20-27-41"
|
"number": "+A-42-48-9-30-4-102-20-27-41"
|
||||||
},
|
},
|
||||||
"ppx-flags": [
|
"ppx-flags": [
|
||||||
[
|
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]
|
||||||
"../../node_modules/bisect_ppx/ppx",
|
|
||||||
"--exclude-files",
|
|
||||||
".*_test\\.res$$"
|
|
||||||
]
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
41
packages/squiggle-lang/lint.sh
Executable file
41
packages/squiggle-lang/lint.sh
Executable file
|
@ -0,0 +1,41 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Hat tip to @dfalling
|
||||||
|
# https://forum.rescript-lang.org/t/rescript-9-1-how-can-we-format-to-standard-out/1590/2?u=quinn-dougherty
|
||||||
|
|
||||||
|
errors=false
|
||||||
|
|
||||||
|
files=`ls src/rescript/**/**/*.res src/rescript/**/*.res src/rescript/*.res`
|
||||||
|
for file in $files
|
||||||
|
do
|
||||||
|
current=`cat $file`
|
||||||
|
linted=`echo "${current}" | rescript format -stdin .res`
|
||||||
|
diff=`diff <(echo $current) <(echo $linted)`
|
||||||
|
|
||||||
|
if [ ${#diff} -gt 0 ]
|
||||||
|
then
|
||||||
|
echo "ERROR: $file doesn't pass lint"
|
||||||
|
errors=true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
files=`ls src/rescript/**/**/*.resi src/rescript/**/*.resi` # src/rescript/*/resi
|
||||||
|
for file in $files
|
||||||
|
do
|
||||||
|
current=`cat $file`
|
||||||
|
linted=`echo "${current}" | rescript format -stdin .resi`
|
||||||
|
diff=`diff <(echo $current) <(echo $linted)`
|
||||||
|
if [ ${#diff} -gt 0 ]
|
||||||
|
then
|
||||||
|
echo "ERROR: $file doesn't pass lint"
|
||||||
|
errors=true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
|
if $errors
|
||||||
|
then
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "All files pass lint"
|
||||||
|
fi
|
|
@ -10,7 +10,13 @@
|
||||||
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
|
"test:reducer": "jest --testPathPattern '.*__tests__/Reducer.*'",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
|
"test:quick": "jest --modulePathIgnorePatterns=__tests__/Distributions/Invariants/*",
|
||||||
|
"coverage": "rm -f *.coverage; yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report html",
|
||||||
|
"coverage:ci": "yarn clean; BISECT_ENABLE=yes yarn build; yarn test; bisect-ppx-report send-to Codecov",
|
||||||
|
"lint:rescript": "./lint.sh",
|
||||||
|
"lint:prettier": "prettier --check .",
|
||||||
|
"lint": "yarn lint:rescript && yarn lint:prettier",
|
||||||
|
"format": "rescript format -all && prettier --write .",
|
||||||
"all": "yarn build && yarn bundle && yarn test"
|
"all": "yarn build && yarn bundle && yarn test"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
@ -25,7 +31,7 @@
|
||||||
"mathjs": "10.4.3",
|
"mathjs": "10.4.3",
|
||||||
"pdfast": "^0.2.0",
|
"pdfast": "^0.2.0",
|
||||||
"rationale": "0.2.0",
|
"rationale": "0.2.0",
|
||||||
"rescript": "^9.1.4",
|
"rescript": "^9.1.4",
|
||||||
"bisect_ppx": "^2.7.1"
|
"bisect_ppx": "^2.7.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -1,20 +1,26 @@
|
||||||
import { runAll } from "../rescript/ProgramEvaluator.gen";
|
import * as _ from "lodash";
|
||||||
import type {
|
import type {
|
||||||
Inputs_SamplingInputs_t as SamplingInputs,
|
|
||||||
exportEnv,
|
exportEnv,
|
||||||
exportType,
|
|
||||||
exportDistribution,
|
exportDistribution,
|
||||||
} from "../rescript/ProgramEvaluator.gen";
|
} from "../rescript/ProgramEvaluator.gen";
|
||||||
export type { SamplingInputs, exportEnv, exportDistribution };
|
export type { exportEnv, exportDistribution };
|
||||||
export type { t as DistPlus } from "../rescript/OldInterpreter/DistPlus.gen";
|
|
||||||
import {
|
import {
|
||||||
genericDist,
|
genericDist,
|
||||||
env,
|
samplingParams,
|
||||||
resultDist,
|
evaluate,
|
||||||
resultFloat,
|
expressionValue,
|
||||||
resultString,
|
errorValue,
|
||||||
|
distributionError,
|
||||||
|
toPointSet,
|
||||||
|
continuousShape,
|
||||||
|
discreteShape,
|
||||||
|
distributionErrorToString,
|
||||||
|
} from "../rescript/TypescriptInterface.gen";
|
||||||
|
export {
|
||||||
|
makeSampleSetDist,
|
||||||
|
errorValueToString,
|
||||||
|
distributionErrorToString,
|
||||||
} from "../rescript/TypescriptInterface.gen";
|
} from "../rescript/TypescriptInterface.gen";
|
||||||
export {makeSampleSetDist} from "../rescript/TypescriptInterface.gen";
|
|
||||||
import {
|
import {
|
||||||
Constructors_mean,
|
Constructors_mean,
|
||||||
Constructors_sample,
|
Constructors_sample,
|
||||||
|
@ -22,6 +28,7 @@ import {
|
||||||
Constructors_cdf,
|
Constructors_cdf,
|
||||||
Constructors_inv,
|
Constructors_inv,
|
||||||
Constructors_normalize,
|
Constructors_normalize,
|
||||||
|
Constructors_isNormalized,
|
||||||
Constructors_toPointSet,
|
Constructors_toPointSet,
|
||||||
Constructors_toSampleSet,
|
Constructors_toSampleSet,
|
||||||
Constructors_truncate,
|
Constructors_truncate,
|
||||||
|
@ -41,38 +48,28 @@ import {
|
||||||
Constructors_pointwiseLogarithm,
|
Constructors_pointwiseLogarithm,
|
||||||
Constructors_pointwisePower,
|
Constructors_pointwisePower,
|
||||||
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
|
} from "../rescript/Distributions/DistributionOperation/DistributionOperation.gen";
|
||||||
|
import { pointSetDistFn } from "../rescript/OldInterpreter/DistPlus.bs";
|
||||||
|
export type { samplingParams, errorValue };
|
||||||
|
|
||||||
export let defaultSamplingInputs: SamplingInputs = {
|
export let defaultSamplingInputs: samplingParams = {
|
||||||
sampleCount: 10000,
|
sampleCount: 10000,
|
||||||
outputXYPoints: 10000,
|
xyPointLength: 10000,
|
||||||
pointDistLength: 1000,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export function run(
|
export type result<a, b> =
|
||||||
squiggleString: string,
|
|
||||||
samplingInputs?: SamplingInputs,
|
|
||||||
environment?: exportEnv
|
|
||||||
): { tag: "Ok"; value: exportType } | { tag: "Error"; value: string } {
|
|
||||||
let si: SamplingInputs = samplingInputs
|
|
||||||
? samplingInputs
|
|
||||||
: defaultSamplingInputs;
|
|
||||||
let env: exportEnv = environment ? environment : [];
|
|
||||||
return runAll(squiggleString, si, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
//This is clearly not fully typed. I think later we should use a functional library to
|
|
||||||
// provide a better Either type and corresponding functions.
|
|
||||||
type result =
|
|
||||||
| {
|
| {
|
||||||
tag: "Ok";
|
tag: "Ok";
|
||||||
value: any;
|
value: a;
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
tag: "Error";
|
tag: "Error";
|
||||||
value: any;
|
value: b;
|
||||||
};
|
};
|
||||||
|
|
||||||
export function resultMap(r: result, mapFn: any): result {
|
export function resultMap<a, b, c>(
|
||||||
|
r: result<a, c>,
|
||||||
|
mapFn: (x: a) => b
|
||||||
|
): result<b, c> {
|
||||||
if (r.tag === "Ok") {
|
if (r.tag === "Ok") {
|
||||||
return { tag: "Ok", value: mapFn(r.value) };
|
return { tag: "Ok", value: mapFn(r.value) };
|
||||||
} else {
|
} else {
|
||||||
|
@ -80,147 +77,275 @@ export function resultMap(r: result, mapFn: any): result {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resultExn(r: result): any {
|
function Ok<a, b>(x: a): result<a, b> {
|
||||||
r.value
|
return { tag: "Ok", value: x };
|
||||||
}
|
}
|
||||||
|
|
||||||
export class GenericDist {
|
type tagged<a, b> = { tag: a; value: b };
|
||||||
t: genericDist;
|
|
||||||
env: env;
|
|
||||||
|
|
||||||
constructor(t: genericDist, env: env) {
|
function tag<a, b>(x: a, y: b): tagged<a, b> {
|
||||||
|
return { tag: x, value: y };
|
||||||
|
}
|
||||||
|
|
||||||
|
export type squiggleExpression =
|
||||||
|
| tagged<"symbol", string>
|
||||||
|
| tagged<"string", string>
|
||||||
|
| tagged<"call", string>
|
||||||
|
| tagged<"array", squiggleExpression[]>
|
||||||
|
| tagged<"boolean", boolean>
|
||||||
|
| tagged<"distribution", Distribution>
|
||||||
|
| tagged<"number", number>
|
||||||
|
| tagged<"record", { [key: string]: squiggleExpression }>;
|
||||||
|
export function run(
|
||||||
|
squiggleString: string,
|
||||||
|
samplingInputs?: samplingParams,
|
||||||
|
_environment?: exportEnv
|
||||||
|
): result<squiggleExpression, errorValue> {
|
||||||
|
let si: samplingParams = samplingInputs
|
||||||
|
? samplingInputs
|
||||||
|
: defaultSamplingInputs;
|
||||||
|
let result: result<expressionValue, errorValue> = evaluate(squiggleString);
|
||||||
|
return resultMap(result, (x) => createTsExport(x, si));
|
||||||
|
}
|
||||||
|
|
||||||
|
function createTsExport(
|
||||||
|
x: expressionValue,
|
||||||
|
sampEnv: samplingParams
|
||||||
|
): squiggleExpression {
|
||||||
|
switch (x.tag) {
|
||||||
|
case "EvArray":
|
||||||
|
return tag(
|
||||||
|
"array",
|
||||||
|
x.value.map((x) => createTsExport(x, sampEnv))
|
||||||
|
);
|
||||||
|
case "EvBool":
|
||||||
|
return tag("boolean", x.value);
|
||||||
|
case "EvCall":
|
||||||
|
return tag("call", x.value);
|
||||||
|
case "EvDistribution":
|
||||||
|
return tag("distribution", new Distribution(x.value, sampEnv));
|
||||||
|
case "EvNumber":
|
||||||
|
return tag("number", x.value);
|
||||||
|
case "EvRecord":
|
||||||
|
return tag(
|
||||||
|
"record",
|
||||||
|
_.mapValues(x.value, (x) => createTsExport(x, sampEnv))
|
||||||
|
);
|
||||||
|
case "EvString":
|
||||||
|
return tag("string", x.value);
|
||||||
|
case "EvSymbol":
|
||||||
|
return tag("symbol", x.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resultExn<a, c>(r: result<a, c>): a | c {
|
||||||
|
return r.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type point = { x: number; y: number };
|
||||||
|
|
||||||
|
export type shape = {
|
||||||
|
continuous: point[];
|
||||||
|
discrete: point[];
|
||||||
|
};
|
||||||
|
|
||||||
|
function shapePoints(x: continuousShape | discreteShape): point[] {
|
||||||
|
let xs = x.xyShape.xs;
|
||||||
|
let ys = x.xyShape.ys;
|
||||||
|
return _.zipWith(xs, ys, (x, y) => ({ x, y }));
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Distribution {
|
||||||
|
t: genericDist;
|
||||||
|
env: samplingParams;
|
||||||
|
|
||||||
|
constructor(t: genericDist, env: samplingParams) {
|
||||||
this.t = t;
|
this.t = t;
|
||||||
this.env = env;
|
this.env = env;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
mapResultDist(r: resultDist) {
|
mapResultDist(
|
||||||
return resultMap(r, (v: genericDist) => new GenericDist(v, this.env));
|
r: result<genericDist, distributionError>
|
||||||
|
): result<Distribution, distributionError> {
|
||||||
|
return resultMap(r, (v: genericDist) => new Distribution(v, this.env));
|
||||||
}
|
}
|
||||||
|
|
||||||
mean() {
|
mean(): result<number, distributionError> {
|
||||||
return Constructors_mean({ env: this.env }, this.t);
|
return Constructors_mean({ env: this.env }, this.t);
|
||||||
}
|
}
|
||||||
|
|
||||||
sample(): resultFloat {
|
sample(): result<number, distributionError> {
|
||||||
return Constructors_sample({ env: this.env }, this.t);
|
return Constructors_sample({ env: this.env }, this.t);
|
||||||
}
|
}
|
||||||
|
|
||||||
pdf(n: number): resultFloat {
|
pdf(n: number): result<number, distributionError> {
|
||||||
return Constructors_pdf({ env: this.env }, this.t, n);
|
return Constructors_pdf({ env: this.env }, this.t, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
cdf(n: number): resultFloat {
|
cdf(n: number): result<number, distributionError> {
|
||||||
return Constructors_cdf({ env: this.env }, this.t, n);
|
return Constructors_cdf({ env: this.env }, this.t, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
inv(n: number): resultFloat {
|
inv(n: number): result<number, distributionError> {
|
||||||
return Constructors_inv({ env: this.env }, this.t, n);
|
return Constructors_inv({ env: this.env }, this.t, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
normalize() {
|
isNormalized(): result<boolean, distributionError> {
|
||||||
|
return Constructors_isNormalized({ env: this.env }, this.t);
|
||||||
|
}
|
||||||
|
|
||||||
|
normalize(): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_normalize({ env: this.env }, this.t)
|
Constructors_normalize({ env: this.env }, this.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
toPointSet() {
|
type() {
|
||||||
|
return this.t.tag;
|
||||||
|
}
|
||||||
|
|
||||||
|
pointSet(): result<shape, distributionError> {
|
||||||
|
let pointSet = toPointSet(
|
||||||
|
this.t,
|
||||||
|
{
|
||||||
|
xyPointLength: this.env.xyPointLength,
|
||||||
|
sampleCount: this.env.sampleCount,
|
||||||
|
},
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
if (pointSet.tag === "Ok") {
|
||||||
|
let distribution = pointSet.value;
|
||||||
|
if (distribution.tag === "Continuous") {
|
||||||
|
return Ok({
|
||||||
|
continuous: shapePoints(distribution.value),
|
||||||
|
discrete: [],
|
||||||
|
});
|
||||||
|
} else if (distribution.tag === "Discrete") {
|
||||||
|
return Ok({
|
||||||
|
discrete: shapePoints(distribution.value),
|
||||||
|
continuous: [],
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return Ok({
|
||||||
|
discrete: shapePoints(distribution.value.discrete),
|
||||||
|
continuous: shapePoints(distribution.value.continuous),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return pointSet;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toPointSet(): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_toPointSet({ env: this.env }, this.t)
|
Constructors_toPointSet({ env: this.env }, this.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
toSampleSet(n: number) {
|
toSampleSet(n: number): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_toSampleSet({ env: this.env }, this.t, n)
|
Constructors_toSampleSet({ env: this.env }, this.t, n)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
truncate(left: number, right: number) {
|
truncate(
|
||||||
|
left: number,
|
||||||
|
right: number
|
||||||
|
): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_truncate({ env: this.env }, this.t, left, right)
|
Constructors_truncate({ env: this.env }, this.t, left, right)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
inspect() {
|
inspect(): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(Constructors_inspect({ env: this.env }, this.t));
|
return this.mapResultDist(Constructors_inspect({ env: this.env }, this.t));
|
||||||
}
|
}
|
||||||
|
|
||||||
toString(): resultString {
|
toString(): string {
|
||||||
return Constructors_toString({ env: this.env }, this.t);
|
let result = Constructors_toString({ env: this.env }, this.t);
|
||||||
|
if (result.tag === "Ok") {
|
||||||
|
return result.value;
|
||||||
|
} else {
|
||||||
|
return distributionErrorToString(result.value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toSparkline(n: number): resultString {
|
toSparkline(n: number): result<string, distributionError> {
|
||||||
return Constructors_toSparkline({ env: this.env }, this.t, n);
|
return Constructors_toSparkline({ env: this.env }, this.t, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicAdd(d2: GenericDist) {
|
algebraicAdd(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicAdd({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicAdd({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicMultiply(d2: GenericDist) {
|
algebraicMultiply(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicMultiply({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicMultiply({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicDivide(d2: GenericDist) {
|
algebraicDivide(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicDivide({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicDivide({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicSubtract(d2: GenericDist) {
|
algebraicSubtract(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicSubtract({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicSubtract({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicLogarithm(d2: GenericDist) {
|
algebraicLogarithm(
|
||||||
|
d2: Distribution
|
||||||
|
): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicLogarithm({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicLogarithm({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
algebraicPower(d2: GenericDist) {
|
algebraicPower(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_algebraicPower({ env: this.env }, this.t, d2.t)
|
Constructors_algebraicPower({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwiseAdd(d2: GenericDist) {
|
pointwiseAdd(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwiseAdd({ env: this.env }, this.t, d2.t)
|
Constructors_pointwiseAdd({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwiseMultiply(d2: GenericDist) {
|
pointwiseMultiply(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwiseMultiply({ env: this.env }, this.t, d2.t)
|
Constructors_pointwiseMultiply({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwiseDivide(d2: GenericDist) {
|
pointwiseDivide(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwiseDivide({ env: this.env }, this.t, d2.t)
|
Constructors_pointwiseDivide({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwiseSubtract(d2: GenericDist) {
|
pointwiseSubtract(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwiseSubtract({ env: this.env }, this.t, d2.t)
|
Constructors_pointwiseSubtract({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwiseLogarithm(d2: GenericDist) {
|
pointwiseLogarithm(
|
||||||
|
d2: Distribution
|
||||||
|
): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwiseLogarithm({ env: this.env }, this.t, d2.t)
|
Constructors_pointwiseLogarithm({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pointwisePower(d2: GenericDist) {
|
pointwisePower(d2: Distribution): result<Distribution, distributionError> {
|
||||||
return this.mapResultDist(
|
return this.mapResultDist(
|
||||||
Constructors_pointwisePower({ env: this.env }, this.t, d2.t)
|
Constructors_pointwisePower({ env: this.env }, this.t, d2.t)
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
type functionCallInfo = GenericDist_Types.Operation.genericFunctionCallInfo
|
type functionCallInfo = GenericDist_Types.Operation.genericFunctionCallInfo
|
||||||
type genericDist = GenericDist_Types.genericDist
|
type genericDist = DistributionTypes.genericDist
|
||||||
type error = GenericDist_Types.error
|
type error = DistributionTypes.error
|
||||||
|
|
||||||
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ type outputType =
|
||||||
| Dist(genericDist)
|
| Dist(genericDist)
|
||||||
| Float(float)
|
| Float(float)
|
||||||
| String(string)
|
| String(string)
|
||||||
|
| Bool(bool)
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -66,6 +67,18 @@ module OutputLocal = {
|
||||||
| e => Error(toErrorOrUnreachable(e))
|
| e => Error(toErrorOrUnreachable(e))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let toBool = (t: t) =>
|
||||||
|
switch t {
|
||||||
|
| Bool(d) => Some(d)
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let toBoolR = (t: t): result<bool, error> =>
|
||||||
|
switch t {
|
||||||
|
| Bool(r) => Ok(r)
|
||||||
|
| e => Error(toErrorOrUnreachable(e))
|
||||||
|
}
|
||||||
|
|
||||||
//This is used to catch errors in other switch statements.
|
//This is used to catch errors in other switch statements.
|
||||||
let fromResult = (r: result<t, error>): outputType =>
|
let fromResult = (r: result<t, error>): outputType =>
|
||||||
switch r {
|
switch r {
|
||||||
|
@ -107,8 +120,8 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
(),
|
(),
|
||||||
)->OutputLocal.toDistR
|
)->OutputLocal.toDistR
|
||||||
|
|
||||||
let fromDistFn = (subFnName: GenericDist_Types.Operation.fromDist, dist: genericDist) =>
|
let fromDistFn = (subFnName: GenericDist_Types.Operation.fromDist, dist: genericDist) => {
|
||||||
switch subFnName {
|
let response = switch subFnName {
|
||||||
| ToFloat(distToFloatOperation) =>
|
| ToFloat(distToFloatOperation) =>
|
||||||
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
GenericDist.toFloatOperation(dist, ~toPointSetFn, ~distToFloatOperation)
|
||||||
->E.R2.fmap(r => Float(r))
|
->E.R2.fmap(r => Float(r))
|
||||||
|
@ -123,6 +136,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
Dist(dist)
|
Dist(dist)
|
||||||
}
|
}
|
||||||
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
| ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||||
|
| ToBool(IsNormalized) => dist->GenericDist.isNormalized->Bool
|
||||||
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
| ToDist(Truncate(leftCutoff, rightCutoff)) =>
|
||||||
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
GenericDist.truncate(~toPointSetFn, ~leftCutoff, ~rightCutoff, dist, ())
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
|
@ -154,6 +168,8 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
|
||||||
->E.R2.fmap(r => Dist(r))
|
->E.R2.fmap(r => Dist(r))
|
||||||
->OutputLocal.fromResult
|
->OutputLocal.fromResult
|
||||||
}
|
}
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
switch functionCallInfo {
|
switch functionCallInfo {
|
||||||
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
| FromDist(subFnName, dist) => fromDistFn(subFnName, dist)
|
||||||
|
@ -201,6 +217,7 @@ module Constructors = {
|
||||||
let inv = (~env, dist, f) => C.inv(dist, f)->run(~env)->toFloatR
|
let inv = (~env, dist, f) => C.inv(dist, f)->run(~env)->toFloatR
|
||||||
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
let pdf = (~env, dist, f) => C.pdf(dist, f)->run(~env)->toFloatR
|
||||||
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
let normalize = (~env, dist) => C.normalize(dist)->run(~env)->toDistR
|
||||||
|
let isNormalized = (~env, dist) => C.isNormalized(dist)->run(~env)->toBoolR
|
||||||
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
let toPointSet = (~env, dist) => C.toPointSet(dist)->run(~env)->toDistR
|
||||||
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
let toSampleSet = (~env, dist, n) => C.toSampleSet(dist, n)->run(~env)->toDistR
|
||||||
let truncate = (~env, dist, leftCutoff, rightCutoff) =>
|
let truncate = (~env, dist, leftCutoff, rightCutoff) =>
|
||||||
|
|
|
@ -11,6 +11,7 @@ type outputType =
|
||||||
| Dist(genericDist)
|
| Dist(genericDist)
|
||||||
| Float(float)
|
| Float(float)
|
||||||
| String(string)
|
| String(string)
|
||||||
|
| Bool(bool)
|
||||||
| GenDistError(error)
|
| GenDistError(error)
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
|
@ -34,62 +35,61 @@ module Output: {
|
||||||
let toFloatR: t => result<float, error>
|
let toFloatR: t => result<float, error>
|
||||||
let toString: t => option<string>
|
let toString: t => option<string>
|
||||||
let toStringR: t => result<string, error>
|
let toStringR: t => result<string, error>
|
||||||
|
let toBool: t => option<bool>
|
||||||
|
let toBoolR: t => result<bool, error>
|
||||||
let toError: t => option<error>
|
let toError: t => option<error>
|
||||||
let fmap: (~env: env, t, GenericDist_Types.Operation.singleParamaterFunction) => t
|
let fmap: (~env: env, t, GenericDist_Types.Operation.singleParamaterFunction) => t
|
||||||
}
|
}
|
||||||
|
|
||||||
module Constructors: {
|
module Constructors: {
|
||||||
@genType
|
@genType
|
||||||
let mean: (~env: env, genericDist) => result<float, error>
|
let mean: (~env: env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let sample: (~env: env, genericDist) => result<float, error>
|
let sample: (~env: env, genericDist) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
let cdf: (~env: env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
let inv: (~env: env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
let pdf: (~env: env, genericDist, float) => result<float, error>
|
||||||
@genType
|
@genType
|
||||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
let isNormalized: (~env: env, genericDist) => result<bool, error>
|
||||||
@genType
|
@genType
|
||||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||||
@genType
|
@genType
|
||||||
let truncate: (
|
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||||
~env: env,
|
@genType
|
||||||
genericDist,
|
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
||||||
option<float>,
|
@genType
|
||||||
option<float>,
|
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||||
) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let toString: (~env: env, genericDist) => result<string, error>
|
||||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
||||||
let toString: (~env: env, genericDist) => result<string, error>
|
@genType
|
||||||
@genType
|
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
@genType
|
||||||
@genType
|
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
@genType
|
||||||
@genType
|
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
|
||||||
@genType
|
|
||||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
|
@genType
|
||||||
type genericDist =
|
type genericDist =
|
||||||
| PointSet(PointSetTypes.pointSetDist)
|
| PointSet(PointSetTypes.pointSetDist)
|
||||||
| SampleSet(array<float>)
|
| SampleSet(SampleSetDist.t)
|
||||||
| Symbolic(SymbolicDistTypes.symbolicDist)
|
| Symbolic(SymbolicDistTypes.symbolicDist)
|
||||||
|
|
||||||
|
@genType
|
||||||
type error =
|
type error =
|
||||||
| NotYetImplemented
|
| NotYetImplemented
|
||||||
| Unreachable
|
| Unreachable
|
||||||
| DistributionVerticalShiftIsInvalid
|
| DistributionVerticalShiftIsInvalid
|
||||||
|
| ArgumentError(string)
|
||||||
| Other(string)
|
| Other(string)
|
||||||
|
|
||||||
module Operation = {
|
module Operation = {
|
||||||
|
@ -55,7 +58,11 @@ module DistributionOperation = {
|
||||||
type fromDist =
|
type fromDist =
|
||||||
| ToFloat(Operation.toFloat)
|
| ToFloat(Operation.toFloat)
|
||||||
| ToDist(toDist)
|
| ToDist(toDist)
|
||||||
| ToDistCombination(Operation.direction, Operation.arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
| ToDistCombination(
|
||||||
|
Operation.direction,
|
||||||
|
Operation.arithmeticOperation,
|
||||||
|
[#Dist(genericDist) | #Float(float)],
|
||||||
|
)
|
||||||
| ToString
|
| ToString
|
||||||
|
|
||||||
type singleParamaterFunction =
|
type singleParamaterFunction =
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//TODO: multimodal, add interface, test somehow, track performance, refactor sampleSet, refactor ASTEvaluator.res.
|
//TODO: multimodal, add interface, test somehow, track performance, refactor sampleSet, refactor ASTEvaluator.res.
|
||||||
type t = GenericDist_Types.genericDist
|
type t = DistributionTypes.genericDist
|
||||||
type error = GenericDist_Types.error
|
type error = DistributionTypes.error
|
||||||
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
type toPointSetFn = t => result<PointSetTypes.pointSetDist, error>
|
||||||
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
type toSampleSetFn = t => result<SampleSetDist.t, error>
|
||||||
type scaleMultiplyFn = (t, float) => result<t, error>
|
type scaleMultiplyFn = (t, float) => result<t, error>
|
||||||
|
@ -32,6 +32,15 @@ let normalize = (t: t): t =>
|
||||||
| SampleSet(_) => t
|
| SampleSet(_) => t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let integralEndY = (t: t): float =>
|
||||||
|
switch t {
|
||||||
|
| PointSet(r) => PointSetDist.T.integralEndY(r)
|
||||||
|
| Symbolic(_) => 1.0
|
||||||
|
| SampleSet(_) => 1.0
|
||||||
|
}
|
||||||
|
|
||||||
|
let isNormalized = (t: t): bool => Js.Math.abs_float(integralEndY(t) -. 1.0) < 1e-7
|
||||||
|
|
||||||
let toFloatOperation = (
|
let toFloatOperation = (
|
||||||
t,
|
t,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
|
@ -115,7 +124,7 @@ module Truncate = {
|
||||||
| Some(r) => Ok(r)
|
| Some(r) => Ok(r)
|
||||||
| None =>
|
| None =>
|
||||||
toPointSetFn(t)->E.R2.fmap(t => {
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
GenericDist_Types.PointSet(PointSetDist.T.truncate(leftCutoff, rightCutoff, t))
|
DistributionTypes.PointSet(PointSetDist.T.truncate(leftCutoff, rightCutoff, t))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -168,7 +177,7 @@ module AlgebraicCombination = {
|
||||||
->E.R.bind(((t1, t2)) => {
|
->E.R.bind(((t1, t2)) => {
|
||||||
SampleSetDist.map2(~fn, ~t1, ~t2)->GenericDist_Types.Error.resultStringToResultError
|
SampleSetDist.map2(~fn, ~t1, ~t2)->GenericDist_Types.Error.resultStringToResultError
|
||||||
})
|
})
|
||||||
->E.R2.fmap(r => GenericDist_Types.SampleSet(r))
|
->E.R2.fmap(r => DistributionTypes.SampleSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
//I'm (Ozzie) really just guessing here, very little idea what's best
|
//I'm (Ozzie) really just guessing here, very little idea what's best
|
||||||
|
@ -206,7 +215,7 @@ module AlgebraicCombination = {
|
||||||
arithmeticOperation,
|
arithmeticOperation,
|
||||||
t1,
|
t1,
|
||||||
t2,
|
t2,
|
||||||
)->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
)->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -229,7 +238,7 @@ let pointwiseCombination = (
|
||||||
t2,
|
t2,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
let pointwiseCombinationFloat = (
|
let pointwiseCombinationFloat = (
|
||||||
|
@ -239,7 +248,7 @@ let pointwiseCombinationFloat = (
|
||||||
~float: float,
|
~float: float,
|
||||||
): result<t, error> => {
|
): result<t, error> => {
|
||||||
let m = switch arithmeticOperation {
|
let m = switch arithmeticOperation {
|
||||||
| #Add | #Subtract => Error(GenericDist_Types.DistributionVerticalShiftIsInvalid)
|
| #Add | #Subtract => Error(DistributionTypes.DistributionVerticalShiftIsInvalid)
|
||||||
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
| (#Multiply | #Divide | #Power | #Logarithm) as arithmeticOperation =>
|
||||||
toPointSetFn(t)->E.R2.fmap(t => {
|
toPointSetFn(t)->E.R2.fmap(t => {
|
||||||
//TODO: Move to PointSet codebase
|
//TODO: Move to PointSet codebase
|
||||||
|
@ -254,7 +263,7 @@ let pointwiseCombinationFloat = (
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
m->E.R2.fmap(r => GenericDist_Types.PointSet(r))
|
m->E.R2.fmap(r => DistributionTypes.PointSet(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
//Note: The result should always cumulatively sum to 1. This would be good to test.
|
//Note: The result should always cumulatively sum to 1. This would be good to test.
|
||||||
|
@ -265,7 +274,7 @@ let mixture = (
|
||||||
~pointwiseAddFn: pointwiseAddFn,
|
~pointwiseAddFn: pointwiseAddFn,
|
||||||
) => {
|
) => {
|
||||||
if E.A.length(values) == 0 {
|
if E.A.length(values) == 0 {
|
||||||
Error(GenericDist_Types.Other("Mixture error: mixture must have at least 1 element"))
|
Error(DistributionTypes.Other("Mixture error: mixture must have at least 1 element"))
|
||||||
} else {
|
} else {
|
||||||
let totalWeight = values->E.A2.fmap(E.Tuple2.second)->E.A.Floats.sum
|
let totalWeight = values->E.A2.fmap(E.Tuple2.second)->E.A.Floats.sum
|
||||||
let properlyWeightedValues =
|
let properlyWeightedValues =
|
||||||
|
|
|
@ -15,12 +15,15 @@ let toString: t => string
|
||||||
|
|
||||||
let normalize: t => t
|
let normalize: t => t
|
||||||
|
|
||||||
|
let isNormalized: t => bool
|
||||||
|
|
||||||
let toFloatOperation: (
|
let toFloatOperation: (
|
||||||
t,
|
t,
|
||||||
~toPointSetFn: toPointSetFn,
|
~toPointSetFn: toPointSetFn,
|
||||||
~distToFloatOperation: Operation.distToFloatOperation,
|
~distToFloatOperation: Operation.distToFloatOperation,
|
||||||
) => result<float, error>
|
) => result<float, error>
|
||||||
|
|
||||||
|
@genType
|
||||||
let toPointSet: (
|
let toPointSet: (
|
||||||
t,
|
t,
|
||||||
~xyPointLength: int,
|
~xyPointLength: int,
|
||||||
|
|
|
@ -1,20 +1,24 @@
|
||||||
type genericDist =
|
type genericDist = DistributionTypes.genericDist
|
||||||
| PointSet(PointSetTypes.pointSetDist)
|
@genType
|
||||||
| SampleSet(SampleSetDist.t)
|
type error = DistributionTypes.error
|
||||||
| Symbolic(SymbolicDistTypes.symbolicDist)
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type error =
|
|
||||||
| NotYetImplemented
|
|
||||||
| Unreachable
|
|
||||||
| DistributionVerticalShiftIsInvalid
|
|
||||||
| Other(string)
|
|
||||||
|
|
||||||
module Error = {
|
module Error = {
|
||||||
type t = error
|
type t = error
|
||||||
|
|
||||||
let fromString = (s: string): t => Other(s)
|
let fromString = (s: string): t => Other(s)
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let toString = (x: t) => {
|
||||||
|
switch x {
|
||||||
|
| NotYetImplemented => "Not Yet Implemented"
|
||||||
|
| Unreachable => "Unreachable"
|
||||||
|
| DistributionVerticalShiftIsInvalid => "Distribution Vertical Shift Is Invalid"
|
||||||
|
| ArgumentError(x) => `Argument Error: ${x}`
|
||||||
|
| Other(s) => s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let resultStringToResultError: result<'a, string> => result<'a, error> = n =>
|
let resultStringToResultError: result<'a, string> => result<'a, error> = n =>
|
||||||
n->E.R2.errMap(r => r->fromString->Error)
|
n->E.R2.errMap(r => r->fromString->Error)
|
||||||
}
|
}
|
||||||
|
@ -51,6 +55,7 @@ module Operation = {
|
||||||
| #Sample
|
| #Sample
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@genType
|
||||||
type pointsetXSelection = [#Linear | #ByWeight]
|
type pointsetXSelection = [#Linear | #ByWeight]
|
||||||
|
|
||||||
type toDist =
|
type toDist =
|
||||||
|
@ -66,11 +71,14 @@ module Operation = {
|
||||||
| ToString
|
| ToString
|
||||||
| ToSparkline(int)
|
| ToSparkline(int)
|
||||||
|
|
||||||
|
type toBool = IsNormalized
|
||||||
|
|
||||||
type fromDist =
|
type fromDist =
|
||||||
| ToFloat(toFloat)
|
| ToFloat(toFloat)
|
||||||
| ToDist(toDist)
|
| ToDist(toDist)
|
||||||
| ToDistCombination(direction, arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
| ToDistCombination(direction, arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
||||||
| ToString(toString)
|
| ToString(toString)
|
||||||
|
| ToBool(toBool)
|
||||||
|
|
||||||
type singleParamaterFunction =
|
type singleParamaterFunction =
|
||||||
| FromDist(fromDist)
|
| FromDist(fromDist)
|
||||||
|
@ -96,6 +104,7 @@ module Operation = {
|
||||||
| ToDist(Inspect) => `inspect`
|
| ToDist(Inspect) => `inspect`
|
||||||
| ToString(ToString) => `toString`
|
| ToString(ToString) => `toString`
|
||||||
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
| ToString(ToSparkline(n)) => `toSparkline(${E.I.toString(n)})`
|
||||||
|
| ToBool(IsNormalized) => `isNormalized`
|
||||||
| ToDistCombination(Algebraic, _, _) => `algebraic`
|
| ToDistCombination(Algebraic, _, _) => `algebraic`
|
||||||
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
| ToDistCombination(Pointwise, _, _) => `pointwise`
|
||||||
}
|
}
|
||||||
|
@ -126,6 +135,7 @@ module Constructors = {
|
||||||
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
let inv = (dist, x): t => FromDist(ToFloat(#Inv(x)), dist)
|
||||||
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
let pdf = (dist, x): t => FromDist(ToFloat(#Pdf(x)), dist)
|
||||||
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
let normalize = (dist): t => FromDist(ToDist(Normalize), dist)
|
||||||
|
let isNormalized = (dist): t => FromDist(ToBool(IsNormalized), dist)
|
||||||
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
let toPointSet = (dist): t => FromDist(ToDist(ToPointSet), dist)
|
||||||
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
let toSampleSet = (dist, r): t => FromDist(ToDist(ToSampleSet(r)), dist)
|
||||||
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
let truncate = (dist, left, right): t => FromDist(ToDist(Truncate(left, right)), dist)
|
||||||
|
|
|
@ -4,11 +4,12 @@ This library provides one interface to generic distributions. These distribution
|
||||||
|
|
||||||
Different internal formats (symbolic, point set, sample set) allow for benefits and features. It's common for distributions to be converted into either point sets or sample sets to enable certain functions.
|
Different internal formats (symbolic, point set, sample set) allow for benefits and features. It's common for distributions to be converted into either point sets or sample sets to enable certain functions.
|
||||||
|
|
||||||
In addition to this interface, there's a second, generic function, for calling functions on this generic distribution type. This ``genericOperation`` standardizes the inputs and outputs for these various function calls. See it's ``run()`` function.
|
In addition to this interface, there's a second, generic function, for calling functions on this generic distribution type. This `genericOperation` standardizes the inputs and outputs for these various function calls. See it's `run()` function.
|
||||||
|
|
||||||
Performance is very important. Some operations can take a long time to run, and even then, be inaccurate. Because of this, we plan to have a lot of logging and stack tracing functionality eventually built in.
|
Performance is very important. Some operations can take a long time to run, and even then, be inaccurate. Because of this, we plan to have a lot of logging and stack tracing functionality eventually built in.
|
||||||
|
|
||||||
## Diagram of Distribution Types
|
## Diagram of Distribution Types
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
graph TD
|
graph TD
|
||||||
A[Generic Distribution] -->B{Point Set}
|
A[Generic Distribution] -->B{Point Set}
|
||||||
|
@ -34,7 +35,8 @@ graph TD
|
||||||
## Diagram of Generic Distribution Types
|
## Diagram of Generic Distribution Types
|
||||||
|
|
||||||
## Todo
|
## Todo
|
||||||
- [ ] Lots of cleanup
|
|
||||||
|
- [ ] Lots of cleanup
|
||||||
- [ ] Simple test story
|
- [ ] Simple test story
|
||||||
- [ ] Provide decent stack traces for key calls in GenericOperation. This could be very useful for debugging.
|
- [ ] Provide decent stack traces for key calls in GenericOperation. This could be very useful for debugging.
|
||||||
- [ ] Cleanup Sample Set library
|
- [ ] Cleanup Sample Set library
|
||||||
|
@ -45,4 +47,4 @@ graph TD
|
||||||
- [ ] Allow these functions to be run on web workers
|
- [ ] Allow these functions to be run on web workers
|
||||||
- [ ] Refactor interpreter to use GenericDist. This might not be necessary, as the new reducer-inspired interpreter is integrated.
|
- [ ] Refactor interpreter to use GenericDist. This might not be necessary, as the new reducer-inspired interpreter is integrated.
|
||||||
|
|
||||||
## More todos
|
## More todos
|
||||||
|
|
|
@ -100,7 +100,6 @@ let combineShapesContinuousContinuous = (
|
||||||
s1: PointSetTypes.xyShape,
|
s1: PointSetTypes.xyShape,
|
||||||
s2: PointSetTypes.xyShape,
|
s2: PointSetTypes.xyShape,
|
||||||
): PointSetTypes.xyShape => {
|
): PointSetTypes.xyShape => {
|
||||||
|
|
||||||
// if we add the two distributions, we should probably use normal filters.
|
// if we add the two distributions, we should probably use normal filters.
|
||||||
// if we multiply the two distributions, we should probably use lognormal filters.
|
// if we multiply the two distributions, we should probably use lognormal filters.
|
||||||
let t1m = toDiscretePointMassesFromTriangulars(s1)
|
let t1m = toDiscretePointMassesFromTriangulars(s1)
|
||||||
|
|
|
@ -235,18 +235,10 @@ module T = Dist({
|
||||||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
|
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
|
||||||
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
|
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
|
||||||
|
|
||||||
Analysis.integrate(
|
Analysis.integrate(~indefiniteIntegralStepwise, ~indefiniteIntegralLinear, t)
|
||||||
~indefiniteIntegralStepwise,
|
|
||||||
~indefiniteIntegralLinear,
|
|
||||||
t,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
let variance = (t: t): float =>
|
let variance = (t: t): float =>
|
||||||
XYShape.Analysis.getVarianceDangerously(
|
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||||
t,
|
|
||||||
mean,
|
|
||||||
Analysis.getMeanOfSquares,
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
let downsampleEquallyOverX = (length, t): t =>
|
let downsampleEquallyOverX = (length, t): t =>
|
||||||
|
|
|
@ -214,4 +214,4 @@ module T = Dist({
|
||||||
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
let getMeanOfSquares = t => t |> shapeMap(XYShape.T.square) |> mean
|
||||||
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -47,6 +47,7 @@ module Dist = (T: dist) => {
|
||||||
let truncate = T.truncate
|
let truncate = T.truncate
|
||||||
let mean = T.mean
|
let mean = T.mean
|
||||||
let variance = T.variance
|
let variance = T.variance
|
||||||
|
let integralEndY = T.integralEndY
|
||||||
|
|
||||||
let updateIntegralCache = T.updateIntegralCache
|
let updateIntegralCache = T.updateIntegralCache
|
||||||
|
|
||||||
|
|
|
@ -212,8 +212,7 @@ module T = Dist({
|
||||||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
|
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
|
||||||
|
|
||||||
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
||||||
let discreteMean =
|
let discreteMean = discrete |> Discrete.shapeMap(XYShape.T.square) |> Discrete.T.mean
|
||||||
discrete |> Discrete.shapeMap(XYShape.T.square) |> Discrete.T.mean
|
|
||||||
let continuousMean = continuous |> Continuous.Analysis.getMeanOfSquares
|
let continuousMean = continuous |> Continuous.Analysis.getMeanOfSquares
|
||||||
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
|
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
|
||||||
totalIntegralSum
|
totalIntegralSum
|
||||||
|
|
|
@ -207,4 +207,4 @@ let toSparkline = (t: t, bucketCount) =>
|
||||||
T.toContinuous(t)
|
T.toContinuous(t)
|
||||||
->E.O2.fmap(Continuous.downsampleEquallyOverX(bucketCount))
|
->E.O2.fmap(Continuous.downsampleEquallyOverX(bucketCount))
|
||||||
->E.O2.toResult("toContinous Error: Could not convert into continuous distribution")
|
->E.O2.toResult("toContinous Error: Could not convert into continuous distribution")
|
||||||
->E.R2.fmap(r => Continuous.getShape(r).ys->Sparklines.create())
|
->E.R2.fmap(r => Continuous.getShape(r).ys->Sparklines.create())
|
||||||
|
|
|
@ -14,11 +14,12 @@ type distributionType = [
|
||||||
| #CDF
|
| #CDF
|
||||||
]
|
]
|
||||||
|
|
||||||
type xyShape = XYShape.xyShape;
|
type xyShape = XYShape.xyShape
|
||||||
type interpolationStrategy = XYShape.interpolationStrategy;
|
type interpolationStrategy = XYShape.interpolationStrategy
|
||||||
type extrapolationStrategy = XYShape.extrapolationStrategy;
|
type extrapolationStrategy = XYShape.extrapolationStrategy
|
||||||
type interpolator = XYShape.extrapolationStrategy;
|
type interpolator = XYShape.extrapolationStrategy
|
||||||
|
|
||||||
|
@genType
|
||||||
type rec continuousShape = {
|
type rec continuousShape = {
|
||||||
xyShape: xyShape,
|
xyShape: xyShape,
|
||||||
interpolation: interpolationStrategy,
|
interpolation: interpolationStrategy,
|
||||||
|
@ -26,12 +27,14 @@ type rec continuousShape = {
|
||||||
integralCache: option<continuousShape>,
|
integralCache: option<continuousShape>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@genType
|
||||||
type discreteShape = {
|
type discreteShape = {
|
||||||
xyShape: xyShape,
|
xyShape: xyShape,
|
||||||
integralSumCache: option<float>,
|
integralSumCache: option<float>,
|
||||||
integralCache: option<continuousShape>,
|
integralCache: option<continuousShape>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@genType
|
||||||
type mixedShape = {
|
type mixedShape = {
|
||||||
continuous: continuousShape,
|
continuous: continuousShape,
|
||||||
discrete: discreteShape,
|
discrete: discreteShape,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const pdfast = require('pdfast');
|
const pdfast = require("pdfast");
|
||||||
const _ = require("lodash");
|
const _ = require("lodash");
|
||||||
|
|
||||||
const samplesToContinuousPdf = (
|
const samplesToContinuousPdf = (
|
||||||
|
@ -6,13 +6,17 @@ const samplesToContinuousPdf = (
|
||||||
size,
|
size,
|
||||||
width,
|
width,
|
||||||
min = false,
|
min = false,
|
||||||
max = false,
|
max = false
|
||||||
) => {
|
) => {
|
||||||
let _samples = _.filter(samples, _.isFinite);
|
let _samples = _.filter(samples, _.isFinite);
|
||||||
if (_.isFinite(min)) { _samples = _.filter(_samples, r => r > min) };
|
if (_.isFinite(min)) {
|
||||||
if (_.isFinite(max)) { _samples = _.filter(_samples, r => r < max) };
|
_samples = _.filter(_samples, (r) => r > min);
|
||||||
|
}
|
||||||
|
if (_.isFinite(max)) {
|
||||||
|
_samples = _.filter(_samples, (r) => r < max);
|
||||||
|
}
|
||||||
let pdf = pdfast.create(_samples, { size, width });
|
let pdf = pdfast.create(_samples, { size, width });
|
||||||
return {xs: pdf.map(r => r.x), ys: pdf.map(r => r.y)};
|
return { xs: pdf.map((r) => r.x), ys: pdf.map((r) => r.y) };
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
open SymbolicDistTypes
|
open SymbolicDistTypes
|
||||||
|
|
||||||
|
let normal95confidencePoint = 1.6448536269514722
|
||||||
|
// explained in website/docs/internal/ProcessingConfidenceIntervals
|
||||||
|
|
||||||
module Normal = {
|
module Normal = {
|
||||||
type t = normal
|
type t = normal
|
||||||
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
||||||
|
@ -11,7 +14,7 @@ module Normal = {
|
||||||
|
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let mean = E.A.Floats.mean([low, high])
|
let mean = E.A.Floats.mean([low, high])
|
||||||
let stdev = (high -. low) /. (2. *. 1.644854)
|
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
||||||
|
@ -21,12 +24,12 @@ module Normal = {
|
||||||
|
|
||||||
let add = (n1: t, n2: t) => {
|
let add = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean +. n2.mean
|
let mean = n1.mean +. n2.mean
|
||||||
let stdev = sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
let subtract = (n1: t, n2: t) => {
|
let subtract = (n1: t, n2: t) => {
|
||||||
let mean = n1.mean -. n2.mean
|
let mean = n1.mean -. n2.mean
|
||||||
let stdev = sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||||
#Normal({mean: mean, stdev: stdev})
|
#Normal({mean: mean, stdev: stdev})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,6 +47,23 @@ module Normal = {
|
||||||
| #Subtract => Some(subtract(n1, n2))
|
| #Subtract => Some(subtract(n1, n2))
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let operateFloatFirst = (operation: Operation.Algebraic.t, n1: float, n2: t) =>
|
||||||
|
switch operation {
|
||||||
|
| #Add => Some(#Normal({mean: n1 +. n2.mean, stdev: n2.stdev}))
|
||||||
|
| #Subtract => Some(#Normal({mean: n1 -. n2.mean, stdev: n2.stdev}))
|
||||||
|
| #Multiply => Some(#Normal({mean: n1 *. n2.mean, stdev: n1 *. n2.stdev}))
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let operateFloatSecond = (operation: Operation.Algebraic.t, n1: t, n2: float) =>
|
||||||
|
switch operation {
|
||||||
|
| #Add => Some(#Normal({mean: n1.mean +. n2, stdev: n1.stdev}))
|
||||||
|
| #Subtract => Some(#Normal({mean: n1.mean -. n2, stdev: n1.stdev}))
|
||||||
|
| #Multiply => Some(#Normal({mean: n1.mean *. n2, stdev: n1.stdev *. n2}))
|
||||||
|
| #Divide => Some(#Normal({mean: n1.mean /. n2, stdev: n1.stdev /. n2}))
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module Exponential = {
|
module Exponential = {
|
||||||
|
@ -81,7 +101,7 @@ module Triangular = {
|
||||||
low < medium && medium < high
|
low < medium && medium < high
|
||||||
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
||||||
: Error("Triangular values must be increasing order.")
|
: Error("Triangular values must be increasing order.")
|
||||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||||
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
|
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
|
||||||
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)
|
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)
|
||||||
|
@ -115,19 +135,22 @@ module Lognormal = {
|
||||||
let mean = (t: t) => Ok(Jstat.Lognormal.mean(t.mu, t.sigma))
|
let mean = (t: t) => Ok(Jstat.Lognormal.mean(t.mu, t.sigma))
|
||||||
let sample = (t: t) => Jstat.Lognormal.sample(t.mu, t.sigma)
|
let sample = (t: t) => Jstat.Lognormal.sample(t.mu, t.sigma)
|
||||||
let toString = ({mu, sigma}: t) => j`Lognormal($mu,$sigma)`
|
let toString = ({mu, sigma}: t) => j`Lognormal($mu,$sigma)`
|
||||||
|
|
||||||
let from90PercentCI = (low, high) => {
|
let from90PercentCI = (low, high) => {
|
||||||
let logLow = Js.Math.log(low)
|
let logLow = Js.Math.log(low)
|
||||||
let logHigh = Js.Math.log(high)
|
let logHigh = Js.Math.log(high)
|
||||||
let mu = E.A.Floats.mean([logLow, logHigh])
|
let mu = E.A.Floats.mean([logLow, logHigh])
|
||||||
let sigma = (logHigh -. logLow) /. (2.0 *. 1.645)
|
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let fromMeanAndStdev = (mean, stdev) => {
|
let fromMeanAndStdev = (mean, stdev) => {
|
||||||
|
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
||||||
|
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Generation_and_parameters
|
||||||
if stdev > 0.0 {
|
if stdev > 0.0 {
|
||||||
let variance = Js.Math.pow_float(~base=stdev, ~exp=2.0)
|
let variance = stdev ** 2.
|
||||||
let meanSquared = Js.Math.pow_float(~base=mean, ~exp=2.0)
|
let meanSquared = mean ** 2.
|
||||||
let mu = Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance /. meanSquared +. 1.0)
|
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
||||||
let sigma = Js.Math.pow_float(~base=Js.Math.log(variance /. meanSquared +. 1.0), ~exp=0.5)
|
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
||||||
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||||
} else {
|
} else {
|
||||||
Error("Lognormal standard deviation must be larger than 0")
|
Error("Lognormal standard deviation must be larger than 0")
|
||||||
|
@ -135,13 +158,16 @@ module Lognormal = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let multiply = (l1, l2) => {
|
let multiply = (l1, l2) => {
|
||||||
|
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
||||||
let mu = l1.mu +. l2.mu
|
let mu = l1.mu +. l2.mu
|
||||||
let sigma = l1.sigma +. l2.sigma
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let divide = (l1, l2) => {
|
let divide = (l1, l2) => {
|
||||||
let mu = l1.mu -. l2.mu
|
let mu = l1.mu -. l2.mu
|
||||||
let sigma = l1.sigma +. l2.sigma
|
// We believe the ratiands will have covariance zero.
|
||||||
|
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
||||||
|
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||||
#Lognormal({mu: mu, sigma: sigma})
|
#Lognormal({mu: mu, sigma: sigma})
|
||||||
}
|
}
|
||||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||||
|
@ -150,6 +176,22 @@ module Lognormal = {
|
||||||
| #Divide => Some(divide(n1, n2))
|
| #Divide => Some(divide(n1, n2))
|
||||||
| _ => None
|
| _ => None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let operateFloatFirst = (operation: Operation.Algebraic.t, n1: float, n2: t) =>
|
||||||
|
switch operation {
|
||||||
|
| #Multiply =>
|
||||||
|
n1 > 0.0 ? Some(#Lognormal({mu: Js.Math.log(n1) +. n2.mu, sigma: n2.sigma})) : None
|
||||||
|
| #Divide => n1 > 0.0 ? Some(#Lognormal({mu: Js.Math.log(n1) -. n2.mu, sigma: n2.sigma})) : None
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
|
|
||||||
|
let operateFloatSecond = (operation: Operation.Algebraic.t, n1: t, n2: float) =>
|
||||||
|
switch operation {
|
||||||
|
| #Multiply =>
|
||||||
|
n2 > 0.0 ? Some(#Lognormal({mu: n1.mu +. Js.Math.log(n2), sigma: n1.sigma})) : None
|
||||||
|
| #Divide => n2 > 0.0 ? Some(#Lognormal({mu: n1.mu -. Js.Math.log(n2), sigma: n1.sigma})) : None
|
||||||
|
| _ => None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module Uniform = {
|
module Uniform = {
|
||||||
|
@ -341,12 +383,36 @@ module T = {
|
||||||
}
|
}
|
||||||
| (#Normal(v1), #Normal(v2)) =>
|
| (#Normal(v1), #Normal(v2)) =>
|
||||||
Normal.operate(op, v1, v2) |> E.O.dimap(r => #AnalyticalSolution(r), () => #NoSolution)
|
Normal.operate(op, v1, v2) |> E.O.dimap(r => #AnalyticalSolution(r), () => #NoSolution)
|
||||||
|
| (#Float(v1), #Normal(v2)) =>
|
||||||
|
Normal.operateFloatFirst(op, v1, v2) |> E.O.dimap(
|
||||||
|
r => #AnalyticalSolution(r),
|
||||||
|
() => #NoSolution,
|
||||||
|
)
|
||||||
|
| (#Normal(v1), #Float(v2)) =>
|
||||||
|
Normal.operateFloatSecond(op, v1, v2) |> E.O.dimap(
|
||||||
|
r => #AnalyticalSolution(r),
|
||||||
|
() => #NoSolution,
|
||||||
|
)
|
||||||
| (#Lognormal(v1), #Lognormal(v2)) =>
|
| (#Lognormal(v1), #Lognormal(v2)) =>
|
||||||
Lognormal.operate(op, v1, v2) |> E.O.dimap(r => #AnalyticalSolution(r), () => #NoSolution)
|
Lognormal.operate(op, v1, v2) |> E.O.dimap(r => #AnalyticalSolution(r), () => #NoSolution)
|
||||||
|
| (#Float(v1), #Lognormal(v2)) =>
|
||||||
|
Lognormal.operateFloatFirst(op, v1, v2) |> E.O.dimap(
|
||||||
|
r => #AnalyticalSolution(r),
|
||||||
|
() => #NoSolution,
|
||||||
|
)
|
||||||
|
| (#Lognormal(v1), #Float(v2)) =>
|
||||||
|
Lognormal.operateFloatSecond(op, v1, v2) |> E.O.dimap(
|
||||||
|
r => #AnalyticalSolution(r),
|
||||||
|
() => #NoSolution,
|
||||||
|
)
|
||||||
| _ => #NoSolution
|
| _ => #NoSolution
|
||||||
}
|
}
|
||||||
|
|
||||||
let toPointSetDist = (~xSelection=#ByWeight, sampleCount, d: symbolicDist): PointSetTypes.pointSetDist =>
|
let toPointSetDist = (
|
||||||
|
~xSelection=#ByWeight,
|
||||||
|
sampleCount,
|
||||||
|
d: symbolicDist,
|
||||||
|
): PointSetTypes.pointSetDist =>
|
||||||
switch d {
|
switch d {
|
||||||
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
||||||
| _ =>
|
| _ =>
|
||||||
|
|
|
@ -21,4 +21,4 @@ let toPointSetDist = (samplingInputs, environment, node: node) =>
|
||||||
let runFunction = (samplingInputs, environment, inputs, fn: ASTTypes.Function.t) => {
|
let runFunction = (samplingInputs, environment, inputs, fn: ASTTypes.Function.t) => {
|
||||||
let params = envs(samplingInputs, environment)
|
let params = envs(samplingInputs, environment)
|
||||||
ASTTypes.Function.run(params, inputs, fn)
|
ASTTypes.Function.run(params, inputs, fn)
|
||||||
}
|
}
|
||||||
|
|
|
@ -213,19 +213,20 @@ module SamplingDistribution = {
|
||||||
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1)
|
let i1 = renderIfIsNotSamplingDistribution(evaluationParams, t1)
|
||||||
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2)
|
let i2 = renderIfIsNotSamplingDistribution(evaluationParams, t2)
|
||||||
E.R.merge(i1, i2) |> E.R.bind(_, ((a, b)) => {
|
E.R.merge(i1, i2) |> E.R.bind(_, ((a, b)) => {
|
||||||
let samples = getCombinationSamples(
|
let samples =
|
||||||
evaluationParams.samplingInputs.sampleCount,
|
getCombinationSamples(
|
||||||
algebraicOp,
|
evaluationParams.samplingInputs.sampleCount,
|
||||||
a,
|
algebraicOp,
|
||||||
b,
|
a,
|
||||||
) |> E.O.toResult("Could not get samples")
|
b,
|
||||||
|
) |> E.O.toResult("Could not get samples")
|
||||||
|
|
||||||
let sampleSetDist = samples -> E.R.bind(SampleSetDist.make)
|
let sampleSetDist = samples->E.R.bind(SampleSetDist.make)
|
||||||
|
|
||||||
let pointSetDist =
|
let pointSetDist =
|
||||||
sampleSetDist
|
sampleSetDist->E.R.bind(r =>
|
||||||
-> E.R.bind(r =>
|
SampleSetDist.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r)
|
||||||
SampleSetDist.toPointSetDist(~samplingInputs=evaluationParams.samplingInputs, ~samples=r));
|
)
|
||||||
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
|
pointSetDist |> E.R.fmap(r => #Normalize(#RenderedDist(r)))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,116 +1,87 @@
|
||||||
open PointSetTypes;
|
open PointSetTypes
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
type t = PointSetTypes.distPlus;
|
type t = PointSetTypes.distPlus
|
||||||
|
|
||||||
let pointSetDistIntegral = pointSetDist => PointSetDist.T.Integral.get(pointSetDist);
|
let pointSetDistIntegral = pointSetDist => PointSetDist.T.Integral.get(pointSetDist)
|
||||||
let make =
|
let make = (~pointSetDist, ~squiggleString, ()): t => {
|
||||||
(
|
let integral = pointSetDistIntegral(pointSetDist)
|
||||||
~pointSetDist,
|
{pointSetDist: pointSetDist, integralCache: integral, squiggleString: squiggleString}
|
||||||
~squiggleString,
|
}
|
||||||
(),
|
|
||||||
)
|
|
||||||
: t => {
|
|
||||||
let integral = pointSetDistIntegral(pointSetDist);
|
|
||||||
{pointSetDist, integralCache: integral, squiggleString};
|
|
||||||
};
|
|
||||||
|
|
||||||
let update =
|
let update = (~pointSetDist=?, ~integralCache=?, ~squiggleString=?, t: t) => {
|
||||||
(
|
|
||||||
~pointSetDist=?,
|
|
||||||
~integralCache=?,
|
|
||||||
~squiggleString=?,
|
|
||||||
t: t,
|
|
||||||
) => {
|
|
||||||
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
|
pointSetDist: E.O.default(t.pointSetDist, pointSetDist),
|
||||||
integralCache: E.O.default(t.integralCache, integralCache),
|
integralCache: E.O.default(t.integralCache, integralCache),
|
||||||
squiggleString: E.O.default(t.squiggleString, squiggleString),
|
squiggleString: E.O.default(t.squiggleString, squiggleString),
|
||||||
};
|
}
|
||||||
|
|
||||||
let updateShape = (pointSetDist, t) => {
|
let updateShape = (pointSetDist, t) => {
|
||||||
let integralCache = pointSetDistIntegral(pointSetDist);
|
let integralCache = pointSetDistIntegral(pointSetDist)
|
||||||
update(~pointSetDist, ~integralCache, t);
|
update(~pointSetDist, ~integralCache, t)
|
||||||
};
|
}
|
||||||
|
|
||||||
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist;
|
let toPointSetDist = ({pointSetDist, _}: t) => pointSetDist
|
||||||
|
|
||||||
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist);
|
let pointSetDistFn = (fn, {pointSetDist}: t) => fn(pointSetDist)
|
||||||
|
|
||||||
module T =
|
module T = Distributions.Dist({
|
||||||
Distributions.Dist({
|
type t = PointSetTypes.distPlus
|
||||||
type t = PointSetTypes.distPlus;
|
type integral = PointSetTypes.distPlus
|
||||||
type integral = PointSetTypes.distPlus;
|
let toPointSetDist = toPointSetDist
|
||||||
let toPointSetDist = toPointSetDist;
|
let toContinuous = pointSetDistFn(PointSetDist.T.toContinuous)
|
||||||
let toContinuous = pointSetDistFn(PointSetDist.T.toContinuous);
|
let toDiscrete = pointSetDistFn(PointSetDist.T.toDiscrete)
|
||||||
let toDiscrete = pointSetDistFn(PointSetDist.T.toDiscrete);
|
|
||||||
|
|
||||||
let normalize = (t: t): t => {
|
let normalize = (t: t): t => {
|
||||||
let normalizedShape = t |> toPointSetDist |> PointSetDist.T.normalize;
|
let normalizedShape = t |> toPointSetDist |> PointSetDist.T.normalize
|
||||||
t |> updateShape(normalizedShape);
|
t |> updateShape(normalizedShape)
|
||||||
};
|
}
|
||||||
|
|
||||||
let truncate = (leftCutoff, rightCutoff, t: t): t => {
|
let truncate = (leftCutoff, rightCutoff, t: t): t => {
|
||||||
let truncatedShape =
|
let truncatedShape = t |> toPointSetDist |> PointSetDist.T.truncate(leftCutoff, rightCutoff)
|
||||||
t
|
|
||||||
|> toPointSetDist
|
|
||||||
|> PointSetDist.T.truncate(leftCutoff, rightCutoff);
|
|
||||||
|
|
||||||
t |> updateShape(truncatedShape);
|
t |> updateShape(truncatedShape)
|
||||||
};
|
}
|
||||||
|
|
||||||
let xToY = (f, t: t) =>
|
let xToY = (f, t: t) => t |> toPointSetDist |> PointSetDist.T.xToY(f)
|
||||||
t
|
|
||||||
|> toPointSetDist
|
|
||||||
|> PointSetDist.T.xToY(f);
|
|
||||||
|
|
||||||
let minX = pointSetDistFn(PointSetDist.T.minX);
|
let minX = pointSetDistFn(PointSetDist.T.minX)
|
||||||
let maxX = pointSetDistFn(PointSetDist.T.maxX);
|
let maxX = pointSetDistFn(PointSetDist.T.maxX)
|
||||||
let toDiscreteProbabilityMassFraction =
|
let toDiscreteProbabilityMassFraction = pointSetDistFn(
|
||||||
pointSetDistFn(PointSetDist.T.toDiscreteProbabilityMassFraction);
|
PointSetDist.T.toDiscreteProbabilityMassFraction,
|
||||||
|
)
|
||||||
|
|
||||||
// This bit is kind of awkward, could probably use rethinking.
|
// This bit is kind of awkward, could probably use rethinking.
|
||||||
let integral = (t: t) =>
|
let integral = (t: t) => updateShape(Continuous(t.integralCache), t)
|
||||||
updateShape(Continuous(t.integralCache), t);
|
|
||||||
|
|
||||||
let updateIntegralCache = (integralCache: option<PointSetTypes.continuousShape>, t) =>
|
let updateIntegralCache = (integralCache: option<PointSetTypes.continuousShape>, t) =>
|
||||||
update(~integralCache=E.O.default(t.integralCache, integralCache), t);
|
update(~integralCache=E.O.default(t.integralCache, integralCache), t)
|
||||||
|
|
||||||
let downsample = (i, t): t =>
|
let downsample = (i, t): t => updateShape(t |> toPointSetDist |> PointSetDist.T.downsample(i), t)
|
||||||
updateShape(t |> toPointSetDist |> PointSetDist.T.downsample(i), t);
|
// todo: adjust for limit, maybe?
|
||||||
// todo: adjust for limit, maybe?
|
let mapY = (
|
||||||
let mapY =
|
~integralSumCacheFn=previousIntegralSum => None,
|
||||||
(
|
~integralCacheFn=previousIntegralCache => None,
|
||||||
~integralSumCacheFn=previousIntegralSum => None,
|
~fn,
|
||||||
~integralCacheFn=previousIntegralCache => None,
|
{pointSetDist, _} as t: t,
|
||||||
~fn,
|
): t => PointSetDist.T.mapY(~integralSumCacheFn, ~fn, pointSetDist) |> updateShape(_, t)
|
||||||
{pointSetDist, _} as t: t,
|
|
||||||
)
|
|
||||||
: t =>
|
|
||||||
PointSetDist.T.mapY(~integralSumCacheFn, ~fn, pointSetDist)
|
|
||||||
|> updateShape(_, t);
|
|
||||||
|
|
||||||
// get the total of everything
|
// get the total of everything
|
||||||
let integralEndY = (t: t) => {
|
let integralEndY = (t: t) => {
|
||||||
PointSetDist.T.Integral.sum(
|
PointSetDist.T.Integral.sum(toPointSetDist(t))
|
||||||
toPointSetDist(t),
|
}
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: Fix this below, obviously. Adjust for limits
|
// TODO: Fix this below, obviously. Adjust for limits
|
||||||
let integralXtoY = (f, t: t) => {
|
let integralXtoY = (f, t: t) => {
|
||||||
PointSetDist.T.Integral.xToY(
|
PointSetDist.T.Integral.xToY(f, toPointSetDist(t))
|
||||||
f,
|
}
|
||||||
toPointSetDist(t),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
// TODO: This part is broken when there is a limit, if this is supposed to be taken into account.
|
||||||
let integralYtoX = (f, t: t) => {
|
let integralYtoX = (f, t: t) => {
|
||||||
PointSetDist.T.Integral.yToX(f, toPointSetDist(t));
|
PointSetDist.T.Integral.yToX(f, toPointSetDist(t))
|
||||||
};
|
}
|
||||||
|
|
||||||
let mean = (t: t) => {
|
let mean = (t: t) => {
|
||||||
PointSetDist.T.mean(t.pointSetDist);
|
PointSetDist.T.mean(t.pointSetDist)
|
||||||
};
|
}
|
||||||
let variance = (t: t) => PointSetDist.T.variance(t.pointSetDist);
|
let variance = (t: t) => PointSetDist.T.variance(t.pointSetDist)
|
||||||
});
|
})
|
||||||
|
|
|
@ -22,7 +22,7 @@ let makeSymbolicFromTwoFloats = (name, fn) =>
|
||||||
~inputTypes=[#Float, #Float],
|
~inputTypes=[#Float, #Float],
|
||||||
~run=x =>
|
~run=x =>
|
||||||
switch x {
|
switch x {
|
||||||
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => (#SymbolicDist(r)))
|
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => #SymbolicDist(r))
|
||||||
| e => wrongInputsError(e)
|
| e => wrongInputsError(e)
|
||||||
},
|
},
|
||||||
(),
|
(),
|
||||||
|
@ -90,7 +90,8 @@ let floatFromDist = (
|
||||||
switch t {
|
switch t {
|
||||||
| #SymbolicDist(s) =>
|
| #SymbolicDist(s) =>
|
||||||
SymbolicDist.T.operate(distToFloatOp, s) |> E.R.bind(_, v => Ok(#SymbolicDist(#Float(v))))
|
SymbolicDist.T.operate(distToFloatOp, s) |> E.R.bind(_, v => Ok(#SymbolicDist(#Float(v))))
|
||||||
| #RenderedDist(rs) => PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
|
| #RenderedDist(rs) =>
|
||||||
|
PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
|
||||||
}
|
}
|
||||||
|
|
||||||
let verticalScaling = (scaleOp, rs, scaleBy) => {
|
let verticalScaling = (scaleOp, rs, scaleBy) => {
|
||||||
|
@ -125,10 +126,15 @@ module Multimodal = {
|
||||||
->E.R.bind(TypeSystem.TypedValue.toArray)
|
->E.R.bind(TypeSystem.TypedValue.toArray)
|
||||||
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toFloat) |> E.A.R.firstErrorOrOpen)
|
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toFloat) |> E.A.R.firstErrorOrOpen)
|
||||||
|
|
||||||
E.R.merge(dists, weights) -> E.R.bind(((a, b)) =>
|
E.R.merge(dists, weights)->E.R.bind(((a, b)) =>
|
||||||
E.A.length(b) > E.A.length(a) ?
|
E.A.length(b) > E.A.length(a)
|
||||||
Error("Too many weights provided") :
|
? Error("Too many weights provided")
|
||||||
Ok(E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (a |> E.O.toExn(""), b |> E.O.default(1.0))))
|
: Ok(
|
||||||
|
E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (
|
||||||
|
a |> E.O.toExn(""),
|
||||||
|
b |> E.O.default(1.0),
|
||||||
|
)),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
| _ => Error("Needs items")
|
| _ => Error("Needs items")
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,11 +86,7 @@ module TypedValue = {
|
||||||
|> E.R.fmap(r => #Array(r))
|
|> E.R.fmap(r => #Array(r))
|
||||||
| (#Hash(named), #Hash(r)) =>
|
| (#Hash(named), #Hash(r)) =>
|
||||||
let keyValues =
|
let keyValues =
|
||||||
named |> E.A.fmap(((name, intendedType)) => (
|
named |> E.A.fmap(((name, intendedType)) => (name, intendedType, Hash.getByName(r, name)))
|
||||||
name,
|
|
||||||
intendedType,
|
|
||||||
Hash.getByName(r, name),
|
|
||||||
))
|
|
||||||
let typedHash =
|
let typedHash =
|
||||||
keyValues
|
keyValues
|
||||||
|> E.A.fmap(((name, intendedType, optionNode)) =>
|
|> E.A.fmap(((name, intendedType, optionNode)) =>
|
||||||
|
@ -180,11 +176,7 @@ module Function = {
|
||||||
_coerceInputNodes(evaluationParams, t.inputTypes, t.shouldCoerceTypes),
|
_coerceInputNodes(evaluationParams, t.inputTypes, t.shouldCoerceTypes),
|
||||||
)
|
)
|
||||||
|
|
||||||
let run = (
|
let run = (evaluationParams: ASTTypes.evaluationParams, inputNodes: inputNodes, t: t) =>
|
||||||
evaluationParams: ASTTypes.evaluationParams,
|
|
||||||
inputNodes: inputNodes,
|
|
||||||
t: t,
|
|
||||||
) =>
|
|
||||||
inputsToTypedValues(evaluationParams, inputNodes, t)->E.R.bind(t.run)
|
inputsToTypedValues(evaluationParams, inputNodes, t)->E.R.bind(t.run)
|
||||||
|> (
|
|> (
|
||||||
x =>
|
x =>
|
||||||
|
|
|
@ -121,17 +121,14 @@ module MathAdtToDistDst = {
|
||||||
| (_, _, Ok(mu), Ok(sigma)) => Ok(#FunctionCall("lognormal", [mu, sigma]))
|
| (_, _, Ok(mu), Ok(sigma)) => Ok(#FunctionCall("lognormal", [mu, sigma]))
|
||||||
| _ => Error("Lognormal distribution needs either mean and stdev or mu and sigma")
|
| _ => Error("Lognormal distribution needs either mean and stdev or mu and sigma")
|
||||||
}
|
}
|
||||||
| _ =>
|
| _ => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall("lognormal", args))
|
||||||
parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) =>
|
|
||||||
#FunctionCall("lognormal", args)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error("Dotwise exponentiation needs two operands")
|
// Error("Dotwise exponentiation needs two operands")
|
||||||
let operationParser = (
|
let operationParser = (name: string, args: result<array<ASTTypes.node>, string>): result<
|
||||||
name: string,
|
ASTTypes.node,
|
||||||
args: result<array<ASTTypes.node>, string>,
|
string,
|
||||||
): result<ASTTypes.node, string> => {
|
> => {
|
||||||
let toOkAlgebraic = r => Ok(#AlgebraicCombination(r))
|
let toOkAlgebraic = r => Ok(#AlgebraicCombination(r))
|
||||||
let toOkPointwise = r => Ok(#PointwiseCombination(r))
|
let toOkPointwise = r => Ok(#PointwiseCombination(r))
|
||||||
let toOkTruncate = r => Ok(#Truncate(r))
|
let toOkTruncate = r => Ok(#Truncate(r))
|
||||||
|
@ -169,10 +166,7 @@ module MathAdtToDistDst = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let functionParser = (
|
let functionParser = (
|
||||||
nodeParser: MathJsonToMathJsAdt.arg => Belt.Result.t<
|
nodeParser: MathJsonToMathJsAdt.arg => Belt.Result.t<ASTTypes.node, string>,
|
||||||
ASTTypes.node,
|
|
||||||
string,
|
|
||||||
>,
|
|
||||||
name: string,
|
name: string,
|
||||||
args: array<MathJsonToMathJsAdt.arg>,
|
args: array<MathJsonToMathJsAdt.arg>,
|
||||||
): result<ASTTypes.node, string> => {
|
): result<ASTTypes.node, string> => {
|
||||||
|
@ -224,17 +218,11 @@ module MathAdtToDistDst = {
|
||||||
)
|
)
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
}
|
}
|
||||||
| name =>
|
| name => parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) => #FunctionCall(name, args))
|
||||||
parseArgs() |> E.R.fmap((args: array<ASTTypes.node>) =>
|
|
||||||
#FunctionCall(name, args)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let rec nodeParser: MathJsonToMathJsAdt.arg => result<
|
let rec nodeParser: MathJsonToMathJsAdt.arg => result<ASTTypes.node, string> = x =>
|
||||||
ASTTypes.node,
|
|
||||||
string,
|
|
||||||
> = x =>
|
|
||||||
switch x {
|
switch x {
|
||||||
| Value(f) => Ok(#SymbolicDist(#Float(f)))
|
| Value(f) => Ok(#SymbolicDist(#Float(f)))
|
||||||
| Symbol(sym) => Ok(#Symbol(sym))
|
| Symbol(sym) => Ok(#Symbol(sym))
|
||||||
|
@ -267,8 +255,7 @@ module MathAdtToDistDst = {
|
||||||
blocks |> E.A.fmap(b => topLevel(b)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(E.A.concatMany)
|
blocks |> E.A.fmap(b => topLevel(b)) |> E.A.R.firstErrorOrOpen |> E.R.fmap(E.A.concatMany)
|
||||||
}
|
}
|
||||||
|
|
||||||
let run = (r): result<ASTTypes.program, string> =>
|
let run = (r): result<ASTTypes.program, string> => r |> MathAdtCleaner.run |> topLevel
|
||||||
r |> MathAdtCleaner.run |> topLevel
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* The MathJs parser doesn't support '.+' syntax, but we want it because it
|
/* The MathJs parser doesn't support '.+' syntax, but we want it because it
|
||||||
|
|
|
@ -39,17 +39,16 @@ module Inputs = {
|
||||||
type exportDistribution = [
|
type exportDistribution = [
|
||||||
| #DistPlus(DistPlus.t)
|
| #DistPlus(DistPlus.t)
|
||||||
| #Float(float)
|
| #Float(float)
|
||||||
| #Function((float) => Belt.Result.t<DistPlus.t,string>)
|
| #Function(float => Belt.Result.t<DistPlus.t, string>)
|
||||||
]
|
]
|
||||||
|
|
||||||
type exportEnv = array<(string, ASTTypes.node)>
|
type exportEnv = array<(string, ASTTypes.node)>
|
||||||
|
|
||||||
type exportType = {
|
type exportType = {
|
||||||
environment : exportEnv,
|
environment: exportEnv,
|
||||||
exports: array<exportDistribution>
|
exports: array<exportDistribution>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
module Internals = {
|
module Internals = {
|
||||||
let addVariable = (
|
let addVariable = (
|
||||||
{samplingInputs, squiggleString, environment}: Inputs.inputs,
|
{samplingInputs, squiggleString, environment}: Inputs.inputs,
|
||||||
|
@ -58,9 +57,7 @@ module Internals = {
|
||||||
): Inputs.inputs => {
|
): Inputs.inputs => {
|
||||||
samplingInputs: samplingInputs,
|
samplingInputs: samplingInputs,
|
||||||
squiggleString: squiggleString,
|
squiggleString: squiggleString,
|
||||||
environment: ASTTypes.Environment.update(environment, str, _ => Some(
|
environment: ASTTypes.Environment.update(environment, str, _ => Some(node)),
|
||||||
node,
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type outputs = {
|
type outputs = {
|
||||||
|
@ -76,8 +73,7 @@ module Internals = {
|
||||||
pointSetDistLength: inputs.samplingInputs.pointDistLength |> E.O.default(10000),
|
pointSetDistLength: inputs.samplingInputs.pointDistLength |> E.O.default(10000),
|
||||||
}
|
}
|
||||||
|
|
||||||
let runNode = (inputs, node) =>
|
let runNode = (inputs, node) => AST.toLeaf(makeInputs(inputs), inputs.environment, node)
|
||||||
AST.toLeaf(makeInputs(inputs), inputs.environment, node)
|
|
||||||
|
|
||||||
let renderIfNeeded = (inputs: Inputs.inputs, node: ASTTypes.node): result<
|
let renderIfNeeded = (inputs: Inputs.inputs, node: ASTTypes.node): result<
|
||||||
ASTTypes.node,
|
ASTTypes.node,
|
||||||
|
@ -106,16 +102,14 @@ module Internals = {
|
||||||
let outputToDistPlus = (inputs: Inputs.inputs, pointSetDist: PointSetTypes.pointSetDist) =>
|
let outputToDistPlus = (inputs: Inputs.inputs, pointSetDist: PointSetTypes.pointSetDist) =>
|
||||||
DistPlus.make(~pointSetDist, ~squiggleString=Some(inputs.squiggleString), ())
|
DistPlus.make(~pointSetDist, ~squiggleString=Some(inputs.squiggleString), ())
|
||||||
|
|
||||||
let rec returnDist = (functionInfo : (array<string>, ASTTypes.node),
|
let rec returnDist = (
|
||||||
inputs : Inputs.inputs,
|
functionInfo: (array<string>, ASTTypes.node),
|
||||||
env : ASTTypes.environment) => {
|
inputs: Inputs.inputs,
|
||||||
(input : float) => {
|
env: ASTTypes.environment,
|
||||||
let foo: Inputs.inputs = {...inputs, environment: env};
|
) => {
|
||||||
evaluateFunction(
|
(input: float) => {
|
||||||
foo,
|
let foo: Inputs.inputs = {...inputs, environment: env}
|
||||||
functionInfo,
|
evaluateFunction(foo, functionInfo, [#SymbolicDist(#Float(input))]) |> E.R.bind(_, a =>
|
||||||
[#SymbolicDist(#Float(input))],
|
|
||||||
) |> E.R.bind(_, a =>
|
|
||||||
switch a {
|
switch a {
|
||||||
| #DistPlus(d) => Ok(DistPlus.T.normalize(d))
|
| #DistPlus(d) => Ok(DistPlus.T.normalize(d))
|
||||||
| n =>
|
| n =>
|
||||||
|
@ -126,11 +120,10 @@ module Internals = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// TODO: Consider using ExpressionTypes.ExpressionTree.getFloat or similar in this function
|
// TODO: Consider using ExpressionTypes.ExpressionTree.getFloat or similar in this function
|
||||||
and coersionToExportedTypes = (
|
and coersionToExportedTypes = (inputs, env: ASTTypes.environment, ex: ASTTypes.node): result<
|
||||||
inputs,
|
exportDistribution,
|
||||||
env: ASTTypes.environment,
|
string,
|
||||||
ex: ASTTypes.node,
|
> =>
|
||||||
): result<exportDistribution, string> =>
|
|
||||||
ex
|
ex
|
||||||
|> renderIfNeeded(inputs)
|
|> renderIfNeeded(inputs)
|
||||||
|> E.R.bind(_, x =>
|
|> E.R.bind(_, x =>
|
||||||
|
@ -143,56 +136,45 @@ module Internals = {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
and evaluateFunction = (
|
and evaluateFunction = (inputs: Inputs.inputs, fn: (array<string>, ASTTypes.node), fnInputs) => {
|
||||||
inputs: Inputs.inputs,
|
let output = AST.runFunction(makeInputs(inputs), inputs.environment, fnInputs, fn)
|
||||||
fn: (array<string>, ASTTypes.node),
|
|
||||||
fnInputs,
|
|
||||||
) => {
|
|
||||||
let output = AST.runFunction(
|
|
||||||
makeInputs(inputs),
|
|
||||||
inputs.environment,
|
|
||||||
fnInputs,
|
|
||||||
fn,
|
|
||||||
)
|
|
||||||
output |> E.R.bind(_, coersionToExportedTypes(inputs, inputs.environment))
|
output |> E.R.bind(_, coersionToExportedTypes(inputs, inputs.environment))
|
||||||
}
|
}
|
||||||
|
|
||||||
let runProgram = (inputs: Inputs.inputs, p: ASTTypes.program) => {
|
let runProgram = (inputs: Inputs.inputs, p: ASTTypes.program) => {
|
||||||
let ins = ref(inputs)
|
let ins = ref(inputs)
|
||||||
p
|
p
|
||||||
|> E.A.fmap(x =>
|
|> E.A.fmap(x =>
|
||||||
switch x {
|
switch x {
|
||||||
| #Assignment(name, node) =>
|
| #Assignment(name, node) =>
|
||||||
ins := addVariable(ins.contents, name, node)
|
ins := addVariable(ins.contents, name, node)
|
||||||
None
|
None
|
||||||
| #Expression(node) =>
|
| #Expression(node) => Some(runNode(ins.contents, node))
|
||||||
Some(runNode(ins.contents, node))
|
}
|
||||||
}
|
)
|
||||||
)
|
|> E.A.O.concatSomes
|
||||||
|> E.A.O.concatSomes
|
|> E.A.R.firstErrorOrOpen
|
||||||
|
|> E.R.bind(_, d =>
|
||||||
|
d
|
||||||
|
|> E.A.fmap(x => coersionToExportedTypes(inputs, ins.contents.environment, x))
|
||||||
|> E.A.R.firstErrorOrOpen
|
|> E.A.R.firstErrorOrOpen
|
||||||
|> E.R.bind(_, d =>
|
)
|
||||||
d
|
|> E.R.fmap(ex => {
|
||||||
|> E.A.fmap(x => coersionToExportedTypes(inputs, ins.contents.environment, x))
|
environment: Belt.Map.String.toArray(ins.contents.environment),
|
||||||
|> E.A.R.firstErrorOrOpen
|
exports: ex,
|
||||||
)
|
})
|
||||||
|> E.R.fmap(ex =>
|
|
||||||
{
|
|
||||||
environment: Belt.Map.String.toArray(ins.contents.environment),
|
|
||||||
exports: ex
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let inputsToLeaf = (inputs: Inputs.inputs) =>
|
let inputsToLeaf = (inputs: Inputs.inputs) =>
|
||||||
Parser.fromString(inputs.squiggleString) |> E.R.bind(_, g => runProgram(inputs, g))
|
Parser.fromString(inputs.squiggleString) |> E.R.bind(_, g => runProgram(inputs, g))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@genType
|
@genType
|
||||||
let runAll : (string, Inputs.SamplingInputs.t, exportEnv) => result<exportType,string> =
|
let runAll: (string, Inputs.SamplingInputs.t, exportEnv) => result<exportType, string> = (
|
||||||
(squiggleString, samplingInputs, environment) => {
|
squiggleString,
|
||||||
|
samplingInputs,
|
||||||
|
environment,
|
||||||
|
) => {
|
||||||
let inputs = Inputs.make(
|
let inputs = Inputs.make(
|
||||||
~samplingInputs,
|
~samplingInputs,
|
||||||
~squiggleString,
|
~squiggleString,
|
||||||
|
|
|
@ -5,5 +5,6 @@ module Extra = Reducer_Extra
|
||||||
module Js = Reducer_Js
|
module Js = Reducer_Js
|
||||||
module MathJs = Reducer_MathJs
|
module MathJs = Reducer_MathJs
|
||||||
|
|
||||||
let eval = Expression.eval
|
type expressionValue = Reducer_Expression.expressionValue
|
||||||
|
let evaluate = Expression.eval
|
||||||
let parse = Expression.parse
|
let parse = Expression.parse
|
||||||
|
|
|
@ -4,5 +4,10 @@ module Expression = Reducer_Expression
|
||||||
module Extra = Reducer_Extra
|
module Extra = Reducer_Extra
|
||||||
module Js = Reducer_Js
|
module Js = Reducer_Js
|
||||||
module MathJs = Reducer_MathJs
|
module MathJs = Reducer_MathJs
|
||||||
let eval: string => result<Expression.expressionValue, ErrorValue.errorValue>
|
|
||||||
|
@genType
|
||||||
|
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||||
|
|
||||||
|
@genType
|
||||||
|
let evaluate: string => result<expressionValue, Reducer_ErrorValue.errorValue>
|
||||||
let parse: string => result<Expression.expression, ErrorValue.errorValue>
|
let parse: string => result<Expression.expression, ErrorValue.errorValue>
|
||||||
|
|
|
@ -14,8 +14,8 @@ exception TestRescriptException
|
||||||
let callInternal = (call: functionCall): result<'b, errorValue> => {
|
let callInternal = (call: functionCall): result<'b, errorValue> => {
|
||||||
let callMathJs = (call: functionCall): result<'b, errorValue> =>
|
let callMathJs = (call: functionCall): result<'b, errorValue> =>
|
||||||
switch call {
|
switch call {
|
||||||
| ("jsraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
| ("javascriptraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
||||||
| ("resraise", _) => raise(TestRescriptException) // For Tests
|
| ("rescriptraise", _) => raise(TestRescriptException) // For Tests
|
||||||
| call => call->toStringFunctionCall->MathJs.Eval.eval
|
| call => call->toStringFunctionCall->MathJs.Eval.eval
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ let callInternal = (call: functionCall): result<'b, errorValue> => {
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Lisp engine uses Result monad while reducing expressions
|
Reducer uses Result monad while reducing expressions
|
||||||
*/
|
*/
|
||||||
let dispatch = (call: functionCall): result<expressionValue, errorValue> =>
|
let dispatch = (call: functionCall): result<expressionValue, errorValue> =>
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -1,15 +1,24 @@
|
||||||
|
@genType
|
||||||
type errorValue =
|
type errorValue =
|
||||||
| REArrayIndexNotFound(string, int)
|
| REArrayIndexNotFound(string, int)
|
||||||
|
| REAssignmentExpected
|
||||||
|
| REExpressionExpected
|
||||||
| REFunctionExpected(string)
|
| REFunctionExpected(string)
|
||||||
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
||||||
|
| REMacroNotFound(string)
|
||||||
| RERecordPropertyNotFound(string, string)
|
| RERecordPropertyNotFound(string, string)
|
||||||
|
| RESymbolNotFound(string)
|
||||||
|
| RESyntaxError(string)
|
||||||
| RETodo(string) // To do
|
| RETodo(string) // To do
|
||||||
|
|
||||||
type t = errorValue
|
type t = errorValue
|
||||||
|
|
||||||
|
@genType
|
||||||
let errorToString = err =>
|
let errorToString = err =>
|
||||||
switch err {
|
switch err {
|
||||||
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
||||||
|
| REAssignmentExpected => "Assignment expected"
|
||||||
|
| REExpressionExpected => "Expression expected"
|
||||||
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
||||||
| REJavaScriptExn(omsg, oname) => {
|
| REJavaScriptExn(omsg, oname) => {
|
||||||
let answer = "JS Exception:"
|
let answer = "JS Exception:"
|
||||||
|
@ -23,6 +32,9 @@ let errorToString = err =>
|
||||||
}
|
}
|
||||||
answer
|
answer
|
||||||
}
|
}
|
||||||
|
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
||||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||||
|
| RESymbolNotFound(symbolName) => `${symbolName} is not defined`
|
||||||
|
| RESyntaxError(desc) => `Syntax Error: ${desc}`
|
||||||
| RETodo(msg) => `TODO: ${msg}`
|
| RETodo(msg) => `TODO: ${msg}`
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,10 +11,11 @@ type expressionValue = ExpressionValue.expressionValue
|
||||||
type t = expression
|
type t = expression
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Shows the Lisp Code as text lisp code
|
Shows the expression as text of expression
|
||||||
*/
|
*/
|
||||||
let rec toString = expression =>
|
let rec toString = expression =>
|
||||||
switch expression {
|
switch expression {
|
||||||
|
| T.EBindings(bindings) => "$$bound"
|
||||||
| T.EList(aList) =>
|
| T.EList(aList) =>
|
||||||
`(${Belt.List.map(aList, aValue => toString(aValue))
|
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||||
->Extra.List.interperse(" ")
|
->Extra.List.interperse(" ")
|
||||||
|
@ -30,7 +31,7 @@ let toStringResult = codeResult =>
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Converts a MathJs code to Lisp Code
|
Converts a MathJs code to expression
|
||||||
*/
|
*/
|
||||||
let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
||||||
expr->parser->Result.flatMap(node => converter(node))
|
expr->parser->Result.flatMap(node => converter(node))
|
||||||
|
@ -38,54 +39,141 @@ let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
||||||
let parse = (mathJsCode: string): result<t, errorValue> =>
|
let parse = (mathJsCode: string): result<t, errorValue> =>
|
||||||
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromNode)
|
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromNode)
|
||||||
|
|
||||||
module MapString = Belt.Map.String
|
let defaultBindings: T.bindings = Belt.Map.String.empty
|
||||||
type bindings = MapString.t<unit>
|
|
||||||
let defaultBindings: bindings = MapString.fromArray([])
|
|
||||||
// TODO Define bindings for function execution context
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
After reducing each level of code tree, we have a value list to evaluate
|
Recursively evaluate/reduce the expression (Lisp AST)
|
||||||
*/
|
*/
|
||||||
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
let rec reduceExpression = (expression: t, bindings: T.bindings): result<expressionValue, 'e> => {
|
||||||
switch valueList {
|
/*
|
||||||
| list{EvSymbol(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
After reducing each level of expression(Lisp AST), we have a value list to evaluate
|
||||||
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
*/
|
||||||
}
|
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
||||||
|
switch valueList {
|
||||||
/*
|
| list{EvCall(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
||||||
Recursively evaluate/reduce the code tree
|
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||||
*/
|
|
||||||
let rec reduceExpression = (expression: t, bindings): result<expressionValue, 'e> =>
|
|
||||||
switch expression {
|
|
||||||
| T.EValue(value) => value->Ok
|
|
||||||
| T.EList(list) => {
|
|
||||||
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
|
||||||
racc,
|
|
||||||
each: expression,
|
|
||||||
) =>
|
|
||||||
racc->Result.flatMap(acc => {
|
|
||||||
each
|
|
||||||
->reduceExpression(bindings)
|
|
||||||
->Result.flatMap(newNode => {
|
|
||||||
acc->Belt.List.add(newNode)->Ok
|
|
||||||
})
|
|
||||||
})
|
|
||||||
)
|
|
||||||
racc->Result.flatMap(acc => acc->reduceValueList)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Macros are like functions but instead of taking values as parameters,
|
||||||
|
they take expressions as parameters and return a new expression.
|
||||||
|
Macros are used to define language building blocks. They are like Lisp macros.
|
||||||
|
*/
|
||||||
|
let doMacroCall = (list: list<t>, bindings: T.bindings): result<t, 'e> => {
|
||||||
|
let dispatchMacroCall = (list: list<t>, bindings: T.bindings): result<t, 'e> => {
|
||||||
|
let rec replaceSymbols = (expression: t, bindings: T.bindings): result<t, errorValue> =>
|
||||||
|
switch expression {
|
||||||
|
| T.EValue(EvSymbol(aSymbol)) =>
|
||||||
|
switch bindings->Belt.Map.String.get(aSymbol) {
|
||||||
|
| Some(boundExpression) => boundExpression->Ok
|
||||||
|
| None => RESymbolNotFound(aSymbol)->Error
|
||||||
|
}
|
||||||
|
| T.EValue(_) => expression->Ok
|
||||||
|
| T.EBindings(_) => expression->Ok
|
||||||
|
| T.EList(list) => {
|
||||||
|
let racc = list->Belt.List.reduceReverse(Ok(list{}), (racc, each: expression) =>
|
||||||
|
racc->Result.flatMap(acc => {
|
||||||
|
each
|
||||||
|
->replaceSymbols(bindings)
|
||||||
|
->Result.flatMap(newNode => {
|
||||||
|
acc->Belt.List.add(newNode)->Ok
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
racc->Result.map(acc => acc->T.EList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let doBindStatement = (statement: t, bindings: T.bindings) => {
|
||||||
|
switch statement {
|
||||||
|
| T.EList(list{T.EValue(EvCall("$let")), T.EValue(EvSymbol(aSymbol)), expression}) => {
|
||||||
|
let rNewExpression = replaceSymbols(expression, bindings)
|
||||||
|
rNewExpression->Result.map(newExpression =>
|
||||||
|
Belt.Map.String.set(bindings, aSymbol, newExpression)->T.EBindings
|
||||||
|
)
|
||||||
|
}
|
||||||
|
| _ => REAssignmentExpected->Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let doBindExpression = (expression: t, bindings: T.bindings) => {
|
||||||
|
switch expression {
|
||||||
|
| T.EList(list{T.EValue(EvCall("$let")), ..._}) => REExpressionExpected->Error
|
||||||
|
| _ => replaceSymbols(expression, bindings)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch list {
|
||||||
|
| list{T.EValue(EvCall("$$bindings"))} => bindings->T.EBindings->Ok
|
||||||
|
|
||||||
|
| list{T.EValue(EvCall("$$bindStatement")), T.EBindings(bindings), statement} =>
|
||||||
|
doBindStatement(statement, bindings)
|
||||||
|
| list{T.EValue(EvCall("$$bindExpression")), T.EBindings(bindings), expression} =>
|
||||||
|
doBindExpression(expression, bindings)
|
||||||
|
| _ => list->T.EList->Ok
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
list->dispatchMacroCall(bindings)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let rec seekMacros = (expression: t, bindings: T.bindings): result<t, 'e> =>
|
||||||
|
switch expression {
|
||||||
|
| T.EValue(value) => expression->Ok
|
||||||
|
| T.EList(list) => {
|
||||||
|
let racc: result<list<t>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||||
|
racc,
|
||||||
|
each: expression,
|
||||||
|
) =>
|
||||||
|
racc->Result.flatMap(acc => {
|
||||||
|
each
|
||||||
|
->seekMacros(bindings)
|
||||||
|
->Result.flatMap(newNode => {
|
||||||
|
acc->Belt.List.add(newNode)->Ok
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
racc->Result.flatMap(acc => acc->doMacroCall(bindings))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let rec reduceExpandedExpression = (expression: t): result<expressionValue, 'e> =>
|
||||||
|
switch expression {
|
||||||
|
| T.EValue(value) => value->Ok
|
||||||
|
| T.EList(list) => {
|
||||||
|
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||||
|
racc,
|
||||||
|
each: expression,
|
||||||
|
) =>
|
||||||
|
racc->Result.flatMap(acc => {
|
||||||
|
each
|
||||||
|
->reduceExpandedExpression
|
||||||
|
->Result.flatMap(newNode => {
|
||||||
|
acc->Belt.List.add(newNode)->Ok
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
racc->Result.flatMap(acc => acc->reduceValueList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let rExpandedExpression: result<t, 'e> = expression->seekMacros(bindings)
|
||||||
|
rExpandedExpression->Result.flatMap(expandedExpression =>
|
||||||
|
expandedExpression->reduceExpandedExpression
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
let evalWBindingsExpression = (aExpression, bindings): result<expressionValue, 'e> =>
|
let evalWBindingsExpression = (aExpression, bindings): result<expressionValue, 'e> =>
|
||||||
reduceExpression(aExpression, bindings)
|
reduceExpression(aExpression, bindings)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Evaluates MathJs code via Lisp using bindings and answers the result
|
Evaluates MathJs code via Reducer using bindings and answers the result
|
||||||
*/
|
*/
|
||||||
let evalWBindings = (codeText: string, bindings: bindings) => {
|
let evalWBindings = (codeText: string, bindings: T.bindings) => {
|
||||||
parse(codeText)->Result.flatMap(code => code->evalWBindingsExpression(bindings))
|
parse(codeText)->Result.flatMap(code => code->evalWBindingsExpression(bindings))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Evaluates MathJs code via Lisp and answers the result
|
Evaluates MathJs code via Reducer and answers the result
|
||||||
*/
|
*/
|
||||||
let eval = (code: string) => evalWBindings(code, defaultBindings)
|
let eval = (code: string) => evalWBindings(code, defaultBindings)
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
module Result = Belt.Result
|
|
||||||
module T = Reducer_Expression_T
|
|
||||||
type expression = T.expression
|
|
||||||
type expressionValue = ReducerInterface.ExpressionValue.expressionValue
|
|
||||||
type t = expression
|
|
||||||
let toString: T.expression => Js.String.t
|
|
||||||
let toStringResult: result<T.expression, 'a> => string
|
|
||||||
let parse: string => result<expression, Reducer_ErrorValue.t>
|
|
||||||
module MapString = Belt.Map.String
|
|
||||||
type bindings = MapString.t<unit>
|
|
||||||
let defaultBindings: bindings
|
|
||||||
let reduceValueList: list<expressionValue> => result<
|
|
||||||
expressionValue,
|
|
||||||
Reducer_ErrorValue.t,
|
|
||||||
>
|
|
||||||
let reduceExpression: (expression, 'a) => result<
|
|
||||||
expressionValue,
|
|
||||||
Reducer_ErrorValue.t,
|
|
||||||
>
|
|
||||||
let evalWBindingsExpression: (expression, 'a) => result<
|
|
||||||
expressionValue,
|
|
||||||
Reducer_ErrorValue.t,
|
|
||||||
>
|
|
||||||
let evalWBindings: (string, bindings) => Result.t<
|
|
||||||
expressionValue,
|
|
||||||
Reducer_ErrorValue.t,
|
|
||||||
>
|
|
||||||
let eval: string => Result.t<expressionValue, Reducer_ErrorValue.t>
|
|
|
@ -1,5 +1,15 @@
|
||||||
open ReducerInterface.ExpressionValue
|
open ReducerInterface.ExpressionValue
|
||||||
|
|
||||||
|
/*
|
||||||
|
An expression is a Lisp AST. An expression is either a primitive value or a list of expressions.
|
||||||
|
In the case of a list of expressions (e1, e2, e3, ...eN), the semantic is
|
||||||
|
apply e1, e2 -> apply e3 -> ... -> apply eN
|
||||||
|
This is Lisp semantics. It holds true in both eager and lazy evaluations.
|
||||||
|
A Lisp AST contains only expressions/primitive values to apply to their left.
|
||||||
|
The act of defining the semantics of a functional language is to write it in terms of Lisp AST.
|
||||||
|
*/
|
||||||
type rec expression =
|
type rec expression =
|
||||||
| EList(list<expression>) // A list to map-reduce
|
| EList(list<expression>) // A list to map-reduce
|
||||||
| EValue(expressionValue) // Irreducible built-in value. Reducer should not know the internals. External libraries are responsible
|
| EValue(expressionValue) // Irreducible built-in value. Reducer should not know the internals. External libraries are responsible
|
||||||
|
| EBindings(bindings) // let/def kind of statements return bindings
|
||||||
|
and bindings = Belt.Map.String.t<expression>
|
||||||
|
|
|
@ -7,25 +7,31 @@ open Reducer_ErrorValue
|
||||||
|
|
||||||
type node = {"type": string, "isNode": bool, "comment": string}
|
type node = {"type": string, "isNode": bool, "comment": string}
|
||||||
type arrayNode = {...node, "items": array<node>}
|
type arrayNode = {...node, "items": array<node>}
|
||||||
//assignmentNode
|
type block = {"node": node}
|
||||||
//blockNode
|
type blockNode = {...node, "blocks": array<block>}
|
||||||
//conditionalNode
|
//conditionalNode
|
||||||
type constantNode = {...node, "value": unit}
|
type constantNode = {...node, "value": unit}
|
||||||
//functionAssignmentNode
|
//functionAssignmentNode
|
||||||
type functionNode = {...node, "fn": string, "args": array<node>}
|
|
||||||
type indexNode = {...node, "dimensions": array<node>}
|
type indexNode = {...node, "dimensions": array<node>}
|
||||||
type objectNode = {...node, "properties": Js.Dict.t<node>}
|
type objectNode = {...node, "properties": Js.Dict.t<node>}
|
||||||
type accessorNode = {...node, "object": node, "index": indexNode}
|
type accessorNode = {...node, "object": node, "index": indexNode, "name": string}
|
||||||
type operatorNode = {...functionNode, "op": string}
|
|
||||||
|
|
||||||
//parenthesisNode
|
|
||||||
type parenthesisNode = {...node, "content": node}
|
type parenthesisNode = {...node, "content": node}
|
||||||
//rangeNode
|
//rangeNode
|
||||||
//relationalNode
|
//relationalNode
|
||||||
type symbolNode = {...node, "name": string}
|
type symbolNode = {...node, "name": string}
|
||||||
|
type functionNode = {...node, "fn": unit, "args": array<node>}
|
||||||
|
type operatorNode = {...functionNode, "op": string}
|
||||||
|
type assignmentNode = {...node, "object": symbolNode, "value": node}
|
||||||
|
type assignmentNodeWAccessor = {...node, "object": accessorNode, "value": node}
|
||||||
|
type assignmentNodeWIndex = {...assignmentNodeWAccessor, "index": Js.null<indexNode>}
|
||||||
|
|
||||||
external castAccessorNode: node => accessorNode = "%identity"
|
external castAccessorNode: node => accessorNode = "%identity"
|
||||||
external castArrayNode: node => arrayNode = "%identity"
|
external castArrayNode: node => arrayNode = "%identity"
|
||||||
|
external castAssignmentNode: node => assignmentNode = "%identity"
|
||||||
|
external castAssignmentNodeWAccessor: node => assignmentNodeWAccessor = "%identity"
|
||||||
|
external castAssignmentNodeWIndex: node => assignmentNodeWIndex = "%identity"
|
||||||
|
external castBlockNode: node => blockNode = "%identity"
|
||||||
external castConstantNode: node => constantNode = "%identity"
|
external castConstantNode: node => constantNode = "%identity"
|
||||||
external castFunctionNode: node => functionNode = "%identity"
|
external castFunctionNode: node => functionNode = "%identity"
|
||||||
external castIndexNode: node => indexNode = "%identity"
|
external castIndexNode: node => indexNode = "%identity"
|
||||||
|
@ -50,6 +56,8 @@ let parse = (expr: string): result<node, errorValue> =>
|
||||||
type mathJsNode =
|
type mathJsNode =
|
||||||
| MjAccessorNode(accessorNode)
|
| MjAccessorNode(accessorNode)
|
||||||
| MjArrayNode(arrayNode)
|
| MjArrayNode(arrayNode)
|
||||||
|
| MjAssignmentNode(assignmentNode)
|
||||||
|
| MjBlockNode(blockNode)
|
||||||
| MjConstantNode(constantNode)
|
| MjConstantNode(constantNode)
|
||||||
| MjFunctionNode(functionNode)
|
| MjFunctionNode(functionNode)
|
||||||
| MjIndexNode(indexNode)
|
| MjIndexNode(indexNode)
|
||||||
|
@ -58,10 +66,21 @@ type mathJsNode =
|
||||||
| MjParenthesisNode(parenthesisNode)
|
| MjParenthesisNode(parenthesisNode)
|
||||||
| MjSymbolNode(symbolNode)
|
| MjSymbolNode(symbolNode)
|
||||||
|
|
||||||
let castNodeType = (node: node) =>
|
let castNodeType = (node: node) => {
|
||||||
|
let decideAssignmentNode = node => {
|
||||||
|
let iNode = node->castAssignmentNodeWIndex
|
||||||
|
if Js.null == iNode["index"] && iNode["object"]["type"] == "SymbolNode" {
|
||||||
|
node->castAssignmentNode->MjAssignmentNode->Ok
|
||||||
|
} else {
|
||||||
|
RESyntaxError("Assignment to index or property not supported")->Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
switch node["type"] {
|
switch node["type"] {
|
||||||
| "AccessorNode" => node->castAccessorNode->MjAccessorNode->Ok
|
| "AccessorNode" => node->castAccessorNode->MjAccessorNode->Ok
|
||||||
| "ArrayNode" => node->castArrayNode->MjArrayNode->Ok
|
| "ArrayNode" => node->castArrayNode->MjArrayNode->Ok
|
||||||
|
| "AssignmentNode" => node->decideAssignmentNode
|
||||||
|
| "BlockNode" => node->castBlockNode->MjBlockNode->Ok
|
||||||
| "ConstantNode" => node->castConstantNode->MjConstantNode->Ok
|
| "ConstantNode" => node->castConstantNode->MjConstantNode->Ok
|
||||||
| "FunctionNode" => node->castFunctionNode->MjFunctionNode->Ok
|
| "FunctionNode" => node->castFunctionNode->MjFunctionNode->Ok
|
||||||
| "IndexNode" => node->castIndexNode->MjIndexNode->Ok
|
| "IndexNode" => node->castIndexNode->MjIndexNode->Ok
|
||||||
|
@ -71,6 +90,19 @@ let castNodeType = (node: node) =>
|
||||||
| "SymbolNode" => node->castSymbolNode->MjSymbolNode->Ok
|
| "SymbolNode" => node->castSymbolNode->MjSymbolNode->Ok
|
||||||
| _ => RETodo(`Argg, unhandled MathJsNode: ${node["type"]}`)->Error
|
| _ => RETodo(`Argg, unhandled MathJsNode: ${node["type"]}`)->Error
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
external unitAsSymbolNode: unit => symbolNode = "%identity"
|
||||||
|
external unitAsString: unit => string = "%identity"
|
||||||
|
|
||||||
|
let nameOfFunctionNode = (fNode: functionNode): string => {
|
||||||
|
let name = fNode["fn"]
|
||||||
|
if Js.typeof(name) == "string" {
|
||||||
|
name->unitAsString
|
||||||
|
} else {
|
||||||
|
(name->unitAsSymbolNode)["name"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let rec toString = (mathJsNode: mathJsNode): string => {
|
let rec toString = (mathJsNode: mathJsNode): string => {
|
||||||
let toStringValue = (a: 'a): string =>
|
let toStringValue = (a: 'a): string =>
|
||||||
|
@ -87,9 +119,10 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
||||||
->Js.String.concatMany("")
|
->Js.String.concatMany("")
|
||||||
|
|
||||||
let toStringFunctionNode = (fnode: functionNode): string =>
|
let toStringFunctionNode = (fnode: functionNode): string =>
|
||||||
`${fnode["fn"]}(${fnode["args"]->toStringNodeArray})`
|
`${fnode->nameOfFunctionNode}(${fnode["args"]->toStringNodeArray})`
|
||||||
|
|
||||||
let toStringObjectEntry = ((key: string, value: node)): string => `${key}: ${value->toStringMathJsNode}`
|
let toStringObjectEntry = ((key: string, value: node)): string =>
|
||||||
|
`${key}: ${value->toStringMathJsNode}`
|
||||||
|
|
||||||
let toStringObjectNode = (oNode: objectNode): string =>
|
let toStringObjectNode = (oNode: objectNode): string =>
|
||||||
`{${oNode["properties"]
|
`{${oNode["properties"]
|
||||||
|
@ -103,16 +136,28 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
||||||
->Belt.Array.map(each => toStringResult(each->castNodeType))
|
->Belt.Array.map(each => toStringResult(each->castNodeType))
|
||||||
->Js.String.concatMany("")
|
->Js.String.concatMany("")
|
||||||
|
|
||||||
|
let toStringSymbolNode = (sNode: symbolNode): string => sNode["name"]
|
||||||
|
|
||||||
|
let toStringBlocks = (blocks: array<block>): string =>
|
||||||
|
blocks
|
||||||
|
->Belt.Array.map(each => each["node"]->castNodeType->toStringResult)
|
||||||
|
->Extra.Array.interperse("; ")
|
||||||
|
->Js.String.concatMany("")
|
||||||
|
|
||||||
switch mathJsNode {
|
switch mathJsNode {
|
||||||
| MjAccessorNode(aNode) => `${aNode["object"]->toStringMathJsNode}[${aNode["index"]->toStringIndexNode}]`
|
| MjAccessorNode(aNode) =>
|
||||||
|
`${aNode["object"]->toStringMathJsNode}[${aNode["index"]->toStringIndexNode}]`
|
||||||
| MjArrayNode(aNode) => `[${aNode["items"]->toStringNodeArray}]`
|
| MjArrayNode(aNode) => `[${aNode["items"]->toStringNodeArray}]`
|
||||||
|
| MjAssignmentNode(aNode) =>
|
||||||
|
`${aNode["object"]->toStringSymbolNode} = ${aNode["value"]->toStringMathJsNode}`
|
||||||
|
| MjBlockNode(bNode) => `{${bNode["blocks"]->toStringBlocks}}`
|
||||||
| MjConstantNode(cNode) => cNode["value"]->toStringValue
|
| MjConstantNode(cNode) => cNode["value"]->toStringValue
|
||||||
| MjFunctionNode(fNode) => fNode->toStringFunctionNode
|
| MjFunctionNode(fNode) => fNode->toStringFunctionNode
|
||||||
| MjIndexNode(iNode) => iNode->toStringIndexNode
|
| MjIndexNode(iNode) => iNode->toStringIndexNode
|
||||||
| MjObjectNode(oNode) => oNode->toStringObjectNode
|
| MjObjectNode(oNode) => oNode->toStringObjectNode
|
||||||
| MjOperatorNode(opNode) => opNode->castOperatorNodeToFunctionNode->toStringFunctionNode
|
| MjOperatorNode(opNode) => opNode->castOperatorNodeToFunctionNode->toStringFunctionNode
|
||||||
| MjParenthesisNode(pNode) => `(${toStringMathJsNode(pNode["content"])})`
|
| MjParenthesisNode(pNode) => `(${toStringMathJsNode(pNode["content"])})`
|
||||||
| MjSymbolNode(sNode) => sNode["name"]
|
| MjSymbolNode(sNode) => sNode->toStringSymbolNode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
and toStringResult = (rMathJsNode: result<mathJsNode, errorValue>): string =>
|
and toStringResult = (rMathJsNode: result<mathJsNode, errorValue>): string =>
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
module ErrorValue = Reducer_ErrorValue
|
module ErrorValue = Reducer_ErrorValue
|
||||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||||
module ExtressionT = Reducer_Expression_T
|
module ExpressionT = Reducer_Expression_T
|
||||||
module JavaScript = Reducer_Js
|
module JavaScript = Reducer_Js
|
||||||
module Parse = Reducer_MathJs_Parse
|
module Parse = Reducer_MathJs_Parse
|
||||||
module Result = Belt.Result
|
module Result = Belt.Result
|
||||||
|
|
||||||
type expression = ExtressionT.expression
|
type expression = ExpressionT.expression
|
||||||
type expressionValue = ExpressionValue.expressionValue
|
type expressionValue = ExpressionValue.expressionValue
|
||||||
type errorValue = ErrorValue.errorValue
|
type errorValue = ErrorValue.errorValue
|
||||||
|
|
||||||
|
@ -18,10 +18,19 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
let castFunctionNode = fNode => {
|
let toEvCallValue = (name: string): expression =>
|
||||||
let fn = fNode["fn"]->ExpressionValue.EvSymbol->ExtressionT.EValue
|
name->ExpressionValue.EvCall->ExpressionT.EValue
|
||||||
|
let toEvSymbolValue = (name: string): expression =>
|
||||||
|
name->ExpressionValue.EvSymbol->ExpressionT.EValue
|
||||||
|
|
||||||
|
let passToFunction = (fName: string, rLispArgs): result<expression, errorValue> => {
|
||||||
|
let fn = fName->toEvCallValue
|
||||||
|
rLispArgs->Result.flatMap(lispArgs => list{fn, ...lispArgs}->ExpressionT.EList->Ok)
|
||||||
|
}
|
||||||
|
|
||||||
|
let caseFunctionNode = fNode => {
|
||||||
let lispArgs = fNode["args"]->Belt.List.fromArray->fromNodeList
|
let lispArgs = fNode["args"]->Belt.List.fromArray->fromNodeList
|
||||||
lispArgs->Result.map(argsCode => list{fn, ...argsCode}->ExtressionT.EList)
|
passToFunction(fNode->Parse.nameOfFunctionNode, lispArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
let caseObjectNode = oNode => {
|
let caseObjectNode = oNode => {
|
||||||
|
@ -34,15 +43,16 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
||||||
fromNode(value)->Result.map(valueExpression => {
|
fromNode(value)->Result.map(valueExpression => {
|
||||||
let entryCode =
|
let entryCode =
|
||||||
list{
|
list{
|
||||||
key->ExpressionValue.EvString->ExtressionT.EValue,
|
key->ExpressionValue.EvString->ExpressionT.EValue,
|
||||||
valueExpression,
|
valueExpression,
|
||||||
}->ExtressionT.EList
|
}->ExpressionT.EList
|
||||||
list{entryCode, ...acc}
|
list{entryCode, ...acc}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
let lispName = "$constructRecord"->ExpressionValue.EvSymbol->ExtressionT.EValue
|
rargs->Result.flatMap(args =>
|
||||||
rargs->Result.map(args => list{lispName, ExtressionT.EList(args)}->ExtressionT.EList)
|
passToFunction("$constructRecord", list{ExpressionT.EList(args)}->Ok)
|
||||||
|
) // $consturctRecord gets a single argument: List of key-value paiers
|
||||||
}
|
}
|
||||||
|
|
||||||
oNode["properties"]->Js.Dict.entries->Belt.List.fromArray->fromObjectEntries
|
oNode["properties"]->Js.Dict.entries->Belt.List.fromArray->fromObjectEntries
|
||||||
|
@ -54,33 +64,75 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
||||||
Ok(list{}),
|
Ok(list{}),
|
||||||
(racc, currentPropertyMathJsNode) =>
|
(racc, currentPropertyMathJsNode) =>
|
||||||
racc->Result.flatMap(acc =>
|
racc->Result.flatMap(acc =>
|
||||||
fromNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{propertyCode, ...acc})
|
fromNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{
|
||||||
|
propertyCode,
|
||||||
|
...acc,
|
||||||
|
})
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
rpropertyCodeList->Result.map(propertyCodeList => ExtressionT.EList(propertyCodeList))
|
rpropertyCodeList->Result.map(propertyCodeList => ExpressionT.EList(propertyCodeList))
|
||||||
}
|
}
|
||||||
|
|
||||||
let caseAccessorNode = (objectNode, indexNode) => {
|
let caseAccessorNode = (objectNode, indexNode) => {
|
||||||
let fn = "$atIndex"->ExpressionValue.EvSymbol->ExtressionT.EValue
|
|
||||||
|
|
||||||
caseIndexNode(indexNode)->Result.flatMap(indexCode => {
|
caseIndexNode(indexNode)->Result.flatMap(indexCode => {
|
||||||
fromNode(objectNode)->Result.map(objectCode =>
|
fromNode(objectNode)->Result.flatMap(objectCode =>
|
||||||
list{fn, objectCode, indexCode}->ExtressionT.EList
|
passToFunction("$atIndex", list{objectCode, indexCode}->Ok)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
switch typedMathJsNode {
|
let caseAssignmentNode = aNode => {
|
||||||
| MjArrayNode(aNode) =>
|
let symbol = aNode["object"]["name"]->toEvSymbolValue
|
||||||
aNode["items"]->Belt.List.fromArray->fromNodeList->Result.map(list => ExtressionT.EList(list))
|
let rValueExpression = fromNode(aNode["value"])
|
||||||
| MjConstantNode(cNode) =>
|
rValueExpression->Result.flatMap(valueExpression => {
|
||||||
cNode["value"]->JavaScript.Gate.jsToEv->Result.map(v => v->ExtressionT.EValue)
|
let lispArgs = list{symbol, valueExpression}->Ok
|
||||||
| MjFunctionNode(fNode) => fNode->castFunctionNode
|
passToFunction("$let", lispArgs)
|
||||||
| MjOperatorNode(opNode) => opNode->Parse.castOperatorNodeToFunctionNode->castFunctionNode
|
})
|
||||||
| MjParenthesisNode(pNode) => pNode["content"]->fromNode
|
|
||||||
| MjAccessorNode(aNode) => caseAccessorNode(aNode["object"], aNode["index"])
|
|
||||||
| MjObjectNode(oNode) => caseObjectNode(oNode)
|
|
||||||
| MjSymbolNode(sNode) => sNode["name"]->ExpressionValue.EvSymbol->ExtressionT.EValue->Ok
|
|
||||||
| MjIndexNode(iNode) => caseIndexNode(iNode)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let caseArrayNode = aNode => {
|
||||||
|
aNode["items"]->Belt.List.fromArray->fromNodeList->Result.map(list => ExpressionT.EList(list))
|
||||||
|
}
|
||||||
|
|
||||||
|
let caseBlockNode = (bNode): result<expression, errorValue> => {
|
||||||
|
let blocks = bNode["blocks"]
|
||||||
|
let initialBindings = passToFunction("$$bindings", list{}->Ok)
|
||||||
|
let lastIndex = Belt.Array.length(blocks) - 1
|
||||||
|
blocks->Belt.Array.reduceWithIndex(initialBindings, (rPreviousBindings, block, i) => {
|
||||||
|
rPreviousBindings->Result.flatMap(previousBindings => {
|
||||||
|
let node = block["node"]
|
||||||
|
let rStatement: result<expression, errorValue> = node->fromNode
|
||||||
|
let bindName = if i == lastIndex {
|
||||||
|
"$$bindExpression"
|
||||||
|
} else {
|
||||||
|
"$$bindStatement"
|
||||||
|
}
|
||||||
|
rStatement->Result.flatMap((statement: expression) => {
|
||||||
|
let lispArgs = list{previousBindings, statement}->Ok
|
||||||
|
passToFunction(bindName, lispArgs)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let rFinalExpression: result<expression, errorValue> = switch typedMathJsNode {
|
||||||
|
| MjAccessorNode(aNode) => caseAccessorNode(aNode["object"], aNode["index"])
|
||||||
|
| MjArrayNode(aNode) => caseArrayNode(aNode)
|
||||||
|
| MjAssignmentNode(aNode) => caseAssignmentNode(aNode)
|
||||||
|
| MjSymbolNode(sNode) => {
|
||||||
|
let expr: expression = toEvSymbolValue(sNode["name"])
|
||||||
|
let rExpr: result<expression, errorValue> = expr->Ok
|
||||||
|
rExpr
|
||||||
|
}
|
||||||
|
| MjBlockNode(bNode) => caseBlockNode(bNode)
|
||||||
|
// | MjBlockNode(bNode) => "statement"->toEvSymbolValue->Ok
|
||||||
|
| MjConstantNode(cNode) =>
|
||||||
|
cNode["value"]->JavaScript.Gate.jsToEv->Result.flatMap(v => v->ExpressionT.EValue->Ok)
|
||||||
|
| MjFunctionNode(fNode) => fNode->caseFunctionNode
|
||||||
|
| MjIndexNode(iNode) => caseIndexNode(iNode)
|
||||||
|
| MjObjectNode(oNode) => caseObjectNode(oNode)
|
||||||
|
| MjOperatorNode(opNode) => opNode->Parse.castOperatorNodeToFunctionNode->caseFunctionNode
|
||||||
|
| MjParenthesisNode(pNode) => pNode["content"]->fromNode
|
||||||
|
}
|
||||||
|
rFinalExpression
|
||||||
})
|
})
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user