Merge branch 'develop' into mix-distributions
This commit is contained in:
commit
38135f0c81
14
.github/CODEOWNERS
vendored
14
.github/CODEOWNERS
vendored
|
@ -9,21 +9,21 @@
|
|||
# This also holds true for GitHub teams.
|
||||
|
||||
# Rescript
|
||||
*.res @Hazelfire @OAGr @quinn-dougherty
|
||||
*.resi @Hazelfire @OAGr @quinn-dougherty
|
||||
*.res @OAGr @quinn-dougherty
|
||||
*.resi @OAGr @quinn-dougherty
|
||||
|
||||
# Typescript
|
||||
*.tsx @Hazelfire @OAGr
|
||||
*.ts @Hazelfire @OAGr
|
||||
|
||||
# Javascript
|
||||
*.js @Hazelfire
|
||||
*.js @Hazelfire @OAGr
|
||||
|
||||
# Any opsy files
|
||||
.github/* @quinn-dougherty
|
||||
*.json @quinn-dougherty @Hazelfire
|
||||
*.y*ml @quinn-dougherty
|
||||
*.config.js @Hazelfire
|
||||
.github/** @quinn-dougherty @OAGr
|
||||
*.json @quinn-dougherty @Hazelfire @OAGr
|
||||
*.y*ml @quinn-dougherty @OAGr
|
||||
*.config.js @Hazelfire @OAGr
|
||||
|
||||
# Documentation
|
||||
*.md @quinn-dougherty @OAGr @Hazelfire
|
||||
|
|
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
|
@ -25,17 +25,17 @@ jobs:
|
|||
steps:
|
||||
- id: skip_lang_check
|
||||
name: Check if the changes are about squiggle-lang src files
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||
with:
|
||||
paths: '["packages/squiggle-lang/**"]'
|
||||
- id: skip_components_check
|
||||
name: Check if the changes are about components src files
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||
with:
|
||||
paths: '["packages/components/**"]'
|
||||
- id: skip_website_check
|
||||
name: Check if the changes are about website src files
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
uses: fkirc/skip-duplicate-actions@v3.4.1
|
||||
with:
|
||||
paths: '["packages/website/**"]'
|
||||
|
||||
|
@ -58,7 +58,7 @@ jobs:
|
|||
uses: creyD/prettier_action@v4.2
|
||||
with:
|
||||
dry: true
|
||||
prettier_options: --check .
|
||||
prettier_options: --check packages/squiggle-lang
|
||||
|
||||
lang-build-test-bundle:
|
||||
name: Language build, test, and bundle
|
||||
|
@ -97,7 +97,7 @@ jobs:
|
|||
uses: creyD/prettier_action@v4.2
|
||||
with:
|
||||
dry: true
|
||||
prettier_options: --check .
|
||||
prettier_options: --check packages/components
|
||||
|
||||
components-bundle-build:
|
||||
name: Components bundle and build
|
||||
|
@ -134,7 +134,7 @@ jobs:
|
|||
uses: creyD/prettier_action@v4.2
|
||||
with:
|
||||
dry: true
|
||||
prettier_options: --check .
|
||||
prettier_options: --check packages/website
|
||||
|
||||
website-build:
|
||||
name: Website build
|
||||
|
|
|
@ -51,4 +51,91 @@ See [here](https://github.com/NixOS/nixpkgs/issues/107375)
|
|||
|
||||
# Pull request protocol
|
||||
|
||||
Please work against `staging` branch. **Do not** work against `master`. Please do not merge without approval from some subset of Quinn, Sam, and Ozzie; they will be auto-pinged.
|
||||
Please work against `develop` branch. **Do not** work against `master`.
|
||||
|
||||
- For rescript code: Quinn and Ozzie are reviewers
|
||||
- For js or typescript code: Sam and Ozzie are reviewers
|
||||
- For ops code (i.e. yaml, package.json): Quinn and Sam are reviewers
|
||||
|
||||
Autopings are set up: if you are not autopinged, you are welcome to comment, but please do not use the formal review feature, send approvals, rejections, or merges.
|
||||
|
||||
# Code Quality
|
||||
|
||||
- Aim for at least 8/10\* quality in `/packages/squiggle-lang`, and 7/10 quality in `/packages/components`.
|
||||
- If you submit a PR that is under a 7, for some reason, describe the reasoning for this in the PR.
|
||||
|
||||
* This quality score is subjective.
|
||||
|
||||
# Rescript Style
|
||||
|
||||
**Use `->` instead of `|>`**
|
||||
Note: Our codebase used to use `|>`, so there's a lot of that in the system. We'll gradually change it.
|
||||
|
||||
**Use `x -> y -> z` instead of `let foo = y(x); let bar = z(foo)`**
|
||||
|
||||
**Don't use anonymous functions with over three lines**
|
||||
Bad:
|
||||
|
||||
```rescript
|
||||
foo
|
||||
-> E.O.fmap(r => {
|
||||
let a = 34;
|
||||
let b = 35;
|
||||
let c = 48;
|
||||
r + a + b + c
|
||||
}
|
||||
```
|
||||
|
||||
Good:
|
||||
|
||||
```rescript
|
||||
let addingFn = (r => {
|
||||
let a = 34;
|
||||
let b = 35;
|
||||
let c = 48;
|
||||
r + a + b + c
|
||||
}
|
||||
foo -> addingFn
|
||||
```
|
||||
|
||||
**Write out types for everything, even if there's an interface file**
|
||||
We'll try this for one month (ending May 5, 2022), then revisit.
|
||||
|
||||
**Use the Rescript optional default syntax**
|
||||
Rescript is clever about function inputs. There's custom syntax for default and optional arguments. In the cases where this applies, use it.
|
||||
|
||||
From https://rescript-lang.org/docs/manual/latest/function:
|
||||
|
||||
```rescript
|
||||
// radius can be omitted
|
||||
let drawCircle = (~color, ~radius=?, ()) => {
|
||||
setColor(color)
|
||||
switch radius {
|
||||
| None => startAt(1, 1)
|
||||
| Some(r_) => startAt(r_, r_)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Use named arguments**
|
||||
If a function is called externally (in a different file), and has either:
|
||||
|
||||
1. Two arguments of the same type
|
||||
2. Three paramaters or more.
|
||||
|
||||
**Module naming: Use x_y as module names**
|
||||
For example: `Myname_Myproject_Add.res`. Rescript/Ocaml both require files to have unique names, so long names are needed to keep different parts separate from each other.
|
||||
|
||||
See [this page](https://dev.to/yawaramin/a-modular-ocaml-project-structure-1ikd) for more information. (Though note that they use two underscores, and we do one. We might refactor that later.
|
||||
|
||||
**Module naming: Don't rename modules**
|
||||
We have some of this in the Reducer code, but generally discourage it.
|
||||
|
||||
**Use interface files (.resi) for files with very public interfaces**
|
||||
|
||||
### Recommended Rescript resources
|
||||
|
||||
- https://dev.to/yawaramin/a-modular-ocaml-project-structure-1ikd
|
||||
- https://github.com/avohq/reasonml-code-style-guide
|
||||
- https://cs.brown.edu/courses/cs017/content/docs/reasonml-style.pdf
|
||||
- https://github.com/ostera/reason-design-patterns/
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
"private": true,
|
||||
"name": "squiggle",
|
||||
"scripts": {
|
||||
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules"
|
||||
"nodeclean": "rm -r node_modules && rm -r packages/*/node_modules",
|
||||
"format:all": "prettier --write . && cd packages/squiggle-lang && yarn format"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^2.6.2"
|
||||
|
|
|
@ -75,7 +75,7 @@
|
|||
"@storybook/preset-create-react-app": "^4.1.0",
|
||||
"@storybook/react": "^6.4.20",
|
||||
"@types/styled-components": "^5.1.24",
|
||||
"@types/webpack": "^4.41.32",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"react-codejar": "^1.1.2",
|
||||
"style-loader": "^3.3.1",
|
||||
"ts-loader": "^9.2.8",
|
||||
|
|
|
@ -4,10 +4,10 @@ open Expect
|
|||
describe("Bandwidth", () => {
|
||||
test("nrd0()", () => {
|
||||
let data = [1., 4., 3., 2.]
|
||||
expect(SampleSetDist_Bandwidth.nrd0(data)) -> toEqual(0.7625801874014622)
|
||||
expect(SampleSetDist_Bandwidth.nrd0(data))->toEqual(0.7625801874014622)
|
||||
})
|
||||
test("nrd()", () => {
|
||||
let data = [1., 4., 3., 2.]
|
||||
expect(SampleSetDist_Bandwidth.nrd(data)) -> toEqual(0.8981499984950554)
|
||||
expect(SampleSetDist_Bandwidth.nrd(data))->toEqual(0.8981499984950554)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -6,6 +6,8 @@ let normalDist: GenericDist_Types.genericDist = normalDist5
|
|||
let betaDist: GenericDist_Types.genericDist = Symbolic(#Beta({alpha: 2.0, beta: 5.0}))
|
||||
let lognormalDist: GenericDist_Types.genericDist = Symbolic(#Lognormal({mu: 0.0, sigma: 1.0}))
|
||||
let cauchyDist: GenericDist_Types.genericDist = Symbolic(#Cauchy({local: 1.0, scale: 1.0}))
|
||||
let triangularDist: GenericDist_Types.genericDist = Symbolic(#Triangular({low: 1.0, medium: 2.0, high: 3.0}))
|
||||
let triangularDist: GenericDist_Types.genericDist = Symbolic(
|
||||
#Triangular({low: 1.0, medium: 2.0, high: 3.0}),
|
||||
)
|
||||
let exponentialDist: GenericDist_Types.genericDist = Symbolic(#Exponential({rate: 2.0}))
|
||||
let uniformDist: GenericDist_Types.genericDist = Symbolic(#Uniform({low: 9.0, high: 10.0}))
|
||||
|
|
|
@ -1,70 +1,73 @@
|
|||
open Jest
|
||||
open Expect
|
||||
open TestHelpers
|
||||
open Expect
|
||||
open TestHelpers
|
||||
|
||||
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
|
||||
// TODO: use Normal.make (etc.), but preferably after the new validation dispatch is in.
|
||||
let mkNormal = (mean, stdev) => GenericDist_Types.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||
let mkBeta = (alpha, beta) => GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||
let mkExponential = rate => GenericDist_Types.Symbolic(#Exponential({rate: rate}))
|
||||
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
||||
let mkUniform = (low, high) => GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
||||
let mkCauchy = (local, scale) => GenericDist_Types.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||
let mkLognormal = (mu, sigma) => GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||
|
||||
describe("mixture", () => {
|
||||
testAll("fair mean of two normal distributions", list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)}, tup => { // should be property
|
||||
let (mean1, mean2) = tup
|
||||
let meanValue = {
|
||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))
|
||||
-> outputMap(FromDist(ToFloat(#Mean)))
|
||||
}
|
||||
meanValue -> unpackFloat -> expect -> toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||
})
|
||||
testAll(
|
||||
"weighted mean of a beta and an exponential",
|
||||
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
|
||||
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
|
||||
tup => {
|
||||
let ((alpha, beta), rate) = tup
|
||||
let betaWeight = 0.25
|
||||
let exponentialWeight = 0.75
|
||||
let meanValue = {
|
||||
run(Mixture(
|
||||
[
|
||||
(mkBeta(alpha, beta), betaWeight),
|
||||
(mkExponential(rate), exponentialWeight)
|
||||
]
|
||||
)) -> outputMap(FromDist(ToFloat(#Mean)))
|
||||
}
|
||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||
let exponentialMean = 1.0 /. rate
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeSoCloseTo(
|
||||
betaWeight *. betaMean +. exponentialWeight *. exponentialMean,
|
||||
~digits=-1
|
||||
"fair mean of two normal distributions",
|
||||
list{(0.0, 1e2), (-1e1, -1e-4), (-1e1, 1e2), (-1e1, 1e1)},
|
||||
tup => {
|
||||
// should be property
|
||||
let (mean1, mean2) = tup
|
||||
let meanValue = {
|
||||
run(Mixture([(mkNormal(mean1, 9e-1), 0.5), (mkNormal(mean2, 9e-1), 0.5)]))->outputMap(
|
||||
FromDist(ToFloat(#Mean)),
|
||||
)
|
||||
}
|
||||
meanValue->unpackFloat->expect->toBeSoCloseTo((mean1 +. mean2) /. 2.0, ~digits=-1)
|
||||
},
|
||||
)
|
||||
testAll(
|
||||
"weighted mean of lognormal and uniform",
|
||||
// Would not survive property tests: very easy to find cases that NaN out.
|
||||
list{((-1e2,1e1), (2e0,1e0)), ((-1e-16,1e-16), (1e-8,1e0)), ((0.0,1e0), (1e0,1e-2))},
|
||||
tup => {
|
||||
let ((low, high), (mu, sigma)) = tup
|
||||
let uniformWeight = 0.6
|
||||
let lognormalWeight = 0.4
|
||||
let meanValue = {
|
||||
run(Mixture([(mkUniform(low, high), uniformWeight), (mkLognormal(mu, sigma), lognormalWeight)]))
|
||||
-> outputMap(FromDist(ToFloat(#Mean)))
|
||||
}
|
||||
let uniformMean = (low +. high) /. 2.0
|
||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
|
||||
"weighted mean of a beta and an exponential",
|
||||
// This would not survive property testing, it was easy for me to find cases that NaN'd out.
|
||||
list{((128.0, 1.0), 2.0), ((2e-1, 64.0), 16.0), ((1e0, 1e0), 64.0)},
|
||||
tup => {
|
||||
let ((alpha, beta), rate) = tup
|
||||
let betaWeight = 0.25
|
||||
let exponentialWeight = 0.75
|
||||
let meanValue = {
|
||||
run(
|
||||
Mixture([(mkBeta(alpha, beta), betaWeight), (mkExponential(rate), exponentialWeight)]),
|
||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||
}
|
||||
let betaMean = 1.0 /. (1.0 +. beta /. alpha)
|
||||
let exponentialMean = 1.0 /. rate
|
||||
meanValue
|
||||
->unpackFloat
|
||||
->expect
|
||||
->toBeSoCloseTo(betaWeight *. betaMean +. exponentialWeight *. exponentialMean, ~digits=-1)
|
||||
},
|
||||
)
|
||||
testAll(
|
||||
"weighted mean of lognormal and uniform",
|
||||
// Would not survive property tests: very easy to find cases that NaN out.
|
||||
list{((-1e2, 1e1), (2e0, 1e0)), ((-1e-16, 1e-16), (1e-8, 1e0)), ((0.0, 1e0), (1e0, 1e-2))},
|
||||
tup => {
|
||||
let ((low, high), (mu, sigma)) = tup
|
||||
let uniformWeight = 0.6
|
||||
let lognormalWeight = 0.4
|
||||
let meanValue = {
|
||||
run(
|
||||
Mixture([
|
||||
(mkUniform(low, high), uniformWeight),
|
||||
(mkLognormal(mu, sigma), lognormalWeight),
|
||||
]),
|
||||
)->outputMap(FromDist(ToFloat(#Mean)))
|
||||
}
|
||||
let uniformMean = (low +. high) /. 2.0
|
||||
let lognormalMean = mu +. sigma ** 2.0 /. 2.0
|
||||
meanValue
|
||||
->unpackFloat
|
||||
->expect
|
||||
->toBeSoCloseTo(uniformWeight *. uniformMean +. lognormalWeight *. lognormalMean, ~digits=-1)
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
|
|
|
@ -38,4 +38,3 @@ describe("Continuous and discrete splits", () => {
|
|||
let toArr2 = discrete2 |> E.FloatFloatMap.toArray
|
||||
makeTest("splitMedium at count=500", toArr2 |> Belt.Array.length, 500)
|
||||
})
|
||||
|
||||
|
|
|
@ -9,125 +9,109 @@ describe("(Symbolic) normalize", () => {
|
|||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||
let normalValue = mkNormal(mean, 2.0)
|
||||
let normalizedValue = run(FromDist(ToDist(Normalize), normalValue))
|
||||
normalizedValue
|
||||
-> unpackDist
|
||||
-> expect
|
||||
-> toEqual(normalValue)
|
||||
normalizedValue->unpackDist->expect->toEqual(normalValue)
|
||||
})
|
||||
})
|
||||
|
||||
describe("(Symbolic) mean", () => {
|
||||
testAll("of normal distributions", list{-1e8, -16.0, -1e-2, 0.0, 1e-4, 32.0, 1e16}, mean => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo(mean)
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(mean, 4.0)))->unpackFloat->expect->toBeCloseTo(mean)
|
||||
})
|
||||
|
||||
Skip.test("of normal(0, -1) (it NaNs out)", () => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> ExpectJs.toBeFalsy
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(1e1, -1e0)))->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||
})
|
||||
|
||||
test("of normal(0, 1e-8) (it doesn't freak out at tiny stdev)", () => {
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo(0.0)
|
||||
run(FromDist(ToFloat(#Mean), mkNormal(0.0, 1e-8)))->unpackFloat->expect->toBeCloseTo(0.0)
|
||||
})
|
||||
|
||||
testAll("of exponential distributions", list{1e-7, 2.0, 10.0, 100.0}, rate => {
|
||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))))
|
||||
meanValue -> unpackFloat -> expect -> toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Exponential({rate: rate}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. rate) // https://en.wikipedia.org/wiki/Exponential_distribution#Mean,_variance,_moments,_and_median
|
||||
})
|
||||
|
||||
test("of a cauchy distribution", () => {
|
||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo(2.01868297874546)
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Cauchy({local: 1.0, scale: 1.0}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(2.01868297874546)
|
||||
//-> toBe(GenDistError(Other("Cauchy distributions may have no mean value.")))
|
||||
})
|
||||
|
||||
testAll("of triangular distributions", list{(1.0,2.0,3.0), (-1e7,-1e-7,1e-7), (-1e-7,1e0,1e7), (-1e-16,0.0,1e-16)}, tup => {
|
||||
let (low, medium, high) = tup
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high}))
|
||||
))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||
})
|
||||
testAll(
|
||||
"of triangular distributions",
|
||||
list{(1.0, 2.0, 3.0), (-1e7, -1e-7, 1e-7), (-1e-7, 1e0, 1e7), (-1e-16, 0.0, 1e-16)},
|
||||
tup => {
|
||||
let (low, medium, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||
),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||
},
|
||||
)
|
||||
|
||||
// TODO: nonpositive inputs are SUPPOSED to crash.
|
||||
testAll("of beta distributions", list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)}, tup => {
|
||||
let (alpha, beta) = tup
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||
))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo(1.0 /. (1.0 +. (beta /. alpha))) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||
})
|
||||
// TODO: nonpositive inputs are SUPPOSED to crash.
|
||||
testAll(
|
||||
"of beta distributions",
|
||||
list{(1e-4, 6.4e1), (1.28e2, 1e0), (1e-16, 1e-16), (1e16, 1e16), (-1e4, 1e1), (1e1, -1e4)},
|
||||
tup => {
|
||||
let (alpha, beta) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||
},
|
||||
)
|
||||
|
||||
// TODO: When we have our theory of validators we won't want this to be NaN but to be an error.
|
||||
test("of beta(0, 0)", () => {
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))
|
||||
))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> ExpectJs.toBeFalsy
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Beta({alpha: 0.0, beta: 0.0}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->ExpectJs.toBeFalsy
|
||||
})
|
||||
|
||||
testAll("of lognormal distributions", list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)}, tup => {
|
||||
let (mu, sigma) = tup
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||
))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0 )) // https://brilliant.org/wiki/log-normal-distribution/
|
||||
})
|
||||
testAll(
|
||||
"of lognormal distributions",
|
||||
list{(2.0, 4.0), (1e-7, 1e-2), (-1e6, 10.0), (1e3, -1e2), (-1e8, -1e4), (1e2, 1e-5)},
|
||||
tup => {
|
||||
let (mu, sigma) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||
},
|
||||
)
|
||||
|
||||
testAll("of uniform distributions", list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)}, tup => {
|
||||
let (low, high) = tup
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))
|
||||
))
|
||||
meanValue
|
||||
-> unpackFloat
|
||||
-> expect
|
||||
-> toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||
})
|
||||
testAll(
|
||||
"of uniform distributions",
|
||||
list{(1e-5, 12.345), (-1e4, 1e4), (-1e16, -1e2), (5.3e3, 9e9)},
|
||||
tup => {
|
||||
let (low, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Uniform({low: low, high: high}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||
},
|
||||
)
|
||||
|
||||
test("of a float", () => {
|
||||
let meanValue = run(FromDist(
|
||||
ToFloat(#Mean),
|
||||
GenericDist_Types.Symbolic(#Float(7.7))
|
||||
))
|
||||
meanValue -> unpackFloat -> expect -> toBeCloseTo(7.7)
|
||||
let meanValue = run(FromDist(ToFloat(#Mean), GenericDist_Types.Symbolic(#Float(7.7))))
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(7.7)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Normal distribution with sparklines", () => {
|
||||
|
||||
let parameterWiseAdditionPdf = (n1: SymbolicDistTypes.normal, n2: SymbolicDistTypes.normal) => {
|
||||
let normalDistAtSumMeanConstr = SymbolicDist.Normal.add(n1, n2)
|
||||
let normalDistAtSumMean: SymbolicDistTypes.normal = switch normalDistAtSumMeanConstr {
|
||||
| #Normal(params) => params
|
||||
| #Normal(params) => params
|
||||
}
|
||||
x => SymbolicDist.Normal.pdf(x, normalDistAtSumMean)
|
||||
}
|
||||
|
@ -138,24 +122,25 @@ describe("Normal distribution with sparklines", () => {
|
|||
|
||||
test("mean=5 pdf", () => {
|
||||
let pdfNormalDistAtMean5 = x => SymbolicDist.Normal.pdf(x, normalDistAtMean5)
|
||||
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
|
||||
Sparklines.create(sparklineMean5, ())
|
||||
-> expect
|
||||
-> toEqual(`▁▂▃▆██▇▅▂▁▁▁▁▁▁▁▁▁▁▁`)
|
||||
let sparklineMean5 = fnImage(pdfNormalDistAtMean5, range20Float)
|
||||
Sparklines.create(sparklineMean5, ())
|
||||
->expect
|
||||
->toEqual(`▁▂▃▆██▇▅▂▁▁▁▁▁▁▁▁▁▁▁`)
|
||||
})
|
||||
|
||||
test("parameter-wise addition of two normal distributions", () => {
|
||||
let sparklineMean15 = normalDistAtMean5 -> parameterWiseAdditionPdf(normalDistAtMean10) -> fnImage(range20Float)
|
||||
|
||||
test("parameter-wise addition of two normal distributions", () => {
|
||||
let sparklineMean15 =
|
||||
normalDistAtMean5->parameterWiseAdditionPdf(normalDistAtMean10)->fnImage(range20Float)
|
||||
Sparklines.create(sparklineMean15, ())
|
||||
-> expect
|
||||
-> toEqual(`▁▁▁▁▁▁▁▁▁▂▃▄▆███▇▅▄▂`)
|
||||
->expect
|
||||
->toEqual(`▁▁▁▁▁▁▁▁▁▂▃▄▆███▇▅▄▂`)
|
||||
})
|
||||
|
||||
test("mean=10 cdf", () => {
|
||||
let cdfNormalDistAtMean10 = x => SymbolicDist.Normal.cdf(x, normalDistAtMean10)
|
||||
let sparklineMean10 = fnImage(cdfNormalDistAtMean10, range20Float)
|
||||
Sparklines.create(sparklineMean10, ())
|
||||
-> expect
|
||||
-> toEqual(`▁▁▁▁▁▁▁▁▂▄▅▇████████`)
|
||||
->expect
|
||||
->toEqual(`▁▁▁▁▁▁▁▁▂▄▅▇████████`)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,8 +3,8 @@ open Expect
|
|||
|
||||
let makeTest = (~only=false, str, item1, item2) =>
|
||||
only
|
||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
||||
: test(str, () => expect(item1) -> toEqual(item2))
|
||||
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||
: test(str, () => expect(item1)->toEqual(item2))
|
||||
|
||||
describe("Lodash", () =>
|
||||
describe("Lodash", () => {
|
||||
|
|
|
@ -6,15 +6,17 @@ open Expect
|
|||
let expectEvalToBe = (expr: string, answer: string) =>
|
||||
Reducer.evaluate(expr)->ExpressionValue.toStringResult->expect->toBe(answer)
|
||||
|
||||
let testEval = (expr, answer) => test(expr, () => expectEvalToBe(expr, answer))
|
||||
|
||||
describe("builtin", () => {
|
||||
// All MathJs operators and functions are available for string, number and boolean
|
||||
// .e.g + - / * > >= < <= == /= not and or
|
||||
// See https://mathjs.org/docs/expressions/syntax.html
|
||||
// See https://mathjs.org/docs/reference/functions.html
|
||||
test("-1", () => expectEvalToBe("-1", "Ok(-1)"))
|
||||
test("1-1", () => expectEvalToBe("1-1", "Ok(0)"))
|
||||
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
||||
test("concat('a','b')", () => expectEvalToBe("concat('a','b')", "Ok('ab')"))
|
||||
testEval("-1", "Ok(-1)")
|
||||
testEval("1-1", "Ok(0)")
|
||||
testEval("2>1", "Ok(true)")
|
||||
testEval("concat('a','b')", "Ok('ab')")
|
||||
})
|
||||
|
||||
describe("builtin exception", () => {
|
||||
|
|
|
@ -7,45 +7,62 @@ open Expect
|
|||
let expectParseToBe = (expr, answer) =>
|
||||
Parse.parse(expr)->Result.flatMap(Parse.castNodeType)->Parse.toStringResult->expect->toBe(answer)
|
||||
|
||||
let testParse = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||
|
||||
let testDescriptionParse = (desc, expr, answer) => test(desc, () => expectParseToBe(expr, answer))
|
||||
|
||||
module MySkip = {
|
||||
let testParse = (expr, answer) => Skip.test(expr, () => expectParseToBe(expr, answer))
|
||||
|
||||
let testDescriptionParse = (desc, expr, answer) =>
|
||||
Skip.test(desc, () => expectParseToBe(expr, answer))
|
||||
}
|
||||
|
||||
describe("MathJs parse", () => {
|
||||
describe("literals operators paranthesis", () => {
|
||||
test("1", () => expectParseToBe("1", "1"))
|
||||
test("'hello'", () => expectParseToBe("'hello'", "'hello'"))
|
||||
test("true", () => expectParseToBe("true", "true"))
|
||||
test("1+2", () => expectParseToBe("1+2", "add(1, 2)"))
|
||||
test("add(1,2)", () => expectParseToBe("add(1,2)", "add(1, 2)"))
|
||||
test("(1)", () => expectParseToBe("(1)", "(1)"))
|
||||
test("(1+2)", () => expectParseToBe("(1+2)", "(add(1, 2))"))
|
||||
testParse("1", "1")
|
||||
testParse("'hello'", "'hello'")
|
||||
testParse("true", "true")
|
||||
testParse("1+2", "add(1, 2)")
|
||||
testParse("add(1,2)", "add(1, 2)")
|
||||
testParse("(1)", "(1)")
|
||||
testParse("(1+2)", "(add(1, 2))")
|
||||
})
|
||||
|
||||
describe("multi-line", () => {
|
||||
testParse("1; 2", "{1; 2}")
|
||||
})
|
||||
|
||||
describe("variables", () => {
|
||||
Skip.test("define", () => expectParseToBe("x = 1", "???"))
|
||||
Skip.test("use", () => expectParseToBe("x", "???"))
|
||||
testParse("x = 1", "x = 1")
|
||||
testParse("x", "x")
|
||||
testParse("x = 1; x", "{x = 1; x}")
|
||||
})
|
||||
|
||||
describe("functions", () => {
|
||||
Skip.test("define", () => expectParseToBe("identity(x) = x", "???"))
|
||||
Skip.test("use", () => expectParseToBe("identity(x)", "???"))
|
||||
MySkip.testParse("identity(x) = x", "???")
|
||||
MySkip.testParse("identity(x)", "???")
|
||||
})
|
||||
|
||||
describe("arrays", () => {
|
||||
test("empty", () => expectParseToBe("[]", "[]"))
|
||||
test("define", () => expectParseToBe("[0, 1, 2]", "[0, 1, 2]"))
|
||||
test("define with strings", () => expectParseToBe("['hello', 'world']", "['hello', 'world']"))
|
||||
Skip.test("range", () => expectParseToBe("range(0, 4)", "range(0, 4)"))
|
||||
test("index", () => expectParseToBe("([0,1,2])[1]", "([0, 1, 2])[1]"))
|
||||
testDescriptionParse("empty", "[]", "[]")
|
||||
testDescriptionParse("define", "[0, 1, 2]", "[0, 1, 2]")
|
||||
testDescriptionParse("define with strings", "['hello', 'world']", "['hello', 'world']")
|
||||
MySkip.testParse("range(0, 4)", "range(0, 4)")
|
||||
testDescriptionParse("index", "([0,1,2])[1]", "([0, 1, 2])[1]")
|
||||
})
|
||||
|
||||
describe("records", () => {
|
||||
test("define", () => expectParseToBe("{a: 1, b: 2}", "{a: 1, b: 2}"))
|
||||
test("use", () => expectParseToBe("record.property", "record['property']"))
|
||||
testDescriptionParse("define", "{a: 1, b: 2}", "{a: 1, b: 2}")
|
||||
testDescriptionParse("use", "record.property", "record['property']")
|
||||
})
|
||||
|
||||
describe("comments", () => {
|
||||
Skip.test("define", () => expectParseToBe("# This is a comment", "???"))
|
||||
MySkip.testDescriptionParse("define", "# This is a comment", "???")
|
||||
})
|
||||
|
||||
describe("if statement", () => {
|
||||
Skip.test("define", () => expectParseToBe("if (true) { 1 } else { 0 }", "???"))
|
||||
// TODO Tertiary operator instead
|
||||
MySkip.testDescriptionParse("define", "if (true) { 1 } else { 0 }", "???")
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,15 @@
|
|||
open Jest
|
||||
open Reducer_TestHelpers
|
||||
|
||||
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||
|
||||
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||
test(desc, () => expectParseToBe(expr, answer))
|
||||
|
||||
let testEvalToBe = (expr, answer) => test(expr, () => expectEvalToBe(expr, answer))
|
||||
|
||||
let testDescriptionEvalToBe = (desc, expr, answer) => test(desc, () => expectEvalToBe(expr, answer))
|
||||
|
||||
describe("reducer using mathjs parse", () => {
|
||||
// Test the MathJs parser compatibility
|
||||
// Those tests toString that there is a semantic mapping from MathJs to Expression
|
||||
|
@ -10,31 +19,45 @@ describe("reducer using mathjs parse", () => {
|
|||
// Those tests toString that we are converting mathjs parse tree to what we need
|
||||
|
||||
describe("expressions", () => {
|
||||
test("1", () => expectParseToBe("1", "Ok(1)"))
|
||||
test("(1)", () => expectParseToBe("(1)", "Ok(1)"))
|
||||
test("1+2", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||
test("(1+2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||
test("add(1,2)", () => expectParseToBe("1+2", "Ok((:add 1 2))"))
|
||||
test("1+2*3", () => expectParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))"))
|
||||
testParseToBe("1", "Ok(1)")
|
||||
testParseToBe("(1)", "Ok(1)")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2", "Ok((:add 1 2))")
|
||||
testParseToBe("1+2*3", "Ok((:add 1 (:multiply 2 3)))")
|
||||
})
|
||||
describe("arrays", () => {
|
||||
//Note. () is a empty list in Lisp
|
||||
// The only builtin structure in Lisp is list. There are no arrays
|
||||
// [1,2,3] becomes (1 2 3)
|
||||
test("empty", () => expectParseToBe("[]", "Ok(())"))
|
||||
test("[1, 2, 3]", () => expectParseToBe("[1, 2, 3]", "Ok((1 2 3))"))
|
||||
test("['hello', 'world']", () => expectParseToBe("['hello', 'world']", "Ok(('hello' 'world'))"))
|
||||
test("index", () => expectParseToBe("([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))"))
|
||||
testDescriptionParseToBe("empty", "[]", "Ok(())")
|
||||
testParseToBe("[1, 2, 3]", "Ok((1 2 3))")
|
||||
testParseToBe("['hello', 'world']", "Ok(('hello' 'world'))")
|
||||
testDescriptionParseToBe("index", "([0,1,2])[1]", "Ok((:$atIndex (0 1 2) (1)))")
|
||||
})
|
||||
describe("records", () => {
|
||||
test("define", () =>
|
||||
expectParseToBe("{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
||||
testDescriptionParseToBe("define", "{a: 1, b: 2}", "Ok((:$constructRecord (('a' 1) ('b' 2))))")
|
||||
testDescriptionParseToBe(
|
||||
"use",
|
||||
"{a: 1, b: 2}.a",
|
||||
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
||||
)
|
||||
test("use", () =>
|
||||
expectParseToBe(
|
||||
"{a: 1, b: 2}.a",
|
||||
"Ok((:$atIndex (:$constructRecord (('a' 1) ('b' 2))) ('a')))",
|
||||
)
|
||||
})
|
||||
describe("multi-line", () => {
|
||||
testParseToBe("1; 2", "Ok((:$$bindExpression (:$$bindStatement (:$$bindings) 1) 2))")
|
||||
testParseToBe(
|
||||
"1+1; 2+1",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:add 1 1)) (:add 2 1)))",
|
||||
)
|
||||
})
|
||||
describe("assignment", () => {
|
||||
testParseToBe(
|
||||
"x=1; x",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x 1)) :x))",
|
||||
)
|
||||
testParseToBe(
|
||||
"x=1+1; x+1",
|
||||
"Ok((:$$bindExpression (:$$bindStatement (:$$bindings) (:$let :x (:add 1 1))) (:add :x 1)))",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -45,37 +68,51 @@ describe("eval", () => {
|
|||
// See https://mathjs.org/docs/expressions/syntax.html
|
||||
// See https://mathjs.org/docs/reference/functions.html
|
||||
describe("expressions", () => {
|
||||
test("1", () => expectEvalToBe("1", "Ok(1)"))
|
||||
test("1+2", () => expectEvalToBe("1+2", "Ok(3)"))
|
||||
test("(1+2)*3", () => expectEvalToBe("(1+2)*3", "Ok(9)"))
|
||||
test("2>1", () => expectEvalToBe("2>1", "Ok(true)"))
|
||||
test("concat('a ', 'b')", () => expectEvalToBe("concat('a ', 'b')", "Ok('a b')"))
|
||||
test("log(10)", () => expectEvalToBe("log(10)", "Ok(2.302585092994046)"))
|
||||
test("cos(10)", () => expectEvalToBe("cos(10)", "Ok(-0.8390715290764524)"))
|
||||
testEvalToBe("1", "Ok(1)")
|
||||
testEvalToBe("1+2", "Ok(3)")
|
||||
testEvalToBe("(1+2)*3", "Ok(9)")
|
||||
testEvalToBe("2>1", "Ok(true)")
|
||||
testEvalToBe("concat('a ', 'b')", "Ok('a b')")
|
||||
testEvalToBe("log(10)", "Ok(2.302585092994046)")
|
||||
testEvalToBe("cos(10)", "Ok(-0.8390715290764524)")
|
||||
// TODO more built ins
|
||||
})
|
||||
describe("arrays", () => {
|
||||
test("empty array", () => expectEvalToBe("[]", "Ok([])"))
|
||||
test("[1, 2, 3]", () => expectEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])"))
|
||||
test("['hello', 'world']", () => expectEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])"))
|
||||
test("index", () => expectEvalToBe("([0,1,2])[1]", "Ok(1)"))
|
||||
test("index not found", () =>
|
||||
expectEvalToBe("([0,1,2])[10]", "Error(Array index not found: 10)")
|
||||
)
|
||||
testEvalToBe("[1, 2, 3]", "Ok([1, 2, 3])")
|
||||
testEvalToBe("['hello', 'world']", "Ok(['hello', 'world'])")
|
||||
testEvalToBe("([0,1,2])[1]", "Ok(1)")
|
||||
testDescriptionEvalToBe("index not found", "([0,1,2])[10]", "Error(Array index not found: 10)")
|
||||
})
|
||||
describe("records", () => {
|
||||
test("define", () => expectEvalToBe("{a: 1, b: 2}", "Ok({a: 1, b: 2})"))
|
||||
test("index", () => expectEvalToBe("{a: 1}.a", "Ok(1)"))
|
||||
test("index not found", () => expectEvalToBe("{a: 1}.b", "Error(Record property not found: b)"))
|
||||
})
|
||||
|
||||
describe("multi-line", () => {
|
||||
testEvalToBe("1; 2", "Error(Assignment expected)")
|
||||
testEvalToBe("1+1; 2+1", "Error(Assignment expected)")
|
||||
})
|
||||
describe("assignment", () => {
|
||||
testEvalToBe("x=1; x", "Ok(1)")
|
||||
testEvalToBe("x=1+1; x+1", "Ok(3)")
|
||||
testEvalToBe("x=1; y=x+1; y+1", "Ok(3)")
|
||||
testEvalToBe("1; x=1", "Error(Assignment expected)")
|
||||
testEvalToBe("1; 1", "Error(Assignment expected)")
|
||||
testEvalToBe("x=1; x=1", "Error(Expression expected)")
|
||||
})
|
||||
})
|
||||
|
||||
describe("test exceptions", () => {
|
||||
test("javascript exception", () =>
|
||||
expectEvalToBe("jsraise('div by 0')", "Error(JS Exception: Error: 'div by 0')")
|
||||
testDescriptionEvalToBe(
|
||||
"javascript exception",
|
||||
"javascriptraise('div by 0')",
|
||||
"Error(JS Exception: Error: 'div by 0')",
|
||||
)
|
||||
|
||||
test("rescript exception", () =>
|
||||
expectEvalToBe("resraise()", "Error(TODO: unhandled rescript exception)")
|
||||
testDescriptionEvalToBe(
|
||||
"rescript exception",
|
||||
"rescriptraise()",
|
||||
"Error(TODO: unhandled rescript exception)",
|
||||
)
|
||||
})
|
||||
|
|
|
@ -111,7 +111,11 @@ describe("parse on distribution functions", () => {
|
|||
})
|
||||
describe("pointwise arithmetic expressions", () => {
|
||||
testParse(~skip=true, "normal(5,2) .+ normal(5,1)", "Ok((:dotAdd (:normal 5 2) (:normal 5 1)))")
|
||||
testParse(~skip=true, "normal(5,2) .- normal(5,1)", "Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))")
|
||||
testParse(
|
||||
~skip=true,
|
||||
"normal(5,2) .- normal(5,1)",
|
||||
"Ok((:dotSubtract (:normal 5 2) (:normal 5 1)))",
|
||||
)
|
||||
testParse("normal(5,2) .* normal(5,1)", "Ok((:dotMultiply (:normal 5 2) (:normal 5 1)))")
|
||||
testParse("normal(5,2) ./ normal(5,1)", "Ok((:dotDivide (:normal 5 2) (:normal 5 1)))")
|
||||
testParse("normal(5,2) .^ normal(5,1)", "Ok((:dotPow (:normal 5 2) (:normal 5 1)))")
|
||||
|
|
|
@ -3,9 +3,8 @@ open Expect
|
|||
|
||||
let makeTest = (~only=false, str, item1, item2) =>
|
||||
only
|
||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
||||
: test(str, () => expect(item1) -> toEqual(item2))
|
||||
|
||||
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||
: test(str, () => expect(item1)->toEqual(item2))
|
||||
|
||||
let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Output)
|
||||
|
||||
|
@ -20,7 +19,9 @@ let run = DistributionOperation.run(~env)
|
|||
let outputMap = fmap(~env)
|
||||
let unreachableInTestFileMessage = "Should be impossible to reach (This error is in test file)"
|
||||
let toExtFloat: option<float> => float = E.O.toExt(unreachableInTestFileMessage)
|
||||
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(unreachableInTestFileMessage)
|
||||
let toExtDist: option<GenericDist_Types.genericDist> => GenericDist_Types.genericDist = E.O.toExt(
|
||||
unreachableInTestFileMessage,
|
||||
)
|
||||
// let toExt: option<'a> => 'a = E.O.toExt(unreachableInTestFileMessage)
|
||||
let unpackFloat = x => x -> toFloat -> toExtFloat
|
||||
let unpackDist = y => y -> toDist -> toExtDist
|
||||
let unpackFloat = x => x->toFloat->toExtFloat
|
||||
let unpackDist = y => y->toDist->toExtDist
|
||||
|
|
|
@ -3,8 +3,8 @@ open Expect
|
|||
|
||||
let makeTest = (~only=false, str, item1, item2) =>
|
||||
only
|
||||
? Only.test(str, () => expect(item1) -> toEqual(item2))
|
||||
: test(str, () => expect(item1) -> toEqual(item2))
|
||||
? Only.test(str, () => expect(item1)->toEqual(item2))
|
||||
: test(str, () => expect(item1)->toEqual(item2))
|
||||
|
||||
let pointSetDist1: PointSetTypes.xyShape = {xs: [1., 4., 8.], ys: [0.2, 0.4, 0.8]}
|
||||
|
||||
|
@ -21,7 +21,11 @@ let pointSetDist3: PointSetTypes.xyShape = {
|
|||
describe("XYShapes", () => {
|
||||
describe("logScorePoint", () => {
|
||||
makeTest("When identical", XYShape.logScorePoint(30, pointSetDist1, pointSetDist1), Some(0.0))
|
||||
makeTest("When similar", XYShape.logScorePoint(30, pointSetDist1, pointSetDist2), Some(1.658971191043856))
|
||||
makeTest(
|
||||
"When similar",
|
||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist2),
|
||||
Some(1.658971191043856),
|
||||
)
|
||||
makeTest(
|
||||
"When very different",
|
||||
XYShape.logScorePoint(30, pointSetDist1, pointSetDist3),
|
||||
|
|
|
@ -88,6 +88,7 @@ function tag<a, b>(x: a, y: b): tagged<a, b> {
|
|||
export type squiggleExpression =
|
||||
| tagged<"symbol", string>
|
||||
| tagged<"string", string>
|
||||
| tagged<"call", string>
|
||||
| tagged<"array", squiggleExpression[]>
|
||||
| tagged<"boolean", boolean>
|
||||
| tagged<"distribution", Distribution>
|
||||
|
@ -117,6 +118,8 @@ function createTsExport(
|
|||
);
|
||||
case "EvBool":
|
||||
return tag("boolean", x.value);
|
||||
case "EvCall":
|
||||
return tag("call", x.value);
|
||||
case "EvDistribution":
|
||||
return tag("distribution", new Distribution(x.value, sampEnv));
|
||||
case "EvNumber":
|
||||
|
|
|
@ -39,57 +39,52 @@ module Output: {
|
|||
}
|
||||
|
||||
module Constructors: {
|
||||
@genType
|
||||
let mean: (~env: env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let sample: (~env: env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||
@genType
|
||||
let truncate: (
|
||||
~env: env,
|
||||
genericDist,
|
||||
option<float>,
|
||||
option<float>,
|
||||
) => result<genericDist, error>
|
||||
@genType
|
||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toString: (~env: env, genericDist) => result<string, error>
|
||||
@genType
|
||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
||||
@genType
|
||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let mean: (~env: env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let sample: (~env: env, genericDist) => result<float, error>
|
||||
@genType
|
||||
let cdf: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let inv: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let pdf: (~env: env, genericDist, float) => result<float, error>
|
||||
@genType
|
||||
let normalize: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toPointSet: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toSampleSet: (~env: env, genericDist, int) => result<genericDist, error>
|
||||
@genType
|
||||
let truncate: (~env: env, genericDist, option<float>, option<float>) => result<genericDist, error>
|
||||
@genType
|
||||
let inspect: (~env: env, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let toString: (~env: env, genericDist) => result<string, error>
|
||||
@genType
|
||||
let toSparkline: (~env: env, genericDist, int) => result<string, error>
|
||||
@genType
|
||||
let algebraicAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let algebraicPower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseAdd: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseMultiply: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseDivide: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseSubtract: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwiseLogarithm: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
@genType
|
||||
let pointwisePower: (~env: env, genericDist, genericDist) => result<genericDist, error>
|
||||
}
|
||||
|
|
|
@ -55,7 +55,11 @@ module DistributionOperation = {
|
|||
type fromDist =
|
||||
| ToFloat(Operation.toFloat)
|
||||
| ToDist(toDist)
|
||||
| ToDistCombination(Operation.direction, Operation.arithmeticOperation, [#Dist(genericDist) | #Float(float)])
|
||||
| ToDistCombination(
|
||||
Operation.direction,
|
||||
Operation.arithmeticOperation,
|
||||
[#Dist(genericDist) | #Float(float)],
|
||||
)
|
||||
| ToString
|
||||
|
||||
type singleParamaterFunction =
|
||||
|
|
|
@ -16,7 +16,7 @@ module Error = {
|
|||
type t = error
|
||||
|
||||
let fromString = (s: string): t => Other(s)
|
||||
|
||||
|
||||
@genType
|
||||
let toString = (x: t) => {
|
||||
switch x {
|
||||
|
|
|
@ -100,7 +100,6 @@ let combineShapesContinuousContinuous = (
|
|||
s1: PointSetTypes.xyShape,
|
||||
s2: PointSetTypes.xyShape,
|
||||
): PointSetTypes.xyShape => {
|
||||
|
||||
// if we add the two distributions, we should probably use normal filters.
|
||||
// if we multiply the two distributions, we should probably use lognormal filters.
|
||||
let t1m = toDiscretePointMassesFromTriangulars(s1)
|
||||
|
|
|
@ -235,18 +235,10 @@ module T = Dist({
|
|||
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
|
||||
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
|
||||
|
||||
Analysis.integrate(
|
||||
~indefiniteIntegralStepwise,
|
||||
~indefiniteIntegralLinear,
|
||||
t,
|
||||
)
|
||||
Analysis.integrate(~indefiniteIntegralStepwise, ~indefiniteIntegralLinear, t)
|
||||
}
|
||||
let variance = (t: t): float =>
|
||||
XYShape.Analysis.getVarianceDangerously(
|
||||
t,
|
||||
mean,
|
||||
Analysis.getMeanOfSquares,
|
||||
)
|
||||
XYShape.Analysis.getVarianceDangerously(t, mean, Analysis.getMeanOfSquares)
|
||||
})
|
||||
|
||||
let downsampleEquallyOverX = (length, t): t =>
|
||||
|
|
|
@ -212,8 +212,7 @@ module T = Dist({
|
|||
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
|
||||
|
||||
let getMeanOfSquares = ({discrete, continuous}: t) => {
|
||||
let discreteMean =
|
||||
discrete |> Discrete.shapeMap(XYShape.T.square) |> Discrete.T.mean
|
||||
let discreteMean = discrete |> Discrete.shapeMap(XYShape.T.square) |> Discrete.T.mean
|
||||
let continuousMean = continuous |> Continuous.Analysis.getMeanOfSquares
|
||||
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
|
||||
totalIntegralSum
|
||||
|
|
|
@ -207,4 +207,4 @@ let toSparkline = (t: t, bucketCount) =>
|
|||
T.toContinuous(t)
|
||||
->E.O2.fmap(Continuous.downsampleEquallyOverX(bucketCount))
|
||||
->E.O2.toResult("toContinous Error: Could not convert into continuous distribution")
|
||||
->E.R2.fmap(r => Continuous.getShape(r).ys->Sparklines.create())
|
||||
->E.R2.fmap(r => Continuous.getShape(r).ys->Sparklines.create())
|
||||
|
|
|
@ -14,10 +14,10 @@ type distributionType = [
|
|||
| #CDF
|
||||
]
|
||||
|
||||
type xyShape = XYShape.xyShape;
|
||||
type interpolationStrategy = XYShape.interpolationStrategy;
|
||||
type extrapolationStrategy = XYShape.extrapolationStrategy;
|
||||
type interpolator = XYShape.extrapolationStrategy;
|
||||
type xyShape = XYShape.xyShape
|
||||
type interpolationStrategy = XYShape.interpolationStrategy
|
||||
type extrapolationStrategy = XYShape.extrapolationStrategy
|
||||
type interpolator = XYShape.extrapolationStrategy
|
||||
|
||||
@genType
|
||||
type rec continuousShape = {
|
||||
|
|
|
@ -81,7 +81,7 @@ module Triangular = {
|
|||
low < medium && medium < high
|
||||
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
||||
: Error("Triangular values must be increasing order.")
|
||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||
let inv = (p, t: t) => Jstat.Triangular.inv(p, t.low, t.high, t.medium)
|
||||
let sample = (t: t) => Jstat.Triangular.sample(t.low, t.high, t.medium)
|
||||
|
@ -346,7 +346,11 @@ module T = {
|
|||
| _ => #NoSolution
|
||||
}
|
||||
|
||||
let toPointSetDist = (~xSelection=#ByWeight, sampleCount, d: symbolicDist): PointSetTypes.pointSetDist =>
|
||||
let toPointSetDist = (
|
||||
~xSelection=#ByWeight,
|
||||
sampleCount,
|
||||
d: symbolicDist,
|
||||
): PointSetTypes.pointSetDist =>
|
||||
switch d {
|
||||
| #Float(v) => Discrete(Discrete.make(~integralSumCache=Some(1.0), {xs: [v], ys: [1.0]}))
|
||||
| _ =>
|
||||
|
|
|
@ -21,4 +21,4 @@ let toPointSetDist = (samplingInputs, environment, node: node) =>
|
|||
let runFunction = (samplingInputs, environment, inputs, fn: ASTTypes.Function.t) => {
|
||||
let params = envs(samplingInputs, environment)
|
||||
ASTTypes.Function.run(params, inputs, fn)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ let makeSymbolicFromTwoFloats = (name, fn) =>
|
|||
~inputTypes=[#Float, #Float],
|
||||
~run=x =>
|
||||
switch x {
|
||||
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => (#SymbolicDist(r)))
|
||||
| [#Float(a), #Float(b)] => fn(a, b) |> E.R.fmap(r => #SymbolicDist(r))
|
||||
| e => wrongInputsError(e)
|
||||
},
|
||||
(),
|
||||
|
@ -90,7 +90,8 @@ let floatFromDist = (
|
|||
switch t {
|
||||
| #SymbolicDist(s) =>
|
||||
SymbolicDist.T.operate(distToFloatOp, s) |> E.R.bind(_, v => Ok(#SymbolicDist(#Float(v))))
|
||||
| #RenderedDist(rs) => PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
|
||||
| #RenderedDist(rs) =>
|
||||
PointSetDist.operate(distToFloatOp, rs) |> (v => Ok(#SymbolicDist(#Float(v))))
|
||||
}
|
||||
|
||||
let verticalScaling = (scaleOp, rs, scaleBy) => {
|
||||
|
@ -125,10 +126,15 @@ module Multimodal = {
|
|||
->E.R.bind(TypeSystem.TypedValue.toArray)
|
||||
->E.R.bind(r => r |> E.A.fmap(TypeSystem.TypedValue.toFloat) |> E.A.R.firstErrorOrOpen)
|
||||
|
||||
E.R.merge(dists, weights) -> E.R.bind(((a, b)) =>
|
||||
E.A.length(b) > E.A.length(a) ?
|
||||
Error("Too many weights provided") :
|
||||
Ok(E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (a |> E.O.toExn(""), b |> E.O.default(1.0))))
|
||||
E.R.merge(dists, weights)->E.R.bind(((a, b)) =>
|
||||
E.A.length(b) > E.A.length(a)
|
||||
? Error("Too many weights provided")
|
||||
: Ok(
|
||||
E.A.zipMaxLength(a, b) |> E.A.fmap(((a, b)) => (
|
||||
a |> E.O.toExn(""),
|
||||
b |> E.O.default(1.0),
|
||||
)),
|
||||
)
|
||||
)
|
||||
| _ => Error("Needs items")
|
||||
}
|
||||
|
|
|
@ -86,11 +86,7 @@ module TypedValue = {
|
|||
|> E.R.fmap(r => #Array(r))
|
||||
| (#Hash(named), #Hash(r)) =>
|
||||
let keyValues =
|
||||
named |> E.A.fmap(((name, intendedType)) => (
|
||||
name,
|
||||
intendedType,
|
||||
Hash.getByName(r, name),
|
||||
))
|
||||
named |> E.A.fmap(((name, intendedType)) => (name, intendedType, Hash.getByName(r, name)))
|
||||
let typedHash =
|
||||
keyValues
|
||||
|> E.A.fmap(((name, intendedType, optionNode)) =>
|
||||
|
@ -180,11 +176,7 @@ module Function = {
|
|||
_coerceInputNodes(evaluationParams, t.inputTypes, t.shouldCoerceTypes),
|
||||
)
|
||||
|
||||
let run = (
|
||||
evaluationParams: ASTTypes.evaluationParams,
|
||||
inputNodes: inputNodes,
|
||||
t: t,
|
||||
) =>
|
||||
let run = (evaluationParams: ASTTypes.evaluationParams, inputNodes: inputNodes, t: t) =>
|
||||
inputsToTypedValues(evaluationParams, inputNodes, t)->E.R.bind(t.run)
|
||||
|> (
|
||||
x =>
|
||||
|
|
|
@ -6,7 +6,7 @@ module Js = Reducer_Js
|
|||
module MathJs = Reducer_MathJs
|
||||
|
||||
@genType
|
||||
type expressionValue = Reducer_Expression.expressionValue
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
|
||||
@genType
|
||||
let evaluate: string => result<expressionValue, Reducer_ErrorValue.errorValue>
|
||||
|
|
|
@ -14,8 +14,8 @@ exception TestRescriptException
|
|||
let callInternal = (call: functionCall): result<'b, errorValue> => {
|
||||
let callMathJs = (call: functionCall): result<'b, errorValue> =>
|
||||
switch call {
|
||||
| ("jsraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
||||
| ("resraise", _) => raise(TestRescriptException) // For Tests
|
||||
| ("javascriptraise", [msg]) => Js.Exn.raiseError(toString(msg)) // For Tests
|
||||
| ("rescriptraise", _) => raise(TestRescriptException) // For Tests
|
||||
| call => call->toStringFunctionCall->MathJs.Eval.eval
|
||||
}
|
||||
|
||||
|
@ -58,7 +58,7 @@ let callInternal = (call: functionCall): result<'b, errorValue> => {
|
|||
}
|
||||
|
||||
/*
|
||||
Lisp engine uses Result monad while reducing expressions
|
||||
Reducer uses Result monad while reducing expressions
|
||||
*/
|
||||
let dispatch = (call: functionCall): result<expressionValue, errorValue> =>
|
||||
try {
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
@genType
|
||||
type errorValue =
|
||||
| REArrayIndexNotFound(string, int)
|
||||
| REAssignmentExpected
|
||||
| REExpressionExpected
|
||||
| REFunctionExpected(string)
|
||||
| REJavaScriptExn(option<string>, option<string>) // Javascript Exception
|
||||
| REMacroNotFound(string)
|
||||
| RERecordPropertyNotFound(string, string)
|
||||
| RESymbolNotFound(string)
|
||||
| RESyntaxError(string)
|
||||
| RETodo(string) // To do
|
||||
|
||||
type t = errorValue
|
||||
|
@ -12,6 +17,8 @@ type t = errorValue
|
|||
let errorToString = err =>
|
||||
switch err {
|
||||
| REArrayIndexNotFound(msg, index) => `${msg}: ${Js.String.make(index)}`
|
||||
| REAssignmentExpected => "Assignment expected"
|
||||
| REExpressionExpected => "Expression expected"
|
||||
| REFunctionExpected(msg) => `Function expected: ${msg}`
|
||||
| REJavaScriptExn(omsg, oname) => {
|
||||
let answer = "JS Exception:"
|
||||
|
@ -25,6 +32,9 @@ let errorToString = err =>
|
|||
}
|
||||
answer
|
||||
}
|
||||
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||
| RESymbolNotFound(symbolName) => `${symbolName} is not defined`
|
||||
| RESyntaxError(desc) => `Syntax Error: ${desc}`
|
||||
| RETodo(msg) => `TODO: ${msg}`
|
||||
}
|
||||
|
|
|
@ -11,10 +11,11 @@ type expressionValue = ExpressionValue.expressionValue
|
|||
type t = expression
|
||||
|
||||
/*
|
||||
Shows the Lisp Code as text lisp code
|
||||
Shows the expression as text of expression
|
||||
*/
|
||||
let rec toString = expression =>
|
||||
switch expression {
|
||||
| T.EBindings(bindings) => "$$bound"
|
||||
| T.EList(aList) =>
|
||||
`(${Belt.List.map(aList, aValue => toString(aValue))
|
||||
->Extra.List.interperse(" ")
|
||||
|
@ -30,7 +31,7 @@ let toStringResult = codeResult =>
|
|||
}
|
||||
|
||||
/*
|
||||
Converts a MathJs code to Lisp Code
|
||||
Converts a MathJs code to expression
|
||||
*/
|
||||
let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
||||
expr->parser->Result.flatMap(node => converter(node))
|
||||
|
@ -38,54 +39,141 @@ let parse_ = (expr: string, parser, converter): result<t, errorValue> =>
|
|||
let parse = (mathJsCode: string): result<t, errorValue> =>
|
||||
mathJsCode->parse_(MathJs.Parse.parse, MathJs.ToExpression.fromNode)
|
||||
|
||||
module MapString = Belt.Map.String
|
||||
type bindings = MapString.t<unit>
|
||||
let defaultBindings: bindings = MapString.fromArray([])
|
||||
// TODO Define bindings for function execution context
|
||||
let defaultBindings: T.bindings = Belt.Map.String.empty
|
||||
|
||||
/*
|
||||
After reducing each level of code tree, we have a value list to evaluate
|
||||
Recursively evaluate/reduce the expression (Lisp AST)
|
||||
*/
|
||||
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
||||
switch valueList {
|
||||
| list{EvSymbol(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
||||
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||
}
|
||||
|
||||
/*
|
||||
Recursively evaluate/reduce the code tree
|
||||
*/
|
||||
let rec reduceExpression = (expression: t, bindings): result<expressionValue, 'e> =>
|
||||
switch expression {
|
||||
| T.EValue(value) => value->Ok
|
||||
| T.EList(list) => {
|
||||
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->reduceExpression(bindings)
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->reduceValueList)
|
||||
let rec reduceExpression = (expression: t, bindings: T.bindings): result<expressionValue, 'e> => {
|
||||
/*
|
||||
After reducing each level of expression(Lisp AST), we have a value list to evaluate
|
||||
*/
|
||||
let reduceValueList = (valueList: list<expressionValue>): result<expressionValue, 'e> =>
|
||||
switch valueList {
|
||||
| list{EvCall(fName), ...args} => (fName, args->Belt.List.toArray)->BuiltIn.dispatch
|
||||
| _ => valueList->Belt.List.toArray->ExpressionValue.EvArray->Ok
|
||||
}
|
||||
|
||||
/*
|
||||
Macros are like functions but instead of taking values as parameters,
|
||||
they take expressions as parameters and return a new expression.
|
||||
Macros are used to define language building blocks. They are like Lisp macros.
|
||||
*/
|
||||
let doMacroCall = (list: list<t>, bindings: T.bindings): result<t, 'e> => {
|
||||
let dispatchMacroCall = (list: list<t>, bindings: T.bindings): result<t, 'e> => {
|
||||
let rec replaceSymbols = (expression: t, bindings: T.bindings): result<t, errorValue> =>
|
||||
switch expression {
|
||||
| T.EValue(EvSymbol(aSymbol)) =>
|
||||
switch bindings->Belt.Map.String.get(aSymbol) {
|
||||
| Some(boundExpression) => boundExpression->Ok
|
||||
| None => RESymbolNotFound(aSymbol)->Error
|
||||
}
|
||||
| T.EValue(_) => expression->Ok
|
||||
| T.EBindings(_) => expression->Ok
|
||||
| T.EList(list) => {
|
||||
let racc = list->Belt.List.reduceReverse(Ok(list{}), (racc, each: expression) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->replaceSymbols(bindings)
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.map(acc => acc->T.EList)
|
||||
}
|
||||
}
|
||||
|
||||
let doBindStatement = (statement: t, bindings: T.bindings) => {
|
||||
switch statement {
|
||||
| T.EList(list{T.EValue(EvCall("$let")), T.EValue(EvSymbol(aSymbol)), expression}) => {
|
||||
let rNewExpression = replaceSymbols(expression, bindings)
|
||||
rNewExpression->Result.map(newExpression =>
|
||||
Belt.Map.String.set(bindings, aSymbol, newExpression)->T.EBindings
|
||||
)
|
||||
}
|
||||
| _ => REAssignmentExpected->Error
|
||||
}
|
||||
}
|
||||
|
||||
let doBindExpression = (expression: t, bindings: T.bindings) => {
|
||||
switch expression {
|
||||
| T.EList(list{T.EValue(EvCall("$let")), ..._}) => REExpressionExpected->Error
|
||||
| _ => replaceSymbols(expression, bindings)
|
||||
}
|
||||
}
|
||||
|
||||
switch list {
|
||||
| list{T.EValue(EvCall("$$bindings"))} => bindings->T.EBindings->Ok
|
||||
|
||||
| list{T.EValue(EvCall("$$bindStatement")), T.EBindings(bindings), statement} =>
|
||||
doBindStatement(statement, bindings)
|
||||
| list{T.EValue(EvCall("$$bindExpression")), T.EBindings(bindings), expression} =>
|
||||
doBindExpression(expression, bindings)
|
||||
| _ => list->T.EList->Ok
|
||||
}
|
||||
}
|
||||
|
||||
list->dispatchMacroCall(bindings)
|
||||
}
|
||||
|
||||
let rec seekMacros = (expression: t, bindings: T.bindings): result<t, 'e> =>
|
||||
switch expression {
|
||||
| T.EValue(value) => expression->Ok
|
||||
| T.EList(list) => {
|
||||
let racc: result<list<t>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->seekMacros(bindings)
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->doMacroCall(bindings))
|
||||
}
|
||||
}
|
||||
|
||||
let rec reduceExpandedExpression = (expression: t): result<expressionValue, 'e> =>
|
||||
switch expression {
|
||||
| T.EValue(value) => value->Ok
|
||||
| T.EList(list) => {
|
||||
let racc: result<list<expressionValue>, 'e> = list->Belt.List.reduceReverse(Ok(list{}), (
|
||||
racc,
|
||||
each: expression,
|
||||
) =>
|
||||
racc->Result.flatMap(acc => {
|
||||
each
|
||||
->reduceExpandedExpression
|
||||
->Result.flatMap(newNode => {
|
||||
acc->Belt.List.add(newNode)->Ok
|
||||
})
|
||||
})
|
||||
)
|
||||
racc->Result.flatMap(acc => acc->reduceValueList)
|
||||
}
|
||||
}
|
||||
|
||||
let rExpandedExpression: result<t, 'e> = expression->seekMacros(bindings)
|
||||
rExpandedExpression->Result.flatMap(expandedExpression =>
|
||||
expandedExpression->reduceExpandedExpression
|
||||
)
|
||||
}
|
||||
|
||||
let evalWBindingsExpression = (aExpression, bindings): result<expressionValue, 'e> =>
|
||||
reduceExpression(aExpression, bindings)
|
||||
|
||||
/*
|
||||
Evaluates MathJs code via Lisp using bindings and answers the result
|
||||
Evaluates MathJs code via Reducer using bindings and answers the result
|
||||
*/
|
||||
let evalWBindings = (codeText: string, bindings: bindings) => {
|
||||
let evalWBindings = (codeText: string, bindings: T.bindings) => {
|
||||
parse(codeText)->Result.flatMap(code => code->evalWBindingsExpression(bindings))
|
||||
}
|
||||
|
||||
/*
|
||||
Evaluates MathJs code via Lisp and answers the result
|
||||
Evaluates MathJs code via Reducer and answers the result
|
||||
*/
|
||||
let eval = (code: string) => evalWBindings(code, defaultBindings)
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
module Result = Belt.Result
|
||||
module T = Reducer_Expression_T
|
||||
type expression = T.expression
|
||||
@genType
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
type t = expression
|
||||
let toString: T.expression => Js.String.t
|
||||
let toStringResult: result<T.expression, 'a> => string
|
||||
let parse: string => result<expression, Reducer_ErrorValue.t>
|
||||
module MapString = Belt.Map.String
|
||||
type bindings = MapString.t<unit>
|
||||
let defaultBindings: bindings
|
||||
let reduceValueList: list<expressionValue> => result<
|
||||
expressionValue,
|
||||
Reducer_ErrorValue.t,
|
||||
>
|
||||
let reduceExpression: (expression, 'a) => result<
|
||||
expressionValue,
|
||||
Reducer_ErrorValue.t,
|
||||
>
|
||||
let evalWBindingsExpression: (expression, 'a) => result<
|
||||
expressionValue,
|
||||
Reducer_ErrorValue.t,
|
||||
>
|
||||
let evalWBindings: (string, bindings) => Result.t<
|
||||
expressionValue,
|
||||
Reducer_ErrorValue.t,
|
||||
>
|
||||
let eval: string => Result.t<expressionValue, Reducer_ErrorValue.t>
|
|
@ -1,5 +1,15 @@
|
|||
open ReducerInterface.ExpressionValue
|
||||
|
||||
/*
|
||||
An expression is a Lisp AST. An expression is either a primitive value or a list of expressions.
|
||||
In the case of a list of expressions (e1, e2, e3, ...eN), the semantic is
|
||||
apply e1, e2 -> apply e3 -> ... -> apply eN
|
||||
This is Lisp semantics. It holds true in both eager and lazy evaluations.
|
||||
A Lisp AST contains only expressions/primitive values to apply to their left.
|
||||
The act of defining the semantics of a functional language is to write it in terms of Lisp AST.
|
||||
*/
|
||||
type rec expression =
|
||||
| EList(list<expression>) // A list to map-reduce
|
||||
| EValue(expressionValue) // Irreducible built-in value. Reducer should not know the internals. External libraries are responsible
|
||||
| EBindings(bindings) // let/def kind of statements return bindings
|
||||
and bindings = Belt.Map.String.t<expression>
|
||||
|
|
|
@ -7,25 +7,31 @@ open Reducer_ErrorValue
|
|||
|
||||
type node = {"type": string, "isNode": bool, "comment": string}
|
||||
type arrayNode = {...node, "items": array<node>}
|
||||
//assignmentNode
|
||||
//blockNode
|
||||
type block = {"node": node}
|
||||
type blockNode = {...node, "blocks": array<block>}
|
||||
//conditionalNode
|
||||
type constantNode = {...node, "value": unit}
|
||||
//functionAssignmentNode
|
||||
type functionNode = {...node, "fn": string, "args": array<node>}
|
||||
type indexNode = {...node, "dimensions": array<node>}
|
||||
type objectNode = {...node, "properties": Js.Dict.t<node>}
|
||||
type accessorNode = {...node, "object": node, "index": indexNode}
|
||||
type operatorNode = {...functionNode, "op": string}
|
||||
type accessorNode = {...node, "object": node, "index": indexNode, "name": string}
|
||||
|
||||
//parenthesisNode
|
||||
type parenthesisNode = {...node, "content": node}
|
||||
//rangeNode
|
||||
//relationalNode
|
||||
type symbolNode = {...node, "name": string}
|
||||
type functionNode = {...node, "fn": unit, "args": array<node>}
|
||||
type operatorNode = {...functionNode, "op": string}
|
||||
type assignmentNode = {...node, "object": symbolNode, "value": node}
|
||||
type assignmentNodeWAccessor = {...node, "object": accessorNode, "value": node}
|
||||
type assignmentNodeWIndex = {...assignmentNodeWAccessor, "index": Js.null<indexNode>}
|
||||
|
||||
external castAccessorNode: node => accessorNode = "%identity"
|
||||
external castArrayNode: node => arrayNode = "%identity"
|
||||
external castAssignmentNode: node => assignmentNode = "%identity"
|
||||
external castAssignmentNodeWAccessor: node => assignmentNodeWAccessor = "%identity"
|
||||
external castAssignmentNodeWIndex: node => assignmentNodeWIndex = "%identity"
|
||||
external castBlockNode: node => blockNode = "%identity"
|
||||
external castConstantNode: node => constantNode = "%identity"
|
||||
external castFunctionNode: node => functionNode = "%identity"
|
||||
external castIndexNode: node => indexNode = "%identity"
|
||||
|
@ -50,6 +56,8 @@ let parse = (expr: string): result<node, errorValue> =>
|
|||
type mathJsNode =
|
||||
| MjAccessorNode(accessorNode)
|
||||
| MjArrayNode(arrayNode)
|
||||
| MjAssignmentNode(assignmentNode)
|
||||
| MjBlockNode(blockNode)
|
||||
| MjConstantNode(constantNode)
|
||||
| MjFunctionNode(functionNode)
|
||||
| MjIndexNode(indexNode)
|
||||
|
@ -58,10 +66,21 @@ type mathJsNode =
|
|||
| MjParenthesisNode(parenthesisNode)
|
||||
| MjSymbolNode(symbolNode)
|
||||
|
||||
let castNodeType = (node: node) =>
|
||||
let castNodeType = (node: node) => {
|
||||
let decideAssignmentNode = node => {
|
||||
let iNode = node->castAssignmentNodeWIndex
|
||||
if Js.null == iNode["index"] && iNode["object"]["type"] == "SymbolNode" {
|
||||
node->castAssignmentNode->MjAssignmentNode->Ok
|
||||
} else {
|
||||
RESyntaxError("Assignment to index or property not supported")->Error
|
||||
}
|
||||
}
|
||||
|
||||
switch node["type"] {
|
||||
| "AccessorNode" => node->castAccessorNode->MjAccessorNode->Ok
|
||||
| "ArrayNode" => node->castArrayNode->MjArrayNode->Ok
|
||||
| "AssignmentNode" => node->decideAssignmentNode
|
||||
| "BlockNode" => node->castBlockNode->MjBlockNode->Ok
|
||||
| "ConstantNode" => node->castConstantNode->MjConstantNode->Ok
|
||||
| "FunctionNode" => node->castFunctionNode->MjFunctionNode->Ok
|
||||
| "IndexNode" => node->castIndexNode->MjIndexNode->Ok
|
||||
|
@ -71,6 +90,19 @@ let castNodeType = (node: node) =>
|
|||
| "SymbolNode" => node->castSymbolNode->MjSymbolNode->Ok
|
||||
| _ => RETodo(`Argg, unhandled MathJsNode: ${node["type"]}`)->Error
|
||||
}
|
||||
}
|
||||
|
||||
external unitAsSymbolNode: unit => symbolNode = "%identity"
|
||||
external unitAsString: unit => string = "%identity"
|
||||
|
||||
let nameOfFunctionNode = (fNode: functionNode): string => {
|
||||
let name = fNode["fn"]
|
||||
if Js.typeof(name) == "string" {
|
||||
name->unitAsString
|
||||
} else {
|
||||
(name->unitAsSymbolNode)["name"]
|
||||
}
|
||||
}
|
||||
|
||||
let rec toString = (mathJsNode: mathJsNode): string => {
|
||||
let toStringValue = (a: 'a): string =>
|
||||
|
@ -87,9 +119,10 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
|||
->Js.String.concatMany("")
|
||||
|
||||
let toStringFunctionNode = (fnode: functionNode): string =>
|
||||
`${fnode["fn"]}(${fnode["args"]->toStringNodeArray})`
|
||||
`${fnode->nameOfFunctionNode}(${fnode["args"]->toStringNodeArray})`
|
||||
|
||||
let toStringObjectEntry = ((key: string, value: node)): string => `${key}: ${value->toStringMathJsNode}`
|
||||
let toStringObjectEntry = ((key: string, value: node)): string =>
|
||||
`${key}: ${value->toStringMathJsNode}`
|
||||
|
||||
let toStringObjectNode = (oNode: objectNode): string =>
|
||||
`{${oNode["properties"]
|
||||
|
@ -103,16 +136,28 @@ let rec toString = (mathJsNode: mathJsNode): string => {
|
|||
->Belt.Array.map(each => toStringResult(each->castNodeType))
|
||||
->Js.String.concatMany("")
|
||||
|
||||
let toStringSymbolNode = (sNode: symbolNode): string => sNode["name"]
|
||||
|
||||
let toStringBlocks = (blocks: array<block>): string =>
|
||||
blocks
|
||||
->Belt.Array.map(each => each["node"]->castNodeType->toStringResult)
|
||||
->Extra.Array.interperse("; ")
|
||||
->Js.String.concatMany("")
|
||||
|
||||
switch mathJsNode {
|
||||
| MjAccessorNode(aNode) => `${aNode["object"]->toStringMathJsNode}[${aNode["index"]->toStringIndexNode}]`
|
||||
| MjAccessorNode(aNode) =>
|
||||
`${aNode["object"]->toStringMathJsNode}[${aNode["index"]->toStringIndexNode}]`
|
||||
| MjArrayNode(aNode) => `[${aNode["items"]->toStringNodeArray}]`
|
||||
| MjAssignmentNode(aNode) =>
|
||||
`${aNode["object"]->toStringSymbolNode} = ${aNode["value"]->toStringMathJsNode}`
|
||||
| MjBlockNode(bNode) => `{${bNode["blocks"]->toStringBlocks}}`
|
||||
| MjConstantNode(cNode) => cNode["value"]->toStringValue
|
||||
| MjFunctionNode(fNode) => fNode->toStringFunctionNode
|
||||
| MjIndexNode(iNode) => iNode->toStringIndexNode
|
||||
| MjObjectNode(oNode) => oNode->toStringObjectNode
|
||||
| MjOperatorNode(opNode) => opNode->castOperatorNodeToFunctionNode->toStringFunctionNode
|
||||
| MjParenthesisNode(pNode) => `(${toStringMathJsNode(pNode["content"])})`
|
||||
| MjSymbolNode(sNode) => sNode["name"]
|
||||
| MjSymbolNode(sNode) => sNode->toStringSymbolNode
|
||||
}
|
||||
}
|
||||
and toStringResult = (rMathJsNode: result<mathJsNode, errorValue>): string =>
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
module ErrorValue = Reducer_ErrorValue
|
||||
module ExpressionValue = ReducerInterface.ExpressionValue
|
||||
module ExtressionT = Reducer_Expression_T
|
||||
module ExpressionT = Reducer_Expression_T
|
||||
module JavaScript = Reducer_Js
|
||||
module Parse = Reducer_MathJs_Parse
|
||||
module Result = Belt.Result
|
||||
|
||||
type expression = ExtressionT.expression
|
||||
type expression = ExpressionT.expression
|
||||
type expressionValue = ExpressionValue.expressionValue
|
||||
type errorValue = ErrorValue.errorValue
|
||||
|
||||
|
@ -18,10 +18,19 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
)
|
||||
)
|
||||
|
||||
let castFunctionNode = fNode => {
|
||||
let fn = fNode["fn"]->ExpressionValue.EvSymbol->ExtressionT.EValue
|
||||
let toEvCallValue = (name: string): expression =>
|
||||
name->ExpressionValue.EvCall->ExpressionT.EValue
|
||||
let toEvSymbolValue = (name: string): expression =>
|
||||
name->ExpressionValue.EvSymbol->ExpressionT.EValue
|
||||
|
||||
let passToFunction = (fName: string, rLispArgs): result<expression, errorValue> => {
|
||||
let fn = fName->toEvCallValue
|
||||
rLispArgs->Result.flatMap(lispArgs => list{fn, ...lispArgs}->ExpressionT.EList->Ok)
|
||||
}
|
||||
|
||||
let caseFunctionNode = fNode => {
|
||||
let lispArgs = fNode["args"]->Belt.List.fromArray->fromNodeList
|
||||
lispArgs->Result.map(argsCode => list{fn, ...argsCode}->ExtressionT.EList)
|
||||
passToFunction(fNode->Parse.nameOfFunctionNode, lispArgs)
|
||||
}
|
||||
|
||||
let caseObjectNode = oNode => {
|
||||
|
@ -34,15 +43,16 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
fromNode(value)->Result.map(valueExpression => {
|
||||
let entryCode =
|
||||
list{
|
||||
key->ExpressionValue.EvString->ExtressionT.EValue,
|
||||
key->ExpressionValue.EvString->ExpressionT.EValue,
|
||||
valueExpression,
|
||||
}->ExtressionT.EList
|
||||
}->ExpressionT.EList
|
||||
list{entryCode, ...acc}
|
||||
})
|
||||
)
|
||||
)
|
||||
let lispName = "$constructRecord"->ExpressionValue.EvSymbol->ExtressionT.EValue
|
||||
rargs->Result.map(args => list{lispName, ExtressionT.EList(args)}->ExtressionT.EList)
|
||||
rargs->Result.flatMap(args =>
|
||||
passToFunction("$constructRecord", list{ExpressionT.EList(args)}->Ok)
|
||||
) // $consturctRecord gets a single argument: List of key-value paiers
|
||||
}
|
||||
|
||||
oNode["properties"]->Js.Dict.entries->Belt.List.fromArray->fromObjectEntries
|
||||
|
@ -54,33 +64,75 @@ let rec fromNode = (mathJsNode: Parse.node): result<expression, errorValue> =>
|
|||
Ok(list{}),
|
||||
(racc, currentPropertyMathJsNode) =>
|
||||
racc->Result.flatMap(acc =>
|
||||
fromNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{propertyCode, ...acc})
|
||||
fromNode(currentPropertyMathJsNode)->Result.map(propertyCode => list{
|
||||
propertyCode,
|
||||
...acc,
|
||||
})
|
||||
),
|
||||
)
|
||||
rpropertyCodeList->Result.map(propertyCodeList => ExtressionT.EList(propertyCodeList))
|
||||
rpropertyCodeList->Result.map(propertyCodeList => ExpressionT.EList(propertyCodeList))
|
||||
}
|
||||
|
||||
let caseAccessorNode = (objectNode, indexNode) => {
|
||||
let fn = "$atIndex"->ExpressionValue.EvSymbol->ExtressionT.EValue
|
||||
|
||||
caseIndexNode(indexNode)->Result.flatMap(indexCode => {
|
||||
fromNode(objectNode)->Result.map(objectCode =>
|
||||
list{fn, objectCode, indexCode}->ExtressionT.EList
|
||||
fromNode(objectNode)->Result.flatMap(objectCode =>
|
||||
passToFunction("$atIndex", list{objectCode, indexCode}->Ok)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
switch typedMathJsNode {
|
||||
| MjArrayNode(aNode) =>
|
||||
aNode["items"]->Belt.List.fromArray->fromNodeList->Result.map(list => ExtressionT.EList(list))
|
||||
| MjConstantNode(cNode) =>
|
||||
cNode["value"]->JavaScript.Gate.jsToEv->Result.map(v => v->ExtressionT.EValue)
|
||||
| MjFunctionNode(fNode) => fNode->castFunctionNode
|
||||
| MjOperatorNode(opNode) => opNode->Parse.castOperatorNodeToFunctionNode->castFunctionNode
|
||||
| MjParenthesisNode(pNode) => pNode["content"]->fromNode
|
||||
| MjAccessorNode(aNode) => caseAccessorNode(aNode["object"], aNode["index"])
|
||||
| MjObjectNode(oNode) => caseObjectNode(oNode)
|
||||
| MjSymbolNode(sNode) => sNode["name"]->ExpressionValue.EvSymbol->ExtressionT.EValue->Ok
|
||||
| MjIndexNode(iNode) => caseIndexNode(iNode)
|
||||
let caseAssignmentNode = aNode => {
|
||||
let symbol = aNode["object"]["name"]->toEvSymbolValue
|
||||
let rValueExpression = fromNode(aNode["value"])
|
||||
rValueExpression->Result.flatMap(valueExpression => {
|
||||
let lispArgs = list{symbol, valueExpression}->Ok
|
||||
passToFunction("$let", lispArgs)
|
||||
})
|
||||
}
|
||||
|
||||
let caseArrayNode = aNode => {
|
||||
aNode["items"]->Belt.List.fromArray->fromNodeList->Result.map(list => ExpressionT.EList(list))
|
||||
}
|
||||
|
||||
let caseBlockNode = (bNode): result<expression, errorValue> => {
|
||||
let blocks = bNode["blocks"]
|
||||
let initialBindings = passToFunction("$$bindings", list{}->Ok)
|
||||
let lastIndex = Belt.Array.length(blocks) - 1
|
||||
blocks->Belt.Array.reduceWithIndex(initialBindings, (rPreviousBindings, block, i) => {
|
||||
rPreviousBindings->Result.flatMap(previousBindings => {
|
||||
let node = block["node"]
|
||||
let rStatement: result<expression, errorValue> = node->fromNode
|
||||
let bindName = if i == lastIndex {
|
||||
"$$bindExpression"
|
||||
} else {
|
||||
"$$bindStatement"
|
||||
}
|
||||
rStatement->Result.flatMap((statement: expression) => {
|
||||
let lispArgs = list{previousBindings, statement}->Ok
|
||||
passToFunction(bindName, lispArgs)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
let rFinalExpression: result<expression, errorValue> = switch typedMathJsNode {
|
||||
| MjAccessorNode(aNode) => caseAccessorNode(aNode["object"], aNode["index"])
|
||||
| MjArrayNode(aNode) => caseArrayNode(aNode)
|
||||
| MjAssignmentNode(aNode) => caseAssignmentNode(aNode)
|
||||
| MjSymbolNode(sNode) => {
|
||||
let expr: expression = toEvSymbolValue(sNode["name"])
|
||||
let rExpr: result<expression, errorValue> = expr->Ok
|
||||
rExpr
|
||||
}
|
||||
| MjBlockNode(bNode) => caseBlockNode(bNode)
|
||||
// | MjBlockNode(bNode) => "statement"->toEvSymbolValue->Ok
|
||||
| MjConstantNode(cNode) =>
|
||||
cNode["value"]->JavaScript.Gate.jsToEv->Result.flatMap(v => v->ExpressionT.EValue->Ok)
|
||||
| MjFunctionNode(fNode) => fNode->caseFunctionNode
|
||||
| MjIndexNode(iNode) => caseIndexNode(iNode)
|
||||
| MjObjectNode(oNode) => caseObjectNode(oNode)
|
||||
| MjOperatorNode(opNode) => opNode->Parse.castOperatorNodeToFunctionNode->caseFunctionNode
|
||||
| MjParenthesisNode(pNode) => pNode["content"]->fromNode
|
||||
}
|
||||
rFinalExpression
|
||||
})
|
||||
|
|
|
@ -7,25 +7,30 @@ module ErrorValue = Reducer_ErrorValue
|
|||
|
||||
@genType
|
||||
type rec expressionValue =
|
||||
| EvArray(array<expressionValue>)
|
||||
| EvBool(bool)
|
||||
| EvCall(string) // External function call
|
||||
| EvDistribution(GenericDist_Types.genericDist)
|
||||
| EvNumber(float)
|
||||
| EvRecord(Js.Dict.t<expressionValue>)
|
||||
| EvString(string)
|
||||
| EvSymbol(string)
|
||||
| EvArray(array<expressionValue>)
|
||||
| EvRecord(Js.Dict.t<expressionValue>)
|
||||
| EvDistribution(GenericDist_Types.genericDist)
|
||||
|
||||
type functionCall = (string, array<expressionValue>)
|
||||
|
||||
let rec toString = aValue =>
|
||||
switch aValue {
|
||||
| EvBool(aBool) => Js.String.make(aBool)
|
||||
| EvCall(fName) => `:${fName}`
|
||||
| EvNumber(aNumber) => Js.String.make(aNumber)
|
||||
| EvString(aString) => `'${aString}'`
|
||||
| EvSymbol(aString) => `:${aString}`
|
||||
| EvArray(anArray) => {
|
||||
let args =
|
||||
anArray->Belt.Array.map(each => toString(each))->Extra_Array.interperse(", ")->Js.String.concatMany("")
|
||||
anArray
|
||||
->Belt.Array.map(each => toString(each))
|
||||
->Extra_Array.interperse(", ")
|
||||
->Js.String.concatMany("")
|
||||
`[${args}]`
|
||||
}
|
||||
| EvRecord(aRecord) => {
|
||||
|
@ -37,12 +42,13 @@ let rec toString = aValue =>
|
|||
->Js.String.concatMany("")
|
||||
`{${pairs}}`
|
||||
}
|
||||
| EvDistribution(dist) => `${GenericDist.toString(dist)}`
|
||||
| EvDistribution(dist) => GenericDist.toString(dist)
|
||||
}
|
||||
|
||||
let toStringWithType = aValue =>
|
||||
switch aValue {
|
||||
| EvBool(_) => `Bool::${toString(aValue)}`
|
||||
| EvCall(_) => `Call::${toString(aValue)}`
|
||||
| EvNumber(_) => `Number::${toString(aValue)}`
|
||||
| EvString(_) => `String::${toString(aValue)}`
|
||||
| EvSymbol(_) => `Symbol::${toString(aValue)}`
|
||||
|
|
|
@ -160,7 +160,8 @@ let dispatchToGenericOutput = (call: ExpressionValue.functionCall): option<
|
|||
| ("mean", [EvDistribution(dist)]) => Helpers.toFloatFn(#Mean, dist)
|
||||
| ("toString", [EvDistribution(dist)]) => Helpers.toStringFn(ToString, dist)
|
||||
| ("toSparkline", [EvDistribution(dist)]) => Helpers.toStringFn(ToSparkline(20), dist)
|
||||
| ("toSparkline", [EvDistribution(dist), EvNumber(n)]) => Helpers.toStringFn(ToSparkline(Belt.Float.toInt(n)), dist)
|
||||
| ("toSparkline", [EvDistribution(dist), EvNumber(n)]) =>
|
||||
Helpers.toStringFn(ToSparkline(Belt.Float.toInt(n)), dist)
|
||||
| ("exp", [EvDistribution(a)]) =>
|
||||
// https://mathjs.org/docs/reference/functions/exp.html
|
||||
Helpers.twoDiststoDistFn(Algebraic, "pow", GenericDist.fromFloat(Math.e), a)->Some
|
||||
|
|
|
@ -32,7 +32,7 @@ let makeSampleSetDist = SampleSetDist.make
|
|||
let evaluate = Reducer.evaluate
|
||||
|
||||
@genType
|
||||
type expressionValue = Reducer_Expression.expressionValue
|
||||
type expressionValue = ReducerInterface_ExpressionValue.expressionValue
|
||||
|
||||
@genType
|
||||
type errorValue = Reducer_ErrorValue.errorValue
|
||||
|
|
|
@ -179,14 +179,15 @@ module R = {
|
|||
}
|
||||
|
||||
module R2 = {
|
||||
let fmap = (a,b) => R.fmap(b,a)
|
||||
let bind = (a, b) => R.bind(b, a)
|
||||
let fmap = (a, b) => R.fmap(b, a)
|
||||
let bind = (a, b) => R.bind(b, a)
|
||||
|
||||
//Converts result type to change error type only
|
||||
let errMap = (a, map) => switch(a){
|
||||
let errMap = (a, map) =>
|
||||
switch a {
|
||||
| Ok(r) => Ok(r)
|
||||
| Error(e) => map(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let safe_fn_of_string = (fn, s: string): option<'a> =>
|
||||
|
@ -300,7 +301,6 @@ module A = {
|
|||
|> Rationale.Result.return
|
||||
}
|
||||
|
||||
|
||||
// This zips while taking the longest elements of each array.
|
||||
let zipMaxLength = (array1, array2) => {
|
||||
let maxLength = Int.max(length(array1), length(array2))
|
||||
|
@ -456,7 +456,6 @@ module A = {
|
|||
let diff = (arr: array<float>): array<float> =>
|
||||
Belt.Array.zipBy(arr, Belt.Array.sliceToEnd(arr, 1), (left, right) => right -. left)
|
||||
|
||||
|
||||
exception RangeError(string)
|
||||
let range = (min: float, max: float, n: int): array<float> =>
|
||||
switch n {
|
||||
|
@ -474,7 +473,7 @@ module A = {
|
|||
}
|
||||
|
||||
module A2 = {
|
||||
let fmap = (a,b) => A.fmap(b,a)
|
||||
let fmap = (a, b) => A.fmap(b, a)
|
||||
let joinWith = (a, b) => A.joinWith(b, a)
|
||||
}
|
||||
|
||||
|
|
|
@ -36,8 +36,8 @@ module Exponential = {
|
|||
@module("jstat") @scope("exponential") external pdf: (float, float) => float = "pdf"
|
||||
@module("jstat") @scope("exponential") external cdf: (float, float) => float = "cdf"
|
||||
@module("jstat") @scope("exponential") external inv: (float, float) => float = "inv"
|
||||
@module("jstat") @scope("exponential") external sample: (float) => float = "sample"
|
||||
@module("jstat") @scope("exponential") external mean: (float) => float = "mean"
|
||||
@module("jstat") @scope("exponential") external sample: float => float = "sample"
|
||||
@module("jstat") @scope("exponential") external mean: float => float = "mean"
|
||||
}
|
||||
|
||||
module Cauchy = {
|
||||
|
@ -56,7 +56,6 @@ module Triangular = {
|
|||
@module("jstat") @scope("triangular") external mean: (float, float, float) => float = "mean"
|
||||
}
|
||||
|
||||
|
||||
module Pareto = {
|
||||
@module("jstat") @scope("pareto") external pdf: (float, float, float) => float = "pdf"
|
||||
@module("jstat") @scope("pareto") external cdf: (float, float, float) => float = "cdf"
|
||||
|
@ -66,20 +65,20 @@ module Pareto = {
|
|||
module Poisson = {
|
||||
@module("jstat") @scope("poisson") external pdf: (float, float) => float = "pdf"
|
||||
@module("jstat") @scope("poisson") external cdf: (float, float) => float = "cdf"
|
||||
@module("jstat") @scope("poisson") external sample: (float) => float = "sample"
|
||||
@module("jstat") @scope("poisson") external mean: (float) => float = "mean"
|
||||
@module("jstat") @scope("poisson") external sample: float => float = "sample"
|
||||
@module("jstat") @scope("poisson") external mean: float => float = "mean"
|
||||
}
|
||||
|
||||
module Weibull = {
|
||||
@module("jstat") @scope("weibull") external pdf: (float, float, float) => float = "pdf"
|
||||
@module("jstat") @scope("weibull") external cdf: (float, float,float ) => float = "cdf"
|
||||
@module("jstat") @scope("weibull") external sample: (float,float) => float = "sample"
|
||||
@module("jstat") @scope("weibull") external mean: (float,float) => float = "mean"
|
||||
@module("jstat") @scope("weibull") external cdf: (float, float, float) => float = "cdf"
|
||||
@module("jstat") @scope("weibull") external sample: (float, float) => float = "sample"
|
||||
@module("jstat") @scope("weibull") external mean: (float, float) => float = "mean"
|
||||
}
|
||||
|
||||
module Binomial = {
|
||||
@module("jstat") @scope("binomial") external pdf: (float, float, float) => float = "pdf"
|
||||
@module("jstat") @scope("binomial") external cdf: (float, float,float ) => float = "cdf"
|
||||
@module("jstat") @scope("binomial") external cdf: (float, float, float) => float = "cdf"
|
||||
}
|
||||
|
||||
@module("jstat") external sum: array<float> => float = "sum"
|
||||
|
|
13
yarn.lock
13
yarn.lock
|
@ -4142,7 +4142,7 @@
|
|||
"@types/source-list-map" "*"
|
||||
source-map "^0.7.3"
|
||||
|
||||
"@types/webpack@^4.41.26", "@types/webpack@^4.41.32", "@types/webpack@^4.41.8":
|
||||
"@types/webpack@^4.41.26", "@types/webpack@^4.41.8":
|
||||
version "4.41.32"
|
||||
resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.32.tgz#a7bab03b72904070162b2f169415492209e94212"
|
||||
integrity sha512-cb+0ioil/7oz5//7tZUSwbrSAN/NWHrQylz5cW8G0dWTcF/g+/dSdMlKVZspBYuMAN1+WnwHrkxiRrLcwd0Heg==
|
||||
|
@ -4154,6 +4154,15 @@
|
|||
anymatch "^3.0.0"
|
||||
source-map "^0.6.0"
|
||||
|
||||
"@types/webpack@^5.28.0":
|
||||
version "5.28.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-5.28.0.tgz#78dde06212f038d77e54116cfe69e88ae9ed2c03"
|
||||
integrity sha512-8cP0CzcxUiFuA9xGJkfeVpqmWTk9nx6CWwamRGCj95ph1SmlRRk9KlCZ6avhCbZd4L68LvYT6l1kpdEnQXrF8w==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
tapable "^2.2.0"
|
||||
webpack "^5"
|
||||
|
||||
"@types/ws@^8.5.1":
|
||||
version "8.5.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d"
|
||||
|
@ -17499,7 +17508,7 @@ webpack@4:
|
|||
watchpack "^1.7.4"
|
||||
webpack-sources "^1.4.1"
|
||||
|
||||
webpack@^5.64.4, webpack@^5.70.0, webpack@^5.72.0, webpack@^5.9.0:
|
||||
webpack@^5, webpack@^5.64.4, webpack@^5.70.0, webpack@^5.72.0, webpack@^5.9.0:
|
||||
version "5.72.0"
|
||||
resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.72.0.tgz#f8bc40d9c6bb489a4b7a8a685101d6022b8b6e28"
|
||||
integrity sha512-qmSmbspI0Qo5ld49htys8GY9XhS9CGqFoHTsOVAnjBdg0Zn79y135R+k4IR4rKK6+eKaabMhJwiVB7xw0SJu5w==
|
||||
|
|
Loading…
Reference in New Issue
Block a user