Merge pull request #965 from quantified-uncertainty/experiment-10.0rc1
bump `rescript` and `@rescript/std` to `10.0.1`
This commit is contained in:
commit
56771820aa
|
@ -3,7 +3,7 @@ lib
|
|||
*.bs.js
|
||||
*.gen.tsx
|
||||
.nyc_output/
|
||||
_coverage/
|
||||
coverage/
|
||||
.cache/
|
||||
Reducer_Peggy_GeneratedParser.js
|
||||
ReducerProject_IncludeParser.js
|
||||
|
|
|
@ -32,25 +32,29 @@ describe("dotSubtract", () => {
|
|||
*/
|
||||
Skip.test("mean of normal minus exponential (property)", () => {
|
||||
assert_(
|
||||
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
|
||||
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||
~env,
|
||||
mkNormal(mean, 1.0),
|
||||
mkExponential(rate),
|
||||
)
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||
// according to algebra or random variables,
|
||||
let meanAnalytical =
|
||||
mean -.
|
||||
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||
"On trusted input this should never happen",
|
||||
property2(
|
||||
float_(),
|
||||
floatRange(1e-5, 1e5),
|
||||
(mean, rate) => {
|
||||
// We limit ourselves to stdev=1 so that the integral is trivial
|
||||
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
|
||||
~env,
|
||||
mkNormal(mean, 1.0),
|
||||
mkExponential(rate),
|
||||
)
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||
}
|
||||
}),
|
||||
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
|
||||
// according to algebra or random variables,
|
||||
let meanAnalytical =
|
||||
mean -.
|
||||
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
|
||||
"On trusted input this should never happen",
|
||||
)
|
||||
switch meanResult {
|
||||
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
|
||||
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
|
||||
}
|
||||
},
|
||||
),
|
||||
)
|
||||
pass
|
||||
})
|
||||
|
|
|
@ -40,51 +40,60 @@ let algebraicPower = algebraicPower(~env)
|
|||
|
||||
describe("(Algebraic) addition of distributions", () => {
|
||||
describe("mean", () => {
|
||||
test("normal(mean=5) + normal(mean=20)", () => {
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist20)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
->expect
|
||||
->toBe(Some(2.5e1))
|
||||
})
|
||||
test(
|
||||
"normal(mean=5) + normal(mean=20)",
|
||||
() => {
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist20)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
->expect
|
||||
->toBe(Some(2.5e1))
|
||||
},
|
||||
)
|
||||
|
||||
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
|
||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
||||
}
|
||||
})
|
||||
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
|
||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
||||
}
|
||||
})
|
||||
test(
|
||||
"uniform(low=9, high=10) + beta(alpha=2, beta=5)",
|
||||
() => {
|
||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
|
||||
}
|
||||
},
|
||||
)
|
||||
test(
|
||||
"beta(alpha=2, beta=5) + uniform(low=9, high=10)",
|
||||
() => {
|
||||
// let uniformMean = (9.0 +. 10.0) /. 2.0
|
||||
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
describe("pdf", () => {
|
||||
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
|
||||
|
@ -122,247 +131,282 @@ describe("(Algebraic) addition of distributions", () => {
|
|||
}
|
||||
},
|
||||
)
|
||||
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||
test(
|
||||
"(normal(mean=10) + normal(mean=10)).pdf(1.9e1)",
|
||||
() => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=4.
|
||||
// This value was calculated by a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||
}
|
||||
})
|
||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)",
|
||||
() => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=4.
|
||||
// This value was calculated by a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||
}
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)",
|
||||
() => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
describe("cdf", () => {
|
||||
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
|
||||
let received =
|
||||
normalDist10
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist5)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
testAll(
|
||||
"(normal(mean=5) + normal(mean=5)).cdf (imprecise)",
|
||||
list{6e0, 8e0, 1e1, 1.2e1},
|
||||
x => {
|
||||
let received =
|
||||
normalDist10
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist5)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(normal(mean=10) + normal(mean=10)).cdf(1.25e1)",
|
||||
() => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// The value was calculated externally using a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||
}
|
||||
})
|
||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// The value was calculated externally using a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)",
|
||||
() => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// The value was calculated externally using a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||
}
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)",
|
||||
() => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// The value was calculated externally using a python script
|
||||
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
describe("inv", () => {
|
||||
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
|
||||
let received =
|
||||
normalDist10
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist5)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
testAll(
|
||||
"(normal(mean=5) + normal(mean=5)).inv (imprecise)",
|
||||
list{5e-2, 4.2e-3, 9e-3},
|
||||
x => {
|
||||
let received =
|
||||
normalDist10
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist5
|
||||
->algebraicAdd(normalDist5)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(normal(mean=10) + normal(mean=10)).inv(1e-1)",
|
||||
() => {
|
||||
let received =
|
||||
normalDist20
|
||||
->Ok
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
let calculated =
|
||||
normalDist10
|
||||
->algebraicAdd(normalDist10)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toOption
|
||||
->E.O.flatten
|
||||
switch received {
|
||||
| None =>
|
||||
"this branch occurs when the dispatch to Jstat on trusted input fails."
|
||||
->expect
|
||||
->toBe("never")
|
||||
| Some(x) =>
|
||||
switch calculated {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
||||
}
|
||||
})
|
||||
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)",
|
||||
() => {
|
||||
let received =
|
||||
uniformDist
|
||||
->algebraicAdd(betaDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
|
||||
}
|
||||
},
|
||||
)
|
||||
test(
|
||||
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)",
|
||||
() => {
|
||||
let received =
|
||||
betaDist
|
||||
->algebraicAdd(uniformDist)
|
||||
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
|
||||
->E.R2.fmap(run)
|
||||
->E.R2.fmap(toFloat)
|
||||
->E.R.toExn("Expected float", _)
|
||||
switch received {
|
||||
| None => "algebraicAdd has"->expect->toBe("failed")
|
||||
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
|
||||
// sometimes it works with ~digits=2.
|
||||
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -87,14 +87,22 @@ describe("Means are invariant", () => {
|
|||
let testAddInvariant = (t1, t2) =>
|
||||
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||
|
||||
testAll("with two of the same distribution", distributions, dist => {
|
||||
testAddInvariant(dist, dist)
|
||||
})
|
||||
testAll(
|
||||
"with two of the same distribution",
|
||||
distributions,
|
||||
dist => {
|
||||
testAddInvariant(dist, dist)
|
||||
},
|
||||
)
|
||||
|
||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testAddInvariant(dist1, dist2)
|
||||
})
|
||||
testAll(
|
||||
"with two different distributions",
|
||||
pairsOfDifferentDistributions,
|
||||
dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testAddInvariant(dist1, dist2)
|
||||
},
|
||||
)
|
||||
|
||||
testAll(
|
||||
"with two different distributions in swapped order",
|
||||
|
@ -116,14 +124,22 @@ describe("Means are invariant", () => {
|
|||
let testSubtractInvariant = (t1, t2) =>
|
||||
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||
|
||||
testAll("with two of the same distribution", distributions, dist => {
|
||||
testSubtractInvariant(dist, dist)
|
||||
})
|
||||
testAll(
|
||||
"with two of the same distribution",
|
||||
distributions,
|
||||
dist => {
|
||||
testSubtractInvariant(dist, dist)
|
||||
},
|
||||
)
|
||||
|
||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testSubtractInvariant(dist1, dist2)
|
||||
})
|
||||
testAll(
|
||||
"with two different distributions",
|
||||
pairsOfDifferentDistributions,
|
||||
dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testSubtractInvariant(dist1, dist2)
|
||||
},
|
||||
)
|
||||
|
||||
testAll(
|
||||
"with two different distributions in swapped order",
|
||||
|
@ -145,14 +161,22 @@ describe("Means are invariant", () => {
|
|||
let testMultiplicationInvariant = (t1, t2) =>
|
||||
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
|
||||
|
||||
testAll("with two of the same distribution", distributions, dist => {
|
||||
testMultiplicationInvariant(dist, dist)
|
||||
})
|
||||
testAll(
|
||||
"with two of the same distribution",
|
||||
distributions,
|
||||
dist => {
|
||||
testMultiplicationInvariant(dist, dist)
|
||||
},
|
||||
)
|
||||
|
||||
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testMultiplicationInvariant(dist1, dist2)
|
||||
})
|
||||
testAll(
|
||||
"with two different distributions",
|
||||
pairsOfDifferentDistributions,
|
||||
dists => {
|
||||
let (dist1, dist2) = dists
|
||||
testMultiplicationInvariant(dist1, dist2)
|
||||
},
|
||||
)
|
||||
|
||||
testAll(
|
||||
"with two different distributions in swapped order",
|
||||
|
|
|
@ -17,10 +17,9 @@ describe("klDivergence: continuous -> continuous -> float", () => {
|
|||
let answer =
|
||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
let prediction =
|
||||
uniformMakeR(
|
||||
lowPrediction,
|
||||
highPrediction,
|
||||
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
||||
s => DistributionTypes.ArgumentError(s),
|
||||
)
|
||||
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
|
||||
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
|
||||
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
|
||||
|
@ -183,9 +182,9 @@ describe("combineAlongSupportOfSecondArgument0", () => {
|
|||
let answer =
|
||||
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
|
||||
let prediction =
|
||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
|
||||
s,
|
||||
))
|
||||
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
|
||||
s => DistributionTypes.ArgumentError(s),
|
||||
)
|
||||
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
|
||||
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ open Expect
|
|||
open TestHelpers
|
||||
|
||||
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
|
||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
||||
|
||||
describe("(Symbolic) normalize", () => {
|
||||
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
|
||||
|
@ -47,10 +47,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (low, medium, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(
|
||||
#ToFloat(#Mean),
|
||||
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
|
||||
),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Triangular({low, medium, high}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
|
||||
},
|
||||
|
@ -63,7 +60,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (alpha, beta) = tup
|
||||
let meanValue = run(
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha, beta}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
|
||||
},
|
||||
|
@ -84,8 +81,8 @@ describe("(Symbolic) mean", () => {
|
|||
let (mean, stdev) = tup
|
||||
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
|
||||
let meanValue =
|
||||
betaDistribution->E.R2.fmap(d =>
|
||||
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
|
||||
betaDistribution->E.R2.fmap(
|
||||
d => run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)),
|
||||
)
|
||||
switch meanValue {
|
||||
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
|
||||
|
@ -100,7 +97,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (mu, sigma) = tup
|
||||
let meanValue = run(
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu, sigma}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
|
||||
},
|
||||
|
@ -112,7 +109,7 @@ describe("(Symbolic) mean", () => {
|
|||
tup => {
|
||||
let (low, high) = tup
|
||||
let meanValue = run(
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
|
||||
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low, high}))),
|
||||
)
|
||||
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
|
||||
},
|
||||
|
|
|
@ -33,12 +33,18 @@ describe("Bindings", () => {
|
|||
let value2 = Reducer_T.IEvNumber(5.)
|
||||
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
|
||||
|
||||
test("get on extended", () => {
|
||||
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
||||
})
|
||||
test(
|
||||
"get on extended",
|
||||
() => {
|
||||
expect(extendedBindings->Bindings.get("value")) == Some(value2)
|
||||
},
|
||||
)
|
||||
|
||||
test("get on original", () => {
|
||||
expect(bindings->Bindings.get("value")) == Some(value)
|
||||
})
|
||||
test(
|
||||
"get on original",
|
||||
() => {
|
||||
expect(bindings->Bindings.get("value")) == Some(value)
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -40,14 +40,23 @@ describe("Namespace", () => {
|
|||
|
||||
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
|
||||
|
||||
test("merge many 1", () => {
|
||||
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
||||
})
|
||||
test("merge many 2", () => {
|
||||
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
||||
})
|
||||
test("merge many 3", () => {
|
||||
expect(nsMerged->Namespace.get("value")) == Some(value)
|
||||
})
|
||||
test(
|
||||
"merge many 1",
|
||||
() => {
|
||||
expect(nsMerged->Namespace.get("x1")) == Some(x1)
|
||||
},
|
||||
)
|
||||
test(
|
||||
"merge many 2",
|
||||
() => {
|
||||
expect(nsMerged->Namespace.get("x4")) == Some(x4)
|
||||
},
|
||||
)
|
||||
test(
|
||||
"merge many 3",
|
||||
() => {
|
||||
expect(nsMerged->Namespace.get("value")) == Some(value)
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -75,29 +75,32 @@ describe("Peggy to Expression", () => {
|
|||
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
|
||||
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
|
||||
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
|
||||
describe("ternary bindings", () => {
|
||||
testToExpression(
|
||||
// expression binding
|
||||
"f(a) = a > 5 ? 1 : 0; f(6)",
|
||||
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
|
||||
~v="1",
|
||||
(),
|
||||
)
|
||||
testToExpression(
|
||||
// when true binding
|
||||
"f(a) = a > 5 ? a : 0; f(6)",
|
||||
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
|
||||
~v="6",
|
||||
(),
|
||||
)
|
||||
testToExpression(
|
||||
// when false binding
|
||||
"f(a) = a < 5 ? 1 : a; f(6)",
|
||||
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
|
||||
~v="6",
|
||||
(),
|
||||
)
|
||||
})
|
||||
describe(
|
||||
"ternary bindings",
|
||||
() => {
|
||||
testToExpression(
|
||||
// expression binding
|
||||
"f(a) = a > 5 ? 1 : 0; f(6)",
|
||||
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
|
||||
~v="1",
|
||||
(),
|
||||
)
|
||||
testToExpression(
|
||||
// when true binding
|
||||
"f(a) = a > 5 ? a : 0; f(6)",
|
||||
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
|
||||
~v="6",
|
||||
(),
|
||||
)
|
||||
testToExpression(
|
||||
// when false binding
|
||||
"f(a) = a < 5 ? 1 : a; f(6)",
|
||||
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
|
||||
~v="6",
|
||||
(),
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
describe("if then else", () => {
|
||||
|
|
|
@ -22,7 +22,7 @@ let expectEvalError = (code: string) =>
|
|||
Expression.BackCompatible.evaluateString(code)
|
||||
->Reducer_Value.toStringResult
|
||||
->expect
|
||||
->toMatch("Error\(")
|
||||
->toMatch("Error\\(")
|
||||
|
||||
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
|
||||
let testDescriptionParseToBe = (desc, expr, answer) =>
|
||||
|
|
|
@ -37,14 +37,16 @@ describe("eval", () => {
|
|||
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
|
||||
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
|
||||
testEvalError("{a: 1}.b") // invalid syntax
|
||||
test("always the same property ending", () =>
|
||||
expectEvalToBe(
|
||||
`{
|
||||
test(
|
||||
"always the same property ending",
|
||||
() =>
|
||||
expectEvalToBe(
|
||||
`{
|
||||
a: 1,
|
||||
b: 2,
|
||||
}`,
|
||||
"Ok({a: 1,b: 2})",
|
||||
)
|
||||
"Ok({a: 1,b: 2})",
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
|
|
|
@ -11,32 +11,34 @@ describe("ReducerProject Tutorial", () => {
|
|||
/*
|
||||
Case "Running a single source".
|
||||
*/
|
||||
test("run", () => {
|
||||
/* Let's start with running a single source and getting Result as well as the Bindings
|
||||
test(
|
||||
"run",
|
||||
() => {
|
||||
/* Let's start with running a single source and getting Result as well as the Bindings
|
||||
First you need to create a project. A project is a collection of sources.
|
||||
Project takes care of the dependencies between the sources, correct compilation and run order.
|
||||
You can run any source in the project. It will be compiled and run if it hasn't happened already; otherwise already existing results will be presented.
|
||||
The dependencies will be automatically compiled and run. So you don't need to worry about that in a multi source project.
|
||||
In summary you issue a run command on the whole project or on a specific source to ensure that there is a result for that source.
|
||||
*/
|
||||
let project = Project.createProject()
|
||||
/* Every source has a name. This is used for debugging, dependencies and error messages. */
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
/* Let's run "main" source. */
|
||||
project->Project.run("main")
|
||||
/* Now you have a result for "main" source.
|
||||
let project = Project.createProject()
|
||||
/* Every source has a name. This is used for debugging, dependencies and error messages. */
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
/* Let's run "main" source. */
|
||||
project->Project.run("main")
|
||||
/* Now you have a result for "main" source.
|
||||
Running one by one is necessary for UI to navigate among the sources and to see the results by source.
|
||||
And you're free to run any source you want.
|
||||
You will look at the results of this source and you don't want to run the others if not required.
|
||||
*/
|
||||
|
||||
/* However, you could also run the whole project.
|
||||
/* However, you could also run the whole project.
|
||||
If you have all the sources, you can always run the whole project.
|
||||
Dependencies and recompiling on demand will be taken care of by the project.
|
||||
*/
|
||||
project->Project.runAll
|
||||
project->Project.runAll
|
||||
|
||||
/* Either with run or runAll you executed the project.
|
||||
/* Either with run or runAll you executed the project.
|
||||
You can get the result of a specific source by calling getResult for that source.
|
||||
You can get the bindings of a specific source by calling getBindings for that source.
|
||||
If there is any runtime error, getResult will return the error.
|
||||
|
@ -44,49 +46,59 @@ Case "Running a single source".
|
|||
Note that getResult returns None if the source has not been run.
|
||||
Getting None means you have forgotten to run the source.
|
||||
*/
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
|
||||
/* Let's display the result and bindings */
|
||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(3)", "{}")
|
||||
/* You've got 3 with empty bindings. */
|
||||
})
|
||||
/* Let's display the result and bindings */
|
||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(3)", "{}")
|
||||
/* You've got 3 with empty bindings. */
|
||||
},
|
||||
)
|
||||
|
||||
test("run summary", () => {
|
||||
let project = Project.createProject()
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
/* Now you have external bindings and external result. */
|
||||
(
|
||||
result->Reducer_Value.toStringResult,
|
||||
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
||||
)->expect == ("Ok(3)", "{}")
|
||||
})
|
||||
test(
|
||||
"run summary",
|
||||
() => {
|
||||
let project = Project.createProject()
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
/* Now you have external bindings and external result. */
|
||||
(
|
||||
result->Reducer_Value.toStringResult,
|
||||
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
|
||||
)->expect == ("Ok(3)", "{}")
|
||||
},
|
||||
)
|
||||
|
||||
test("run with an environment", () => {
|
||||
/* Running the source code like above allows you to set a custom environment */
|
||||
let project = Project.createProject()
|
||||
test(
|
||||
"run with an environment",
|
||||
() => {
|
||||
/* Running the source code like above allows you to set a custom environment */
|
||||
let project = Project.createProject()
|
||||
|
||||
/* Optional. Set your custom environment anytime before running */
|
||||
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
|
||||
/* Optional. Set your custom environment anytime before running */
|
||||
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
|
||||
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let _bindings = project->Project.getBindings("main")
|
||||
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
||||
})
|
||||
project->Project.setSource("main", "1 + 2")
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let _bindings = project->Project.getBindings("main")
|
||||
result->Reducer_Value.toStringResult->expect == "Ok(3)"
|
||||
},
|
||||
)
|
||||
|
||||
test("shortcut", () => {
|
||||
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
||||
/* Examples above was to prepare you for the multi source tutorial. */
|
||||
let (result, bindings) = Project.evaluate("1+2")
|
||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(3)", "{}")
|
||||
})
|
||||
test(
|
||||
"shortcut",
|
||||
() => {
|
||||
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
|
||||
/* Examples above was to prepare you for the multi source tutorial. */
|
||||
let (result, bindings) = Project.evaluate("1+2")
|
||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(3)", "{}")
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -10,95 +10,104 @@ describe("ReducerProject Tutorial", () => {
|
|||
describe("Multi source", () => {
|
||||
/*
|
||||
Case "Running multiple sources" */
|
||||
test("Chaining", () => {
|
||||
let project = Project.createProject()
|
||||
/* This time let's add 3 sources and chain them together */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
test(
|
||||
"Chaining",
|
||||
() => {
|
||||
let project = Project.createProject()
|
||||
/* This time let's add 3 sources and chain them together */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
|
||||
project->Project.setSource("source2", "y=x+1")
|
||||
/* To run, source2 depends on source1 */
|
||||
project->Project.setContinues("source2", ["source1"])
|
||||
project->Project.setSource("source2", "y=x+1")
|
||||
/* To run, source2 depends on source1 */
|
||||
project->Project.setContinues("source2", ["source1"])
|
||||
|
||||
project->Project.setSource("source3", "z=y+1")
|
||||
/* To run, source3 depends on source2 */
|
||||
project->Project.setContinues("source3", ["source2"])
|
||||
project->Project.setSource("source3", "z=y+1")
|
||||
/* To run, source3 depends on source2 */
|
||||
project->Project.setContinues("source3", ["source2"])
|
||||
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
|
||||
/* And let's check the result and bindings of source3 */
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
/* And let's check the result and bindings of source3 */
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
})
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
},
|
||||
)
|
||||
|
||||
test("Depending", () => {
|
||||
/* Instead of chaining the sources, we could have a dependency tree */
|
||||
/* The point here is that any source can depend on multiple sources */
|
||||
let project = Project.createProject()
|
||||
test(
|
||||
"Depending",
|
||||
() => {
|
||||
/* Instead of chaining the sources, we could have a dependency tree */
|
||||
/* The point here is that any source can depend on multiple sources */
|
||||
let project = Project.createProject()
|
||||
|
||||
/* This time source1 and source2 are not depending on anything */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
project->Project.setSource("source2", "y=2")
|
||||
/* This time source1 and source2 are not depending on anything */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
project->Project.setSource("source2", "y=2")
|
||||
|
||||
project->Project.setSource("source3", "z=x+y")
|
||||
/* To run, source3 depends on source1 and source3 together */
|
||||
project->Project.setContinues("source3", ["source1", "source2"])
|
||||
project->Project.setSource("source3", "z=x+y")
|
||||
/* To run, source3 depends on source1 and source3 together */
|
||||
project->Project.setContinues("source3", ["source1", "source2"])
|
||||
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
|
||||
/* And let's check the result and bindings of source3 */
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
/* And let's check the result and bindings of source3 */
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
})
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
},
|
||||
)
|
||||
|
||||
test("Intro to including", () => {
|
||||
/* Though it would not be practical for a storybook,
|
||||
test(
|
||||
"Intro to including",
|
||||
() => {
|
||||
/* Though it would not be practical for a storybook,
|
||||
let's write the same project above with includes.
|
||||
You will see that parsing includes is setting the dependencies the same way as before. */
|
||||
let project = Project.createProject()
|
||||
let project = Project.createProject()
|
||||
|
||||
/* This time source1 and source2 are not depending on anything */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
project->Project.setSource("source2", "y=2")
|
||||
/* This time source1 and source2 are not depending on anything */
|
||||
project->Project.setSource("source1", "x=1")
|
||||
project->Project.setSource("source2", "y=2")
|
||||
|
||||
project->Project.setSource(
|
||||
"source3",
|
||||
`
|
||||
project->Project.setSource(
|
||||
"source3",
|
||||
`
|
||||
#include "source1"
|
||||
#include "source2"
|
||||
z=x+y`,
|
||||
)
|
||||
/* We need to parse the includes to set the dependencies */
|
||||
project->Project.parseIncludes("source3")
|
||||
)
|
||||
/* We need to parse the includes to set the dependencies */
|
||||
project->Project.parseIncludes("source3")
|
||||
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
/* Now we can run the project */
|
||||
project->Project.runAll
|
||||
|
||||
/* And let's check the result and bindings of source3
|
||||
/* And let's check the result and bindings of source3
|
||||
This time you are getting all the variables because we are including the other sources
|
||||
Behind the scenes parseIncludes is setting the dependencies */
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
let result3 = project->Project.getResult("source3")
|
||||
let bindings3 = project->Project.getBindings("source3")
|
||||
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
/*
|
||||
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(())", "{z: 3}")
|
||||
/*
|
||||
Doing it like this is too verbose for a storybook
|
||||
But I hope you have seen the relation of setContinues and parseIncludes */
|
||||
/*
|
||||
/*
|
||||
Dealing with includes needs more.
|
||||
- There are parse errors
|
||||
- There are cyclic includes
|
||||
- And the depended source1 and source2 is not already there in the project
|
||||
- If you knew the includes before hand there would not be point of the include directive.
|
||||
More on those on the next section. */
|
||||
})
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -24,93 +24,106 @@ Here we will finally proceed to a real life scenario. */
|
|||
)
|
||||
/* We need to parse includes after changing the source */
|
||||
project->Project.parseIncludes("main")
|
||||
test("getDependencies", () => {
|
||||
/* Parse includes has set the dependencies */
|
||||
project->Project.getDependencies("main")->expect == ["common"]
|
||||
/* If there were no includes than there would be no dependencies */
|
||||
/* However if there was a syntax error at includes then would be no dependencies also */
|
||||
/* Therefore looking at dependencies is not the right way to load includes */
|
||||
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
||||
})
|
||||
test("getIncludes", () => {
|
||||
/* Parse includes has set the includes */
|
||||
switch project->Project.getIncludes("main") {
|
||||
| Ok(includes) => includes->expect == ["common"]
|
||||
| Error(err) => err->SqError.toString->fail
|
||||
}
|
||||
/* If the includes cannot be parsed then you get a syntax error.
|
||||
test(
|
||||
"getDependencies",
|
||||
() => {
|
||||
/* Parse includes has set the dependencies */
|
||||
project->Project.getDependencies("main")->expect == ["common"]
|
||||
/* If there were no includes than there would be no dependencies */
|
||||
/* However if there was a syntax error at includes then would be no dependencies also */
|
||||
/* Therefore looking at dependencies is not the right way to load includes */
|
||||
/* getDependencies does not distinguish between setContinues or parseIncludes */
|
||||
},
|
||||
)
|
||||
test(
|
||||
"getIncludes",
|
||||
() => {
|
||||
/* Parse includes has set the includes */
|
||||
switch project->Project.getIncludes("main") {
|
||||
| Ok(includes) => includes->expect == ["common"]
|
||||
| Error(err) => err->SqError.toString->fail
|
||||
}
|
||||
/* If the includes cannot be parsed then you get a syntax error.
|
||||
Otherwise you get the includes.
|
||||
If there is no syntax error then you can load that file and use setSource to add it to the project.
|
||||
And so on recursively... */
|
||||
})
|
||||
test("getDependents", () => {
|
||||
/* For any reason, you are able to query what other sources
|
||||
},
|
||||
)
|
||||
test(
|
||||
"getDependents",
|
||||
() => {
|
||||
/* For any reason, you are able to query what other sources
|
||||
include or depend on the current source.
|
||||
But you don't need to use this to execute the projects.
|
||||
It is provided for completeness of information. */
|
||||
project->Project.getDependents("main")->expect == []
|
||||
/* Nothing is depending on or including main */
|
||||
})
|
||||
project->Project.getDependents("main")->expect == []
|
||||
/* Nothing is depending on or including main */
|
||||
},
|
||||
)
|
||||
|
||||
describe("Real Like", () => {
|
||||
/* Now let's look at recursive and possibly cyclic includes */
|
||||
/* There is no function provided to load the include files.
|
||||
describe(
|
||||
"Real Like",
|
||||
() => {
|
||||
/* Now let's look at recursive and possibly cyclic includes */
|
||||
/* There is no function provided to load the include files.
|
||||
Because we have no idea if will it be an ordinary function or will it use promises.
|
||||
Therefore one has to write a function to load sources recursively and and setSources
|
||||
while checking for dependencies */
|
||||
|
||||
/* Let's make a dummy loader */
|
||||
let loadSource = (sourceName: string) =>
|
||||
switch sourceName {
|
||||
| "source1" => "x=1"
|
||||
| "source2" => `
|
||||
/* Let's make a dummy loader */
|
||||
let loadSource = (sourceName: string) =>
|
||||
switch sourceName {
|
||||
| "source1" => "x=1"
|
||||
| "source2" => `
|
||||
#include "source1"
|
||||
y=2`
|
||||
| "source3" => `
|
||||
| "source3" => `
|
||||
#include "source2"
|
||||
z=3`
|
||||
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
|
||||
}
|
||||
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
|
||||
}
|
||||
|
||||
/* let's recursively load the sources */
|
||||
let rec loadIncludesRecursively = (project, sourceName, visited) => {
|
||||
if visited->Js.Array2.includes(sourceName) {
|
||||
/* Oh we have already visited this source. There is an include cycle */
|
||||
"Cyclic include ${sourceName}"->Js.Exn.raiseError
|
||||
} else {
|
||||
let newVisited = Js.Array2.copy(visited)
|
||||
let _ = newVisited->Js.Array2.push(sourceName)
|
||||
/* Let's parse the includes and dive into them */
|
||||
Project.parseIncludes(project, sourceName)
|
||||
let rIncludes = project->Project.getIncludes(sourceName)
|
||||
switch rIncludes {
|
||||
/* Maybe there is an include syntax error */
|
||||
| Error(err) => err->SqError.toString->Js.Exn.raiseError
|
||||
/* let's recursively load the sources */
|
||||
let rec loadIncludesRecursively = (project, sourceName, visited) => {
|
||||
if visited->Js.Array2.includes(sourceName) {
|
||||
/* Oh we have already visited this source. There is an include cycle */
|
||||
"Cyclic include ${sourceName}"->Js.Exn.raiseError
|
||||
} else {
|
||||
let newVisited = Js.Array2.copy(visited)
|
||||
let _ = newVisited->Js.Array2.push(sourceName)
|
||||
/* Let's parse the includes and dive into them */
|
||||
Project.parseIncludes(project, sourceName)
|
||||
let rIncludes = project->Project.getIncludes(sourceName)
|
||||
switch rIncludes {
|
||||
/* Maybe there is an include syntax error */
|
||||
| Error(err) => err->SqError.toString->Js.Exn.raiseError
|
||||
|
||||
| Ok(includes) =>
|
||||
includes->Belt.Array.forEach(newIncludeName => {
|
||||
/* We have got one of the new includes.
|
||||
Let's load it and add it to the project */
|
||||
let newSource = loadSource(newIncludeName)
|
||||
project->Project.setSource(newIncludeName, newSource)
|
||||
/* The new source is loaded and added to the project. */
|
||||
/* Of course the new source might have includes too. */
|
||||
/* Let's recursively load them */
|
||||
project->loadIncludesRecursively(newIncludeName, newVisited)
|
||||
})
|
||||
| Ok(includes) =>
|
||||
includes->Belt.Array.forEach(
|
||||
newIncludeName => {
|
||||
/* We have got one of the new includes.
|
||||
Let's load it and add it to the project */
|
||||
let newSource = loadSource(newIncludeName)
|
||||
project->Project.setSource(newIncludeName, newSource)
|
||||
/* The new source is loaded and added to the project. */
|
||||
/* Of course the new source might have includes too. */
|
||||
/* Let's recursively load them */
|
||||
project->loadIncludesRecursively(newIncludeName, newVisited)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/* As we have a fake source loader and a recursive include handler,
|
||||
We can not set up a real project */
|
||||
/* As we have a fake source loader and a recursive include handler,
|
||||
We can not set up a real project */
|
||||
|
||||
/* * Here starts our real life project! * */
|
||||
/* * Here starts our real life project! * */
|
||||
|
||||
let project = Project.createProject()
|
||||
let project = Project.createProject()
|
||||
|
||||
project->Project.setSource(
|
||||
"main",
|
||||
`
|
||||
project->Project.setSource(
|
||||
"main",
|
||||
`
|
||||
#include "source1"
|
||||
#include "source2"
|
||||
#include "source3"
|
||||
|
@ -118,37 +131,43 @@ Here we will finally proceed to a real life scenario. */
|
|||
b = doubleX
|
||||
a
|
||||
`,
|
||||
)
|
||||
/* Setting source requires parsing and loading the includes recursively */
|
||||
project->loadIncludesRecursively("main", []) // Not visited yet
|
||||
)
|
||||
/* Setting source requires parsing and loading the includes recursively */
|
||||
project->loadIncludesRecursively("main", []) // Not visited yet
|
||||
|
||||
/* Let's salt it more. Let's have another source in the project which also has includes */
|
||||
/* doubleX includes source1 which is eventually included by main as well */
|
||||
project->Project.setSource(
|
||||
"doubleX",
|
||||
`
|
||||
/* Let's salt it more. Let's have another source in the project which also has includes */
|
||||
/* doubleX includes source1 which is eventually included by main as well */
|
||||
project->Project.setSource(
|
||||
"doubleX",
|
||||
`
|
||||
#include "source1"
|
||||
doubleX = x * 2
|
||||
`,
|
||||
)
|
||||
project->loadIncludesRecursively("doubleX", [])
|
||||
/* Remember, any time you set a source, you need to load includes recursively */
|
||||
)
|
||||
project->loadIncludesRecursively("doubleX", [])
|
||||
/* Remember, any time you set a source, you need to load includes recursively */
|
||||
|
||||
/* As doubleX is not included by main, it is not loaded recursively.
|
||||
So we link it to the project as a dependency */
|
||||
project->Project.setContinues("main", ["doubleX"])
|
||||
/* As doubleX is not included by main, it is not loaded recursively.
|
||||
So we link it to the project as a dependency */
|
||||
project->Project.setContinues("main", ["doubleX"])
|
||||
|
||||
/* Let's run the project */
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
/* And see the result and bindings.. */
|
||||
test("recursive includes", () => {
|
||||
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
|
||||
("Ok(6)", "{a: 6,b: 2}")
|
||||
/* Everything as expected */
|
||||
})
|
||||
})
|
||||
/* Let's run the project */
|
||||
project->Project.runAll
|
||||
let result = project->Project.getResult("main")
|
||||
let bindings = project->Project.getBindings("main")
|
||||
/* And see the result and bindings.. */
|
||||
test(
|
||||
"recursive includes",
|
||||
() => {
|
||||
(
|
||||
result->Reducer_Value.toStringResult,
|
||||
bindings->Reducer_Value.toStringRecord,
|
||||
)->expect == ("Ok(6)", "{a: 6,b: 2}")
|
||||
/* Everything as expected */
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
describe("Includes myFile as myVariable", () => {
|
||||
|
@ -163,14 +182,20 @@ Here we will finally proceed to a real life scenario. */
|
|||
`,
|
||||
)
|
||||
Project.parseIncludes(project, "main")
|
||||
test("getDependencies", () => {
|
||||
Project.getDependencies(project, "main")->expect == ["common"]
|
||||
})
|
||||
test("getIncludes", () => {
|
||||
switch Project.getIncludes(project, "main") {
|
||||
| Ok(includes) => includes->expect == ["common"]
|
||||
| Error(err) => err->SqError.toString->fail
|
||||
}
|
||||
})
|
||||
test(
|
||||
"getDependencies",
|
||||
() => {
|
||||
Project.getDependencies(project, "main")->expect == ["common"]
|
||||
},
|
||||
)
|
||||
test(
|
||||
"getIncludes",
|
||||
() => {
|
||||
switch Project.getIncludes(project, "main") {
|
||||
| Ok(includes) => includes->expect == ["common"]
|
||||
| Error(err) => err->SqError.toString->fail
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -30,8 +30,9 @@ describe("ReducerProject Tutorial", () => {
|
|||
})
|
||||
|
||||
test("userResults", () => {
|
||||
let userResultsAsString = Belt.Array.map(userResults, aResult =>
|
||||
aResult->Reducer_Value.toStringResult
|
||||
let userResultsAsString = Belt.Array.map(
|
||||
userResults,
|
||||
aResult => aResult->Reducer_Value.toStringResult,
|
||||
)
|
||||
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
|
||||
})
|
||||
|
|
|
@ -99,15 +99,19 @@ describe("FunctionRegistry Library", () => {
|
|||
})
|
||||
|
||||
describe("Fn auto-testing", () => {
|
||||
testAll("tests of validity", examples, r => {
|
||||
expectEvalToBeOk(r)
|
||||
})
|
||||
testAll(
|
||||
"tests of validity",
|
||||
examples,
|
||||
r => {
|
||||
expectEvalToBeOk(r)
|
||||
},
|
||||
)
|
||||
|
||||
testAll(
|
||||
"tests of type",
|
||||
E.A.to_list(
|
||||
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) =>
|
||||
E.O.isSome(fn.output)
|
||||
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(
|
||||
((fn, _)) => E.O.isSome(fn.output),
|
||||
),
|
||||
),
|
||||
((fn, example)) => {
|
||||
|
|
|
@ -45,12 +45,12 @@ let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.generi
|
|||
let unpackFloat = x => x->toFloat->toExtFloat
|
||||
let unpackDist = y => y->toDist->toExtDist
|
||||
|
||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
|
||||
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
|
||||
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
|
||||
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha, beta}))
|
||||
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
|
||||
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
|
||||
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
|
||||
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
|
||||
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low, high}))
|
||||
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local, scale}))
|
||||
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu, sigma}))
|
||||
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
|
||||
|
||||
let normalMake = SymbolicDist.Normal.make
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
],
|
||||
"author": "Quantified Uncertainty Research Institute",
|
||||
"dependencies": {
|
||||
"@rescript/std": "^9.1.4",
|
||||
"@rescript/std": "^10.0.0",
|
||||
"@stdlib/stats": "^0.0.13",
|
||||
"jstat": "^1.9.5",
|
||||
"lodash": "^4.17.21",
|
||||
|
@ -58,7 +58,7 @@
|
|||
"peggy": "^2.0.1",
|
||||
"prettier": "^2.7.1",
|
||||
"reanalyze": "^2.23.0",
|
||||
"rescript": "^9.1.4",
|
||||
"rescript": "^10.0.0",
|
||||
"rescript-fast-check": "^1.1.1",
|
||||
"rescript-js-map": "^1.1.0",
|
||||
"ts-jest": "^29.0.3",
|
||||
|
|
|
@ -141,6 +141,7 @@ let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
|
|||
Js.log2("Console log requested: ", dist)
|
||||
Dist(dist)
|
||||
}
|
||||
|
||||
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
|
||||
| #ToScore(LogScore(answer, prior)) =>
|
||||
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)
|
||||
|
|
|
@ -99,6 +99,7 @@ let toFloatOperation = (
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
| (#Stdev | #Variance | #Mode) as op =>
|
||||
switch t {
|
||||
| SampleSet(s) =>
|
||||
|
@ -129,7 +130,7 @@ let toPointSet = (
|
|||
SampleSetDist.toPointSetDist(
|
||||
~samples=r,
|
||||
~samplingInputs={
|
||||
sampleCount: sampleCount,
|
||||
sampleCount,
|
||||
outputXYPoints: xyPointLength,
|
||||
pointSetDistLength: xyPointLength,
|
||||
kernelWidth: None,
|
||||
|
@ -427,6 +428,7 @@ module AlgebraicCombination = {
|
|||
~toSampleSetFn,
|
||||
)
|
||||
}
|
||||
|
||||
| (None, AsMonteCarlo) =>
|
||||
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
|
||||
| (None, AsSymbolic) =>
|
||||
|
@ -443,6 +445,7 @@ module AlgebraicCombination = {
|
|||
)}`
|
||||
Error(RequestedStrategyInvalidError(errString))
|
||||
}
|
||||
|
||||
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ let toDiscretePointMassesFromTriangulars = (
|
|||
()
|
||||
}
|
||||
|
||||
{n: n - 2, masses: masses, means: means, variances: variances}
|
||||
{n: n - 2, masses, means, variances}
|
||||
} else {
|
||||
for i in 1 to n - 2 {
|
||||
// area of triangle = width * height / 2
|
||||
|
@ -91,7 +91,7 @@ let toDiscretePointMassesFromTriangulars = (
|
|||
) |> ignore
|
||||
()
|
||||
}
|
||||
{n: n - 2, masses: masses, means: means, variances: variances}
|
||||
{n: n - 2, masses, means, variances}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -184,7 +184,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
|
|||
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
|
||||
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
|
||||
|
||||
{n: n, masses: masses, means: means, variances: variances}
|
||||
{n, masses, means, variances}
|
||||
}
|
||||
|
||||
type argumentPosition = First | Second
|
||||
|
|
|
@ -45,16 +45,16 @@ module Analysis = {
|
|||
let getShape = (t: t) => t.xyShape
|
||||
let interpolation = (t: t) => t.interpolation
|
||||
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||
xyShape: xyShape,
|
||||
interpolation: interpolation,
|
||||
integralSumCache: integralSumCache,
|
||||
integralCache: integralCache,
|
||||
xyShape,
|
||||
interpolation,
|
||||
integralSumCache,
|
||||
integralCache,
|
||||
}
|
||||
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
|
||||
xyShape: fn(xyShape),
|
||||
interpolation: interpolation,
|
||||
integralSumCache: integralSumCache,
|
||||
integralCache: integralCache,
|
||||
interpolation,
|
||||
integralSumCache,
|
||||
integralCache,
|
||||
}
|
||||
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
|
||||
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
|
||||
|
@ -135,10 +135,10 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
|
|||
|
||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||
...t,
|
||||
integralSumCache: integralSumCache,
|
||||
integralSumCache,
|
||||
}
|
||||
|
||||
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache}
|
||||
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache}
|
||||
|
||||
let sum = (
|
||||
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
|
||||
|
|
|
@ -4,14 +4,14 @@ open Distributions
|
|||
type t = PointSetTypes.discreteShape
|
||||
|
||||
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
|
||||
xyShape: xyShape,
|
||||
integralSumCache: integralSumCache,
|
||||
integralCache: integralCache,
|
||||
xyShape,
|
||||
integralSumCache,
|
||||
integralCache,
|
||||
}
|
||||
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
|
||||
xyShape: fn(xyShape),
|
||||
integralSumCache: integralSumCache,
|
||||
integralCache: integralCache,
|
||||
integralSumCache,
|
||||
integralCache,
|
||||
}
|
||||
let getShape = (t: t) => t.xyShape
|
||||
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
|
||||
|
@ -63,12 +63,12 @@ let reduce = (
|
|||
|
||||
let updateIntegralSumCache = (integralSumCache, t: t): t => {
|
||||
...t,
|
||||
integralSumCache: integralSumCache,
|
||||
integralSumCache,
|
||||
}
|
||||
|
||||
let updateIntegralCache = (integralCache, t: t): t => {
|
||||
...t,
|
||||
integralCache: integralCache,
|
||||
integralCache,
|
||||
}
|
||||
|
||||
/* This multiples all of the data points together and creates a new discrete distribution from the results.
|
||||
|
|
|
@ -4,10 +4,10 @@ open Distributions
|
|||
|
||||
type t = PointSetTypes.mixedShape
|
||||
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
|
||||
continuous: continuous,
|
||||
discrete: discrete,
|
||||
integralSumCache: integralSumCache,
|
||||
integralCache: integralCache,
|
||||
continuous,
|
||||
discrete,
|
||||
integralSumCache,
|
||||
integralCache,
|
||||
}
|
||||
|
||||
let totalLength = (t: t): int => {
|
||||
|
@ -35,7 +35,7 @@ let toDiscrete = ({discrete}: t) => Some(discrete)
|
|||
|
||||
let updateIntegralCache = (integralCache, t: t): t => {
|
||||
...t,
|
||||
integralCache: integralCache,
|
||||
integralCache,
|
||||
}
|
||||
|
||||
let combinePointwise = (
|
||||
|
|
|
@ -79,8 +79,8 @@ module MixedPoint = {
|
|||
type t = mixedPoint
|
||||
let toContinuousValue = (t: t) => t.continuous
|
||||
let toDiscreteValue = (t: t) => t.discrete
|
||||
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
|
||||
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
|
||||
let makeContinuous = (continuous: float): t => {continuous, discrete: 0.0}
|
||||
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete}
|
||||
|
||||
let fmap = (fn: float => float, t: t) => {
|
||||
continuous: fn(t.continuous),
|
||||
|
|
|
@ -7,7 +7,7 @@ module Normal = {
|
|||
type t = normal
|
||||
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
|
||||
stdev > 0.0
|
||||
? Ok(#Normal({mean: mean, stdev: stdev}))
|
||||
? Ok(#Normal({mean, stdev}))
|
||||
: Error("Standard deviation of normal distribution must be larger than 0")
|
||||
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
|
||||
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
|
||||
|
@ -15,7 +15,7 @@ module Normal = {
|
|||
let from90PercentCI = (low, high) => {
|
||||
let mean = E.A.Floats.mean([low, high])
|
||||
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
|
||||
#Normal({mean: mean, stdev: stdev})
|
||||
#Normal({mean, stdev})
|
||||
}
|
||||
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
|
||||
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
|
||||
|
@ -25,12 +25,12 @@ module Normal = {
|
|||
let add = (n1: t, n2: t) => {
|
||||
let mean = n1.mean +. n2.mean
|
||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||
#Normal({mean: mean, stdev: stdev})
|
||||
#Normal({mean, stdev})
|
||||
}
|
||||
let subtract = (n1: t, n2: t) => {
|
||||
let mean = n1.mean -. n2.mean
|
||||
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||
#Normal({mean: mean, stdev: stdev})
|
||||
#Normal({mean, stdev})
|
||||
}
|
||||
|
||||
// TODO: is this useful here at all? would need the integral as well ...
|
||||
|
@ -38,7 +38,7 @@ module Normal = {
|
|||
let mean =
|
||||
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
|
||||
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
|
||||
#Normal({mean: mean, stdev: stdev})
|
||||
#Normal({mean, stdev})
|
||||
}
|
||||
|
||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||
|
@ -88,7 +88,7 @@ module Cauchy = {
|
|||
type t = cauchy
|
||||
let make = (local, scale): result<symbolicDist, string> =>
|
||||
scale > 0.0
|
||||
? Ok(#Cauchy({local: local, scale: scale}))
|
||||
? Ok(#Cauchy({local, scale}))
|
||||
: Error("Cauchy distribution scale parameter must larger than 0.")
|
||||
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
|
||||
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
|
||||
|
@ -102,7 +102,7 @@ module Triangular = {
|
|||
type t = triangular
|
||||
let make = (low, medium, high): result<symbolicDist, string> =>
|
||||
low < medium && medium < high
|
||||
? Ok(#Triangular({low: low, medium: medium, high: high}))
|
||||
? Ok(#Triangular({low, medium, high}))
|
||||
: Error("Triangular values must be increasing order.")
|
||||
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
|
||||
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
|
||||
|
@ -116,7 +116,7 @@ module Beta = {
|
|||
type t = beta
|
||||
let make = (alpha, beta) =>
|
||||
alpha > 0.0 && beta > 0.0
|
||||
? Ok(#Beta({alpha: alpha, beta: beta}))
|
||||
? Ok(#Beta({alpha, beta}))
|
||||
: Error("Beta distribution parameters must be positive")
|
||||
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
|
||||
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
|
||||
|
@ -150,7 +150,7 @@ module Lognormal = {
|
|||
type t = lognormal
|
||||
let make = (mu, sigma) =>
|
||||
sigma > 0.0
|
||||
? Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||
? Ok(#Lognormal({mu, sigma}))
|
||||
: Error("Lognormal standard deviation must be larger than 0")
|
||||
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
|
||||
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
|
||||
|
@ -164,7 +164,7 @@ module Lognormal = {
|
|||
let logHigh = Js.Math.log(high)
|
||||
let mu = E.A.Floats.mean([logLow, logHigh])
|
||||
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
|
||||
#Lognormal({mu: mu, sigma: sigma})
|
||||
#Lognormal({mu, sigma})
|
||||
}
|
||||
let fromMeanAndStdev = (mean, stdev) => {
|
||||
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
|
||||
|
@ -174,7 +174,7 @@ module Lognormal = {
|
|||
let meanSquared = mean ** 2.
|
||||
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
|
||||
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
|
||||
Ok(#Lognormal({mu: mu, sigma: sigma}))
|
||||
Ok(#Lognormal({mu, sigma}))
|
||||
} else {
|
||||
Error("Lognormal standard deviation must be larger than 0")
|
||||
}
|
||||
|
@ -184,14 +184,14 @@ module Lognormal = {
|
|||
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
|
||||
let mu = l1.mu +. l2.mu
|
||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||
#Lognormal({mu: mu, sigma: sigma})
|
||||
#Lognormal({mu, sigma})
|
||||
}
|
||||
let divide = (l1, l2) => {
|
||||
let mu = l1.mu -. l2.mu
|
||||
// We believe the ratiands will have covariance zero.
|
||||
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
|
||||
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
|
||||
#Lognormal({mu: mu, sigma: sigma})
|
||||
#Lognormal({mu, sigma})
|
||||
}
|
||||
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
|
||||
switch operation {
|
||||
|
@ -220,7 +220,7 @@ module Lognormal = {
|
|||
module Uniform = {
|
||||
type t = uniform
|
||||
let make = (low, high) =>
|
||||
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low")
|
||||
high > low ? Ok(#Uniform({low, high})) : Error("High must be larger than low")
|
||||
|
||||
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
|
||||
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
|
||||
|
@ -239,9 +239,7 @@ module Uniform = {
|
|||
module Logistic = {
|
||||
type t = logistic
|
||||
let make = (location, scale) =>
|
||||
scale > 0.0
|
||||
? Ok(#Logistic({location: location, scale: scale}))
|
||||
: Error("Scale must be positive")
|
||||
scale > 0.0 ? Ok(#Logistic({location, scale})) : Error("Scale must be positive")
|
||||
|
||||
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
|
||||
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
|
||||
|
@ -285,7 +283,7 @@ module Gamma = {
|
|||
let make = (shape: float, scale: float) => {
|
||||
if shape > 0. {
|
||||
if scale > 0. {
|
||||
Ok(#Gamma({shape: shape, scale: scale}))
|
||||
Ok(#Gamma({shape, scale}))
|
||||
} else {
|
||||
Error("scale must be larger than 0")
|
||||
}
|
||||
|
@ -543,6 +541,6 @@ module T = {
|
|||
| _ =>
|
||||
let xs = interpolateXs(~xSelection, d, sampleCount)
|
||||
let ys = xs |> E.A.fmap(x => pdf(x, d))
|
||||
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
|
||||
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs, ys}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ let makeFn = (
|
|||
name: string,
|
||||
inputs: array<frType>,
|
||||
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
|
||||
) => makeFnMany(name, [{inputs: inputs, fn: fn}])
|
||||
) => makeFnMany(name, [{inputs, fn}])
|
||||
|
||||
let library = [
|
||||
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
|
||||
|
@ -62,6 +62,7 @@ let library = [
|
|||
let answer = Js.String2.concat(a, b)
|
||||
answer->Reducer_T.IEvString->Ok
|
||||
}
|
||||
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}),
|
||||
|
@ -72,6 +73,7 @@ let library = [
|
|||
let _ = Js.Array2.pushMany(a, b)
|
||||
a->Reducer_T.IEvArray->Ok
|
||||
}
|
||||
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}),
|
||||
|
@ -81,6 +83,7 @@ let library = [
|
|||
Js.log(value->Reducer_Value.toString)
|
||||
value->Ok
|
||||
}
|
||||
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}),
|
||||
|
@ -90,6 +93,7 @@ let library = [
|
|||
Js.log(`${label}: ${value->Reducer_Value.toString}`)
|
||||
value->Ok
|
||||
}
|
||||
|
||||
| _ => Error(impossibleError)
|
||||
}
|
||||
}),
|
||||
|
|
|
@ -135,11 +135,13 @@ module Integration = {
|
|||
let wrappedResult = result->Reducer_T.IEvNumber->Ok
|
||||
wrappedResult
|
||||
}
|
||||
|
||||
| (Error(b), _) => Error(b)
|
||||
| (_, Error(b)) => Error(b)
|
||||
}
|
||||
resultWithOuterPoints
|
||||
}
|
||||
|
||||
| Error(b) =>
|
||||
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
|
||||
"Original error: " ++
|
||||
|
@ -362,6 +364,7 @@ module DiminishingReturns = {
|
|||
result[indexOfBiggestDMR] = value
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
|
||||
|
@ -371,10 +374,12 @@ module DiminishingReturns = {
|
|||
}
|
||||
Ok(newAcc)
|
||||
}
|
||||
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
newAccWrapped
|
||||
}
|
||||
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
})
|
||||
|
@ -427,10 +432,12 @@ module DiminishingReturns = {
|
|||
)
|
||||
result
|
||||
}
|
||||
|
||||
| Error(b) => Error(b)
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
| _ =>
|
||||
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
|
||||
->SqError.Message.REOther
|
||||
|
|
|
@ -20,6 +20,7 @@ module Declaration = {
|
|||
->E.A.R.firstErrorOrOpen
|
||||
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
|
||||
}
|
||||
|
||||
| Error(r) => Error(r)
|
||||
| Ok(_) => Error(impossibleErrorString)
|
||||
}
|
||||
|
|
|
@ -140,6 +140,7 @@ module Old = {
|
|||
| Error(err) => error(err)
|
||||
}
|
||||
}
|
||||
|
||||
| Some(IEvNumber(_))
|
||||
| Some(IEvDistribution(_)) =>
|
||||
switch parseDistributionArray(args) {
|
||||
|
@ -192,6 +193,7 @@ module Old = {
|
|||
}
|
||||
Helpers.toFloatFn(fn, dist, ~env)
|
||||
}
|
||||
|
||||
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
|
||||
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
|
||||
| ("sparkline", [IEvDistribution(dist)]) =>
|
||||
|
|
|
@ -19,6 +19,7 @@ let inputsToDist = (inputs: array<Reducer_T.value>, xyShapeToPointSetDist) => {
|
|||
| _ => impossibleError->SqError.Message.throw
|
||||
}
|
||||
}
|
||||
|
||||
| _ => impossibleError->SqError.Message.throw
|
||||
}
|
||||
)
|
||||
|
|
|
@ -61,6 +61,7 @@ module FRType = {
|
|||
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
|
||||
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
|
||||
}
|
||||
|
||||
| FRTypeArray(r) => `list(${toString(r)})`
|
||||
| FRTypeLambda => `lambda`
|
||||
| FRTypeString => `string`
|
||||
|
@ -132,9 +133,9 @@ module FnDefinition = {
|
|||
}
|
||||
|
||||
let make = (~name, ~inputs, ~run, ()): t => {
|
||||
name: name,
|
||||
inputs: inputs,
|
||||
run: run,
|
||||
name,
|
||||
inputs,
|
||||
run,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,14 +161,14 @@ module Function = {
|
|||
~isExperimental=false,
|
||||
(),
|
||||
): t => {
|
||||
name: name,
|
||||
nameSpace: nameSpace,
|
||||
definitions: definitions,
|
||||
output: output,
|
||||
name,
|
||||
nameSpace,
|
||||
definitions,
|
||||
output,
|
||||
examples: examples->E.O2.default([]),
|
||||
isExperimental: isExperimental,
|
||||
requiresNamespace: requiresNamespace,
|
||||
description: description,
|
||||
isExperimental,
|
||||
requiresNamespace,
|
||||
description,
|
||||
}
|
||||
|
||||
let toJson = (t: t): functionJson => {
|
||||
|
@ -203,15 +204,19 @@ module Registry = {
|
|||
fn.requiresNamespace ? [] : [def.name],
|
||||
]->E.A.concatMany
|
||||
|
||||
names->Belt.Array.reduce(acc, (acc, name) => {
|
||||
switch acc->Belt.Map.String.get(name) {
|
||||
| Some(fns) => {
|
||||
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
||||
acc
|
||||
names->Belt.Array.reduce(
|
||||
acc,
|
||||
(acc, name) => {
|
||||
switch acc->Belt.Map.String.get(name) {
|
||||
| Some(fns) => {
|
||||
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
|
||||
acc
|
||||
}
|
||||
|
||||
| None => acc->Belt.Map.String.set(name, [def])
|
||||
}
|
||||
| None => acc->Belt.Map.String.set(name, [def])
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -245,6 +250,7 @@ module Registry = {
|
|||
| None => REOther(showNameMatchDefinitions())->Error
|
||||
}
|
||||
}
|
||||
|
||||
| None => RESymbolNotFound(fnName)->Error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ module Prepare = {
|
|||
let n2 = map->Belt.Map.String.getExn(arg2)
|
||||
Ok([n1, n2])
|
||||
}
|
||||
|
||||
| _ => Error(impossibleErrorString)
|
||||
}
|
||||
|
||||
|
@ -45,6 +46,7 @@ module Prepare = {
|
|||
let n3 = map->Belt.Map.String.getExn(arg3)
|
||||
Ok([n1, n2, n3])
|
||||
}
|
||||
|
||||
| _ => Error(impossibleErrorString)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,4 +44,4 @@ let removeResult = ({namespace} as bindings: t): t => {
|
|||
|
||||
let locals = ({namespace}: t): Reducer_T.namespace => namespace
|
||||
|
||||
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace: namespace, parent: None}
|
||||
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace, parent: None}
|
||||
|
|
|
@ -6,7 +6,7 @@ let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environ
|
|||
{
|
||||
frameStack: list{},
|
||||
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
|
||||
environment: environment,
|
||||
environment,
|
||||
inFunction: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,6 +123,7 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
|
|||
)
|
||||
(result, context)
|
||||
}
|
||||
|
||||
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,8 +23,8 @@ let make = (): t => list{}
|
|||
|
||||
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
|
||||
t->Belt.List.add({
|
||||
name: name,
|
||||
location: location,
|
||||
name,
|
||||
location,
|
||||
})
|
||||
|
||||
// this is useful for SyntaxErrors
|
||||
|
|
|
@ -43,10 +43,10 @@ let makeLambda = (
|
|||
|
||||
FnLambda({
|
||||
// context: bindings,
|
||||
name: name,
|
||||
name,
|
||||
body: lambda,
|
||||
parameters: parameters,
|
||||
location: location,
|
||||
parameters,
|
||||
location,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -54,8 +54,8 @@ let makeLambda = (
|
|||
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
|
||||
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
|
||||
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
|
||||
body: body,
|
||||
name: name,
|
||||
body,
|
||||
name,
|
||||
})
|
||||
|
||||
// this function doesn't scale to FunctionRegistry's polymorphic functions
|
||||
|
|
|
@ -113,7 +113,7 @@ let nodeToAST = (node: node) => {
|
|||
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
|
||||
}
|
||||
|
||||
{location: node["location"], content: content}
|
||||
{location: node["location"], content}
|
||||
}
|
||||
|
||||
let nodeIdentifierToAST = (node: nodeIdentifier) => {
|
||||
|
|
|
@ -68,7 +68,7 @@ let rec fromNode = (node: Parse.node): expression => {
|
|||
}
|
||||
|
||||
{
|
||||
ast: ast,
|
||||
content: content,
|
||||
ast,
|
||||
content,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -216,6 +216,7 @@ let tryRunWithResult = (
|
|||
project->setResult(sourceId, Error(error))
|
||||
Error(error)
|
||||
}
|
||||
|
||||
| Ok(_prevResult) => {
|
||||
project->doLinkAndRun(sourceId)
|
||||
project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult)
|
||||
|
|
|
@ -6,7 +6,7 @@ type t = T.t
|
|||
|
||||
let emptyItem = (sourceId: string): projectItem => {
|
||||
source: "",
|
||||
sourceId: sourceId,
|
||||
sourceId,
|
||||
rawParse: None,
|
||||
expression: None,
|
||||
continuation: Reducer_Namespace.make(),
|
||||
|
@ -76,7 +76,7 @@ let resetIncludes = (r: t): t => {
|
|||
}
|
||||
|
||||
let setSource = (r: t, source: T.sourceArgumentType): t =>
|
||||
{...r, source: source}->resetIncludes->touchSource
|
||||
{...r, source}->resetIncludes->touchSource
|
||||
|
||||
let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t =>
|
||||
{...r, rawParse: Some(rawParse)}->touchRawParse
|
||||
|
@ -86,7 +86,7 @@ let setExpression = (r: t, expression: T.expressionArgumentType): t =>
|
|||
|
||||
let setContinuation = (r: t, continuation: T.continuationArgumentType): t => {
|
||||
...r,
|
||||
continuation: continuation,
|
||||
continuation,
|
||||
}
|
||||
|
||||
let setResult = (r: t, result: T.resultArgumentType): t => {
|
||||
|
@ -110,24 +110,23 @@ let getPastChain = (this: t): array<string> => {
|
|||
Js.Array2.concat(getDirectIncludes(this), getContinues(this))
|
||||
}
|
||||
|
||||
let setContinues = (this: t, continues: array<string>): t =>
|
||||
{...this, continues: continues}->touchSource
|
||||
let setContinues = (this: t, continues: array<string>): t => {...this, continues}->touchSource
|
||||
|
||||
let removeContinues = (this: t): t => {...this, continues: []}->touchSource
|
||||
|
||||
let setIncludes = (this: t, includes: T.includesType): t => {
|
||||
...this,
|
||||
includes: includes,
|
||||
includes,
|
||||
}
|
||||
|
||||
let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => {
|
||||
...this,
|
||||
includeAsVariables: includeAsVariables,
|
||||
includeAsVariables,
|
||||
}
|
||||
|
||||
let setDirectImports = (this: t, directIncludes: array<string>): t => {
|
||||
...this,
|
||||
directIncludes: directIncludes,
|
||||
directIncludes,
|
||||
}
|
||||
|
||||
let parseIncludes = (this: t): t => {
|
||||
|
@ -144,9 +143,9 @@ let parseIncludes = (this: t): t => {
|
|||
->Belt.Array.map(((_variable, file)) => file)
|
||||
{
|
||||
...this,
|
||||
includes: includes,
|
||||
includeAsVariables: includeAsVariables,
|
||||
directIncludes: directIncludes,
|
||||
includes,
|
||||
includeAsVariables,
|
||||
directIncludes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,6 +54,7 @@ module Message = {
|
|||
}
|
||||
answer
|
||||
}
|
||||
|
||||
| REMacroNotFound(macro) => `Macro not found: ${macro}`
|
||||
| RENotAFunction(valueString) => `${valueString} is not a function`
|
||||
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
|
||||
|
@ -93,8 +94,8 @@ type t = {
|
|||
exception SqException(t)
|
||||
|
||||
let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => {
|
||||
message: message,
|
||||
frameStack: frameStack,
|
||||
message,
|
||||
frameStack,
|
||||
}
|
||||
|
||||
// this shouldn't be used much, since frame stack will be empty
|
||||
|
|
|
@ -18,6 +18,7 @@ let stdLib: Reducer_T.namespace = {
|
|||
| None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw
|
||||
}
|
||||
}
|
||||
|
||||
| [IEvRecord(dict), IEvString(sIndex)] =>
|
||||
switch Belt.Map.String.get(dict, sIndex) {
|
||||
| Some(value) => value
|
||||
|
|
|
@ -9,13 +9,13 @@ type declaration<'a> = {
|
|||
|
||||
module ContinuousFloatArg = {
|
||||
let make = (min: float, max: float): arg => {
|
||||
Float({min: min, max: max})
|
||||
Float({min, max})
|
||||
}
|
||||
}
|
||||
|
||||
module ContinuousTimeArg = {
|
||||
let make = (min: Js.Date.t, max: Js.Date.t): arg => {
|
||||
Date({min: min, max: max})
|
||||
Date({min, max})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,7 +33,7 @@ module Arg = {
|
|||
}
|
||||
|
||||
let make = (fn: 'a, args: array<arg>): declaration<'a> => {
|
||||
{fn: fn, args: args}
|
||||
{fn, args}
|
||||
}
|
||||
|
||||
let toString = (r: declaration<'a>, fnToString): string => {
|
||||
|
|
|
@ -85,8 +85,8 @@ module T = {
|
|||
}
|
||||
let square = mapX(x => x ** 2.0)
|
||||
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
|
||||
let fromArray = ((xs, ys)): t => {xs: xs, ys: ys}
|
||||
let fromArrays = (xs, ys): t => {xs: xs, ys: ys}
|
||||
let fromArray = ((xs, ys)): t => {xs, ys}
|
||||
let fromArrays = (xs, ys): t => {xs, ys}
|
||||
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
|
||||
let concat = (t1: t, t2: t) => {
|
||||
let cxs = Array.concat(list{t1.xs, t2.xs})
|
||||
|
@ -142,7 +142,7 @@ module T = {
|
|||
}
|
||||
|
||||
let make = (~xs: array<float>, ~ys: array<float>) => {
|
||||
let attempt: t = {xs: xs, ys: ys}
|
||||
let attempt: t = {xs, ys}
|
||||
switch Validator.validate(attempt) {
|
||||
| Some(error) => Error(error)
|
||||
| None => Ok(attempt)
|
||||
|
@ -452,6 +452,7 @@ module PointwiseCombination = {
|
|||
let _ = Js.Array.push(fn(y1, y2), newYs)
|
||||
let _ = Js.Array.push(x, newXs)
|
||||
}
|
||||
|
||||
| None => ()
|
||||
}
|
||||
}
|
||||
|
@ -558,7 +559,7 @@ module Range = {
|
|||
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
|
||||
)
|
||||
}
|
||||
Some({xs: xs, ys: cumulativeY})
|
||||
Some({xs, ys: cumulativeY})
|
||||
}
|
||||
|
||||
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)
|
||||
|
|
13
yarn.lock
13
yarn.lock
|
@ -3037,6 +3037,11 @@
|
|||
"@react-hook/passive-layout-effect" "^1.2.0"
|
||||
"@react-hook/resize-observer" "^1.2.1"
|
||||
|
||||
"@rescript/std@^10.0.0":
|
||||
version "10.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-10.0.0.tgz#11996296739d7f0d2949283c93b4d14e9ed4589d"
|
||||
integrity sha512-DFwX5vWASZtvjFdqar2VIadvmy2ZBPTnPI2A9EKEkvNR93OUoZygOfvhRaueIQtlS4f9X50E3v2awI9JJG+JsQ==
|
||||
|
||||
"@rescript/std@^9.1.4":
|
||||
version "9.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-9.1.4.tgz#94971cb504b10d36d470618fa1c6f0a2d03a6b9b"
|
||||
|
@ -16340,10 +16345,10 @@ rescript-js-map@^1.1.0:
|
|||
dependencies:
|
||||
rescript-js-iterator "^1.1.0"
|
||||
|
||||
rescript@^9.1.4:
|
||||
version "9.1.4"
|
||||
resolved "https://registry.yarnpkg.com/rescript/-/rescript-9.1.4.tgz#1eb126f98d6c16942c0bf0df67c050198e580515"
|
||||
integrity sha512-aXANK4IqecJzdnDpJUsU6pxMViCR5ogAxzuqS0mOr8TloMnzAjJFu63fjD6LCkWrKAhlMkFFzQvVQYaAaVkFXw==
|
||||
rescript@^10.0.0:
|
||||
version "10.0.0"
|
||||
resolved "https://registry.yarnpkg.com/rescript/-/rescript-10.0.0.tgz#8460bc6f7d94bc580eac02d7c7efdf0a470916b8"
|
||||
integrity sha512-LhNg/4+0j8NvoFeslgAeYLlzUwkq6kR6l6v8BnZ61VDTxopK2l96uT1lq5lv1aMxzMDynvE2qnX0zalre+6XxA==
|
||||
|
||||
resize-observer-polyfill@^1.5.1:
|
||||
version "1.5.1"
|
||||
|
|
Loading…
Reference in New Issue
Block a user