Compare commits

...

3 Commits

Author SHA1 Message Date
Sam Nolan
b44bf0cc38 Fix failing tests due to environment change 2022-07-12 17:25:11 +10:00
Sam Nolan
ee14a47bd6 Fix failing floating point test 2022-07-12 15:29:50 +10:00
Sam Nolan
fe5a42353e Add percentile graphed as environment option 2022-07-12 15:16:13 +10:00
19 changed files with 120 additions and 102 deletions

View File

@ -219,6 +219,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
distributionPlotSettings={distributionPlotSettings}
height={height}
environment={{
...environment,
sampleCount: environment.sampleCount / 10,
xyPointLength: environment.xyPointLength / 10,
}}
@ -234,6 +235,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
distributionPlotSettings={distributionPlotSettings}
height={height}
environment={{
...environment,
sampleCount: environment.sampleCount / 10,
xyPointLength: environment.xyPointLength / 10,
}}
@ -246,7 +248,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
<VariableBox heading="Module" showTypes={showTypes}>
<div className="space-y-3">
{Object.entries(expression.value)
.filter(([key, r]) => key !== "Math")
.filter(([key, _]) => key !== "Math")
.map(([key, r]) => (
<div key={key} className="flex space-x-2">
<div className="flex-none">

View File

@ -55,6 +55,7 @@ const schema = yup.object({}).shape({
.default(1000)
.min(10)
.max(10000),
percentile: yup.number().required().positive().default(0.9998).min(0).max(1),
chartHeight: yup.number().required().positive().integer().default(350),
leftSizePercent: yup
.number()
@ -155,6 +156,20 @@ const SamplingSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
</Text>
</div>
</div>
<div>
<InputItem
name="percentile"
type="number"
label="Symbolic Distribution Percentile"
register={register}
/>
<div className="mt-2">
<Text>
When converting symbolic distributions to PointSet distributions, what
percentile to sample the points within.
</Text>
</div>
</div>
</div>
);
@ -436,6 +451,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
const { register, control } = useForm({
resolver: yupResolver(schema),
defaultValues: {
percentile: 0.9998,
sampleCount: 1000,
xyPointLength: 1000,
chartHeight: 150,
@ -468,6 +484,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
const env: environment = useMemo(
() => ({
percentile: Number(vars.percentile),
sampleCount: Number(vars.sampleCount),
xyPointLength: Number(vars.xyPointLength),
}),

View File

@ -42,7 +42,7 @@ export let linearYScale: LinearScale = {
name: "yscale",
type: "linear",
range: "height",
zero: false,
zero: true,
domain: {
fields: [
{
@ -84,7 +84,7 @@ export let expYScale: PowScale = {
type: "pow",
exponent: 0.1,
range: "height",
zero: false,
zero: true,
nice: false,
domain: {
fields: [

View File

@ -1,10 +1,6 @@
open Jest
open Expect
let env: DistributionOperation.env = {
sampleCount: 100,
xyPointLength: 100,
}
open TestHelpers
let {
normalDist5,

View File

@ -58,7 +58,7 @@ describe("eval on distribution functions", () => {
describe("subtract", () => {
testEval("10 - normal(5, 1)", "Ok(Normal(5,1))")
testEval("normal(5, 1) - 10", "Ok(Normal(-5,1))")
testEval("mean(1 - toPointSet(normal(5, 2)))", "Ok(-4.002309896304692)")
testEval("mean(1 - toPointSet(normal(5, 2)))", "Ok(-4.002309896304693)")
})
describe("multiply", () => {
testEval("normal(10, 2) * 2", "Ok(Normal(20,4))")

View File

@ -3,6 +3,7 @@ import {
resultMap,
defaultBindings,
mergeBindings,
defaultEnvironment,
} from "../../src/js/index";
import { testRun, testRunPartial } from "./TestHelpers";
@ -109,7 +110,7 @@ describe("JS Imports", () => {
describe("Distribution", () => {
//It's important that sampleCount is less than 9. If it's more, than that will create randomness
//Also, note, the value should be created using makeSampleSetDist() later on.
let env = { sampleCount: 8, xyPointLength: 100 };
let env = { ...defaultEnvironment, sampleCount: 8, xyPointLength: 100 };
let dist1Samples = [3, 4, 5, 6, 6, 7, 10, 15, 30];
let dist1SampleCount = dist1Samples.length;
let dist = new Distribution(

View File

@ -1,4 +1,4 @@
import { Distribution } from "../../src/js/index";
import { Distribution, defaultEnvironment } from "../../src/js/index";
import { expectErrorToBeBounded, failDefault, testRun } from "./TestHelpers";
import * as fc from "fast-check";
@ -16,6 +16,7 @@ let arrayGen = () =>
);
describe("cumulative density function", () => {
let n = 10000;
let env = { ...defaultEnvironment, sampleCount: n, xyPointLength: 100 };
// We should fix this.
test.skip("'s codomain is bounded above", () => {
@ -23,10 +24,7 @@ describe("cumulative density function", () => {
fc.property(arrayGen(), fc.float(), (xs_, x) => {
let xs = Array.from(xs_);
// Should compute with squiggle strings once interpreter has `sample`
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(x).value;
let epsilon = 5e-7;
expect(cdfValue).toBeLessThanOrEqual(1 + epsilon);
@ -39,10 +37,7 @@ describe("cumulative density function", () => {
fc.property(arrayGen(), fc.float(), (xs_, x) => {
let xs = Array.from(xs_);
// Should compute with squiggle strings once interpreter has `sample`
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(x).value;
expect(cdfValue).toBeGreaterThanOrEqual(0);
})
@ -57,10 +52,7 @@ describe("cumulative density function", () => {
let xs = Array.from(xs_);
let max = Math.max(...xs);
// Should compute with squiggle strings once interpreter has `sample`
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(max).value;
expect(cdfValue).toBeCloseTo(1.0, 2);
})
@ -74,10 +66,7 @@ describe("cumulative density function", () => {
let xs = Array.from(xs_);
let min = Math.min(...xs);
// Should compute with squiggle strings once interpreter has `sample`
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(min).value;
let max = Math.max(...xs);
let epsilon = 5e-3;
@ -95,10 +84,7 @@ describe("cumulative density function", () => {
fc.assert(
fc.property(arrayGen(), fc.float(), (xs_, x) => {
let xs = Array.from(xs_);
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(x).value;
let max = Math.max(...xs);
if (x > max) {
@ -117,10 +103,7 @@ describe("cumulative density function", () => {
fc.assert(
fc.property(arrayGen(), fc.float(), (xs_, x) => {
let xs = Array.from(xs_);
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let cdfValue = dist.cdf(x).value;
expect(cdfValue).toBeGreaterThanOrEqual(0);
})
@ -131,6 +114,7 @@ describe("cumulative density function", () => {
// I no longer believe this is true.
describe("probability density function", () => {
let n = 1000;
let env = { ...defaultEnvironment, sampleCount: n, xyPointLength: 100 };
test.skip("assigns to the max at most the weight of the mean", () => {
fc.assert(
@ -139,10 +123,7 @@ describe("probability density function", () => {
let max = Math.max(...xs);
let mean = xs.reduce((a, b) => a + b, 0.0) / xs.length;
// Should be from squiggleString once interpreter exposes sampleset
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: n, xyPointLength: 100 }
);
let dist = new Distribution({ tag: "SampleSet", value: xs }, env);
let pdfValueMean = dist.pdf(mean).value;
let pdfValueMax = dist.pdf(max).value;
if (typeof pdfValueMean == "number" && typeof pdfValueMax == "number") {
@ -166,7 +147,7 @@ describe("mean is mean", () => {
let n = xs.length;
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: 2 * n, xyPointLength: 4 * n }
{ ...defaultEnvironment, sampleCount: 2 * n, xyPointLength: 4 * n }
);
let mean = dist.mean();
if (typeof mean.value == "number") {
@ -193,7 +174,11 @@ describe("mean is mean", () => {
let n = xs.length;
let dist = new Distribution(
{ tag: "SampleSet", value: xs },
{ sampleCount: Math.floor(n / 2), xyPointLength: 4 * n }
{
...defaultEnvironment,
sampleCount: Math.floor(n / 2),
xyPointLength: 4 * n,
}
);
let mean = dist.mean();
if (typeof mean.value == "number") {

View File

@ -30,6 +30,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
let env: DistributionOperation.env = {
percentile: 0.9998,
sampleCount: MagicNumbers.Environment.defaultSampleCount,
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
}

View File

@ -105,14 +105,7 @@ export class Distribution {
}
pointSet(): result<shape, distributionError> {
let pointSet = toPointSet(
this.t,
{
xyPointLength: this.env.xyPointLength,
sampleCount: this.env.sampleCount,
},
undefined
);
let pointSet = toPointSet(this.t, this.env, undefined);
if (pointSet.tag === "Ok") {
let distribution = pointSet.value;
if (distribution.tag === "Continuous") {

View File

@ -39,20 +39,17 @@ export type { result, shape, environment, lambdaValue, squiggleExpression };
export { parse } from "./parse";
export let defaultSamplingInputs: environment = {
sampleCount: 10000,
xyPointLength: 10000,
};
export function run(
squiggleString: string,
bindings?: externalBindings,
environment?: environment,
environment?: Partial<environment>,
imports?: jsImports
): result<squiggleExpression, errorValue> {
let b = bindings ? bindings : defaultBindings;
let i = imports ? imports : defaultImports;
let e = environment ? environment : defaultEnvironment;
let e = environment
? _.merge(defaultEnvironment, environment)
: defaultEnvironment;
let res: result<expressionValue, errorValue> = evaluateUsingOptions(
{ externalBindings: mergeImportsWithBindings(b, i), environment: e },
squiggleString
@ -64,12 +61,14 @@ export function run(
export function runPartial(
squiggleString: string,
bindings?: externalBindings,
environment?: environment,
environment?: Partial<environment>,
imports?: jsImports
): result<externalBindings, errorValue> {
let b = bindings ? bindings : defaultBindings;
let i = imports ? imports : defaultImports;
let e = environment ? environment : defaultEnvironment;
let e = environment
? _.merge(defaultEnvironment, environment)
: defaultEnvironment;
return evaluatePartialUsingExternalBindings(
squiggleString,

View File

@ -5,11 +5,13 @@ type error = DistributionTypes.error
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
type env = {
percentile: float,
sampleCount: int,
xyPointLength: int,
}
let defaultEnv = {
percentile: 0.9998,
sampleCount: MagicNumbers.Environment.defaultSampleCount,
xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
}
@ -137,7 +139,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
->OutputLocal.fromResult
| ToString(ToString) => dist->GenericDist.toString->String
| ToString(ToSparkline(bucketCount)) =>
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ())
GenericDist.toSparkline(dist, ~percentile=env.percentile, ~sampleCount, ~bucketCount, ())
->E.R2.fmap(r => String(r))
->OutputLocal.fromResult
| ToDist(Inspect) => {
@ -170,7 +172,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
->OutputLocal.fromResult
| ToDist(ToPointSet) =>
dist
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ())
->GenericDist.toPointSet(~percentile=env.percentile, ~xyPointLength, ~sampleCount, ())
->E.R2.fmap(r => Dist(PointSet(r)))
->OutputLocal.fromResult
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>

View File

@ -1,5 +1,6 @@
@genType
type env = {
percentile: float,
sampleCount: int,
xyPointLength: int,
}

View File

@ -70,8 +70,8 @@ module DistributionOperation = {
| #IntegralSum
| #Mode
| #Stdev
| #Min
| #Max
| #Min(float)
| #Max(float)
| #Variance
]
@ -123,8 +123,8 @@ module DistributionOperation = {
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
| ToFloat(#Mean) => `mean`
| ToFloat(#Min) => `min`
| ToFloat(#Max) => `max`
| ToFloat(#Min(_)) => `min`
| ToFloat(#Max(_)) => `max`
| ToFloat(#Stdev) => `stdev`
| ToFloat(#Variance) => `variance`
| ToFloat(#Mode) => `mode`

View File

@ -108,7 +108,7 @@ let toFloatOperation = (
) => {
switch distToFloatOperation {
| #IntegralSum => Ok(integralEndY(t))
| (#Pdf(_) | #Cdf(_) | #Inv(_) | #Mean | #Sample | #Min | #Max) as op => {
| (#Pdf(_) | #Cdf(_) | #Inv(_) | #Mean | #Sample | #Min(_) | #Max(_)) as op => {
let trySymbolicSolution = switch (t: t) {
| Symbolic(r) => SymbolicDist.T.operate(op, r)->E.R.toOption
| _ => None
@ -118,8 +118,8 @@ let toFloatOperation = (
| (SampleSet(sampleSet), #Mean) => SampleSetDist.mean(sampleSet)->Some
| (SampleSet(sampleSet), #Sample) => SampleSetDist.sample(sampleSet)->Some
| (SampleSet(sampleSet), #Inv(r)) => SampleSetDist.percentile(sampleSet, r)->Some
| (SampleSet(sampleSet), #Min) => SampleSetDist.min(sampleSet)->Some
| (SampleSet(sampleSet), #Max) => SampleSetDist.max(sampleSet)->Some
| (SampleSet(sampleSet), #Min(_)) => SampleSetDist.min(sampleSet)->Some
| (SampleSet(sampleSet), #Max(_)) => SampleSetDist.max(sampleSet)->Some
| _ => None
}
@ -150,6 +150,7 @@ let toFloatOperation = (
// Also, change the outputXYPoints/pointSetDistLength details
let toPointSet = (
t,
~percentile: float,
~xyPointLength,
~sampleCount,
~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=#ByWeight,
@ -157,7 +158,7 @@ let toPointSet = (
): result<PointSetTypes.pointSetDist, error> => {
switch (t: t) {
| PointSet(pointSet) => Ok(pointSet)
| Symbolic(r) => Ok(SymbolicDist.T.toPointSetDist(~xSelection, xyPointLength, r))
| Symbolic(r) => Ok(SymbolicDist.T.toPointSetDist(~percentile, ~xSelection, xyPointLength, r))
| SampleSet(r) =>
SampleSetDist.toPointSetDist(
~samples=r,
@ -177,9 +178,12 @@ let toPointSet = (
xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3
fairly arbitrarily.
*/
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, ()): result<string, error> =>
let toSparkline = (t: t, ~percentile: float, ~sampleCount: int, ~bucketCount: int=20, ()): result<
string,
error,
> =>
t
->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
->toPointSet(~percentile, ~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
->E.R.bind(r =>
r->PointSetDist.toSparkline(bucketCount)->E.R2.errMap(x => DistributionTypes.SparklineError(x))
)

View File

@ -37,12 +37,19 @@ module Score: {
@genType
let toPointSet: (
t,
~percentile: float,
~xyPointLength: int,
~sampleCount: int,
~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=?,
unit,
) => result<PointSetTypes.pointSetDist, error>
let toSparkline: (t, ~sampleCount: int, ~bucketCount: int=?, unit) => result<string, error>
let toSparkline: (
t,
~percentile: float,
~sampleCount: int,
~bucketCount: int=?,
unit,
) => result<string, error>
let truncate: (
t,

View File

@ -254,8 +254,8 @@ let operate = (distToFloatOp: Operation.distToFloatOperation, s): float =>
| #Inv(f) => inv(f, s)
| #Sample => sample(s)
| #Mean => T.mean(s)
| #Min => T.minX(s)
| #Max => T.maxX(s)
| #Min(_) => T.minX(s)
| #Max(_) => T.maxX(s)
}
let toSparkline = (t: t, bucketCount): result<string, PointSetTypes.sparklineError> =>

View File

@ -331,9 +331,6 @@ module From90thPercentile = {
}
module T = {
let minCdfValue = 0.0001
let maxCdfValue = 0.9999
let pdf = (x, dist) =>
switch dist {
| #Normal(n) => Normal.pdf(x, n)
@ -419,35 +416,39 @@ module T = {
| #Bernoulli(n) => Bernoulli.toString(n)
}
let min: symbolicDist => float = x =>
let min = (~percentile: float, x: symbolicDist): float => {
let minCdf = (1. -. percentile) /. 2.
switch x {
| #Triangular({low}) => low
| #Exponential(n) => Exponential.inv(minCdfValue, n)
| #Cauchy(n) => Cauchy.inv(minCdfValue, n)
| #Normal(n) => Normal.inv(minCdfValue, n)
| #Lognormal(n) => Lognormal.inv(minCdfValue, n)
| #Logistic(n) => Logistic.inv(minCdfValue, n)
| #Gamma(n) => Gamma.inv(minCdfValue, n)
| #Exponential(n) => Exponential.inv(minCdf, n)
| #Cauchy(n) => Cauchy.inv(minCdf, n)
| #Normal(n) => Normal.inv(minCdf, n)
| #Lognormal(n) => Lognormal.inv(minCdf, n)
| #Logistic(n) => Logistic.inv(minCdf, n)
| #Gamma(n) => Gamma.inv(minCdf, n)
| #Uniform({low}) => low
| #Bernoulli(n) => Bernoulli.min(n)
| #Beta(n) => Beta.inv(minCdfValue, n)
| #Beta(n) => Beta.inv(minCdf, n)
| #Float(n) => n
}
}
let max: symbolicDist => float = x =>
let max = (~percentile: float, x: symbolicDist): float => {
let maxCdf = 1. -. (1. -. percentile) /. 2.
switch x {
| #Triangular(n) => n.high
| #Exponential(n) => Exponential.inv(maxCdfValue, n)
| #Cauchy(n) => Cauchy.inv(maxCdfValue, n)
| #Normal(n) => Normal.inv(maxCdfValue, n)
| #Gamma(n) => Gamma.inv(maxCdfValue, n)
| #Lognormal(n) => Lognormal.inv(maxCdfValue, n)
| #Logistic(n) => Logistic.inv(maxCdfValue, n)
| #Beta(n) => Beta.inv(maxCdfValue, n)
| #Exponential(n) => Exponential.inv(maxCdf, n)
| #Cauchy(n) => Cauchy.inv(maxCdf, n)
| #Normal(n) => Normal.inv(maxCdf, n)
| #Gamma(n) => Gamma.inv(maxCdf, n)
| #Lognormal(n) => Lognormal.inv(maxCdf, n)
| #Logistic(n) => Logistic.inv(maxCdf, n)
| #Beta(n) => Beta.inv(maxCdf, n)
| #Bernoulli(n) => Bernoulli.max(n)
| #Uniform({high}) => high
| #Float(n) => n
}
}
let mean: symbolicDist => result<float, string> = x =>
switch x {
@ -469,15 +470,20 @@ module T = {
| #Cdf(f) => Ok(cdf(f, s))
| #Pdf(f) => Ok(pdf(f, s))
| #Inv(f) => Ok(inv(f, s))
| #Min => Ok(min(s))
| #Max => Ok(max(s))
| #Min(percentile) => Ok(min(~percentile, s))
| #Max(percentile) => Ok(min(~percentile, s))
| #Sample => Ok(sample(s))
| #Mean => mean(s)
}
let interpolateXs = (~xSelection: [#Linear | #ByWeight]=#Linear, dist: symbolicDist, n) =>
let interpolateXs = (
~percentile: float,
~xSelection: [#Linear | #ByWeight]=#Linear,
dist: symbolicDist,
n,
) => {
switch (xSelection, dist) {
| (#Linear, _) => E.A.Floats.range(min(dist), max(dist), n)
| (#Linear, _) => E.A.Floats.range(min(~percentile, dist), max(~percentile, dist), n)
| (#ByWeight, #Uniform(n)) =>
// In `ByWeight mode, uniform distributions get special treatment because we need two x's
// on either side for proper rendering (just left and right of the discontinuities).
@ -485,9 +491,12 @@ module T = {
let dx = MagicNumbers.Epsilon.ten *. distance
[n.low -. dx, n.low, n.low +. dx, n.high -. dx, n.high, n.high +. dx]
| (#ByWeight, _) =>
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n)
let minCdf = (1. -. percentile) /. 2.
let maxCdf = 1. -. minCdf
let ys = E.A.Floats.range(minCdf, maxCdf, n)
ys |> E.A.fmap(y => inv(y, dist))
}
}
/* Calling e.g. "Normal.operate" returns an optional that wraps a result.
If the optional is None, there is no valid analytic solution. If it Some, it
@ -533,6 +542,7 @@ module T = {
}
let toPointSetDist = (
~percentile: float,
~xSelection=#ByWeight,
sampleCount,
d: symbolicDist,
@ -541,7 +551,7 @@ module T = {
| #Float(v) => Float.toPointSetDist(v)
| #Bernoulli(v) => Bernoulli.toPointSetDist(v)
| _ =>
let xs = interpolateXs(~xSelection, d, sampleCount)
let xs = interpolateXs(~percentile, ~xSelection, d, sampleCount)
let ys = xs |> E.A.fmap(x => pdf(x, d))
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
}

View File

@ -216,8 +216,8 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
| "mean" => #Mean
| "stdev" => #Stdev
| "variance" => #Variance
| "min" => #Min
| "max" => #Max
| "min" => #Min(env.percentile)
| "max" => #Max(env.percentile)
| "mode" => #Mode
| _ => #Mean
}

View File

@ -26,8 +26,8 @@ type distToFloatOperation = [
| #Inv(float)
| #Mean
| #Sample
| #Min
| #Max
| #Min(float)
| #Max(float)
]
module Convolution = {