Add percentile graphed as environment option

This commit is contained in:
Sam Nolan 2022-07-12 15:16:13 +10:00
parent af4423cc2e
commit fe5a42353e
16 changed files with 101 additions and 69 deletions

View File

@ -219,6 +219,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
distributionPlotSettings={distributionPlotSettings} distributionPlotSettings={distributionPlotSettings}
height={height} height={height}
environment={{ environment={{
...environment,
sampleCount: environment.sampleCount / 10, sampleCount: environment.sampleCount / 10,
xyPointLength: environment.xyPointLength / 10, xyPointLength: environment.xyPointLength / 10,
}} }}
@ -234,6 +235,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
distributionPlotSettings={distributionPlotSettings} distributionPlotSettings={distributionPlotSettings}
height={height} height={height}
environment={{ environment={{
...environment,
sampleCount: environment.sampleCount / 10, sampleCount: environment.sampleCount / 10,
xyPointLength: environment.xyPointLength / 10, xyPointLength: environment.xyPointLength / 10,
}} }}
@ -246,7 +248,7 @@ export const SquiggleItem: React.FC<SquiggleItemProps> = ({
<VariableBox heading="Module" showTypes={showTypes}> <VariableBox heading="Module" showTypes={showTypes}>
<div className="space-y-3"> <div className="space-y-3">
{Object.entries(expression.value) {Object.entries(expression.value)
.filter(([key, r]) => key !== "Math") .filter(([key, _]) => key !== "Math")
.map(([key, r]) => ( .map(([key, r]) => (
<div key={key} className="flex space-x-2"> <div key={key} className="flex space-x-2">
<div className="flex-none"> <div className="flex-none">

View File

@ -55,6 +55,7 @@ const schema = yup.object({}).shape({
.default(1000) .default(1000)
.min(10) .min(10)
.max(10000), .max(10000),
percentile: yup.number().required().positive().default(0.9998).min(0).max(1),
chartHeight: yup.number().required().positive().integer().default(350), chartHeight: yup.number().required().positive().integer().default(350),
leftSizePercent: yup leftSizePercent: yup
.number() .number()
@ -155,6 +156,20 @@ const SamplingSettings: React.FC<{ register: UseFormRegister<FormFields> }> = ({
</Text> </Text>
</div> </div>
</div> </div>
<div>
<InputItem
name="percentile"
type="number"
label="Symbolic Distribution Percentile"
register={register}
/>
<div className="mt-2">
<Text>
When converting symbolic distributions to PointSet distributions, what
percentile to sample the points within.
</Text>
</div>
</div>
</div> </div>
); );
@ -436,6 +451,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
const { register, control } = useForm({ const { register, control } = useForm({
resolver: yupResolver(schema), resolver: yupResolver(schema),
defaultValues: { defaultValues: {
percentile: 0.9998,
sampleCount: 1000, sampleCount: 1000,
xyPointLength: 1000, xyPointLength: 1000,
chartHeight: 150, chartHeight: 150,
@ -468,6 +484,7 @@ export const SquigglePlayground: FC<PlaygroundProps> = ({
const env: environment = useMemo( const env: environment = useMemo(
() => ({ () => ({
percentile: Number(vars.percentile),
sampleCount: Number(vars.sampleCount), sampleCount: Number(vars.sampleCount),
xyPointLength: Number(vars.xyPointLength), xyPointLength: Number(vars.xyPointLength),
}), }),

View File

@ -42,7 +42,7 @@ export let linearYScale: LinearScale = {
name: "yscale", name: "yscale",
type: "linear", type: "linear",
range: "height", range: "height",
zero: false, zero: true,
domain: { domain: {
fields: [ fields: [
{ {
@ -84,7 +84,7 @@ export let expYScale: PowScale = {
type: "pow", type: "pow",
exponent: 0.1, exponent: 0.1,
range: "height", range: "height",
zero: false, zero: true,
nice: false, nice: false,
domain: { domain: {
fields: [ fields: [

View File

@ -1,10 +1,6 @@
open Jest open Jest
open Expect open Expect
open TestHelpers
let env: DistributionOperation.env = {
sampleCount: 100,
xyPointLength: 100,
}
let { let {
normalDist5, normalDist5,

View File

@ -30,6 +30,7 @@ let {toFloat, toDist, toString, toError, fmap} = module(DistributionOperation.Ou
let fnImage = (theFn, inps) => Js.Array.map(theFn, inps) let fnImage = (theFn, inps) => Js.Array.map(theFn, inps)
let env: DistributionOperation.env = { let env: DistributionOperation.env = {
percentile: 0.9998,
sampleCount: MagicNumbers.Environment.defaultSampleCount, sampleCount: MagicNumbers.Environment.defaultSampleCount,
xyPointLength: MagicNumbers.Environment.defaultXYPointLength, xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
} }

View File

@ -105,14 +105,7 @@ export class Distribution {
} }
pointSet(): result<shape, distributionError> { pointSet(): result<shape, distributionError> {
let pointSet = toPointSet( let pointSet = toPointSet(this.t, this.env, undefined);
this.t,
{
xyPointLength: this.env.xyPointLength,
sampleCount: this.env.sampleCount,
},
undefined
);
if (pointSet.tag === "Ok") { if (pointSet.tag === "Ok") {
let distribution = pointSet.value; let distribution = pointSet.value;
if (distribution.tag === "Continuous") { if (distribution.tag === "Continuous") {

View File

@ -39,20 +39,17 @@ export type { result, shape, environment, lambdaValue, squiggleExpression };
export { parse } from "./parse"; export { parse } from "./parse";
export let defaultSamplingInputs: environment = {
sampleCount: 10000,
xyPointLength: 10000,
};
export function run( export function run(
squiggleString: string, squiggleString: string,
bindings?: externalBindings, bindings?: externalBindings,
environment?: environment, environment?: Partial<environment>,
imports?: jsImports imports?: jsImports
): result<squiggleExpression, errorValue> { ): result<squiggleExpression, errorValue> {
let b = bindings ? bindings : defaultBindings; let b = bindings ? bindings : defaultBindings;
let i = imports ? imports : defaultImports; let i = imports ? imports : defaultImports;
let e = environment ? environment : defaultEnvironment; let e = environment
? _.merge(defaultEnvironment, environment)
: defaultEnvironment;
let res: result<expressionValue, errorValue> = evaluateUsingOptions( let res: result<expressionValue, errorValue> = evaluateUsingOptions(
{ externalBindings: mergeImportsWithBindings(b, i), environment: e }, { externalBindings: mergeImportsWithBindings(b, i), environment: e },
squiggleString squiggleString
@ -64,12 +61,14 @@ export function run(
export function runPartial( export function runPartial(
squiggleString: string, squiggleString: string,
bindings?: externalBindings, bindings?: externalBindings,
environment?: environment, environment?: Partial<environment>,
imports?: jsImports imports?: jsImports
): result<externalBindings, errorValue> { ): result<externalBindings, errorValue> {
let b = bindings ? bindings : defaultBindings; let b = bindings ? bindings : defaultBindings;
let i = imports ? imports : defaultImports; let i = imports ? imports : defaultImports;
let e = environment ? environment : defaultEnvironment; let e = environment
? _.merge(defaultEnvironment, environment)
: defaultEnvironment;
return evaluatePartialUsingExternalBindings( return evaluatePartialUsingExternalBindings(
squiggleString, squiggleString,

View File

@ -5,11 +5,13 @@ type error = DistributionTypes.error
// TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way. // TODO: It could be great to use a cache for some calculations (basically, do memoization). Also, better analytics/tracking could go a long way.
type env = { type env = {
percentile: float,
sampleCount: int, sampleCount: int,
xyPointLength: int, xyPointLength: int,
} }
let defaultEnv = { let defaultEnv = {
percentile: 0.9998,
sampleCount: MagicNumbers.Environment.defaultSampleCount, sampleCount: MagicNumbers.Environment.defaultSampleCount,
xyPointLength: MagicNumbers.Environment.defaultXYPointLength, xyPointLength: MagicNumbers.Environment.defaultXYPointLength,
} }
@ -137,7 +139,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
->OutputLocal.fromResult ->OutputLocal.fromResult
| ToString(ToString) => dist->GenericDist.toString->String | ToString(ToString) => dist->GenericDist.toString->String
| ToString(ToSparkline(bucketCount)) => | ToString(ToSparkline(bucketCount)) =>
GenericDist.toSparkline(dist, ~sampleCount, ~bucketCount, ()) GenericDist.toSparkline(dist, ~percentile=env.percentile, ~sampleCount, ~bucketCount, ())
->E.R2.fmap(r => String(r)) ->E.R2.fmap(r => String(r))
->OutputLocal.fromResult ->OutputLocal.fromResult
| ToDist(Inspect) => { | ToDist(Inspect) => {
@ -170,7 +172,7 @@ let rec run = (~env, functionCallInfo: functionCallInfo): outputType => {
->OutputLocal.fromResult ->OutputLocal.fromResult
| ToDist(ToPointSet) => | ToDist(ToPointSet) =>
dist dist
->GenericDist.toPointSet(~xyPointLength, ~sampleCount, ()) ->GenericDist.toPointSet(~percentile=env.percentile, ~xyPointLength, ~sampleCount, ())
->E.R2.fmap(r => Dist(PointSet(r))) ->E.R2.fmap(r => Dist(PointSet(r)))
->OutputLocal.fromResult ->OutputLocal.fromResult
| ToDist(Scale(#LogarithmWithThreshold(eps), f)) => | ToDist(Scale(#LogarithmWithThreshold(eps), f)) =>

View File

@ -1,5 +1,6 @@
@genType @genType
type env = { type env = {
percentile: float,
sampleCount: int, sampleCount: int,
xyPointLength: int, xyPointLength: int,
} }

View File

@ -70,8 +70,8 @@ module DistributionOperation = {
| #IntegralSum | #IntegralSum
| #Mode | #Mode
| #Stdev | #Stdev
| #Min | #Min(float)
| #Max | #Max(float)
| #Variance | #Variance
] ]
@ -123,8 +123,8 @@ module DistributionOperation = {
| ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})` | ToFloat(#Cdf(r)) => `cdf(${E.Float.toFixed(r)})`
| ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})` | ToFloat(#Inv(r)) => `inv(${E.Float.toFixed(r)})`
| ToFloat(#Mean) => `mean` | ToFloat(#Mean) => `mean`
| ToFloat(#Min) => `min` | ToFloat(#Min(_)) => `min`
| ToFloat(#Max) => `max` | ToFloat(#Max(_)) => `max`
| ToFloat(#Stdev) => `stdev` | ToFloat(#Stdev) => `stdev`
| ToFloat(#Variance) => `variance` | ToFloat(#Variance) => `variance`
| ToFloat(#Mode) => `mode` | ToFloat(#Mode) => `mode`

View File

@ -108,7 +108,7 @@ let toFloatOperation = (
) => { ) => {
switch distToFloatOperation { switch distToFloatOperation {
| #IntegralSum => Ok(integralEndY(t)) | #IntegralSum => Ok(integralEndY(t))
| (#Pdf(_) | #Cdf(_) | #Inv(_) | #Mean | #Sample | #Min | #Max) as op => { | (#Pdf(_) | #Cdf(_) | #Inv(_) | #Mean | #Sample | #Min(_) | #Max(_)) as op => {
let trySymbolicSolution = switch (t: t) { let trySymbolicSolution = switch (t: t) {
| Symbolic(r) => SymbolicDist.T.operate(op, r)->E.R.toOption | Symbolic(r) => SymbolicDist.T.operate(op, r)->E.R.toOption
| _ => None | _ => None
@ -118,8 +118,8 @@ let toFloatOperation = (
| (SampleSet(sampleSet), #Mean) => SampleSetDist.mean(sampleSet)->Some | (SampleSet(sampleSet), #Mean) => SampleSetDist.mean(sampleSet)->Some
| (SampleSet(sampleSet), #Sample) => SampleSetDist.sample(sampleSet)->Some | (SampleSet(sampleSet), #Sample) => SampleSetDist.sample(sampleSet)->Some
| (SampleSet(sampleSet), #Inv(r)) => SampleSetDist.percentile(sampleSet, r)->Some | (SampleSet(sampleSet), #Inv(r)) => SampleSetDist.percentile(sampleSet, r)->Some
| (SampleSet(sampleSet), #Min) => SampleSetDist.min(sampleSet)->Some | (SampleSet(sampleSet), #Min(_)) => SampleSetDist.min(sampleSet)->Some
| (SampleSet(sampleSet), #Max) => SampleSetDist.max(sampleSet)->Some | (SampleSet(sampleSet), #Max(_)) => SampleSetDist.max(sampleSet)->Some
| _ => None | _ => None
} }
@ -150,6 +150,7 @@ let toFloatOperation = (
// Also, change the outputXYPoints/pointSetDistLength details // Also, change the outputXYPoints/pointSetDistLength details
let toPointSet = ( let toPointSet = (
t, t,
~percentile: float,
~xyPointLength, ~xyPointLength,
~sampleCount, ~sampleCount,
~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=#ByWeight, ~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=#ByWeight,
@ -157,7 +158,7 @@ let toPointSet = (
): result<PointSetTypes.pointSetDist, error> => { ): result<PointSetTypes.pointSetDist, error> => {
switch (t: t) { switch (t: t) {
| PointSet(pointSet) => Ok(pointSet) | PointSet(pointSet) => Ok(pointSet)
| Symbolic(r) => Ok(SymbolicDist.T.toPointSetDist(~xSelection, xyPointLength, r)) | Symbolic(r) => Ok(SymbolicDist.T.toPointSetDist(~percentile, ~xSelection, xyPointLength, r))
| SampleSet(r) => | SampleSet(r) =>
SampleSetDist.toPointSetDist( SampleSetDist.toPointSetDist(
~samples=r, ~samples=r,
@ -177,9 +178,12 @@ let toPointSet = (
xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3 xyPointLength to be a bit longer than the eventual toSparkline downsampling. I chose 3
fairly arbitrarily. fairly arbitrarily.
*/ */
let toSparkline = (t: t, ~sampleCount: int, ~bucketCount: int=20, ()): result<string, error> => let toSparkline = (t: t, ~percentile: float, ~sampleCount: int, ~bucketCount: int=20, ()): result<
string,
error,
> =>
t t
->toPointSet(~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ()) ->toPointSet(~percentile, ~xSelection=#Linear, ~xyPointLength=bucketCount * 3, ~sampleCount, ())
->E.R.bind(r => ->E.R.bind(r =>
r->PointSetDist.toSparkline(bucketCount)->E.R2.errMap(x => DistributionTypes.SparklineError(x)) r->PointSetDist.toSparkline(bucketCount)->E.R2.errMap(x => DistributionTypes.SparklineError(x))
) )

View File

@ -37,12 +37,19 @@ module Score: {
@genType @genType
let toPointSet: ( let toPointSet: (
t, t,
~percentile: float,
~xyPointLength: int, ~xyPointLength: int,
~sampleCount: int, ~sampleCount: int,
~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=?, ~xSelection: DistributionTypes.DistributionOperation.pointsetXSelection=?,
unit, unit,
) => result<PointSetTypes.pointSetDist, error> ) => result<PointSetTypes.pointSetDist, error>
let toSparkline: (t, ~sampleCount: int, ~bucketCount: int=?, unit) => result<string, error> let toSparkline: (
t,
~percentile: float,
~sampleCount: int,
~bucketCount: int=?,
unit,
) => result<string, error>
let truncate: ( let truncate: (
t, t,

View File

@ -254,8 +254,8 @@ let operate = (distToFloatOp: Operation.distToFloatOperation, s): float =>
| #Inv(f) => inv(f, s) | #Inv(f) => inv(f, s)
| #Sample => sample(s) | #Sample => sample(s)
| #Mean => T.mean(s) | #Mean => T.mean(s)
| #Min => T.minX(s) | #Min(_) => T.minX(s)
| #Max => T.maxX(s) | #Max(_) => T.maxX(s)
} }
let toSparkline = (t: t, bucketCount): result<string, PointSetTypes.sparklineError> => let toSparkline = (t: t, bucketCount): result<string, PointSetTypes.sparklineError> =>

View File

@ -331,9 +331,6 @@ module From90thPercentile = {
} }
module T = { module T = {
let minCdfValue = 0.0001
let maxCdfValue = 0.9999
let pdf = (x, dist) => let pdf = (x, dist) =>
switch dist { switch dist {
| #Normal(n) => Normal.pdf(x, n) | #Normal(n) => Normal.pdf(x, n)
@ -419,35 +416,39 @@ module T = {
| #Bernoulli(n) => Bernoulli.toString(n) | #Bernoulli(n) => Bernoulli.toString(n)
} }
let min: symbolicDist => float = x => let min = (~percentile: float, x: symbolicDist): float => {
let minCdf = (1. -. percentile) /. 2.
switch x { switch x {
| #Triangular({low}) => low | #Triangular({low}) => low
| #Exponential(n) => Exponential.inv(minCdfValue, n) | #Exponential(n) => Exponential.inv(minCdf, n)
| #Cauchy(n) => Cauchy.inv(minCdfValue, n) | #Cauchy(n) => Cauchy.inv(minCdf, n)
| #Normal(n) => Normal.inv(minCdfValue, n) | #Normal(n) => Normal.inv(minCdf, n)
| #Lognormal(n) => Lognormal.inv(minCdfValue, n) | #Lognormal(n) => Lognormal.inv(minCdf, n)
| #Logistic(n) => Logistic.inv(minCdfValue, n) | #Logistic(n) => Logistic.inv(minCdf, n)
| #Gamma(n) => Gamma.inv(minCdfValue, n) | #Gamma(n) => Gamma.inv(minCdf, n)
| #Uniform({low}) => low | #Uniform({low}) => low
| #Bernoulli(n) => Bernoulli.min(n) | #Bernoulli(n) => Bernoulli.min(n)
| #Beta(n) => Beta.inv(minCdfValue, n) | #Beta(n) => Beta.inv(minCdf, n)
| #Float(n) => n | #Float(n) => n
} }
}
let max: symbolicDist => float = x => let max = (~percentile: float, x: symbolicDist): float => {
let maxCdf = 1. -. (1. -. percentile) /. 2.
switch x { switch x {
| #Triangular(n) => n.high | #Triangular(n) => n.high
| #Exponential(n) => Exponential.inv(maxCdfValue, n) | #Exponential(n) => Exponential.inv(maxCdf, n)
| #Cauchy(n) => Cauchy.inv(maxCdfValue, n) | #Cauchy(n) => Cauchy.inv(maxCdf, n)
| #Normal(n) => Normal.inv(maxCdfValue, n) | #Normal(n) => Normal.inv(maxCdf, n)
| #Gamma(n) => Gamma.inv(maxCdfValue, n) | #Gamma(n) => Gamma.inv(maxCdf, n)
| #Lognormal(n) => Lognormal.inv(maxCdfValue, n) | #Lognormal(n) => Lognormal.inv(maxCdf, n)
| #Logistic(n) => Logistic.inv(maxCdfValue, n) | #Logistic(n) => Logistic.inv(maxCdf, n)
| #Beta(n) => Beta.inv(maxCdfValue, n) | #Beta(n) => Beta.inv(maxCdf, n)
| #Bernoulli(n) => Bernoulli.max(n) | #Bernoulli(n) => Bernoulli.max(n)
| #Uniform({high}) => high | #Uniform({high}) => high
| #Float(n) => n | #Float(n) => n
} }
}
let mean: symbolicDist => result<float, string> = x => let mean: symbolicDist => result<float, string> = x =>
switch x { switch x {
@ -469,15 +470,20 @@ module T = {
| #Cdf(f) => Ok(cdf(f, s)) | #Cdf(f) => Ok(cdf(f, s))
| #Pdf(f) => Ok(pdf(f, s)) | #Pdf(f) => Ok(pdf(f, s))
| #Inv(f) => Ok(inv(f, s)) | #Inv(f) => Ok(inv(f, s))
| #Min => Ok(min(s)) | #Min(percentile) => Ok(min(~percentile, s))
| #Max => Ok(max(s)) | #Max(percentile) => Ok(min(~percentile, s))
| #Sample => Ok(sample(s)) | #Sample => Ok(sample(s))
| #Mean => mean(s) | #Mean => mean(s)
} }
let interpolateXs = (~xSelection: [#Linear | #ByWeight]=#Linear, dist: symbolicDist, n) => let interpolateXs = (
~percentile: float,
~xSelection: [#Linear | #ByWeight]=#Linear,
dist: symbolicDist,
n,
) => {
switch (xSelection, dist) { switch (xSelection, dist) {
| (#Linear, _) => E.A.Floats.range(min(dist), max(dist), n) | (#Linear, _) => E.A.Floats.range(min(~percentile, dist), max(~percentile, dist), n)
| (#ByWeight, #Uniform(n)) => | (#ByWeight, #Uniform(n)) =>
// In `ByWeight mode, uniform distributions get special treatment because we need two x's // In `ByWeight mode, uniform distributions get special treatment because we need two x's
// on either side for proper rendering (just left and right of the discontinuities). // on either side for proper rendering (just left and right of the discontinuities).
@ -485,9 +491,12 @@ module T = {
let dx = MagicNumbers.Epsilon.ten *. distance let dx = MagicNumbers.Epsilon.ten *. distance
[n.low -. dx, n.low, n.low +. dx, n.high -. dx, n.high, n.high +. dx] [n.low -. dx, n.low, n.low +. dx, n.high -. dx, n.high, n.high +. dx]
| (#ByWeight, _) => | (#ByWeight, _) =>
let ys = E.A.Floats.range(minCdfValue, maxCdfValue, n) let minCdf = (1. -. percentile) /. 2.
let maxCdf = 1. -. minCdf
let ys = E.A.Floats.range(minCdf, maxCdf, n)
ys |> E.A.fmap(y => inv(y, dist)) ys |> E.A.fmap(y => inv(y, dist))
} }
}
/* Calling e.g. "Normal.operate" returns an optional that wraps a result. /* Calling e.g. "Normal.operate" returns an optional that wraps a result.
If the optional is None, there is no valid analytic solution. If it Some, it If the optional is None, there is no valid analytic solution. If it Some, it
@ -533,6 +542,7 @@ module T = {
} }
let toPointSetDist = ( let toPointSetDist = (
~percentile: float,
~xSelection=#ByWeight, ~xSelection=#ByWeight,
sampleCount, sampleCount,
d: symbolicDist, d: symbolicDist,
@ -541,7 +551,7 @@ module T = {
| #Float(v) => Float.toPointSetDist(v) | #Float(v) => Float.toPointSetDist(v)
| #Bernoulli(v) => Bernoulli.toPointSetDist(v) | #Bernoulli(v) => Bernoulli.toPointSetDist(v)
| _ => | _ =>
let xs = interpolateXs(~xSelection, d, sampleCount) let xs = interpolateXs(~percentile, ~xSelection, d, sampleCount)
let ys = xs |> E.A.fmap(x => pdf(x, d)) let ys = xs |> E.A.fmap(x => pdf(x, d))
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys})) Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
} }

View File

@ -216,8 +216,8 @@ let dispatchToGenericOutput = (call: IEV.functionCall, env: DistributionOperatio
| "mean" => #Mean | "mean" => #Mean
| "stdev" => #Stdev | "stdev" => #Stdev
| "variance" => #Variance | "variance" => #Variance
| "min" => #Min | "min" => #Min(env.percentile)
| "max" => #Max | "max" => #Max(env.percentile)
| "mode" => #Mode | "mode" => #Mode
| _ => #Mean | _ => #Mean
} }

View File

@ -26,8 +26,8 @@ type distToFloatOperation = [
| #Inv(float) | #Inv(float)
| #Mean | #Mean
| #Sample | #Sample
| #Min | #Min(float)
| #Max | #Max(float)
] ]
module Convolution = { module Convolution = {