Compare commits

...

16 Commits

Author SHA1 Message Date
Vyacheslav Matyukhin
9e2eace05e
Merge pull request #1231 from quantified-uncertainty/project-in-editors
Project in editors and remove warnings
2022-10-14 18:06:49 +03:00
Vyacheslav Matyukhin
56771820aa
Merge pull request #965 from quantified-uncertainty/experiment-10.0rc1
bump `rescript` and `@rescript/std` to `10.0.1`
2022-10-13 02:25:23 +03:00
Vyacheslav Matyukhin
33f0647be8
Merge pull request #1260 from quantified-uncertainty/drop-bisect-ppx
remove bisect_ppx
2022-10-13 02:24:53 +03:00
cab
878c6f3d4b
Removed nixos.sh
Also added a note in README.md for NixOS users
2022-10-13 01:03:16 +04:00
Vyacheslav Matyukhin
4cd045b9c8
format rescript 2022-10-12 20:11:28 +04:00
Vyacheslav Matyukhin
a617ec0436
update coverage name in prettierignore 2022-10-12 20:00:27 +04:00
Vyacheslav Matyukhin
80cc20ac72
fix tests 2022-10-12 19:49:14 +04:00
Vyacheslav Matyukhin
666524a36a
Merge branch 'drop-bisect-ppx' into experiment-10.0rc1 2022-10-12 19:36:23 +04:00
Vyacheslav Matyukhin
2ed3633fe5
coverage:local echo note 2022-10-12 19:31:44 +04:00
Vyacheslav Matyukhin
deb88c60fb
Merge branch 'develop' into drop-bisect-ppx 2022-10-12 19:12:25 +04:00
Vyacheslav Matyukhin
838d13086a
remove nyc dependency, jest is enough for coverage 2022-10-12 19:09:17 +04:00
cab
11e80941bc
nix: remove bisect_ppx from build 2022-10-12 18:32:08 +04:00
Vyacheslav Matyukhin
8dac70082c
remove bisect_ppx 2022-10-12 16:21:07 +04:00
Quinn Dougherty
36c3a93d08 10.0.0 2022-08-27 10:37:01 +08:00
Quinn Dougherty
f67abe55a8 Merge remote-tracking branch 'origin/develop' into experiment-10.0rc1 2022-08-27 10:33:48 +08:00
Quinn
07af79adc8
bump rescript and @rescript/std to 10.0.0-rc.1 2022-08-08 14:54:57 -04:00
53 changed files with 890 additions and 1113 deletions

View File

@ -65,6 +65,12 @@ turbo run build --filter=@quri/squiggle-components
You can also run specific npm scripts for the package you're working on. See `packages/*/README.md` for the details.
# NixOS users
This repository requires the use of bundled binaries from node_modules, which
are not linked statically. The easiest way to get them working is to enable
[nix-ld](https://github.com/Mic92/nix-ld).
# Contributing
See `CONTRIBUTING.md`.

View File

@ -30,16 +30,6 @@ rec {
patchelf --replace-needed libstdc++.so.6 $THE_SO linux/ninja.exe && echo "- replaced needed for linux/ninja.exe"
'';
};
bisect_ppx = {
buildInputs = common.which;
postInstall = ''
echo "PATCHELF'ING BISECT_PPX EXECUTABLE"
THE_LD=$(patchelf --print-interpreter $(which mkdir))
patchelf --set-interpreter $THE_LD bin/linux/ppx
patchelf --set-interpreter $THE_LD bin/linux/bisect-ppx-report
cp bin/linux/ppx ppx
'';
};
gentype = {
postInstall = ''
mv gentype.exe ELFLESS-gentype.exe

View File

@ -1,18 +0,0 @@
#!/usr/bin/env bash
# This script is only relevant if you're rolling nixos.
# Esy (a bisect_ppx dependency/build tool) is borked on nixos without using an FHS shell. https://github.com/esy/esy/issues/858
# We need to patchelf rescript executables. https://github.com/NixOS/nixpkgs/issues/107375
set -x
fhsShellName="squiggle-fhs-development"
fhsShellDotNix="{pkgs ? import <nixpkgs> {} }: (pkgs.buildFHSUserEnv { name = \"${fhsShellName}\"; targetPkgs = pkgs: [pkgs.yarn pkgs.glibc]; runScript = \"yarn\"; }).env"
nix-shell - <<<"$fhsShellDotNix"
theLd=$(patchelf --print-interpreter $(which mkdir))
patchelf --set-interpreter $theLd ./node_modules/gentype/gentype.exe
patchelf --set-interpreter $theLd ./node_modules/rescript/linux/*.exe
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/ppx
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/bisect-ppx-report
theSo=$(find /nix/store/*$fhsShellName*/lib64 -name libstdc++.so.6 | head -n 1)
patchelf --replace-needed libstdc++.so.6 $theSo ./node_modules/rescript/linux/ninja.exe

View File

@ -3,7 +3,7 @@ lib
*.bs.js
*.gen.tsx
.nyc_output/
_coverage/
coverage/
.cache/
Reducer_Peggy_GeneratedParser.js
ReducerProject_IncludeParser.js

View File

@ -32,25 +32,29 @@ describe("dotSubtract", () => {
*/
Skip.test("mean of normal minus exponential (property)", () => {
assert_(
property2(float_(), floatRange(1e-5, 1e5), (mean, rate) => {
// We limit ourselves to stdev=1 so that the integral is trivial
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
~env,
mkNormal(mean, 1.0),
mkExponential(rate),
)
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
// according to algebra or random variables,
let meanAnalytical =
mean -.
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
"On trusted input this should never happen",
property2(
float_(),
floatRange(1e-5, 1e5),
(mean, rate) => {
// We limit ourselves to stdev=1 so that the integral is trivial
let dotDifference = DistributionOperation.Constructors.pointwiseSubtract(
~env,
mkNormal(mean, 1.0),
mkExponential(rate),
)
switch meanResult {
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
}
}),
let meanResult = E.R2.bind(DistributionOperation.Constructors.mean(~env), dotDifference)
// according to algebra or random variables,
let meanAnalytical =
mean -.
SymbolicDist.Exponential.mean({rate: rate})->E.R2.toExn(
"On trusted input this should never happen",
)
switch meanResult {
| Ok(meanValue) => abs_float(meanValue -. meanAnalytical) /. abs_float(meanValue) < 1e-2 // 1% relative error
| Error(err) => err === DistributionTypes.OperationError(DivisionByZeroError)
}
},
),
)
pass
})

View File

@ -40,51 +40,60 @@ let algebraicPower = algebraicPower(~env)
describe("(Algebraic) addition of distributions", () => {
describe("mean", () => {
test("normal(mean=5) + normal(mean=20)", () => {
normalDist5
->algebraicAdd(normalDist20)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
->expect
->toBe(Some(2.5e1))
})
test(
"normal(mean=5) + normal(mean=20)",
() => {
normalDist5
->algebraicAdd(normalDist20)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
->expect
->toBe(Some(2.5e1))
},
)
test("uniform(low=9, high=10) + beta(alpha=2, beta=5)", () => {
// let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
}
})
test("beta(alpha=2, beta=5) + uniform(low=9, high=10)", () => {
// let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
}
})
test(
"uniform(low=9, high=10) + beta(alpha=2, beta=5)",
() => {
// let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.786831807237022, ~digits=1) // (uniformMean +. betaMean)
}
},
)
test(
"beta(alpha=2, beta=5) + uniform(low=9, high=10)",
() => {
// let uniformMean = (9.0 +. 10.0) /. 2.0
// let betaMean = 1.0 /. (1.0 +. 5.0 /. 2.0)
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(DistributionTypes.Constructors.UsingDists.mean)
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.784290207736126, ~digits=1) // (uniformMean +. betaMean)
}
},
)
})
describe("pdf", () => {
// TEST IS WRONG. SEE STDEV ADDITION EXPRESSION.
@ -122,247 +131,282 @@ describe("(Algebraic) addition of distributions", () => {
}
},
)
test("(normal(mean=10) + normal(mean=10)).pdf(1.9e1)", () => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
test(
"(normal(mean=10) + normal(mean=10)).pdf(1.9e1)",
() => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1.9e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=1)
}
}
}
})
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)", () => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=4.
// This value was calculated by a python script
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
}
})
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)", () => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic.
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
}
})
},
)
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).pdf(10)",
() => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=4.
// This value was calculated by a python script
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
}
},
)
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).pdf(10)",
() => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.pdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic.
| Some(x) => x->expect->toBeSoCloseTo(0.979023, ~digits=0)
}
},
)
})
describe("cdf", () => {
testAll("(normal(mean=5) + normal(mean=5)).cdf (imprecise)", list{6e0, 8e0, 1e1, 1.2e1}, x => {
let received =
normalDist10
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist5
->algebraicAdd(normalDist5)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
testAll(
"(normal(mean=5) + normal(mean=5)).cdf (imprecise)",
list{6e0, 8e0, 1e1, 1.2e1},
x => {
let received =
normalDist10
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist5
->algebraicAdd(normalDist5)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=0)
}
}
}
})
test("(normal(mean=10) + normal(mean=10)).cdf(1.25e1)", () => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
},
)
test(
"(normal(mean=10) + normal(mean=10)).cdf(1.25e1)",
() => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1.25e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=2)
}
}
}
})
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)", () => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
}
})
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)", () => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
}
})
},
)
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).cdf(10)",
() => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
}
},
)
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).cdf(10)",
() => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.cdf(d, 1e1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// The value was calculated externally using a python script
| Some(x) => x->expect->toBeSoCloseTo(0.71148, ~digits=1)
}
},
)
})
describe("inv", () => {
testAll("(normal(mean=5) + normal(mean=5)).inv (imprecise)", list{5e-2, 4.2e-3, 9e-3}, x => {
let received =
normalDist10
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist5
->algebraicAdd(normalDist5)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
testAll(
"(normal(mean=5) + normal(mean=5)).inv (imprecise)",
list{5e-2, 4.2e-3, 9e-3},
x => {
let received =
normalDist10
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist5
->algebraicAdd(normalDist5)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, x))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
}
}
}
})
test("(normal(mean=10) + normal(mean=10)).inv(1e-1)", () => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
},
)
test(
"(normal(mean=10) + normal(mean=10)).inv(1e-1)",
() => {
let received =
normalDist20
->Ok
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
let calculated =
normalDist10
->algebraicAdd(normalDist10)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 1e-1))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toOption
->E.O.flatten
switch received {
| None =>
"this branch occurs when the dispatch to Jstat on trusted input fails."
->expect
->toBe("never")
| Some(x) =>
switch calculated {
| None => "algebraicAdd has"->expect->toBe("failed")
| Some(y) => x->expect->toBeSoCloseTo(y, ~digits=-1)
}
}
}
})
test("(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)", () => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
}
})
test("(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)", () => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
}
})
},
)
test(
"(uniform(low=9, high=10) + beta(alpha=2, beta=5)).inv(2e-2)",
() => {
let received =
uniformDist
->algebraicAdd(betaDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.179319623146968, ~digits=0)
}
},
)
test(
"(beta(alpha=2, beta=5) + uniform(low=9, high=10)).inv(2e-2)",
() => {
let received =
betaDist
->algebraicAdd(uniformDist)
->E.R2.fmap(d => DistributionTypes.Constructors.UsingDists.inv(d, 2e-2))
->E.R2.fmap(run)
->E.R2.fmap(toFloat)
->E.R.toExn("Expected float", _)
switch received {
| None => "algebraicAdd has"->expect->toBe("failed")
// This is nondeterministic, we could be in a situation where ci fails but you click rerun and it passes, which is bad.
// sometimes it works with ~digits=2.
| Some(x) => x->expect->toBeSoCloseTo(9.190872365862756, ~digits=0)
}
},
)
})
})

View File

@ -87,14 +87,22 @@ describe("Means are invariant", () => {
let testAddInvariant = (t1, t2) =>
E.R.liftM2(testAdditionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => {
testAddInvariant(dist, dist)
})
testAll(
"with two of the same distribution",
distributions,
dist => {
testAddInvariant(dist, dist)
},
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
let (dist1, dist2) = dists
testAddInvariant(dist1, dist2)
})
testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists
testAddInvariant(dist1, dist2)
},
)
testAll(
"with two different distributions in swapped order",
@ -116,14 +124,22 @@ describe("Means are invariant", () => {
let testSubtractInvariant = (t1, t2) =>
E.R.liftM2(testSubtractionMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => {
testSubtractInvariant(dist, dist)
})
testAll(
"with two of the same distribution",
distributions,
dist => {
testSubtractInvariant(dist, dist)
},
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
let (dist1, dist2) = dists
testSubtractInvariant(dist1, dist2)
})
testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists
testSubtractInvariant(dist1, dist2)
},
)
testAll(
"with two different distributions in swapped order",
@ -145,14 +161,22 @@ describe("Means are invariant", () => {
let testMultiplicationInvariant = (t1, t2) =>
E.R.liftM2(testMultiplicationMean, t1, t2)->E.R.toExn("Means were not invariant", _)
testAll("with two of the same distribution", distributions, dist => {
testMultiplicationInvariant(dist, dist)
})
testAll(
"with two of the same distribution",
distributions,
dist => {
testMultiplicationInvariant(dist, dist)
},
)
testAll("with two different distributions", pairsOfDifferentDistributions, dists => {
let (dist1, dist2) = dists
testMultiplicationInvariant(dist1, dist2)
})
testAll(
"with two different distributions",
pairsOfDifferentDistributions,
dists => {
let (dist1, dist2) = dists
testMultiplicationInvariant(dist1, dist2)
},
)
testAll(
"with two different distributions in swapped order",

View File

@ -17,10 +17,9 @@ describe("klDivergence: continuous -> continuous -> float", () => {
let answer =
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
let prediction =
uniformMakeR(
lowPrediction,
highPrediction,
)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
s => DistributionTypes.ArgumentError(s),
)
// integral along the support of the answer of answer.pdf(x) times log of prediction.pdf(x) divided by answer.pdf(x) dx
let analyticalKl = Js.Math.log((highPrediction -. lowPrediction) /. (highAnswer -. lowAnswer))
let kl = E.R.liftJoin2(klDivergence, prediction, answer)
@ -183,9 +182,9 @@ describe("combineAlongSupportOfSecondArgument0", () => {
let answer =
uniformMakeR(lowAnswer, highAnswer)->E.R2.errMap(s => DistributionTypes.ArgumentError(s))
let prediction =
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(s => DistributionTypes.ArgumentError(
s,
))
uniformMakeR(lowPrediction, highPrediction)->E.R2.errMap(
s => DistributionTypes.ArgumentError(s),
)
let answerWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), answer)
let predictionWrapped = E.R.fmap(a => run(FromDist(#ToDist(ToPointSet), a)), prediction)

View File

@ -3,7 +3,7 @@ open Expect
open TestHelpers
// TODO: use Normal.make (but preferably after teh new validation dispatch is in)
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
describe("(Symbolic) normalize", () => {
testAll("has no impact on normal distributions", list{-1e8, -1e-2, 0.0, 1e-4, 1e16}, mean => {
@ -47,10 +47,7 @@ describe("(Symbolic) mean", () => {
tup => {
let (low, medium, high) = tup
let meanValue = run(
FromDist(
#ToFloat(#Mean),
DistributionTypes.Symbolic(#Triangular({low: low, medium: medium, high: high})),
),
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Triangular({low, medium, high}))),
)
meanValue->unpackFloat->expect->toBeCloseTo((low +. medium +. high) /. 3.0) // https://www.statology.org/triangular-distribution/
},
@ -63,7 +60,7 @@ describe("(Symbolic) mean", () => {
tup => {
let (alpha, beta) = tup
let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))),
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Beta({alpha, beta}))),
)
meanValue->unpackFloat->expect->toBeCloseTo(1.0 /. (1.0 +. beta /. alpha)) // https://en.wikipedia.org/wiki/Beta_distribution#Mean
},
@ -84,8 +81,8 @@ describe("(Symbolic) mean", () => {
let (mean, stdev) = tup
let betaDistribution = SymbolicDist.Beta.fromMeanAndStdev(mean, stdev)
let meanValue =
betaDistribution->E.R2.fmap(d =>
run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic))
betaDistribution->E.R2.fmap(
d => run(FromDist(#ToFloat(#Mean), d->DistributionTypes.Symbolic)),
)
switch meanValue {
| Ok(value) => value->unpackFloat->expect->toBeCloseTo(mean)
@ -100,7 +97,7 @@ describe("(Symbolic) mean", () => {
tup => {
let (mu, sigma) = tup
let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))),
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Lognormal({mu, sigma}))),
)
meanValue->unpackFloat->expect->toBeCloseTo(Js.Math.exp(mu +. sigma ** 2.0 /. 2.0)) // https://brilliant.org/wiki/log-normal-distribution/
},
@ -112,7 +109,7 @@ describe("(Symbolic) mean", () => {
tup => {
let (low, high) = tup
let meanValue = run(
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low: low, high: high}))),
FromDist(#ToFloat(#Mean), DistributionTypes.Symbolic(#Uniform({low, high}))),
)
meanValue->unpackFloat->expect->toBeCloseTo((low +. high) /. 2.0) // https://en.wikipedia.org/wiki/Continuous_uniform_distribution#Moments
},

View File

@ -33,12 +33,18 @@ describe("Bindings", () => {
let value2 = Reducer_T.IEvNumber(5.)
let extendedBindings = bindings->Bindings.extend->Bindings.set("value", value2)
test("get on extended", () => {
expect(extendedBindings->Bindings.get("value")) == Some(value2)
})
test(
"get on extended",
() => {
expect(extendedBindings->Bindings.get("value")) == Some(value2)
},
)
test("get on original", () => {
expect(bindings->Bindings.get("value")) == Some(value)
})
test(
"get on original",
() => {
expect(bindings->Bindings.get("value")) == Some(value)
},
)
})
})

View File

@ -40,14 +40,23 @@ describe("Namespace", () => {
let nsMerged = Namespace.mergeMany([ns, ns1, ns2])
test("merge many 1", () => {
expect(nsMerged->Namespace.get("x1")) == Some(x1)
})
test("merge many 2", () => {
expect(nsMerged->Namespace.get("x4")) == Some(x4)
})
test("merge many 3", () => {
expect(nsMerged->Namespace.get("value")) == Some(value)
})
test(
"merge many 1",
() => {
expect(nsMerged->Namespace.get("x1")) == Some(x1)
},
)
test(
"merge many 2",
() => {
expect(nsMerged->Namespace.get("x4")) == Some(x4)
},
)
test(
"merge many 3",
() => {
expect(nsMerged->Namespace.get("value")) == Some(value)
},
)
})
})

View File

@ -75,29 +75,32 @@ describe("Peggy to Expression", () => {
testToExpression("false ? 1 : 0", "false ? (1) : (0)", ~v="0", ())
testToExpression("true ? 1 : false ? 2 : 0", "true ? (1) : (false ? (2) : (0))", ~v="1", ()) // nested ternary
testToExpression("false ? 1 : false ? 2 : 0", "false ? (1) : (false ? (2) : (0))", ~v="0", ()) // nested ternary
describe("ternary bindings", () => {
testToExpression(
// expression binding
"f(a) = a > 5 ? 1 : 0; f(6)",
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
~v="1",
(),
)
testToExpression(
// when true binding
"f(a) = a > 5 ? a : 0; f(6)",
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
~v="6",
(),
)
testToExpression(
// when false binding
"f(a) = a < 5 ? 1 : a; f(6)",
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
~v="6",
(),
)
})
describe(
"ternary bindings",
() => {
testToExpression(
// expression binding
"f(a) = a > 5 ? 1 : 0; f(6)",
"f = {|a| {(larger)(a, 5) ? (1) : (0)}}; (f)(6)",
~v="1",
(),
)
testToExpression(
// when true binding
"f(a) = a > 5 ? a : 0; f(6)",
"f = {|a| {(larger)(a, 5) ? (a) : (0)}}; (f)(6)",
~v="6",
(),
)
testToExpression(
// when false binding
"f(a) = a < 5 ? 1 : a; f(6)",
"f = {|a| {(smaller)(a, 5) ? (1) : (a)}}; (f)(6)",
~v="6",
(),
)
},
)
})
describe("if then else", () => {

View File

@ -22,7 +22,7 @@ let expectEvalError = (code: string) =>
Expression.BackCompatible.evaluateString(code)
->Reducer_Value.toStringResult
->expect
->toMatch("Error\(")
->toMatch("Error\\(")
let testParseToBe = (expr, answer) => test(expr, () => expectParseToBe(expr, answer))
let testDescriptionParseToBe = (desc, expr, answer) =>

View File

@ -37,14 +37,16 @@ describe("eval", () => {
test("index", () => expectEvalToBe("r = {a: 1}; r.a", "Ok(1)"))
test("index", () => expectEvalToBe("r = {a: 1}; r.b", "Error(Record property not found: b)"))
testEvalError("{a: 1}.b") // invalid syntax
test("always the same property ending", () =>
expectEvalToBe(
`{
test(
"always the same property ending",
() =>
expectEvalToBe(
`{
a: 1,
b: 2,
}`,
"Ok({a: 1,b: 2})",
)
"Ok({a: 1,b: 2})",
),
)
})

View File

@ -11,32 +11,34 @@ describe("ReducerProject Tutorial", () => {
/*
Case "Running a single source".
*/
test("run", () => {
/* Let's start with running a single source and getting Result as well as the Bindings
test(
"run",
() => {
/* Let's start with running a single source and getting Result as well as the Bindings
First you need to create a project. A project is a collection of sources.
Project takes care of the dependencies between the sources, correct compilation and run order.
You can run any source in the project. It will be compiled and run if it hasn't happened already; otherwise already existing results will be presented.
The dependencies will be automatically compiled and run. So you don't need to worry about that in a multi source project.
In summary you issue a run command on the whole project or on a specific source to ensure that there is a result for that source.
*/
let project = Project.createProject()
/* Every source has a name. This is used for debugging, dependencies and error messages. */
project->Project.setSource("main", "1 + 2")
/* Let's run "main" source. */
project->Project.run("main")
/* Now you have a result for "main" source.
let project = Project.createProject()
/* Every source has a name. This is used for debugging, dependencies and error messages. */
project->Project.setSource("main", "1 + 2")
/* Let's run "main" source. */
project->Project.run("main")
/* Now you have a result for "main" source.
Running one by one is necessary for UI to navigate among the sources and to see the results by source.
And you're free to run any source you want.
You will look at the results of this source and you don't want to run the others if not required.
*/
/* However, you could also run the whole project.
/* However, you could also run the whole project.
If you have all the sources, you can always run the whole project.
Dependencies and recompiling on demand will be taken care of by the project.
*/
project->Project.runAll
project->Project.runAll
/* Either with run or runAll you executed the project.
/* Either with run or runAll you executed the project.
You can get the result of a specific source by calling getResult for that source.
You can get the bindings of a specific source by calling getBindings for that source.
If there is any runtime error, getResult will return the error.
@ -44,49 +46,59 @@ Case "Running a single source".
Note that getResult returns None if the source has not been run.
Getting None means you have forgotten to run the source.
*/
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
/* Let's display the result and bindings */
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}")
/* You've got 3 with empty bindings. */
})
/* Let's display the result and bindings */
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}")
/* You've got 3 with empty bindings. */
},
)
test("run summary", () => {
let project = Project.createProject()
project->Project.setSource("main", "1 + 2")
project->Project.runAll
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
/* Now you have external bindings and external result. */
(
result->Reducer_Value.toStringResult,
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
)->expect == ("Ok(3)", "{}")
})
test(
"run summary",
() => {
let project = Project.createProject()
project->Project.setSource("main", "1 + 2")
project->Project.runAll
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
/* Now you have external bindings and external result. */
(
result->Reducer_Value.toStringResult,
bindings->Reducer_T.IEvRecord->Reducer_Value.toString,
)->expect == ("Ok(3)", "{}")
},
)
test("run with an environment", () => {
/* Running the source code like above allows you to set a custom environment */
let project = Project.createProject()
test(
"run with an environment",
() => {
/* Running the source code like above allows you to set a custom environment */
let project = Project.createProject()
/* Optional. Set your custom environment anytime before running */
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
/* Optional. Set your custom environment anytime before running */
project->Project.setEnvironment(Reducer_Context.defaultEnvironment)
project->Project.setSource("main", "1 + 2")
project->Project.runAll
let result = project->Project.getResult("main")
let _bindings = project->Project.getBindings("main")
result->Reducer_Value.toStringResult->expect == "Ok(3)"
})
project->Project.setSource("main", "1 + 2")
project->Project.runAll
let result = project->Project.getResult("main")
let _bindings = project->Project.getBindings("main")
result->Reducer_Value.toStringResult->expect == "Ok(3)"
},
)
test("shortcut", () => {
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
/* Examples above was to prepare you for the multi source tutorial. */
let (result, bindings) = Project.evaluate("1+2")
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}")
})
test(
"shortcut",
() => {
/* If you are running single source without includes and you don't need a custom environment, you can use the shortcut. */
/* Examples above was to prepare you for the multi source tutorial. */
let (result, bindings) = Project.evaluate("1+2")
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(3)", "{}")
},
)
})
})

View File

@ -10,95 +10,104 @@ describe("ReducerProject Tutorial", () => {
describe("Multi source", () => {
/*
Case "Running multiple sources" */
test("Chaining", () => {
let project = Project.createProject()
/* This time let's add 3 sources and chain them together */
project->Project.setSource("source1", "x=1")
test(
"Chaining",
() => {
let project = Project.createProject()
/* This time let's add 3 sources and chain them together */
project->Project.setSource("source1", "x=1")
project->Project.setSource("source2", "y=x+1")
/* To run, source2 depends on source1 */
project->Project.setContinues("source2", ["source1"])
project->Project.setSource("source2", "y=x+1")
/* To run, source2 depends on source1 */
project->Project.setContinues("source2", ["source1"])
project->Project.setSource("source3", "z=y+1")
/* To run, source3 depends on source2 */
project->Project.setContinues("source3", ["source2"])
project->Project.setSource("source3", "z=y+1")
/* To run, source3 depends on source2 */
project->Project.setContinues("source3", ["source2"])
/* Now we can run the project */
project->Project.runAll
/* Now we can run the project */
project->Project.runAll
/* And let's check the result and bindings of source3 */
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
/* And let's check the result and bindings of source3 */
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
})
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
},
)
test("Depending", () => {
/* Instead of chaining the sources, we could have a dependency tree */
/* The point here is that any source can depend on multiple sources */
let project = Project.createProject()
test(
"Depending",
() => {
/* Instead of chaining the sources, we could have a dependency tree */
/* The point here is that any source can depend on multiple sources */
let project = Project.createProject()
/* This time source1 and source2 are not depending on anything */
project->Project.setSource("source1", "x=1")
project->Project.setSource("source2", "y=2")
/* This time source1 and source2 are not depending on anything */
project->Project.setSource("source1", "x=1")
project->Project.setSource("source2", "y=2")
project->Project.setSource("source3", "z=x+y")
/* To run, source3 depends on source1 and source3 together */
project->Project.setContinues("source3", ["source1", "source2"])
project->Project.setSource("source3", "z=x+y")
/* To run, source3 depends on source1 and source3 together */
project->Project.setContinues("source3", ["source1", "source2"])
/* Now we can run the project */
project->Project.runAll
/* Now we can run the project */
project->Project.runAll
/* And let's check the result and bindings of source3 */
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
/* And let's check the result and bindings of source3 */
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
})
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
},
)
test("Intro to including", () => {
/* Though it would not be practical for a storybook,
test(
"Intro to including",
() => {
/* Though it would not be practical for a storybook,
let's write the same project above with includes.
You will see that parsing includes is setting the dependencies the same way as before. */
let project = Project.createProject()
let project = Project.createProject()
/* This time source1 and source2 are not depending on anything */
project->Project.setSource("source1", "x=1")
project->Project.setSource("source2", "y=2")
/* This time source1 and source2 are not depending on anything */
project->Project.setSource("source1", "x=1")
project->Project.setSource("source2", "y=2")
project->Project.setSource(
"source3",
`
project->Project.setSource(
"source3",
`
#include "source1"
#include "source2"
z=x+y`,
)
/* We need to parse the includes to set the dependencies */
project->Project.parseIncludes("source3")
)
/* We need to parse the includes to set the dependencies */
project->Project.parseIncludes("source3")
/* Now we can run the project */
project->Project.runAll
/* Now we can run the project */
project->Project.runAll
/* And let's check the result and bindings of source3
/* And let's check the result and bindings of source3
This time you are getting all the variables because we are including the other sources
Behind the scenes parseIncludes is setting the dependencies */
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
let result3 = project->Project.getResult("source3")
let bindings3 = project->Project.getBindings("source3")
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
/*
(result3->Reducer_Value.toStringResult, bindings3->Reducer_Value.toStringRecord)->expect ==
("Ok(())", "{z: 3}")
/*
Doing it like this is too verbose for a storybook
But I hope you have seen the relation of setContinues and parseIncludes */
/*
/*
Dealing with includes needs more.
- There are parse errors
- There are cyclic includes
- And the depended source1 and source2 is not already there in the project
- If you knew the includes before hand there would not be point of the include directive.
More on those on the next section. */
})
},
)
})
})

View File

@ -24,93 +24,106 @@ Here we will finally proceed to a real life scenario. */
)
/* We need to parse includes after changing the source */
project->Project.parseIncludes("main")
test("getDependencies", () => {
/* Parse includes has set the dependencies */
project->Project.getDependencies("main")->expect == ["common"]
/* If there were no includes than there would be no dependencies */
/* However if there was a syntax error at includes then would be no dependencies also */
/* Therefore looking at dependencies is not the right way to load includes */
/* getDependencies does not distinguish between setContinues or parseIncludes */
})
test("getIncludes", () => {
/* Parse includes has set the includes */
switch project->Project.getIncludes("main") {
| Ok(includes) => includes->expect == ["common"]
| Error(err) => err->SqError.toString->fail
}
/* If the includes cannot be parsed then you get a syntax error.
test(
"getDependencies",
() => {
/* Parse includes has set the dependencies */
project->Project.getDependencies("main")->expect == ["common"]
/* If there were no includes than there would be no dependencies */
/* However if there was a syntax error at includes then would be no dependencies also */
/* Therefore looking at dependencies is not the right way to load includes */
/* getDependencies does not distinguish between setContinues or parseIncludes */
},
)
test(
"getIncludes",
() => {
/* Parse includes has set the includes */
switch project->Project.getIncludes("main") {
| Ok(includes) => includes->expect == ["common"]
| Error(err) => err->SqError.toString->fail
}
/* If the includes cannot be parsed then you get a syntax error.
Otherwise you get the includes.
If there is no syntax error then you can load that file and use setSource to add it to the project.
And so on recursively... */
})
test("getDependents", () => {
/* For any reason, you are able to query what other sources
},
)
test(
"getDependents",
() => {
/* For any reason, you are able to query what other sources
include or depend on the current source.
But you don't need to use this to execute the projects.
It is provided for completeness of information. */
project->Project.getDependents("main")->expect == []
/* Nothing is depending on or including main */
})
project->Project.getDependents("main")->expect == []
/* Nothing is depending on or including main */
},
)
describe("Real Like", () => {
/* Now let's look at recursive and possibly cyclic includes */
/* There is no function provided to load the include files.
describe(
"Real Like",
() => {
/* Now let's look at recursive and possibly cyclic includes */
/* There is no function provided to load the include files.
Because we have no idea if will it be an ordinary function or will it use promises.
Therefore one has to write a function to load sources recursively and and setSources
while checking for dependencies */
/* Let's make a dummy loader */
let loadSource = (sourceName: string) =>
switch sourceName {
| "source1" => "x=1"
| "source2" => `
/* Let's make a dummy loader */
let loadSource = (sourceName: string) =>
switch sourceName {
| "source1" => "x=1"
| "source2" => `
#include "source1"
y=2`
| "source3" => `
| "source3" => `
#include "source2"
z=3`
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
}
| _ => `source ${sourceName} not found`->Js.Exn.raiseError
}
/* let's recursively load the sources */
let rec loadIncludesRecursively = (project, sourceName, visited) => {
if visited->Js.Array2.includes(sourceName) {
/* Oh we have already visited this source. There is an include cycle */
"Cyclic include ${sourceName}"->Js.Exn.raiseError
} else {
let newVisited = Js.Array2.copy(visited)
let _ = newVisited->Js.Array2.push(sourceName)
/* Let's parse the includes and dive into them */
Project.parseIncludes(project, sourceName)
let rIncludes = project->Project.getIncludes(sourceName)
switch rIncludes {
/* Maybe there is an include syntax error */
| Error(err) => err->SqError.toString->Js.Exn.raiseError
/* let's recursively load the sources */
let rec loadIncludesRecursively = (project, sourceName, visited) => {
if visited->Js.Array2.includes(sourceName) {
/* Oh we have already visited this source. There is an include cycle */
"Cyclic include ${sourceName}"->Js.Exn.raiseError
} else {
let newVisited = Js.Array2.copy(visited)
let _ = newVisited->Js.Array2.push(sourceName)
/* Let's parse the includes and dive into them */
Project.parseIncludes(project, sourceName)
let rIncludes = project->Project.getIncludes(sourceName)
switch rIncludes {
/* Maybe there is an include syntax error */
| Error(err) => err->SqError.toString->Js.Exn.raiseError
| Ok(includes) =>
includes->Belt.Array.forEach(newIncludeName => {
/* We have got one of the new includes.
Let's load it and add it to the project */
let newSource = loadSource(newIncludeName)
project->Project.setSource(newIncludeName, newSource)
/* The new source is loaded and added to the project. */
/* Of course the new source might have includes too. */
/* Let's recursively load them */
project->loadIncludesRecursively(newIncludeName, newVisited)
})
| Ok(includes) =>
includes->Belt.Array.forEach(
newIncludeName => {
/* We have got one of the new includes.
Let's load it and add it to the project */
let newSource = loadSource(newIncludeName)
project->Project.setSource(newIncludeName, newSource)
/* The new source is loaded and added to the project. */
/* Of course the new source might have includes too. */
/* Let's recursively load them */
project->loadIncludesRecursively(newIncludeName, newVisited)
},
)
}
}
}
}
/* As we have a fake source loader and a recursive include handler,
We can not set up a real project */
/* As we have a fake source loader and a recursive include handler,
We can not set up a real project */
/* * Here starts our real life project! * */
/* * Here starts our real life project! * */
let project = Project.createProject()
let project = Project.createProject()
project->Project.setSource(
"main",
`
project->Project.setSource(
"main",
`
#include "source1"
#include "source2"
#include "source3"
@ -118,37 +131,43 @@ Here we will finally proceed to a real life scenario. */
b = doubleX
a
`,
)
/* Setting source requires parsing and loading the includes recursively */
project->loadIncludesRecursively("main", []) // Not visited yet
)
/* Setting source requires parsing and loading the includes recursively */
project->loadIncludesRecursively("main", []) // Not visited yet
/* Let's salt it more. Let's have another source in the project which also has includes */
/* doubleX includes source1 which is eventually included by main as well */
project->Project.setSource(
"doubleX",
`
/* Let's salt it more. Let's have another source in the project which also has includes */
/* doubleX includes source1 which is eventually included by main as well */
project->Project.setSource(
"doubleX",
`
#include "source1"
doubleX = x * 2
`,
)
project->loadIncludesRecursively("doubleX", [])
/* Remember, any time you set a source, you need to load includes recursively */
)
project->loadIncludesRecursively("doubleX", [])
/* Remember, any time you set a source, you need to load includes recursively */
/* As doubleX is not included by main, it is not loaded recursively.
So we link it to the project as a dependency */
project->Project.setContinues("main", ["doubleX"])
/* As doubleX is not included by main, it is not loaded recursively.
So we link it to the project as a dependency */
project->Project.setContinues("main", ["doubleX"])
/* Let's run the project */
project->Project.runAll
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
/* And see the result and bindings.. */
test("recursive includes", () => {
(result->Reducer_Value.toStringResult, bindings->Reducer_Value.toStringRecord)->expect ==
("Ok(6)", "{a: 6,b: 2}")
/* Everything as expected */
})
})
/* Let's run the project */
project->Project.runAll
let result = project->Project.getResult("main")
let bindings = project->Project.getBindings("main")
/* And see the result and bindings.. */
test(
"recursive includes",
() => {
(
result->Reducer_Value.toStringResult,
bindings->Reducer_Value.toStringRecord,
)->expect == ("Ok(6)", "{a: 6,b: 2}")
/* Everything as expected */
},
)
},
)
})
describe("Includes myFile as myVariable", () => {
@ -163,14 +182,20 @@ Here we will finally proceed to a real life scenario. */
`,
)
Project.parseIncludes(project, "main")
test("getDependencies", () => {
Project.getDependencies(project, "main")->expect == ["common"]
})
test("getIncludes", () => {
switch Project.getIncludes(project, "main") {
| Ok(includes) => includes->expect == ["common"]
| Error(err) => err->SqError.toString->fail
}
})
test(
"getDependencies",
() => {
Project.getDependencies(project, "main")->expect == ["common"]
},
)
test(
"getIncludes",
() => {
switch Project.getIncludes(project, "main") {
| Ok(includes) => includes->expect == ["common"]
| Error(err) => err->SqError.toString->fail
}
},
)
})
})

View File

@ -30,8 +30,9 @@ describe("ReducerProject Tutorial", () => {
})
test("userResults", () => {
let userResultsAsString = Belt.Array.map(userResults, aResult =>
aResult->Reducer_Value.toStringResult
let userResultsAsString = Belt.Array.map(
userResults,
aResult => aResult->Reducer_Value.toStringResult,
)
userResultsAsString->expect == ["Ok(2)", "Ok(4)", "Ok(6)", "Ok(8)", "Ok(10)"]
})

View File

@ -99,15 +99,19 @@ describe("FunctionRegistry Library", () => {
})
describe("Fn auto-testing", () => {
testAll("tests of validity", examples, r => {
expectEvalToBeOk(r)
})
testAll(
"tests of validity",
examples,
r => {
expectEvalToBeOk(r)
},
)
testAll(
"tests of type",
E.A.to_list(
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(((fn, _)) =>
E.O.isSome(fn.output)
FunctionRegistry_Core.Registry.allExamplesWithFns(registry)->E.A2.filter(
((fn, _)) => E.O.isSome(fn.output),
),
),
((fn, example)) => {

View File

@ -45,12 +45,12 @@ let toExtDist: option<DistributionTypes.genericDist> => DistributionTypes.generi
let unpackFloat = x => x->toFloat->toExtFloat
let unpackDist = y => y->toDist->toExtDist
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean: mean, stdev: stdev}))
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha: alpha, beta: beta}))
let mkNormal = (mean, stdev) => DistributionTypes.Symbolic(#Normal({mean, stdev}))
let mkBeta = (alpha, beta) => DistributionTypes.Symbolic(#Beta({alpha, beta}))
let mkExponential = rate => DistributionTypes.Symbolic(#Exponential({rate: rate}))
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low: low, high: high}))
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local: local, scale: scale}))
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu: mu, sigma: sigma}))
let mkUniform = (low, high) => DistributionTypes.Symbolic(#Uniform({low, high}))
let mkCauchy = (local, scale) => DistributionTypes.Symbolic(#Cauchy({local, scale}))
let mkLognormal = (mu, sigma) => DistributionTypes.Symbolic(#Lognormal({mu, sigma}))
let mkDelta = x => DistributionTypes.Symbolic(#Float(x))
let normalMake = SymbolicDist.Normal.make

View File

@ -25,7 +25,6 @@
],
"suffix": ".bs.js",
"namespace": true,
"bs-dependencies": ["bisect_ppx"],
"bs-dev-dependencies": [
"@glennsl/rescript-jest",
"rescript-fast-check",
@ -45,8 +44,5 @@
"refmt": 3,
"warnings": {
"number": "+A-42-48-9-30-4"
},
"ppx-flags": [
["../../node_modules/bisect_ppx/ppx", "--exclude-files", ".*_test\\.res$$"]
]
}
}

View File

@ -2,9 +2,6 @@
module.exports = {
preset: "ts-jest",
testEnvironment: "node",
setupFilesAfterEnv: [
"<rootdir>/../../node_modules/bisect_ppx/src/runtime/js/jest.bs.js",
],
testPathIgnorePatterns: [
".*Fixtures.bs.js",
"/node_modules/",

View File

@ -22,12 +22,8 @@
"test:rescript": "jest --modulePathIgnorePatterns=__tests__/TS/*",
"test:watch": "jest --watchAll",
"test:fnRegistry": "jest __tests__/SquiggleLibrary/SquiggleLibrary_FunctionRegistryLibrary_test.bs.js",
"coverage:rescript:local": "rm -f *.coverage && yarn clean && BISECT_ENABLE=yes yarn build && yarn test:rescript && bisect-ppx-report html",
"coverage:ts:local": "yarn clean && yarn build && nyc --reporter=lcov yarn test:ts",
"coverage:rescript": "yarn clean && BISECT_ENABLE=yes yarn build:rescript && yarn test:rescript && bisect-ppx-report send-to Codecov",
"coverage:ts": "yarn coverage:ts:local && codecov",
"coverage": "yarn coverage:ts && yarn coverage:rescript",
"coverage:local": "yarn coverage:ts:local && yarn coverage:rescript:local",
"coverage:local": "jest --coverage && echo && echo 'Open ./coverage/lcov-report/index.html to see the detailed report.'",
"coverage": "jest --coverage && codecov",
"lint:rescript": "./lint.sh",
"lint:prettier": "prettier --check .",
"lint": "yarn lint:rescript && yarn lint:prettier",
@ -43,7 +39,7 @@
],
"author": "Quantified Uncertainty Research Institute",
"dependencies": {
"@rescript/std": "^9.1.4",
"@rescript/std": "^10.0.0",
"@stdlib/stats": "^0.0.13",
"jstat": "^1.9.5",
"lodash": "^4.17.21",
@ -52,21 +48,17 @@
},
"devDependencies": {
"@glennsl/rescript-jest": "^0.9.2",
"@istanbuljs/nyc-config-typescript": "^1.0.2",
"@types/jest": "^27.5.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"bisect_ppx": "^2.7.1",
"chalk": "^5.1.0",
"codecov": "^3.8.3",
"fast-check": "^3.1.4",
"gentype": "^4.5.0",
"jest": "^27.5.1",
"moduleserve": "^0.9.1",
"nyc": "^15.1.0",
"peggy": "^2.0.1",
"prettier": "^2.7.1",
"reanalyze": "^2.23.0",
"rescript": "^9.1.4",
"rescript": "^10.0.0",
"rescript-fast-check": "^1.1.1",
"rescript-js-map": "^1.1.0",
"ts-jest": "^29.0.3",

View File

@ -141,6 +141,7 @@ let rec run = (~env: env, functionCallInfo: functionCallInfo): outputType => {
Js.log2("Console log requested: ", dist)
Dist(dist)
}
| #ToDist(Normalize) => dist->GenericDist.normalize->Dist
| #ToScore(LogScore(answer, prior)) =>
GenericDist.Score.logScore(~estimate=dist, ~answer, ~prior, ~env)

View File

@ -99,6 +99,7 @@ let toFloatOperation = (
}
}
}
| (#Stdev | #Variance | #Mode) as op =>
switch t {
| SampleSet(s) =>
@ -129,7 +130,7 @@ let toPointSet = (
SampleSetDist.toPointSetDist(
~samples=r,
~samplingInputs={
sampleCount: sampleCount,
sampleCount,
outputXYPoints: xyPointLength,
pointSetDistLength: xyPointLength,
kernelWidth: None,
@ -427,6 +428,7 @@ module AlgebraicCombination = {
~toSampleSetFn,
)
}
| (None, AsMonteCarlo) =>
StrategyCallOnValidatedInputs.monteCarlo(toSampleSetFn, arithmeticOperation, t1, t2)
| (None, AsSymbolic) =>
@ -443,6 +445,7 @@ module AlgebraicCombination = {
)}`
Error(RequestedStrategyInvalidError(errString))
}
| Some(convOp) => StrategyCallOnValidatedInputs.convolution(toPointSetFn, convOp, t1, t2)
}
}

View File

@ -69,7 +69,7 @@ let toDiscretePointMassesFromTriangulars = (
()
}
{n: n - 2, masses: masses, means: means, variances: variances}
{n: n - 2, masses, means, variances}
} else {
for i in 1 to n - 2 {
// area of triangle = width * height / 2
@ -91,7 +91,7 @@ let toDiscretePointMassesFromTriangulars = (
) |> ignore
()
}
{n: n - 2, masses: masses, means: means, variances: variances}
{n: n - 2, masses, means, variances}
}
}
@ -184,7 +184,7 @@ let toDiscretePointMassesFromDiscrete = (s: PointSetTypes.xyShape): pointMassesW
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
let variances: array<float> = Belt.Array.makeBy(n, _ => 0.0)
{n: n, masses: masses, means: means, variances: variances}
{n, masses, means, variances}
}
type argumentPosition = First | Second

View File

@ -45,16 +45,16 @@ module Analysis = {
let getShape = (t: t) => t.xyShape
let interpolation = (t: t) => t.interpolation
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape,
interpolation: interpolation,
integralSumCache: integralSumCache,
integralCache: integralCache,
xyShape,
interpolation,
integralSumCache,
integralCache,
}
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape),
interpolation: interpolation,
integralSumCache: integralSumCache,
integralCache: integralCache,
interpolation,
integralSumCache,
integralCache,
}
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
@ -135,10 +135,10 @@ let shapeFn = (fn, t: t) => t |> getShape |> fn
let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t,
integralSumCache: integralSumCache,
integralSumCache,
}
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache}
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache}
let sum = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,

View File

@ -4,14 +4,14 @@ open Distributions
type t = PointSetTypes.discreteShape
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape,
integralSumCache: integralSumCache,
integralCache: integralCache,
xyShape,
integralSumCache,
integralCache,
}
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape),
integralSumCache: integralSumCache,
integralCache: integralCache,
integralSumCache,
integralCache,
}
let getShape = (t: t) => t.xyShape
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
@ -63,12 +63,12 @@ let reduce = (
let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t,
integralSumCache: integralSumCache,
integralSumCache,
}
let updateIntegralCache = (integralCache, t: t): t => {
...t,
integralCache: integralCache,
integralCache,
}
/* This multiples all of the data points together and creates a new discrete distribution from the results.

View File

@ -4,10 +4,10 @@ open Distributions
type t = PointSetTypes.mixedShape
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
continuous: continuous,
discrete: discrete,
integralSumCache: integralSumCache,
integralCache: integralCache,
continuous,
discrete,
integralSumCache,
integralCache,
}
let totalLength = (t: t): int => {
@ -35,7 +35,7 @@ let toDiscrete = ({discrete}: t) => Some(discrete)
let updateIntegralCache = (integralCache, t: t): t => {
...t,
integralCache: integralCache,
integralCache,
}
let combinePointwise = (

View File

@ -79,8 +79,8 @@ module MixedPoint = {
type t = mixedPoint
let toContinuousValue = (t: t) => t.continuous
let toDiscreteValue = (t: t) => t.discrete
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
let makeContinuous = (continuous: float): t => {continuous, discrete: 0.0}
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete}
let fmap = (fn: float => float, t: t) => {
continuous: fn(t.continuous),

View File

@ -7,7 +7,7 @@ module Normal = {
type t = normal
let make = (mean: float, stdev: float): result<symbolicDist, string> =>
stdev > 0.0
? Ok(#Normal({mean: mean, stdev: stdev}))
? Ok(#Normal({mean, stdev}))
: Error("Standard deviation of normal distribution must be larger than 0")
let pdf = (x, t: t) => Jstat.Normal.pdf(x, t.mean, t.stdev)
let cdf = (x, t: t) => Jstat.Normal.cdf(x, t.mean, t.stdev)
@ -15,7 +15,7 @@ module Normal = {
let from90PercentCI = (low, high) => {
let mean = E.A.Floats.mean([low, high])
let stdev = (high -. low) /. (2. *. normal95confidencePoint)
#Normal({mean: mean, stdev: stdev})
#Normal({mean, stdev})
}
let inv = (p, t: t) => Jstat.Normal.inv(p, t.mean, t.stdev)
let sample = (t: t) => Jstat.Normal.sample(t.mean, t.stdev)
@ -25,12 +25,12 @@ module Normal = {
let add = (n1: t, n2: t) => {
let mean = n1.mean +. n2.mean
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev})
#Normal({mean, stdev})
}
let subtract = (n1: t, n2: t) => {
let mean = n1.mean -. n2.mean
let stdev = Js.Math.sqrt(n1.stdev ** 2. +. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev})
#Normal({mean, stdev})
}
// TODO: is this useful here at all? would need the integral as well ...
@ -38,7 +38,7 @@ module Normal = {
let mean =
(n1.mean *. n2.stdev ** 2. +. n2.mean *. n1.stdev ** 2.) /. (n1.stdev ** 2. +. n2.stdev ** 2.)
let stdev = 1. /. (1. /. n1.stdev ** 2. +. 1. /. n2.stdev ** 2.)
#Normal({mean: mean, stdev: stdev})
#Normal({mean, stdev})
}
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
@ -88,7 +88,7 @@ module Cauchy = {
type t = cauchy
let make = (local, scale): result<symbolicDist, string> =>
scale > 0.0
? Ok(#Cauchy({local: local, scale: scale}))
? Ok(#Cauchy({local, scale}))
: Error("Cauchy distribution scale parameter must larger than 0.")
let pdf = (x, t: t) => Jstat.Cauchy.pdf(x, t.local, t.scale)
let cdf = (x, t: t) => Jstat.Cauchy.cdf(x, t.local, t.scale)
@ -102,7 +102,7 @@ module Triangular = {
type t = triangular
let make = (low, medium, high): result<symbolicDist, string> =>
low < medium && medium < high
? Ok(#Triangular({low: low, medium: medium, high: high}))
? Ok(#Triangular({low, medium, high}))
: Error("Triangular values must be increasing order.")
let pdf = (x, t: t) => Jstat.Triangular.pdf(x, t.low, t.high, t.medium) // not obvious in jstat docs that high comes before medium?
let cdf = (x, t: t) => Jstat.Triangular.cdf(x, t.low, t.high, t.medium)
@ -116,7 +116,7 @@ module Beta = {
type t = beta
let make = (alpha, beta) =>
alpha > 0.0 && beta > 0.0
? Ok(#Beta({alpha: alpha, beta: beta}))
? Ok(#Beta({alpha, beta}))
: Error("Beta distribution parameters must be positive")
let pdf = (x, t: t) => Jstat.Beta.pdf(x, t.alpha, t.beta)
let cdf = (x, t: t) => Jstat.Beta.cdf(x, t.alpha, t.beta)
@ -150,7 +150,7 @@ module Lognormal = {
type t = lognormal
let make = (mu, sigma) =>
sigma > 0.0
? Ok(#Lognormal({mu: mu, sigma: sigma}))
? Ok(#Lognormal({mu, sigma}))
: Error("Lognormal standard deviation must be larger than 0")
let pdf = (x, t: t) => Jstat.Lognormal.pdf(x, t.mu, t.sigma)
let cdf = (x, t: t) => Jstat.Lognormal.cdf(x, t.mu, t.sigma)
@ -164,7 +164,7 @@ module Lognormal = {
let logHigh = Js.Math.log(high)
let mu = E.A.Floats.mean([logLow, logHigh])
let sigma = (logHigh -. logLow) /. (2.0 *. normal95confidencePoint)
#Lognormal({mu: mu, sigma: sigma})
#Lognormal({mu, sigma})
}
let fromMeanAndStdev = (mean, stdev) => {
// https://math.stackexchange.com/questions/2501783/parameters-of-a-lognormal-distribution
@ -174,7 +174,7 @@ module Lognormal = {
let meanSquared = mean ** 2.
let mu = 2. *. Js.Math.log(mean) -. 0.5 *. Js.Math.log(variance +. meanSquared)
let sigma = Js.Math.sqrt(Js.Math.log(variance /. meanSquared +. 1.))
Ok(#Lognormal({mu: mu, sigma: sigma}))
Ok(#Lognormal({mu, sigma}))
} else {
Error("Lognormal standard deviation must be larger than 0")
}
@ -184,14 +184,14 @@ module Lognormal = {
// https://wikiless.org/wiki/Log-normal_distribution?lang=en#Multiplication_and_division_of_independent,_log-normal_random_variables
let mu = l1.mu +. l2.mu
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
#Lognormal({mu: mu, sigma: sigma})
#Lognormal({mu, sigma})
}
let divide = (l1, l2) => {
let mu = l1.mu -. l2.mu
// We believe the ratiands will have covariance zero.
// See here https://stats.stackexchange.com/questions/21735/what-are-the-mean-and-variance-of-the-ratio-of-two-lognormal-variables for details
let sigma = Js.Math.sqrt(l1.sigma ** 2. +. l2.sigma ** 2.)
#Lognormal({mu: mu, sigma: sigma})
#Lognormal({mu, sigma})
}
let operate = (operation: Operation.Algebraic.t, n1: t, n2: t) =>
switch operation {
@ -220,7 +220,7 @@ module Lognormal = {
module Uniform = {
type t = uniform
let make = (low, high) =>
high > low ? Ok(#Uniform({low: low, high: high})) : Error("High must be larger than low")
high > low ? Ok(#Uniform({low, high})) : Error("High must be larger than low")
let pdf = (x, t: t) => Jstat.Uniform.pdf(x, t.low, t.high)
let cdf = (x, t: t) => Jstat.Uniform.cdf(x, t.low, t.high)
@ -239,9 +239,7 @@ module Uniform = {
module Logistic = {
type t = logistic
let make = (location, scale) =>
scale > 0.0
? Ok(#Logistic({location: location, scale: scale}))
: Error("Scale must be positive")
scale > 0.0 ? Ok(#Logistic({location, scale})) : Error("Scale must be positive")
let pdf = (x, t: t) => Stdlib.Logistic.pdf(x, t.location, t.scale)
let cdf = (x, t: t) => Stdlib.Logistic.cdf(x, t.location, t.scale)
@ -285,7 +283,7 @@ module Gamma = {
let make = (shape: float, scale: float) => {
if shape > 0. {
if scale > 0. {
Ok(#Gamma({shape: shape, scale: scale}))
Ok(#Gamma({shape, scale}))
} else {
Error("scale must be larger than 0")
}
@ -543,6 +541,6 @@ module T = {
| _ =>
let xs = interpolateXs(~xSelection, d, sampleCount)
let ys = xs |> E.A.fmap(x => pdf(x, d))
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs: xs, ys: ys}))
Continuous(Continuous.make(~integralSumCache=Some(1.0), {xs, ys}))
}
}

View File

@ -23,7 +23,7 @@ let makeFn = (
name: string,
inputs: array<frType>,
fn: array<Reducer_T.value> => result<Reducer_T.value, errorMessage>,
) => makeFnMany(name, [{inputs: inputs, fn: fn}])
) => makeFnMany(name, [{inputs, fn}])
let library = [
Make.ff2f(~name="add", ~fn=(x, y) => x +. y, ()), // infix + (see Reducer/Reducer_Peggy/helpers.ts)
@ -62,6 +62,7 @@ let library = [
let answer = Js.String2.concat(a, b)
answer->Reducer_T.IEvString->Ok
}
| _ => Error(impossibleError)
}
}),
@ -72,6 +73,7 @@ let library = [
let _ = Js.Array2.pushMany(a, b)
a->Reducer_T.IEvArray->Ok
}
| _ => Error(impossibleError)
}
}),
@ -81,6 +83,7 @@ let library = [
Js.log(value->Reducer_Value.toString)
value->Ok
}
| _ => Error(impossibleError)
}
}),
@ -90,6 +93,7 @@ let library = [
Js.log(`${label}: ${value->Reducer_Value.toString}`)
value->Ok
}
| _ => Error(impossibleError)
}
}),

View File

@ -135,11 +135,13 @@ module Integration = {
let wrappedResult = result->Reducer_T.IEvNumber->Ok
wrappedResult
}
| (Error(b), _) => Error(b)
| (_, Error(b)) => Error(b)
}
resultWithOuterPoints
}
| Error(b) =>
("Integration error 2 in Danger.integrate. It's possible that your function doesn't return a number, try definining auxiliaryFunction(x) = mean(yourFunction(x)) and integrate auxiliaryFunction instead." ++
"Original error: " ++
@ -362,6 +364,7 @@ module DiminishingReturns = {
result[indexOfBiggestDMR] = value
Ok(result)
}
| Error(b) => Error(b)
}
@ -371,10 +374,12 @@ module DiminishingReturns = {
}
Ok(newAcc)
}
| Error(b) => Error(b)
}
newAccWrapped
}
| Error(b) => Error(b)
}
})
@ -427,10 +432,12 @@ module DiminishingReturns = {
)
result
}
| Error(b) => Error(b)
}
result
}
| _ =>
"Error in Danger.diminishingMarginalReturnsForTwoFunctions"
->SqError.Message.REOther

View File

@ -20,6 +20,7 @@ module Declaration = {
->E.A.R.firstErrorOrOpen
->E.R2.fmap(args => Reducer_T.IEvDeclaration(Declaration.make(lambda, args)))
}
| Error(r) => Error(r)
| Ok(_) => Error(impossibleErrorString)
}

View File

@ -140,6 +140,7 @@ module Old = {
| Error(err) => error(err)
}
}
| Some(IEvNumber(_))
| Some(IEvDistribution(_)) =>
switch parseDistributionArray(args) {
@ -192,6 +193,7 @@ module Old = {
}
Helpers.toFloatFn(fn, dist, ~env)
}
| ("integralSum", [IEvDistribution(dist)]) => Helpers.toFloatFn(#IntegralSum, dist, ~env)
| ("toString", [IEvDistribution(dist)]) => Helpers.toStringFn(ToString, dist, ~env)
| ("sparkline", [IEvDistribution(dist)]) =>

View File

@ -19,6 +19,7 @@ let inputsToDist = (inputs: array<Reducer_T.value>, xyShapeToPointSetDist) => {
| _ => impossibleError->SqError.Message.throw
}
}
| _ => impossibleError->SqError.Message.throw
}
)

View File

@ -61,6 +61,7 @@ module FRType = {
let input = ((name, frType): frTypeRecordParam) => `${name}: ${toString(frType)}`
`{${r->E.A2.fmap(input)->E.A2.joinWith(", ")}}`
}
| FRTypeArray(r) => `list(${toString(r)})`
| FRTypeLambda => `lambda`
| FRTypeString => `string`
@ -132,9 +133,9 @@ module FnDefinition = {
}
let make = (~name, ~inputs, ~run, ()): t => {
name: name,
inputs: inputs,
run: run,
name,
inputs,
run,
}
}
@ -160,14 +161,14 @@ module Function = {
~isExperimental=false,
(),
): t => {
name: name,
nameSpace: nameSpace,
definitions: definitions,
output: output,
name,
nameSpace,
definitions,
output,
examples: examples->E.O2.default([]),
isExperimental: isExperimental,
requiresNamespace: requiresNamespace,
description: description,
isExperimental,
requiresNamespace,
description,
}
let toJson = (t: t): functionJson => {
@ -203,15 +204,19 @@ module Registry = {
fn.requiresNamespace ? [] : [def.name],
]->E.A.concatMany
names->Belt.Array.reduce(acc, (acc, name) => {
switch acc->Belt.Map.String.get(name) {
| Some(fns) => {
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
acc
names->Belt.Array.reduce(
acc,
(acc, name) => {
switch acc->Belt.Map.String.get(name) {
| Some(fns) => {
let _ = fns->Js.Array2.push(def) // mutates the array, no need to update acc
acc
}
| None => acc->Belt.Map.String.set(name, [def])
}
| None => acc->Belt.Map.String.set(name, [def])
}
})
},
)
})
)
}
@ -245,6 +250,7 @@ module Registry = {
| None => REOther(showNameMatchDefinitions())->Error
}
}
| None => RESymbolNotFound(fnName)->Error
}
}

View File

@ -34,6 +34,7 @@ module Prepare = {
let n2 = map->Belt.Map.String.getExn(arg2)
Ok([n1, n2])
}
| _ => Error(impossibleErrorString)
}
@ -45,6 +46,7 @@ module Prepare = {
let n3 = map->Belt.Map.String.getExn(arg3)
Ok([n1, n2, n3])
}
| _ => Error(impossibleErrorString)
}
}

View File

@ -44,4 +44,4 @@ let removeResult = ({namespace} as bindings: t): t => {
let locals = ({namespace}: t): Reducer_T.namespace => namespace
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace: namespace, parent: None}
let fromNamespace = (namespace: Reducer_Namespace.t): t => {namespace, parent: None}

View File

@ -6,7 +6,7 @@ let createContext = (stdLib: Reducer_Namespace.t, environment: Reducer_T.environ
{
frameStack: list{},
bindings: stdLib->Reducer_Bindings.fromNamespace->Reducer_Bindings.extend,
environment: environment,
environment,
inFunction: None,
}
}

View File

@ -123,6 +123,7 @@ let rec evaluate: T.reducerFn = (expression, context): (T.value, T.context) => {
)
(result, context)
}
| _ => RENotAFunction(lambda->Reducer_Value.toString)->throwFrom(expression, context)
}
}

View File

@ -23,8 +23,8 @@ let make = (): t => list{}
let extend = (t: t, name: string, location: option<Reducer_Peggy_Parse.location>) =>
t->Belt.List.add({
name: name,
location: location,
name,
location,
})
// this is useful for SyntaxErrors

View File

@ -43,10 +43,10 @@ let makeLambda = (
FnLambda({
// context: bindings,
name: name,
name,
body: lambda,
parameters: parameters,
location: location,
parameters,
location,
})
}
@ -54,8 +54,8 @@ let makeLambda = (
let makeFFILambda = (name: string, body: Reducer_T.lambdaBody): t => FnBuiltin({
// Note: current bindings could be accidentally exposed here through context (compare with native lambda implementation above, where we override them with local bindings).
// But FunctionRegistry API is too limited for that to matter. Please take care not to violate that in the future by accident.
body: body,
name: name,
body,
name,
})
// this function doesn't scale to FunctionRegistry's polymorphic functions

View File

@ -113,7 +113,7 @@ let nodeToAST = (node: node) => {
| _ => raise(UnsupportedPeggyNodeType(node["type"]))
}
{location: node["location"], content: content}
{location: node["location"], content}
}
let nodeIdentifierToAST = (node: nodeIdentifier) => {

View File

@ -68,7 +68,7 @@ let rec fromNode = (node: Parse.node): expression => {
}
{
ast: ast,
content: content,
ast,
content,
}
}

View File

@ -216,6 +216,7 @@ let tryRunWithResult = (
project->setResult(sourceId, Error(error))
Error(error)
}
| Ok(_prevResult) => {
project->doLinkAndRun(sourceId)
project->getResultOption(sourceId)->Belt.Option.getWithDefault(rPrevResult)

View File

@ -6,7 +6,7 @@ type t = T.t
let emptyItem = (sourceId: string): projectItem => {
source: "",
sourceId: sourceId,
sourceId,
rawParse: None,
expression: None,
continuation: Reducer_Namespace.make(),
@ -76,7 +76,7 @@ let resetIncludes = (r: t): t => {
}
let setSource = (r: t, source: T.sourceArgumentType): t =>
{...r, source: source}->resetIncludes->touchSource
{...r, source}->resetIncludes->touchSource
let setRawParse = (r: t, rawParse: T.rawParseArgumentType): t =>
{...r, rawParse: Some(rawParse)}->touchRawParse
@ -86,7 +86,7 @@ let setExpression = (r: t, expression: T.expressionArgumentType): t =>
let setContinuation = (r: t, continuation: T.continuationArgumentType): t => {
...r,
continuation: continuation,
continuation,
}
let setResult = (r: t, result: T.resultArgumentType): t => {
@ -110,24 +110,23 @@ let getPastChain = (this: t): array<string> => {
Js.Array2.concat(getDirectIncludes(this), getContinues(this))
}
let setContinues = (this: t, continues: array<string>): t =>
{...this, continues: continues}->touchSource
let setContinues = (this: t, continues: array<string>): t => {...this, continues}->touchSource
let removeContinues = (this: t): t => {...this, continues: []}->touchSource
let setIncludes = (this: t, includes: T.includesType): t => {
...this,
includes: includes,
includes,
}
let setImportAsVariables = (this: t, includeAsVariables: T.importAsVariablesType): t => {
...this,
includeAsVariables: includeAsVariables,
includeAsVariables,
}
let setDirectImports = (this: t, directIncludes: array<string>): t => {
...this,
directIncludes: directIncludes,
directIncludes,
}
let parseIncludes = (this: t): t => {
@ -144,9 +143,9 @@ let parseIncludes = (this: t): t => {
->Belt.Array.map(((_variable, file)) => file)
{
...this,
includes: includes,
includeAsVariables: includeAsVariables,
directIncludes: directIncludes,
includes,
includeAsVariables,
directIncludes,
}
}
}

View File

@ -54,6 +54,7 @@ module Message = {
}
answer
}
| REMacroNotFound(macro) => `Macro not found: ${macro}`
| RENotAFunction(valueString) => `${valueString} is not a function`
| RERecordPropertyNotFound(msg, index) => `${msg}: ${index}`
@ -93,8 +94,8 @@ type t = {
exception SqException(t)
let fromMessageWithFrameStack = (message: Message.t, frameStack: Reducer_FrameStack.t): t => {
message: message,
frameStack: frameStack,
message,
frameStack,
}
// this shouldn't be used much, since frame stack will be empty

View File

@ -18,6 +18,7 @@ let stdLib: Reducer_T.namespace = {
| None => REArrayIndexNotFound("Array index not found", index)->SqError.Message.throw
}
}
| [IEvRecord(dict), IEvString(sIndex)] =>
switch Belt.Map.String.get(dict, sIndex) {
| Some(value) => value

View File

@ -9,13 +9,13 @@ type declaration<'a> = {
module ContinuousFloatArg = {
let make = (min: float, max: float): arg => {
Float({min: min, max: max})
Float({min, max})
}
}
module ContinuousTimeArg = {
let make = (min: Js.Date.t, max: Js.Date.t): arg => {
Date({min: min, max: max})
Date({min, max})
}
}
@ -33,7 +33,7 @@ module Arg = {
}
let make = (fn: 'a, args: array<arg>): declaration<'a> => {
{fn: fn, args: args}
{fn, args}
}
let toString = (r: declaration<'a>, fnToString): string => {

View File

@ -85,8 +85,8 @@ module T = {
}
let square = mapX(x => x ** 2.0)
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
let fromArray = ((xs, ys)): t => {xs: xs, ys: ys}
let fromArrays = (xs, ys): t => {xs: xs, ys: ys}
let fromArray = ((xs, ys)): t => {xs, ys}
let fromArrays = (xs, ys): t => {xs, ys}
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
let concat = (t1: t, t2: t) => {
let cxs = Array.concat(list{t1.xs, t2.xs})
@ -142,7 +142,7 @@ module T = {
}
let make = (~xs: array<float>, ~ys: array<float>) => {
let attempt: t = {xs: xs, ys: ys}
let attempt: t = {xs, ys}
switch Validator.validate(attempt) {
| Some(error) => Error(error)
| None => Ok(attempt)
@ -452,6 +452,7 @@ module PointwiseCombination = {
let _ = Js.Array.push(fn(y1, y2), newYs)
let _ = Js.Array.push(x, newXs)
}
| None => ()
}
}
@ -558,7 +559,7 @@ module Range = {
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
)
}
Some({xs: xs, ys: cumulativeY})
Some({xs, ys: cumulativeY})
}
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)

View File

@ -22,7 +22,6 @@
"benchmark/**/*.bs.js",
"src/rescript/**/*.js",
"src/rescript/**/*.gen.tsx",
"../../node_modules/bisect_ppx/**/*.bs.js",
"dist/**"
]
},
@ -31,10 +30,10 @@
"outputs": []
},
"bundle": {
"dependsOn": ["^build", "build"]
"dependsOn": ["build"]
},
"coverage": {
"cache": false
"dependsOn": ["build"]
}
}
}

393
yarn.lock
View File

@ -2312,13 +2312,6 @@
js-yaml "^3.13.1"
resolve-from "^5.0.0"
"@istanbuljs/nyc-config-typescript@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@istanbuljs/nyc-config-typescript/-/nyc-config-typescript-1.0.2.tgz#1f5235b28540a07219ae0dd42014912a0b19cf89"
integrity sha512-iKGIyMoyJuFnJRSVTZ78POIRvNnwZaWIf8vG4ZS3rQq58MMDrqEX2nnzx0R28V2X8JvmKYiqY9FP2hlJsm8A0w==
dependencies:
"@istanbuljs/schema" "^0.1.2"
"@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98"
@ -3044,6 +3037,11 @@
"@react-hook/passive-layout-effect" "^1.2.0"
"@react-hook/resize-observer" "^1.2.1"
"@rescript/std@^10.0.0":
version "10.0.0"
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-10.0.0.tgz#11996296739d7f0d2949283c93b4d14e9ed4589d"
integrity sha512-DFwX5vWASZtvjFdqar2VIadvmy2ZBPTnPI2A9EKEkvNR93OUoZygOfvhRaueIQtlS4f9X50E3v2awI9JJG+JsQ==
"@rescript/std@^9.1.4":
version "9.1.4"
resolved "https://registry.yarnpkg.com/@rescript/std/-/std-9.1.4.tgz#94971cb504b10d36d470618fa1c6f0a2d03a6b9b"
@ -5891,11 +5889,6 @@ ansi-regex@^6.0.1:
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a"
integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==
ansi-styles@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==
ansi-styles@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
@ -5948,13 +5941,6 @@ app-root-dir@^1.0.2:
resolved "https://registry.yarnpkg.com/app-root-dir/-/app-root-dir-1.0.2.tgz#38187ec2dea7577fff033ffcb12172692ff6e118"
integrity sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g==
append-transform@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12"
integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==
dependencies:
default-require-extensions "^3.0.0"
"aproba@^1.0.3 || ^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc"
@ -5965,11 +5951,6 @@ aproba@^1.1.1:
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
archy@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40"
integrity sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==
are-we-there-yet@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c"
@ -6255,15 +6236,6 @@ azure-devops-node-api@^11.0.1:
tunnel "0.0.6"
typed-rest-client "^1.8.4"
babel-code-frame@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
integrity sha512-XqYMR2dfdGMW+hd0IUZ2PwK+fGeFkOxZJ0wY+JaQAHzt1Zx8LcvpiZD2NiGkEG8qx0CfkAOr5xt76d1e8vG90g==
dependencies:
chalk "^1.1.3"
esutils "^2.0.2"
js-tokens "^3.0.2"
babel-jest@^27.4.2, babel-jest@^27.5.1:
version "27.5.1"
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444"
@ -6301,13 +6273,6 @@ babel-loader@^8.0.0, babel-loader@^8.2.3, babel-loader@^8.2.5:
make-dir "^3.1.0"
schema-utils "^2.6.5"
babel-messages@^6.23.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e"
integrity sha512-Bl3ZiA+LjqaMtNYopA9TYE9HP1tQ+E5dLxE0XrAzcIJeK2UqF0/EaqXwBn9esd4UmTfEab+P+UYQ1GnioFIb/w==
dependencies:
babel-runtime "^6.22.0"
babel-plugin-add-react-displayname@^0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/babel-plugin-add-react-displayname/-/babel-plugin-add-react-displayname-0.0.5.tgz#339d4cddb7b65fd62d1df9db9fe04de134122bd5"
@ -6426,29 +6391,11 @@ babel-plugin-react-docgen@^4.1.0, babel-plugin-react-docgen@^4.2.1:
lodash "^4.17.15"
react-docgen "^5.0.0"
babel-plugin-transform-es2015-modules-commonjs@^6.26.2:
version "6.26.2"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz#58a793863a9e7ca870bdc5a881117ffac27db6f3"
integrity sha512-CV9ROOHEdrjcwhIaJNBGMBCodN+1cfkwtM1SbUHmvyy35KGT7fohbpOxkE2uLz1o6odKK2Ck/tz47z+VqQfi9Q==
dependencies:
babel-plugin-transform-strict-mode "^6.24.1"
babel-runtime "^6.26.0"
babel-template "^6.26.0"
babel-types "^6.26.0"
babel-plugin-transform-react-remove-prop-types@^0.4.24:
version "0.4.24"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a"
integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==
babel-plugin-transform-strict-mode@^6.24.1:
version "6.24.1"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758"
integrity sha512-j3KtSpjyLSJxNoCDrhwiJad8kw0gJ9REGj8/CqL0HeRyLnvUNYV9zcqluL6QJSXh3nfsLEmSLvwRfGzrgR96Pw==
dependencies:
babel-runtime "^6.22.0"
babel-types "^6.24.1"
babel-preset-current-node-syntax@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b"
@ -6505,55 +6452,6 @@ babel-preset-react-app@^10.0.1:
babel-plugin-macros "^3.1.0"
babel-plugin-transform-react-remove-prop-types "^0.4.24"
babel-runtime@^6.22.0, babel-runtime@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
integrity sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g==
dependencies:
core-js "^2.4.0"
regenerator-runtime "^0.11.0"
babel-template@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02"
integrity sha512-PCOcLFW7/eazGUKIoqH97sO9A2UYMahsn/yRQ7uOk37iutwjq7ODtcTNF+iFDSHNfkctqsLRjLP7URnOx0T1fg==
dependencies:
babel-runtime "^6.26.0"
babel-traverse "^6.26.0"
babel-types "^6.26.0"
babylon "^6.18.0"
lodash "^4.17.4"
babel-traverse@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee"
integrity sha512-iSxeXx7apsjCHe9c7n8VtRXGzI2Bk1rBSOJgCCjfyXb6v1aCqE1KSEpq/8SXuVN8Ka/Rh1WDTF0MDzkvTA4MIA==
dependencies:
babel-code-frame "^6.26.0"
babel-messages "^6.23.0"
babel-runtime "^6.26.0"
babel-types "^6.26.0"
babylon "^6.18.0"
debug "^2.6.8"
globals "^9.18.0"
invariant "^2.2.2"
lodash "^4.17.4"
babel-types@^6.24.1, babel-types@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497"
integrity sha512-zhe3V/26rCWsEZK8kZN+HaQj5yQ1CilTObixFzKW1UWjqG7618Twz6YEsCnjfg5gBcJh02DrpCkS9h98ZqDY+g==
dependencies:
babel-runtime "^6.26.0"
esutils "^2.0.2"
lodash "^4.17.4"
to-fast-properties "^1.0.3"
babylon@^6.18.0:
version "6.18.0"
resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==
bail@^1.0.0:
version "1.0.5"
resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776"
@ -6636,11 +6534,6 @@ bindings@^1.5.0:
dependencies:
file-uri-to-path "1.0.0"
bisect_ppx@^2.7.1:
version "2.7.1"
resolved "https://registry.yarnpkg.com/bisect_ppx/-/bisect_ppx-2.7.1.tgz#8fde3e6f767a1a4ca190a266d0bd38800b088f66"
integrity sha512-e8gRgfhmCptiyGGov+54Acah+rc+svm0yc/26mn+M6CCNDADufVLMgRaG1uw3LAHm/PFPy+zGFAKMwd6lD2O2g==
bl@^4.0.3:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
@ -7014,16 +6907,6 @@ cacheable-request@^6.0.0:
normalize-url "^4.1.0"
responselike "^1.0.2"
caching-transform@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f"
integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==
dependencies:
hasha "^5.0.0"
make-dir "^3.0.0"
package-hash "^4.0.0"
write-file-atomic "^3.0.0"
call-bind@^1.0.0, call-bind@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
@ -7068,7 +6951,7 @@ camelcase@^2.0.0:
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f"
integrity sha512-DLIsRzJVBQu72meAKPkWQOLcujdXT32hwdfnkI1frSiSRMK1MofjKHf+MEx0SB6fjEFXL8fBDv1dKymBlOp4Qw==
camelcase@^5.0.0, camelcase@^5.3.1:
camelcase@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
@ -7119,17 +7002,6 @@ ccount@^1.0.0:
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043"
integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==
chalk@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==
dependencies:
ansi-styles "^2.2.1"
escape-string-regexp "^1.0.2"
has-ansi "^2.0.0"
strip-ansi "^3.0.0"
supports-color "^2.0.0"
chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
@ -7335,15 +7207,6 @@ cli-table3@^0.6.1, cli-table3@^0.6.2:
optionalDependencies:
"@colors/colors" "1.5.0"
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
cliui@^7.0.2:
version "7.0.4"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f"
@ -7708,11 +7571,6 @@ core-js-pure@^3.20.2, core-js-pure@^3.8.1:
resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.25.0.tgz#f8d1f176ff29abbfeb610110de891d5ae5a361d4"
integrity sha512-IeHpLwk3uoci37yoI2Laty59+YqH9x5uR65/yiA0ARAJrTrN4YU0rmauLWfvqOuk77SlNJXj2rM6oT/dBD87+A==
core-js@^2.4.0:
version "2.6.12"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec"
integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==
core-js@^3.0.4, core-js@^3.19.2, core-js@^3.23.3, core-js@^3.6.5, core-js@^3.8.2:
version "3.25.0"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.25.0.tgz#be71d9e0dd648ffd70c44a7ec2319d039357eceb"
@ -8304,7 +8162,7 @@ data-urls@^3.0.2:
whatwg-mimetype "^3.0.0"
whatwg-url "^11.0.0"
debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.8, debug@^2.6.9:
debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.9:
version "2.6.9"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
@ -8325,7 +8183,7 @@ debug@^3.0.0, debug@^3.2.7:
dependencies:
ms "^2.1.1"
decamelize@^1.1.2, decamelize@^1.2.0:
decamelize@^1.1.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==
@ -8397,13 +8255,6 @@ default-gateway@^6.0.3:
dependencies:
execa "^5.0.0"
default-require-extensions@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"
integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==
dependencies:
strip-bom "^4.0.0"
defer-to-connect@^1.0.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
@ -9031,11 +8882,6 @@ es5-shim@^4.5.13:
resolved "https://registry.yarnpkg.com/es5-shim/-/es5-shim-4.6.7.tgz#bc67ae0fc3dd520636e0a1601cc73b450ad3e955"
integrity sha512-jg21/dmlrNQI7JyyA2w7n+yifSxBng0ZralnSfVZjoCawgNTCnS+yBCyVM9DL5itm7SUnDGgv7hcq2XCZX4iRQ==
es6-error@^4.0.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d"
integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==
es6-shim@^0.35.5:
version "0.35.6"
resolved "https://registry.yarnpkg.com/es6-shim/-/es6-shim-0.35.6.tgz#d10578301a83af2de58b9eadb7c2c9945f7388a0"
@ -9189,7 +9035,7 @@ escape-latex@^1.2.0:
resolved "https://registry.yarnpkg.com/escape-latex/-/escape-latex-1.2.0.tgz#07c03818cf7dac250cce517f4fda1b001ef2bca1"
integrity sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==
escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
@ -9918,7 +9764,7 @@ find-cache-dir@^2.0.0, find-cache-dir@^2.1.0:
make-dir "^2.0.0"
pkg-dir "^3.0.0"
find-cache-dir@^3.2.0, find-cache-dir@^3.3.1:
find-cache-dir@^3.3.1:
version "3.3.2"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b"
integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==
@ -10106,11 +9952,6 @@ from2@^2.1.0:
inherits "^2.0.1"
readable-stream "^2.0.0"
fromentries@^1.2.0:
version "1.3.2"
resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.3.2.tgz#e4bca6808816bf8f93b52750f1127f5a6fd86e3a"
integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==
fs-constants@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
@ -10230,7 +10071,7 @@ gentype@^4.5.0:
resolved "https://registry.yarnpkg.com/gentype/-/gentype-4.5.0.tgz#460152da93db783626fd25bbd38fb574d886f5f3"
integrity sha512-XqHBQPS6Qb2HSgNJAwYRXbQJ4LSvz+MgNvuWnj8bz0teSorsy2kDxA6F1eZx5ft8cnfKAls4uNEgd5uNcPbQDg==
get-caller-file@^2.0.1, get-caller-file@^2.0.5:
get-caller-file@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
@ -10411,11 +10252,6 @@ globals@^13.15.0:
dependencies:
type-fest "^0.20.2"
globals@^9.18.0:
version "9.18.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==
globalthis@^1.0.0:
version "1.0.3"
resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf"
@ -10526,13 +10362,6 @@ harmony-reflect@^1.4.6:
resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710"
integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g==
has-ansi@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==
dependencies:
ansi-regex "^2.0.0"
has-bigints@^1.0.1, has-bigints@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa"
@ -10639,14 +10468,6 @@ hash.js@^1.0.0, hash.js@^1.0.3:
inherits "^2.0.3"
minimalistic-assert "^1.0.1"
hasha@^5.0.0:
version "5.2.2"
resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.2.tgz#a48477989b3b327aea3c04f53096d816d97522a1"
integrity sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==
dependencies:
is-stream "^2.0.0"
type-fest "^0.8.0"
hast-to-hyperscript@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d"
@ -11215,7 +11036,7 @@ interpret@^2.2.0:
resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
invariant@^2.2.2, invariant@^2.2.4:
invariant@^2.2.4:
version "2.2.4"
resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6"
integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==
@ -11710,23 +11531,6 @@ istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0:
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3"
integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==
istanbul-lib-hook@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6"
integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==
dependencies:
append-transform "^2.0.0"
istanbul-lib-instrument@^4.0.0:
version "4.0.3"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d"
integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==
dependencies:
"@babel/core" "^7.7.5"
"@istanbuljs/schema" "^0.1.2"
istanbul-lib-coverage "^3.0.0"
semver "^6.3.0"
istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f"
@ -11738,18 +11542,6 @@ istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0:
istanbul-lib-coverage "^3.2.0"
semver "^6.3.0"
istanbul-lib-processinfo@^2.0.2:
version "2.0.3"
resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz#366d454cd0dcb7eb6e0e419378e60072c8626169"
integrity sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==
dependencies:
archy "^1.0.0"
cross-spawn "^7.0.3"
istanbul-lib-coverage "^3.2.0"
p-map "^3.0.0"
rimraf "^3.0.0"
uuid "^8.3.2"
istanbul-lib-report@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
@ -11768,7 +11560,7 @@ istanbul-lib-source-maps@^4.0.0:
istanbul-lib-coverage "^3.0.0"
source-map "^0.6.1"
istanbul-reports@^3.0.2, istanbul-reports@^3.1.3, istanbul-reports@^3.1.4:
istanbul-reports@^3.1.3, istanbul-reports@^3.1.4:
version "3.1.5"
resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae"
integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==
@ -12777,11 +12569,6 @@ js-string-escape@^1.0.1:
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
js-tokens@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
integrity sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg==
js-yaml@3.14.1, js-yaml@^3.13.1:
version "3.14.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
@ -13161,11 +12948,6 @@ lodash.debounce@^4.0.8:
resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==
lodash.flattendeep@^4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2"
integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==
lodash.flow@^3.3.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/lodash.flow/-/lodash.flow-3.5.0.tgz#87bf40292b8cf83e4e8ce1a3ae4209e20071675a"
@ -13201,7 +12983,7 @@ lodash.uniq@4.5.0, lodash.uniq@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.7.0:
lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@ -13945,13 +13727,6 @@ node-libs-browser@^2.2.1:
util "^0.11.0"
vm-browserify "^1.0.1"
node-preload@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301"
integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==
dependencies:
process-on-spawn "^1.0.0"
node-releases@^2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503"
@ -14054,39 +13829,6 @@ nwsapi@^2.2.0, nwsapi@^2.2.2:
resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0"
integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==
nyc@^15.1.0:
version "15.1.0"
resolved "https://registry.yarnpkg.com/nyc/-/nyc-15.1.0.tgz#1335dae12ddc87b6e249d5a1994ca4bdaea75f02"
integrity sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==
dependencies:
"@istanbuljs/load-nyc-config" "^1.0.0"
"@istanbuljs/schema" "^0.1.2"
caching-transform "^4.0.0"
convert-source-map "^1.7.0"
decamelize "^1.2.0"
find-cache-dir "^3.2.0"
find-up "^4.1.0"
foreground-child "^2.0.0"
get-package-type "^0.1.0"
glob "^7.1.6"
istanbul-lib-coverage "^3.0.0"
istanbul-lib-hook "^3.0.0"
istanbul-lib-instrument "^4.0.0"
istanbul-lib-processinfo "^2.0.2"
istanbul-lib-report "^3.0.0"
istanbul-lib-source-maps "^4.0.0"
istanbul-reports "^3.0.2"
make-dir "^3.0.0"
node-preload "^0.2.1"
p-map "^3.0.0"
process-on-spawn "^1.0.0"
resolve-from "^5.0.0"
rimraf "^3.0.0"
signal-exit "^3.0.2"
spawn-wrap "^2.0.0"
test-exclude "^6.0.0"
yargs "^15.0.2"
object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@ -14394,16 +14136,6 @@ p-try@^2.0.0:
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
package-hash@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506"
integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==
dependencies:
graceful-fs "^4.1.15"
hasha "^5.0.0"
lodash.flattendeep "^4.4.0"
release-zalgo "^1.0.0"
package-json@^6.3.0:
version "6.5.0"
resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0"
@ -15602,13 +15334,6 @@ process-nextick-args@~2.0.0:
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
process-on-spawn@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.0.0.tgz#95b05a23073d30a17acfdc92a440efd2baefdc93"
integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==
dependencies:
fromentries "^1.2.0"
process@^0.11.10:
version "0.11.10"
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
@ -16353,11 +16078,6 @@ regenerate@^1.4.2:
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
regenerator-runtime@^0.11.0:
version "0.11.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.7, regenerator-runtime@^0.13.9:
version "0.13.9"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
@ -16460,13 +16180,6 @@ relateurl@^0.2.7:
resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9"
integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==
release-zalgo@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/release-zalgo/-/release-zalgo-1.0.0.tgz#09700b7e5074329739330e535c5a90fb67851730"
integrity sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==
dependencies:
es6-error "^4.0.1"
remark-emoji@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/remark-emoji/-/remark-emoji-2.2.0.tgz#1c702090a1525da5b80e15a8f963ef2c8236cac7"
@ -16608,11 +16321,6 @@ require-from-string@^2.0.2:
resolved "https://registry.yarnpkg.com/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa"
integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
requires-port@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
@ -16637,10 +16345,10 @@ rescript-js-map@^1.1.0:
dependencies:
rescript-js-iterator "^1.1.0"
rescript@^9.1.4:
version "9.1.4"
resolved "https://registry.yarnpkg.com/rescript/-/rescript-9.1.4.tgz#1eb126f98d6c16942c0bf0df67c050198e580515"
integrity sha512-aXANK4IqecJzdnDpJUsU6pxMViCR5ogAxzuqS0mOr8TloMnzAjJFu63fjD6LCkWrKAhlMkFFzQvVQYaAaVkFXw==
rescript@^10.0.0:
version "10.0.0"
resolved "https://registry.yarnpkg.com/rescript/-/rescript-10.0.0.tgz#8460bc6f7d94bc580eac02d7c7efdf0a470916b8"
integrity sha512-LhNg/4+0j8NvoFeslgAeYLlzUwkq6kR6l6v8BnZ61VDTxopK2l96uT1lq5lv1aMxzMDynvE2qnX0zalre+6XxA==
resize-observer-polyfill@^1.5.1:
version "1.5.1"
@ -17426,18 +17134,6 @@ space-separated-tokens@^1.0.0:
resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899"
integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==
spawn-wrap@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e"
integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==
dependencies:
foreground-child "^2.0.0"
is-windows "^1.0.2"
make-dir "^3.0.0"
rimraf "^3.0.0"
signal-exit "^3.0.2"
which "^2.0.1"
spdx-correct@^3.0.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
@ -17738,7 +17434,7 @@ stringify-object@^3.3.0:
is-obj "^1.0.1"
is-regexp "^1.0.0"
strip-ansi@^3.0.0, strip-ansi@^3.0.1:
strip-ansi@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==
@ -17874,11 +17570,6 @@ stylis@^4.0.6:
resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.1.tgz#e46c6a9bbf7c58db1e65bb730be157311ae1fe12"
integrity sha512-lVrM/bNdhVX2OgBFNa2YJ9Lxj7kPzylieHd3TNjuGE0Re9JB7joL5VUKOVH1kdNNJTgGPpT8hmwIAPLaSyEVFQ==
supports-color@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==
supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
@ -18230,11 +17921,6 @@ to-arraybuffer@^1.0.0:
resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
integrity sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==
to-fast-properties@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
integrity sha512-lxrWP8ejsq+7E3nNjwYmUBMAgjMTZoTI+sdBOpvNyijeDLa29LUn9QaoXAHv4+Z578hbmHHJKZknzxVtvo77og==
to-fast-properties@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
@ -18566,7 +18252,7 @@ type-fest@^0.6.0:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
type-fest@^0.8.0, type-fest@^0.8.1:
type-fest@^0.8.1:
version "0.8.1"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
@ -19962,11 +19648,6 @@ which-boxed-primitive@^1.0.2:
is-string "^1.0.5"
is-symbol "^1.0.3"
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==
which@^1.2.9, which@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
@ -20200,15 +19881,6 @@ worker-rpc@^0.1.0:
dependencies:
microevent.ts "~0.1.1"
wrap-ansi@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
@ -20342,14 +20014,6 @@ yaml@^2.1.1:
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.1.1.tgz#1e06fb4ca46e60d9da07e4f786ea370ed3c3cfec"
integrity sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==
yargs-parser@^18.1.2:
version "18.1.3"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^20.2.2, yargs-parser@^20.2.9:
version "20.2.9"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
@ -20360,23 +20024,6 @@ yargs-parser@^21.0.0, yargs-parser@^21.0.1:
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35"
integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==
yargs@^15.0.2:
version "15.4.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
find-up "^4.1.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^4.2.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.2"
yargs@^16.2.0:
version "16.2.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66"