Merge pull request #12 from foretold-app/gentype-experiment

Begin conversion to rescript and add GenType
This commit is contained in:
Ozzie Gooen 2022-02-07 17:28:58 -05:00 committed by GitHub
commit 0f06a107bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
126 changed files with 44702 additions and 77909 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

View File

@ -1,13 +0,0 @@
open Jest;
open Expect;
describe("Bandwidth", () => {
test("nrd0()", () => {
let data = [|1., 4., 3., 2.|];
expect(Bandwidth.nrd0(data)) |> toEqual(0.7625801874014622);
});
test("nrd()", () => {
let data = [|1., 4., 3., 2.|];
expect(Bandwidth.nrd(data)) |> toEqual(0.8981499984950554);
});
});

View File

@ -1,51 +0,0 @@
open Jest;
open Expect;
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () =>
expect(item1) |> toEqual(item2)
)
: test(str, () =>
expect(item1) |> toEqual(item2)
);
describe("Lodash", () => {
describe("Lodash", () => {
makeTest(
"split",
SamplesToShape.Internals.T.splitContinuousAndDiscrete([|1.432, 1.33455, 2.0|]),
([|1.432, 1.33455, 2.0|], E.FloatFloatMap.empty()),
);
makeTest(
"split",
SamplesToShape.Internals.T.splitContinuousAndDiscrete([|
1.432,
1.33455,
2.0,
2.0,
2.0,
2.0,
|])
|> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
([|1.432, 1.33455|], [|(2.0, 4.0)|]),
);
let makeDuplicatedArray = count => {
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int);
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare);
E.A.concatMany([|sorted, sorted, sorted, sorted|])
|> Belt.SortArray.stableSortBy(_, compare);
};
let (_, discrete) =
SamplesToShape.Internals.T.splitContinuousAndDiscrete(makeDuplicatedArray(10));
let toArr = discrete |> E.FloatFloatMap.toArray;
makeTest("splitMedium", toArr |> Belt.Array.length, 10);
let (c, discrete) =
SamplesToShape.Internals.T.splitContinuousAndDiscrete(makeDuplicatedArray(500));
let toArr = discrete |> E.FloatFloatMap.toArray;
makeTest("splitMedium", toArr |> Belt.Array.length, 500);
})
});

View File

@ -1,63 +0,0 @@
open Jest;
open Expect;
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () =>
expect(item1) |> toEqual(item2)
)
: test(str, () =>
expect(item1) |> toEqual(item2)
);
let shape1: DistTypes.xyShape = {xs: [|1., 4., 8.|], ys: [|0.2, 0.4, 0.8|]};
let shape2: DistTypes.xyShape = {
xs: [|1., 5., 10.|],
ys: [|0.2, 0.5, 0.8|],
};
let shape3: DistTypes.xyShape = {
xs: [|1., 20., 50.|],
ys: [|0.2, 0.5, 0.8|],
};
describe("XYShapes", () => {
describe("logScorePoint", () => {
makeTest(
"When identical",
XYShape.logScorePoint(30, shape1, shape1),
Some(0.0),
);
makeTest(
"When similar",
XYShape.logScorePoint(30, shape1, shape2),
Some(1.658971191043856),
);
makeTest(
"When very different",
XYShape.logScorePoint(30, shape1, shape3),
Some(210.3721280423322),
);
});
// describe("transverse", () => {
// makeTest(
// "When very different",
// XYShape.Transversal._transverse(
// (aCurrent, aLast) => aCurrent +. aLast,
// [|1.0, 2.0, 3.0, 4.0|],
// ),
// [|1.0, 3.0, 6.0, 10.0|],
// )
// });
describe("integrateWithTriangles", () => {
makeTest(
"integrates correctly",
XYShape.Range.integrateWithTriangles(shape1),
Some({
xs: [|1., 4., 8.|],
ys: [|0.0, 0.9000000000000001, 3.3000000000000007|],
}),
)
});
});

20
docs/.gitignore vendored Normal file
View File

@ -0,0 +1,20 @@
# Dependencies
/node_modules
# Production
/build
# Generated files
.docusaurus
.cache-loader
# Misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*

41
docs/README.md Normal file
View File

@ -0,0 +1,41 @@
# Website
This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
### Installation
```
$ yarn
```
### Local Development
```
$ yarn start
```
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
### Build
```
$ yarn build
```
This command generates static content into the `build` directory and can be served using any static contents hosting service.
### Deployment
Using SSH:
```
$ USE_SSH=true yarn deploy
```
Not using SSH:
```
$ GIT_USER=<Your GitHub username> yarn deploy
```
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.

3
docs/babel.config.js Normal file
View File

@ -0,0 +1,3 @@
module.exports = {
presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
};

View File

@ -0,0 +1,131 @@
---
slug: Squiggle-Talk
title: The Squiggly language (Short Presentation)
authors: ozzie
---
# Multivariate estimation & the Squiggly language
*This post was originally published on Aug 2020, on [LessWrong](https://www.lesswrong.com/posts/g9QdXySpydd6p8tcN/sunday-august-16-12pm-pdt-talks-by-ozzie-gooen-habryka-ben). The name of the project has since been changed from Suiggly to Squiggle*
*(Talk given at the LessWrong Lighting Talks in 2020. Ozzie Gooen is responsible for the talk, Jacob Lagerros and Justis Mills edited the transcript.* [an event on Sunday 16th of August](https://www.lesswrong.com/posts/g9QdXySpydd6p8tcN/sunday-august-16-12pm-pdt-talks-by-ozzie-gooen-habryka-ben))
![](https://lh5.googleusercontent.com/ebsMFHLu-qE2ZABLhk8aWYx9SqfswabLWxpZKr0iq5PSsv1ruQnRwcDGUzryILT3GuXqL1w1WZARv6Zbjq-o8I4xS0yErm_JHihDYMScY65xLsYgy4svUzI3E6mmBoVTO9IZg4Sv)
**Ozzie:** This image is my [TLDR](https://en.wikipedia.org/wiki/Wikipedia:Too_long;_didn%27t_read) on probability distributions:
![](https://lh4.googleusercontent.com/axqy1MImst0AL-JXV3X7NJd9LFCwZljG05zBD7bQAyBppSrBacchtUXB3zvrtC3xwmqpsUPLznXP4Yfwg_uZOmTuaQ6HrcElhN1_ZgNqOHP2UvGbBAw6kDGb0qZPE1mcnAS39aFT)
Basically, distributions are kind of old school. People are used to estimating and predicting them. We don't want that. We want functions that return distributions -- those are way cooler. The future is functions, not distributions.
<!--truncate-->
What do I mean by this?
For an example, let's look at some of the existing COVID models. This is one of them, from the IHME:
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/d706215f87f158776e6829b718c24979872247a32f51748f.png/w_1332)
You can see that it made projections for total deaths, daily deaths, and a bunch of other variables. And for each indicator, you could choose a country or a location, and it gives you a forecast of what that indicator may look like.
So basically there's some function that for any parameter, which could be deaths or daily deaths or time or whatever, outputs a probability density. That's the core thing that's happening.
![](https://lh3.googleusercontent.com/U42cHcDlBq2FFQMK7OeabjmCf5Wc7INRPwu8_S9FjPXc2n3JslKM5LlBHEwHezsR34GFQ1mD9LxUR2dVBGys_2gdpoueY-2X99Bk31uMprP9ZKvfk_rS8hCxCOJ0FJkKif50FAGQ)
So if you were able to parameterize the model in that way, and format it in these terms, you could basically wrap the function in some encoding. And then do the same forecast, but now using a centralized encoding.
So right now, basically for people to make something like the COVID dashboard from before, they have to use this intense output and write some custom GUI. It's a whole custom process. Moreover, it's very difficult to write*your own* function that calls their underlying model.
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/d37e5e0b2cff20d1fe04033eda56e4762631546d108fe1a3.png/w_406)
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/4d0dd806d1cf871ac477f0a6168567283a08caf99ff3d414.png/w_501)
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/d706215f87f158776e6829b718c24979872247a32f51748f.png/w_1332)
But, hypothetically, if we had an encoding layer between the model and the output, these forecasters could basically write the results of their model into one function, or into one big file. Then that file could be interpreted and run on demand. That would be a much nicer format.
Lets take a look at Metaculus, which is about the best forecasting platform we have right now.
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/7253f0b0ee81213ddd384b9e60d3375c2fb459686acf71d8.png/w_706)
On Metaculus, everything is a point estimate, which is limiting. In general, it's great that we have good point estimates, but most people don't want to look at this. Theyd rather look at the pretty dashboard from before, right?
So we need to figure out ways of getting our predictors to work together to make things that look more like the pretty graphs. And one of those questions is: how do we get predictors to write functions that return distributions?
Ultimately, I think this is something that we obviously want. But it is kind of tricky to get there.
![](https://lh6.googleusercontent.com/fJjV1pe4DFSbeWRqSvg8RlgE7_vAD1f2NGRrBaGRxi_gfDDkMqDOM7Q2K7hsc6w_KziZsc3xVkcpcjUvCNYJvqvH9u_pk1PvweYvFYfMHIFJj-IGo-Dt2D1gx_VJ7aWgnwQLcZOO)
So in Estimation Utopia, as I call it, wed allow for people to take the results of their data science models and convert them into a unified format. But also, humans could just intuitively go ahead and write in the unified format directly. And if we have unified formats that are portable and could be run in different areas with different programming languages, then it would be very easy to autogenerate GUIs for them, including aggregates which combined multiple models at the same time. We could also do scoring, which is something that is obvious that we want, as well as compose models together.
So that's why I've been working on the Squiggly language.
Lets look at some quick examples!
![](https://lh6.googleusercontent.com/bis1Gdsp9jx4o36j_vBjE7NEYc5xKud9K1yjnv2K2YbFB5UhFAsR0uDjgGXMlKqg99fNVMTrIBj1YDuRVh5cxwPZ1QYum8JMujhQxnH-1JQDbH9BKtQ9mA5BdwCzx8LC_l6RtvgE)
This is a classic normal distribution, but once you have this, some of the challenge is making it as easy as possible to make functions that return distributions.
Here's a case for any *t*:
![](https://lh4.googleusercontent.com/QZr0XwYQNmilOhboJXGT3J2Gpt9X7W9aifA_E1PorGeKPjKF3XYOXrhRcn38xh4KKwA6TEDXoF5B9C78MBIAJ6mO7E9tS4_9-jwh1eKlp9wYkc_StUXsi4KRwC8nhBoVXm3lZbis)
We're going to give you a normal, with *t* as a mean and the standard deviation of 3. This is a plot where it's basically showing bars at each one of the deciles. It gets a bit wider at the end. It's very easy once you have this to just create it for any specific combination of values.
Its also cool, because once you have it in this format, its very easy to combine multiple models. For instance, heres a lognormal.
![](https://lh6.googleusercontent.com/g4dYJBmu6ScK9CePMAu_6h9u2PRbRScQlLy_0uKWLmMAOPgJXLp4IXGCUOigCmKetxXtfcpQHLb9Rilkch4FMPV94bZ_MaNWpBSfDYsR0ll4dYdedmkdjwQ1M5FhIa891fu53Hcf)
For example, if I have an estimate and my friend Jacob has an estimate, then we could write a function that for every time *t*, basically queries each one of our estimates and gives that as a combined result.
This kind of shows you a problem with fan charts, that they dont show the fact that all the probability amasses on the very top and the very bottom. Thats an issue that well get over soon. Heres what it looks like if I aggregate my model with Jacobs.
![](https://lh3.googleusercontent.com/mG50mXS2kUXx9mhBksx39s-GgY-yBs0HT4Acl2KAUba-WZ---aSOUONzvrtxYr9q__dLbf7vKzg_TVe7rKJH4c8sHPdM2k4Wi0p_FfQJr_UYzFexee6p9tfigHicmPI0NZw9ggXC)
## Questions
**Raemon:**
I had a little bit of excitement, and then fear, and then excitement again, when you talked about a unified format. The excitement was like, “Ah, a unified format, that sounds nice.” Then I had an image of all of the giant coordination problems that result from failed attempts to create a new unified format, where the attempted unified format becomes [yet another distinct format](https://xkcd.com/927/) among all the preexisting options.
Then I got kind of excited again because to a first approximation, as far as I can tell, in the grand scheme of things currently, approximately zero people use prediction markets. You might actually be able to figure out the right format and get it right the first time. You also might run into the same problems that all the other people that tried to come up with unified formats did, which was that it was hard to figure that out right at the beginning. Maybe now I am scared again. Do you have any thoughts on this?
**Ozzie:**
Yeah, Id say in this case, I think theres no format that does this type of thing yet. This is a pretty unexplored space. Of course, writing the first format in a space is kind of scary, right? Maybe I should spend a huge amount of time making it great, because maybe itll lock in. Maybe I should just iterate. Im not too sure what to do there.
And there are also a few different ways that the format could go. I dont know who its going to be the most useful for, which will be important. But right now, Im just experimenting and seeing whats good for small communities. Well, specifically whats good for me.
**Raemon:**
Yeah, you can build the thing that seems good for you. That seems good. If you get to a point where you want to scale it up, making sure that whatever youre scaling up is reasonably flexible or something might be nice. I dont know.
**Ozzie:**
Yeah. Right now, Im aiming for something thats good at a bunch of things but not that great at any one of them. Im also very curious to get outside opinions. Hopefully people could start playing with this, and I can get their thoughts.
- - - -
**habryka:**
This feels very similar to [Guesstimate](https://www.getguesstimate.com/) , which you also built, just in programming language as opposed to visual language. How does this project differ?
**Ozzie:**
Basically, you could kind of think about this as “Guesstimate: The Language”. But it does come with a lot of advantages. The main one is that you could write functions. With Guesstimate you couldnt write functions. That was a gigantic limitation!
Really, a lot of Squiggly is me trying to remake for my sins with Guesstimate. With Guesstimate, if one person makes a model of how the damage from bicycling, like the micromorts that theyre taking when they bike, that model only works for them. If you wanted to go and configure it to match your situation, youd have to go in and modify it manually. Its actually very difficult to port these models. If one person writes a good model, its hard for somebody else to copy and paste it, hopefully into another programming tool. Its not very portable.
So I think these new features are pretty fundamental. I think that this is a pretty big step in the right direction. In general text-based solutions have a lot of benefits when you can use them, but it is kind of tricky to use them.
- - - -
**Johnswentworth:**
Im getting sort of mixed vibes about what exactly the use case here is. If were thinking of this as a sort of standard for representing models, then I should be able to convert models in other formats, right? Like, if I have a model in Excel or I have a model in [Pyro](https://pyro.ai/) , then there should be some easy way to turn it into this standard format?
On the other hand, if were trying to create a language in which people write models, then thats a whole different use case where being a standard isnt really part of it at all (instead it looks more like the actual UI you showed us).
So Im sort of not sure what the picture is in your head for how someone is actually going to use this and what its going to do for them, or what the value add is compared to Excel or Pyro.
**Ozzie:** Yeah, great question. So I would say that Id ideally have both data scientists and judgemental forecasters trying to use it, and those are two very distinct types of use cases, as you mentioned. Its very possible that they both want their own ideal format, and it doesnt make sense to have one format for the two of them. Im excited for users who dont have any way of making these methods intuitively at the moment.
Suppose, for example, that youre trying to forecast the GDP of US for each year in the coming decades.
Step one is making sure that, basically, people on Metaculus or existing other forecasting platforms, could basically be writing functions using this language and then submitting those instead of just submitting point forecasts. So youd be able to say “given as input a specific year, and some other parameters, output this distribution” — instead of having to make a new and separate forecast for each and every year. Then having the whole rest of the forecasting pipeline work with that (e.g. scoring, visualisations, and so forth).
When you do that, though, it is pretty easy to take some results from other, more advanced tools, and put them into probably very simple functions. So, for instance, if there is a distribution over time (as in the GDP example), that may be something they could interpolate with a few different points. There could be some very simple setups where you take your different Pyro model or something that actually did some intense equations, and then basically put them into this very simple function that just interpolates based on that and then uses this new format.
**Johnswentworth:**
What would be the advantage of that?
**Ozzie:**
Its complicated. If you made your model in Pyro and you wanted to then export it and allow someone to play with it, that could be a tricky thing, because your Pyro model might be computationally expensive to run. As opposed to trying to export a representation that is basically a combination of a CSV and a light wrapper function. And then people run that, which is more convenient and facilitates more collaboration.
**Johnswentworth:**
Why would people run that though? Why do people want that compressed model?
**Ozzie:**
I mean, a lot of the COVID models are like that, where basically the *running* of the simulation was very time intensive and required one persons whole PC. But it would still be nice to be able to export the *results*of that and then make those interactable, right?
**Johnswentworth:**
Oh, I see. Okay, I buy that.
**Ozzie:**
I also dont want to have to write all of the work to do all of the Pyro stuff in this language. Its way too much.
**Johnswentworth:**
Usually, when Im thinking about this sort of thing, and I look at someones model, I really want to know what the underlying gears were behind it. Which is exactly the opposite of what youre talking about. So its just a use case that Im not used to thinking through. But I agree, it does make sense.
- - - -
**habryka:**
Why call the language Squiggly? There were a surprising lack of squiggles in the language. I was like, “Ah, it makes sense, you just use the squiggles as the primary abstraction” — but then you showed me your code editor and there were no squiggles, and I was very disappointed.
**Ozzie:**
Yeah, so I havent written my own parser yet. Ive been using the one from math.js. When I write my own, its possible Ill add it. I also am just really unsure about the name.

View File

@ -0,0 +1,148 @@
---
slug: technical-overview
title: Technical Overview
authors: ozzie
---
# Squiggle Technical Overview
This piece is meant to be read after [Squiggle: An Overview](https://www.lesswrong.com/posts/i5BWqSzuLbpTSoTc4/squiggle-an-overview) . It includes technical information I thought best separated out for readers familiar with coding. As such, its a bit of a grab-bag. It explains the basic internals of Squiggle, outlines ways it could be used in other programming languages, and details some of the history behind it.
The Squiggle codebase is organized in [this github repo](https://github.com/foretold-app/squiggle) . Its open source. The code is quite messy now, but do ping me if youre interested in running it or understanding it.
## Project Subcomponents
I think of Squiggle in three distinct clusters.
1. A high-level ReasonML library for probability distributions.
2. A simple programming language.
3. Custom visualizations and GUIs.
### 1. A high-level ReasonML library for probability distribution functions
Python has some great libraries for working with probabilities and symbolic mathematics. Javascript doesnt. Squiggle is to be run in Javascript (for interactive editing and use), so the first step for this is to have good libraries to do the basic math.
The second step is to have-level types that could express various types of distributions and functions of distributions. For example, some distributions have symbolic representations, and others are rendered (stored as x-y coordinates). These two types have to be dealt with separately. Squiggle also has limited support for continuous and discrete mixtures, and the math for this adds more complexity.
When it comes to performing functions on expressions, theres a lot of optimization necessary for this to go smoothly.
Say you were to write the function,
```
multimodal(normal(5,2), normal(10,1) + uniform(1,10)) * 100
```
Youd want to apply a combination of symbolic, numeric, and sampling techniques in order to render this equation. In this case, Squiggle would perform sampling to compute the distribution of normal(10,1) + uniform(1,10) and then it would use numeric methods for the rest of the equation. In the future, it would be neat if Squiggle would also first symbolically modify the internal distributions to be multiplied by 100, rather than performing it as a separate numeric step.
This type-dependent function operations can be confusing to users, but hopefully less confusing than having to figure out how to do each of the three and doing them separately. I imagine there could be some debugging UI to better explain what operations are performed.
### 2. Simple programming language functionality
It can be useful to think of Squiggle as similar to SQL, Excel, or Probabilistic Programming Languages like [WebPPL](http://webppl.org/) . There are simple ways to declare variables and write functions, but dont expect to use classes, inheritance, or monads. Theres no for loops, though it will probably have some kinds of reduce() methods in the future.
So far the parsing is done with MathJS, meaning we cant change the syntax. Im looking forward to doing so and have been thinking about what it should be like. One idea Im aiming for is to allow for simple dependent typing for the sake of expressing limited functions. For instance,
```
myFunction(t: [float from 0 to 30]) = normal(t,10)
myFunction
```
This function would return an error if called with a float less than 0 or greater than 30. I imagine that many prediction functions would only be estimated for limited domains.
With some introspection it should be possible to auto-generate calculator-like interfaces.
### 3. Visualizations and GUIs
The main visualizations need to be made from scratch because theres little out there now in terms of quality open-source visualizations of probability distributions and similar. This is especially true for continuous and discrete mixtures. D3 seems like the main library here, and D3 can be gnarly to write and maintain.
Right now were using a basic [Vega](https://vega.github.io/) chart for the distribution over a variable, but this will be replaced later.
In the near term, Im interested in making calculator-like user interfaces of various kinds. I imagine one prediction function could be used for many interfaces of calculators.
## Deployment Story, or, Why Javascript?
Squiggle is written in ReasonML which compiles to Javascript. The obvious alternative is Python. Lesser obvious but interesting options are Mathematica or Rust via WebAssembly.
The plan for Squiggle is to prioritize small programs that could be embedded in other programs and run quickly. Perhaps there will be 30 submissions for a “Covid-19 over time per location” calculator, and wed want to run them in parallel in order to find the average answer or to rank them. I could imagine many situations where it would be useful to run these functions for many different inputs; for example, for kinds of sensitivity analyses.
One nice-to-have feature would be functions that call other functions. Perhaps a model of your future income levels depends on some other aggregated function of the S&P 500, which further depends on models of potential tail risks to the economy. If this were the case you would want to have those model dependencies be easily accessible. This could be done via downloading or having a cloud API to quickly call them remotely.
Challenges like these require some programmatic architecture where functions can be fully isolated/sandboxed and downloaded and run on the fly. There are very few web application infrastructures aimed to do things like this, I assume in part because of the apparent difficulty.
Python is open source and has the most open-source tooling for probabilistic work. Oughts [Ergo](https://github.com/oughtinc/ergo) is in Python, and their Elicit uses Ergo (I believe). [Pyro](https://pyro.ai/) and [Edward](http://edwardlib.org/) , two of the most recent and advanced probabilistic programming languages, are accessible in Python. Generally, Python is the obvious choice.
Unfortunately, the current tooling to run small embedded Python programs, particularly in the browser, is quite mediocre. There are a few attempts to bring Python directly to the browser, like [Pyrodide](https://hacks.mozilla.org/2019/04/pyodide-bringing-the-scientific-python-stack-to-the-browser/) , but these are quite early and relatively poorly supported. If you want to run a bunch of Python jobs on demand, you could use Serverless platforms like [AWS Lambda](https://aws.amazon.com/lambda/) or something more specialized like [PythonAnywhere](https://www.pythonanywhere.com/) . Even these are relatively young and raise challenges around speed, cost, and complexity.
Ive looked a fair bit into various solutions. I think that for at least the next 5 to 15 years, the Python solutions will be challenging to run as conveniently as Javascript solutions would. For this time its expected that Python will have to run in separate servers, and this raises issues of speed, cost, and complexity.
At [Guesstimate](https://www.getguesstimate.com/) , we experimented with solutions that had sampling running on a server and found this to hurt the experience. We tested latency of around 40ms to 200ms. Being able to see the results of calculations as you type is a big deal and server computation prevented this. Its possible that newer services with global/local server infrastructures could help here (as opposed to setups with only 10 servers spread around globally), but it would be tricky. [Fly.io](https://fly.io/) launched in the last year, maybe that would be a decent fit for near-user computation.
Basically, at this point, it seems important that Squiggle programs could be easily imported and embedded in the browser and servers, and for this, Javascript currently seems like the best bet. Javascript currently has poor support for probability, but writing our own probability libraries is more feasible than making Python portable. All of the options seem fairly mediocre, but Javascript a bit less so.
Javascript obviously runs well in the browser, but its versatility is greater than that. [Observable](https://observablehq.com/) and other in-browser Javascript coding platforms load in [NPM](https://www.npmjs.com/) libraries on the fly to run directly in the browser, which demonstrates that such functionality is possible. Its [possible](https://code.google.com/archive/p/pyv8/) (though I imagine a bit rough) to call Javascript programs from Python.
ReasonML compiles to OCaml before it compiles to Javascript. Ive found it convenient for writing complicated code and now am hesitant to go back to a dynamic, non-functional language. Theres definitely a whole lot to do (the existing Javascript support for math is very limited), but at least there are decent approaches to doing it.
I imagine the landscape will change a lot in the next 3 to 10 years. Im going to continue to keep an eye on the space. If things change I could very much imagine pursuing a rewrite, but I think it will be a while before any change seems obvious.
## Using Squiggle with other languages
Once the basics of Squiggle are set up, it could be used to describe the results of models that come from other programs. Similar to how many programming languages have ORMs to generate custom SQL statements, similar tools could be made to generate Squiggle functions. The important thing to grok is that Squiggle functions are submitted information, not just internally useful tools. If there were an API to accept “predictions”, people would submit Squiggle code snippets directly to this API.
*Id note here that I find it somewhat interesting how few public APIs do accept code snippets. I could imagine a version of Facebook where you could submit a Javascript function that would take in information about a post and return a number that would be used for ranking it in your feed. This kind of functionality seems like it could be very powerful. My impression is that its currently thought to be too hard to do given existing technologies. This of course is not a good sign for the feasibility of my proposal here, but this coarse seems like a necessary one to do at some time.*
### Example #1:
Say you calculate a few parameters, but know they represent a multimodal combination of a normal distribution and a uniform distribution. You want to submit that as your prediction or estimate via the API of Metaculus or Foretold. You could write that as (in Javascript):
```
var squiggleValue = `mm(normal(${norm.mean}, ${norm.stdev}}), uniform(0, ${uni.max}))`
```
The alternative to this is that you send a bunch of X-Y coordinates representing the distribution, but this isnt good. It would require you to load the necessary library, do all the math on your end, and then send (what is now a both approximated and much longer) form to the server.
With Squiggle, you dont need to calculate the shape of the function in your code, you just need to express it symbolically and send that off.
### Example #2:
Say you want to describe a distribution with a few or a bunch of calculated CDF points. You could do this by wrapping these points into a function that would convert them into a smooth distribution using one of several possible interpolation methods. Maybe in Javascript this would be something like,
```
var points = [[1, 30], [4, 40], [50,70]];
var squiggleValue = `interpolatePoints(${points}, metalog)`
```
I could imagine it is possible that the majority of distributions generated from other code would be sent this way. However, I cant tell what the specifics of that now or what interpolation strategies may be favored. Doing it with many options would allow us to wait and learn what seems to be best. If there is one syntax used an overwhelming proportion of the time, perhaps that could be separated into its own simpler format.
### Example #3:
Say you want to estimate Tesla stock at every point in the next 10 years. You decide to estimate this using a simple analytical equation, where you predict that the price of Tesla stock can be modeled as growing by a mean of -3 to 8 percent each year from the current price using a normal distribution (apologies to Nassim Taleb).
You have a script that fetches Teslas current stock, then uses that in the following string template:
```
var squiggleValue = `(t) => ${current_price} * (0.97 to 1.08)^t`
```
It may seem a bit silly to not just fetch Teslas price from within Squiggle, but it does help separate concerns. Data fetching within Squiggle would raise a bunch of issues, especially when trying to score Squiggle functions.It may seem a bit silly to not just fetch Teslas price from within Squiggle, but it does help separate concerns. Data fetching within Squiggle would raise a bunch of issues, especially when trying to score Squiggle functions.
## History: From Guesstimate to Squiggle
The history of “Squiggle” goes back to early Guesstimate. Its been quite a meandering journey. I was never really expecting things to go the particular way they did, but at least am relatively satisfied with how things are right now. I imagine these details wont be interesting to most readers, but wanted to include it for those particularly close to the project, or for those curious on what I personally have been up to.
90% of the work on Squiggle has been on a probability distribution editor (“A high-level ReasonML library for probability distribution functions**”)**. This has been a several year process, including my time with Guesstimate. The other 10% of the work, with the custom functions, is much more recent.
Things started with [Guesstimate](https://www.getguesstimate.com/) in around 2016. The Guesstimate editor used a simple sampling setup. It was built with [Math.js](https://mathjs.org/) plus a bit of tooling to support sampling and a few custom functions.[1] The editor produced histograms, as opposed to smooth shapes.
When I started working on [Foretold](https://www.foretold.io/) , in 2018, I hoped we could directly take the editor from Guesstimate. It soon became clear the histograms it produced wouldnt be adequate.
In Foretold we needed to score distributions. Scoring distributions requires finding the probability density function at different points, and that requires a continuous representation of the distribution. Converting random samples to continuous distributions requires kernel density estimation. I tried simple kernel density estimation, but couldnt get this to work well. Randomness in distribution shape is quite poor for forecasting users. It brings randomness into scoring, it looks strange (confusing), and its terrible when there are long tails.
Limited distribution editors like those in Metaculus or Elicit dont use sampling; they use numeric techniques. For example, to take the pointwise sum of three uniform distributions, they would take the pdfs at each point and add them vertically. Numeric techniques are well defined for a narrow subset of combinations of distributions. The main problem with these editors is that they are (so far) highly limited in flexibility; you can only make linear combinations of single kinds of distributions (logistic distributions in Metaculus and uniform ones with Elicit.)
It took a while, but we eventually created a simple editor that would use numeric techniques to combine a small subset of distributions and functions using a semi-flexible string representation. If users would request functionality not available in this editor (like multiplying two distributions together, which would require sampling), it would fall back to using the old editor. This was useful but suboptimal. It required us to keep two versions of the editor with slightly different syntaxes, which was not fun for users to keep track of.
The numeric solver could figure out syntaxes like,
```
multimodal(normal(5,2), uniform(10,13), [.2,.8])
```
But would break anytime you wanted to use any other function, like,
```
multimodal(normal(5,2) + lognormal(1,1.5), uniform(10,13), [.2,.8])*100
```
The next step was making a system that would more precisely use numeric methods and Monte Carlo sampling.
At this point we needed to replace most of Math.js. Careful control over the use of Monte Carlo techniques vs. numeric techniques required us to write our own interpreter. [Sebastian Kosch](https://aldusleaf.org/) did the first main stab at this. I then read a fair bit about how to write interpreted languages and fleshed out the functionality. If youre interested, the book [Crafting Interpreters](https://craftinginterpreters.com/) is pretty great on this topic.{interpreters}
At this point we were 80% of the way there to having simple variables and functions, so those made sense to add as well. Once we had functions, it was simple to try out visualizations of single variable distributions, something Ive been wanting to test out for a long time. This proved surprisingly fun, though of course it was limited (and still is.)
After messing with these functions, and spending a lot more time thinking about them, I decided to focus more on making this a formalized language in order to better explore a few areas. This is when I took this language out of its previous application (called WideDomain, its not important now), and renamed it Squiggle.
[1] It was great this worked at the time; writing my own version may have been too challenging, so its possible this hack was counterfactually responsible for Guesstimate.

View File

@ -0,0 +1,194 @@
---
slug: overview-1
title: Squiggle Overview
authors: ozzie
---
Ive spent a fair bit of time over the last several years iterating on a text-based probability distribution editor (the ``5 to 10`` input editor in Guesstimate and Foretold). Recently Ive added some programming language functionality to it, and have decided to refocus it as a domain-specific language.
The language is currently called *Squiggle*. Squiggle is made for expressing distributions and functions that return distributions. I hope that it can be used one day for submitting complex predictions on Foretold and other platforms.
Right now Squiggle is very much a research endeavor. Im making significant sacrifices for stability and deployment in order to test out exciting possible features. If it were being developed in a tech company, it would be in the “research” or “labs” division.
You can mess with the current version of Squiggle [here](https://squiggle-language.com/dist-builder) . Consider it in pre-alpha stage. If you do try it out, please do contact me with questions and concerns. It is still fairly buggy and undocumented.
I expect to spend a lot of time on Squiggle in the next several months or years. Im curious to get feedback from the community. In the short term Id like to get high-level feedback, in the longer term Id appreciate user testing. If you have thoughts or would care to just have a call and chat, please reach out! We ( [The Quantified Uncertainty Research Institute](https://quantifieduncertainty.org/) ) have some funding now, so Im also interested in contractors or hires if someone is a really great fit.
Squiggle was previously introduced in a short talk that was transcribed [here](https://www.lesswrong.com/posts/kTzADPE26xh3dyTEu/multivariate-estimation-and-the-squiggly-language) , and Nuño Sempere wrote a post about using it [here](https://www.lesswrong.com/posts/kTzADPE26xh3dyTEu/multivariate-estimation-and-the-squiggly-language) .
*Note: the code for this has developed since my time on Guesstimate. With Guesstimate, I had one cofounder, Matthew McDermott. During the last two years, Ive had a lot of help from a handful of programmers and enthusiasts. Many thanks to Sebastian Kosch and Nuño Sempere, who both contributed. Ill refer to this vague collective as “we” throughout this post.*
---
# Video Demo
<iframe width="675" height="380" src="https://www.youtube.com/embed/kJLybQWujco" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
## A Quick Tour
The syntax is forked from Guesstimate and Foretold.
**A simple normal distribution**
```
normal(5,2)
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/61eb60718ef462e8788ae077aff49e80561774e1917fecf8.png/w_512)
You may notice that unlike Guesstimate, the distribution is nearly perfectly smooth. Its this way because it doesnt use sampling for (many) functions where it doesnt need to.
**Lognormal shorthand**
```
5 to 10
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/a6138557d6e6cc00a2fa641e83d7778df31a8773840ec8d8.png/w_519)
This results in a lognormal distribution with 5 to 10 being the 5th and 95th confidence intervals respectively.
You can also write lognormal distributions as: ### lognormal(1,2)
or ### lognormal({mean: 3, stdev: 8})
.
**Mix distributions with the multimodal function**
```multimodal(normal(5,2), uniform(14,19), [.2, .8])```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/f87a3805adb027cc7f4c42c75a82f96cf9443ba4517ac93d.png/w_1252)
You can also use the shorthand *mm*(), and add an array at the end to represent the weights of each combined distribution.
*Note: Right now, in the demo, I believe “multimodal” is broken, but you can use “mm”.*
**Mix distributions with discrete data**
*Note: This is particularly buggy.* .
```
multimodal(0, 10, normal(4,5), [.4,.1, .5])
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/f87a3805adb027cc7f4c42c75a82f96cf9443ba4517ac93d.png/w_1252)
**Variables**
```
expected_case = normal(5,2)
long_tail = 3 to 1000
multimodal(expected_case, long_tail, [.2,.8])
```
**Simple calculations**
When calculations are done on two distributions, and there is no trivial symbolic solution the system will use Monte Carlo sampling for these select combinations. This assumes they are perfectly independent.
```
multimodal(normal(5,2) + uniform(10,3), (5 to 10) + 10) * 100
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/1bdf6ccf847193daf9f344f2eeccb751500b467534d631d9.png/w_930)
**Pointwise calculations**
We have an infix for what can be described as pointwise distribution calculations. Calculations are done along the y-axis instead of the x-axis, so to speak. “Pointwise” multiplication is equivalent to an independent Bayesian update. After each calculation, the distributions are renormalized.
```
normal(10,4) .* normal(14,3)
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/515ca5df0ba793341939fa965645775fd8d505a1a6350e89.png/w_840)
**First-Class Functions**
When a function is written, we can display a plot of that function for many values of a single variable. The below plots treat the single variable input on the x-axis, and show various percentiles going from the median outwards.
```
myFunction(t) = normal(t,10)
myFunction
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/8797c08cb612f74e5376cb10e33d888f08ff170c1cae2fee.png/w_496)
```
myFunction(t) = normal(t^3,t^3.1)
myFunction
```
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/33004fd2282ad10d42608301c4cf8cd9342351410a1e290d.png/w_1378)
## Reasons to Focus on Functions
Up until recently, Squiggle didnt have function support. Going forward this will be the primary feature.
Functions are useful for two distinct purposes. First, they allow composition of models. Second, they can be used directly to be submitted as predictions. For instance, in theory you could predict, “For any point in time T, and company N, from now until 2050, this function will predict the market cap of the company.”
At this point Im convinced of a few things:
* Its possible to intuitively write distributions and functions that return distributions, with the right tooling.
* Functions that return distributions are highly preferable to specific distributions, if possible.
* It would also be great if existing forecasting models could be distilled into common formats.
* Theres very little activity in this space now.
* Theres a high amount of value of information to further exploring the space.
* Writing a small DSL like this will be a fair bit of work, but can be feasible if the functionality is kept limited.
* Also, there are several other useful aspects about having a simple language equivalent for Guesstimate style models.
I think that this is a highly neglected area and Im surprised it hasnt been explored more. Its possible that doing a good job is too challenging for a small team, but I think its worth investigating further.
## What Squiggle is Meant For
The first main purpose of Squiggle is to help facilitate the creation of judgementally estimated distributions and functions.
Existing solutions assume the use of either data analysis and models, or judgemental estimation for points, but not judgemental estimation to intuit models. Squiggle is meant to allow people to estimate functions in situations where there is very little data available, and its assumed all or most variables will be intuitively estimated.
A second possible use case is to embed the results of computational models. Functions in Squiggle are rather portable and composable. Squiggle (or better future tools) could help make the results of these models interoperable.
![](https://39669.cdn.cke-cs.com/rQvD3VnunXZu34m86e5f/images/6f4f45b5180f3dc2ac7861237c9ee01366e7177da8a8fd90.png/w_1814)
One thing that Squiggle is **not** meant for is heavy calculation. Its not a probabilistic programming language, because it doesnt specialize in inference. Squiggle is a high-level language and is not great for performance optimization. The idea is that if you need to do heavy computational modeling, youd do so using separate tools, then convert the results to lookup tables or other simple functions that you could express in Squiggle.
One analogy is to think about the online estimation “calculators” and “model explorers”. See the [microCOVID Project](https://www.microcovid.org/?distance=normal&duration=120&interaction=oneTime&personCount=20&riskProfile=closedPod20&setting=outdoor&subLocation=US_06001&theirMask=basic&topLocation=US_06&voice=normal&yourMask=basic) calculator and the [COVID-19 Predictions](https://covid19.healthdata.org/united-states-of-america?view=total-deaths&tab=trend) . In both of these, I assume there was some data analysis and processing stage done on the local machines of the analysts. The results were translated into some processed format (like a set of CSV files), and then custom code was written for a front end to analyze and display that data.
If they were to use a hypothetical front end unified format, this would mean converting their results into a Javascript function that could be called using a standardized interface. This standardization would make it easier for these calculators to be called by third party wigets and UIs, or for them to be downloaded and called from other workflows. The priority here is that the calculators could be run quickly and that the necessary code and data is minimized in size. Heavy calculation and analysis would still happen separately.
### Future “Comprehensive” Uses
On the more comprehensive end, it would be interesting to figure out how individuals or collectives could make large clusters of these functions, where many functions call other functions, and continuous data is pulled in. The latter would probably require some server/database setup that ingests Squiggle files.
I think the comprehensive end is significantly more exciting than simpler use cases but also significantly more challenging. Its equivalent to going from Docker the core technology, to Docker hub, then making an attempt at Kubernetes. Here we barely have a prototype of the proverbial Docker, so theres a lot of work to do.
### Why doesnt this exist already?
I will briefly pause here to flag that I believe the comprehensive end seems fairly obvious as a goal and Im quite surprised it hasnt really been attempted yet, from what I can tell. I imagine such work could be useful to many important actors, conditional on them understanding how to use it.
My best guess is this is due to some mix between:
* Its too technical for many people to be comfortable with.
* Theres a fair amount of work to be done, and its difficult to monetize quickly.
* Theres been an odd, long-standing cultural bias against clearly intuitive estimates.
* The work is substantially harder than I realize.
# Related Tools
**Guesstimate**
I previously made Guesstimate and take a lot of inspiration from it. Squiggle will be a language that uses pure text, not a spreadsheet. Perhaps Squiggle could one day be made available within Guesstimate cells.
**Ergo**
[Ought](https://ought.org/) has a Python library called [Ergo](https://github.com/oughtinc/ergo) with a lot of tooling for judgemental forecasting. Its written in Python so works well with the Python ecosystem. My impression is that its made much more to do calculations of specific distributions than to represent functions. Maybe Ergo results could eventually be embedded into Squiggle functions.
**Elicit**
[Elicit](https://elicit.org/) is also made by [Ought](https://ought.org/) . It does a few things, I recommend just checking it out. Perhaps Squiggle could one day be an option in Elicit as a forecasting format.
**Causal**
[Causal](https://www.causal.app/) is a startup that makes it simple to represent distributions over time. It seems fairly optimized for clever businesses. I imagine it probably is going to be the most polished and easy to use tool in its targeted use cases for quite a while. Causal has an innovative UI with HTML blocks for the different distributions; its not either a spreadsheet-like Guesstimate or a programming language, but something in between.
**Spreadsheets**
Spreadsheets are really good at organizing large tables of parameters for complex estimations. Regular text files arent. I could imagine ways Squiggle could have native support for something like Markdown Tables that get converted into small editable spreadsheets when being edited. Another solution would be to allow the use of JSON or TOML in the language, and auto-translate that into easier tools like tables in editors that allow for them.[2]
**Probabilistic Programming Languages**
There are a bunch of powerful Probabilistic Programming Languages out there. These typically specialize in doing inference on specific data sets. Hopefully, they could be complementary to Squiggle in the long term. As said earlier, Probabilistic Programming Languages are great for computationally intense operations, and Squiggle is not.
**Prediction Markets and Prediction Tournaments**
Most of these tools have fairly simple inputs or forecasting types. If Squiggle becomes polished, I plan to encourage its use for these platforms. I would like to see Squiggle as an open-source, standardized language, but it will be a while (if ever) for it to be stable enough.
**Declarative Programming Languages**
Many declarative programming languages seem relevant. There are several logical or ontological languages, but my impression is that most assume certainty, which seems vastly suboptimal. I think that theres a lot of exploration for languages that allow users to basically state all of their beliefs probabilistically, including statements about the relationships between these beliefs. The purpose wouldnt be to find one specific variable (as often true with probabilistic programming languages), but to more to express ones beliefs to those interested, or do various kinds of resulting analyses.
**Knowledge Graphs**
Knowledge graphs seem like the best tool for describing semantic relationships in ways that anyone outside a small group could understand. I tried making my own small knowledge graph library called [Ken](https://kenstandard.com/) , which weve been using a little in [Foretold](https://www.foretold.io/) . If Squiggle winds up achieving the comprehensive vision mentioned, I imagine there will be a knowledge graph somewhere.
For example, someone could write a function that takes in a “standard location schema” and returns a calculation of the number of piano tuners at that location. Later when someone queries Wikipedia for a town, it will recognize that that town has data on [Wikidata](https://www.wikidata.org/wiki/Wikidata:Main_Page) , which can be easily converted into the necessary schema.
## Next Steps
Right now Im the only active developer of Squiggle. My work is split between Squiggle, writing blog posts and content, and other administrative and organizational duties for QURI.
My first plan is to add some documentation, clean up the internals, and begin writing short programs for personal and group use. If things go well and we could find a good developer to hire, I would be excited to see what we could do after a year or two.
Ambitious versions of Squiggle would be a *lot* of work (as in, 50 to 5000+ engineer years work), so I want to take things one step at a time. I would hope that if progress is sufficiently exciting, it would be possible to either raise sufficient funding or encourage other startups and companies to attempt their own similar solutions.
## Footnotes
[1] The main challenge comes from having a language that represents symbolic mathematics and programming statements. Both of these independently seem challenging, and I have yet to find a great way to combine them. If you read this and have suggestions for learning about making mathematical languages (like Wolfram), please do let me know.
[2] I have a distaste for JSON in cases that are primarily written and read by users. JSON was really optimized for simplicity for programming, not people. My guess is that it was a mistake to have so many modern configuration systems be in JSON instead of TOML or similar.

5
docs/blog/authors.yml Normal file
View File

@ -0,0 +1,5 @@
ozzie:
name: Ozzie Gooen
title: QURI President
url: https://forum.effectivealtruism.org/users/oagr
image_url: https://avatars.githubusercontent.com/u/377065?v=4

View File

@ -0,0 +1,7 @@
---
sidebar_position: 3
---
# Javascript Library

38
docs/docs/Language.md Normal file
View File

@ -0,0 +1,38 @@
---
sidebar_position: 2
---
# Squiggle Language
## Distributions
```js
normal(a,b)
uniform(a,b)
lognormal(a,b)
lognormalFromMeanAndStdDev(mean, stdev)
beta(a,b)
exponential(a)
triangular(a,b,c)
mm(a,b,c, [1,2,3])
cauchy() //todo
pareto() //todo
```
## Functions
```js
trunctate() //todo
leftTrunctate() //todo
rightTrunctate()//todo
```
## Functions
```js
pdf(distribution, float)
inv(distribution, float)
cdf(distribution, float)
mean(distribution)
sample(distribution)
scaleExp(distribution, float)
scaleMultiply(distribution, float)
scaleLog(distribution, float)
```

101
docs/docusaurus.config.js Normal file
View File

@ -0,0 +1,101 @@
// @ts-check
// Note: type annotations allow type checking and IDEs autocompletion
const lightCodeTheme = require('prism-react-renderer/themes/github');
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
/** @type {import('@docusaurus/types').Config} */
const config = {
title: 'Squiggle',
tagline: 'A programming language for probabilistic estimation',
url: 'https://your-docusaurus-test-site.com',
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/favicon.ico',
organizationName: 'facebook', // Usually your GitHub org/user name.
projectName: 'docusaurus', // Usually your repo name.
presets: [
[
'classic',
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
sidebarPath: require.resolve('./sidebars.js'),
// Please change this to your repo.
editUrl: 'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
},
blog: {
showReadingTime: true,
// Please change this to your repo.
editUrl:
'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
},
}),
],
],
themeConfig:
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
({
navbar: {
title: 'Squiggle',
logo: {
alt: 'Squiggle Logo',
src: 'img/logo.svg',
},
items: [
{
type: 'doc',
docId: 'Language',
position: 'left',
label: 'Tutorial',
},
{to: '/blog', label: 'Blog', position: 'left'},
{
href: 'https://github.com/foretold-app/squiggle',
label: 'GitHub',
position: 'right',
},
],
},
footer: {
style: 'dark',
links: [
{
title: 'Docs',
items: [
{
label: 'Tutorial',
to: '/docs/intro',
},
],
},
{
title: 'More',
items: [
{
label: 'Blog',
to: '/blog',
},
{
label: 'GitHub',
href: 'https://github.com/foretold-app/squiggle',
},
],
},
],
copyright: `CC0. Built with Docusaurus.`,
},
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
},
}),
};
module.exports = config;

37
docs/package.json Normal file
View File

@ -0,0 +1,37 @@
{
"name": "docs",
"version": "0.0.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
"start": "docusaurus start",
"build": "docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "2.0.0-beta.15",
"@docusaurus/preset-classic": "2.0.0-beta.15",
"@mdx-js/react": "^1.6.21",
"clsx": "^1.1.1",
"prism-react-renderer": "^1.2.1",
"react": "^17.0.1",
"react-dom": "^17.0.1"
},
"browserslist": {
"production": [
">0.5%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

31
docs/sidebars.js Normal file
View File

@ -0,0 +1,31 @@
/**
* Creating a sidebar enables you to:
- create an ordered group of docs
- render a sidebar for each doc of that group
- provide next/previous navigation
The sidebars can be generated from the filesystem, or explicitly defined here.
Create as many sidebars as you want.
*/
// @ts-check
/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
const sidebars = {
// By default, Docusaurus generates a sidebar from the docs folder structure
tutorialSidebar: [{type: 'autogenerated', dirName: '.'}],
// But you can create a sidebar manually
/*
tutorialSidebar: [
{
type: 'category',
label: 'Tutorial',
items: ['hello'],
},
],
*/
};
module.exports = sidebars;

View File

@ -0,0 +1,64 @@
import React from 'react';
import clsx from 'clsx';
import styles from './HomepageFeatures.module.css';
const FeatureList = [
{
title: 'Easy to Use',
Svg: require('../../static/img/undraw_docusaurus_mountain.svg').default,
description: (
<>
Docusaurus was designed from the ground up to be easily installed and
used to get your website up and running quickly.
</>
),
},
{
title: 'Focus on What Matters',
Svg: require('../../static/img/undraw_docusaurus_tree.svg').default,
description: (
<>
Docusaurus lets you focus on your docs, and we&apos;ll do the chores. Go
ahead and move your docs into the <code>docs</code> directory.
</>
),
},
{
title: 'Powered by React',
Svg: require('../../static/img/undraw_docusaurus_react.svg').default,
description: (
<>
Extend or customize your website layout by reusing React. Docusaurus can
be extended while reusing the same header and footer.
</>
),
},
];
function Feature({Svg, title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} alt={title} />
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}

View File

@ -0,0 +1,11 @@
.features {
display: flex;
align-items: center;
padding: 2rem 0;
width: 100%;
}
.featureSvg {
height: 200px;
width: 200px;
}

39
docs/src/css/custom.css Normal file
View File

@ -0,0 +1,39 @@
/**
* Any CSS included here will be global. The classic template
* bundles Infima by default. Infima is a CSS framework designed to
* work well for content-centric websites.
*/
/* You can override the default Infima variables here. */
:root {
--ifm-color-primary: #2e8555;
--ifm-color-primary-dark: #29784c;
--ifm-color-primary-darker: #277148;
--ifm-color-primary-darkest: #205d3b;
--ifm-color-primary-light: #33925d;
--ifm-color-primary-lighter: #359962;
--ifm-color-primary-lightest: #3cad6e;
--ifm-code-font-size: 95%;
}
/* For readability concerns, you should choose a lighter palette in dark mode. */
html[data-theme='dark'] {
--ifm-color-primary: #25c2a0;
--ifm-color-primary-dark: #21af90;
--ifm-color-primary-darker: #1fa588;
--ifm-color-primary-darkest: #1a8870;
--ifm-color-primary-light: #29d5b0;
--ifm-color-primary-lighter: #32d8b4;
--ifm-color-primary-lightest: #4fddbf;
}
.docusaurus-highlight-code-line {
background-color: rgba(0, 0, 0, 0.1);
display: block;
margin: 0 calc(-1 * var(--ifm-pre-padding));
padding: 0 var(--ifm-pre-padding);
}
html[data-theme='dark'] .docusaurus-highlight-code-line {
background-color: rgba(0, 0, 0, 0.3);
}

35
docs/src/pages/index.js Normal file
View File

@ -0,0 +1,35 @@
import React from 'react';
import clsx from 'clsx';
import Layout from '@theme/Layout';
import Link from '@docusaurus/Link';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import styles from './index.module.css';
import HomepageFeatures from '../components/HomepageFeatures';
function HomepageHeader() {
const {siteConfig} = useDocusaurusContext();
return (
<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<h1 className="hero__title">{siteConfig.title}</h1>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
</div>
</div>
</header>
);
}
export default function Home() {
const {siteConfig} = useDocusaurusContext();
return (
<Layout
title={`Hello from ${siteConfig.title}`}
description="Description will go into a meta tag in <head />">
<HomepageHeader />
<main>
<HomepageFeatures />
</main>
</Layout>
);
}

View File

@ -0,0 +1,23 @@
/**
* CSS files with the .module.css suffix will be treated as CSS modules
* and scoped locally.
*/
.heroBanner {
padding: 4rem 0;
text-align: center;
position: relative;
overflow: hidden;
}
@media screen and (max-width: 966px) {
.heroBanner {
padding: 2rem;
}
}
.buttons {
display: flex;
align-items: center;
justify-content: center;
}

View File

@ -0,0 +1,7 @@
---
title: Markdown page example
---
# Markdown page example
You don't need React to write simple standalone pages.

0
docs/static/.nojekyll vendored Normal file
View File

BIN
docs/static/img/docusaurus.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
docs/static/img/favicon.ico vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

1
docs/static/img/logo.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

View File

@ -0,0 +1,170 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1088" height="687.962" viewBox="0 0 1088 687.962">
<g id="Group_12" data-name="Group 12" transform="translate(-57 -56)">
<g id="Group_11" data-name="Group 11" transform="translate(57 56)">
<path id="Path_83" data-name="Path 83" d="M1017.81,560.461c-5.27,45.15-16.22,81.4-31.25,110.31-20,38.52-54.21,54.04-84.77,70.28a193.275,193.275,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.282,657.282,0,0,0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07,5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12,52.29-235.46,134.74-296.47,155.97-115.41,369.76-110.57,523.43,7.88C941.15,276.621,1036.99,396.031,1017.81,560.461Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_84" data-name="Path 84" d="M986.56,670.771c-20,38.52-47.21,64.04-77.77,80.28a193.272,193.272,0,0,1-27.46,11.94c-55.61,19.3-117.85,14.18-166.74,3.99a657.3,657.3,0,0,0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25,1.72c-100.17,7.36-253.82-6.43-321.42-143.29L382,283.981,444.95,445.6l20.09,51.59,55.37-75.98L549,381.981l130.2,149.27,36.8-81.27L970.78,657.9l14.21,11.59Z" transform="translate(-56 -106.019)" fill="#f2f2f2"/>
<path id="Path_85" data-name="Path 85" d="M302,282.962l26-57,36,83-31-60Z" opacity="0.1"/>
<path id="Path_86" data-name="Path 86" d="M610.5,753.821q-14.97-.675-29.97-.67L465.04,497.191Z" transform="translate(-56 -106.019)" opacity="0.1"/>
<path id="Path_87" data-name="Path 87" d="M464.411,315.191,493,292.962l130,150-132-128Z" opacity="0.1"/>
<path id="Path_88" data-name="Path 88" d="M908.79,751.051a193.265,193.265,0,0,1-27.46,11.94L679.2,531.251Z" transform="translate(-56 -106.019)" opacity="0.1"/>
<circle id="Ellipse_11" data-name="Ellipse 11" cx="3" cy="3" r="3" transform="translate(479 98.962)" fill="#f2f2f2"/>
<circle id="Ellipse_12" data-name="Ellipse 12" cx="3" cy="3" r="3" transform="translate(396 201.962)" fill="#f2f2f2"/>
<circle id="Ellipse_13" data-name="Ellipse 13" cx="2" cy="2" r="2" transform="translate(600 220.962)" fill="#f2f2f2"/>
<circle id="Ellipse_14" data-name="Ellipse 14" cx="2" cy="2" r="2" transform="translate(180 265.962)" fill="#f2f2f2"/>
<circle id="Ellipse_15" data-name="Ellipse 15" cx="2" cy="2" r="2" transform="translate(612 96.962)" fill="#f2f2f2"/>
<circle id="Ellipse_16" data-name="Ellipse 16" cx="2" cy="2" r="2" transform="translate(736 192.962)" fill="#f2f2f2"/>
<circle id="Ellipse_17" data-name="Ellipse 17" cx="2" cy="2" r="2" transform="translate(858 344.962)" fill="#f2f2f2"/>
<path id="Path_89" data-name="Path 89" d="M306,121.222h-2.76v-2.76h-1.48v2.76H299V122.7h2.76v2.759h1.48V122.7H306Z" fill="#f2f2f2"/>
<path id="Path_90" data-name="Path 90" d="M848,424.222h-2.76v-2.76h-1.48v2.76H841V425.7h2.76v2.759h1.48V425.7H848Z" fill="#f2f2f2"/>
<path id="Path_91" data-name="Path 91" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_92" data-name="Path 92" d="M1144,719.981c0,16.569-243.557,74-544,74s-544-57.431-544-74,243.557,14,544,14S1144,703.413,1144,719.981Z" transform="translate(-56 -106.019)" opacity="0.1"/>
<ellipse id="Ellipse_18" data-name="Ellipse 18" cx="544" cy="30" rx="544" ry="30" transform="translate(0 583.962)" fill="#3f3d56"/>
<path id="Path_93" data-name="Path 93" d="M624,677.981c0,33.137-14.775,24-33,24s-33,9.137-33-24,33-96,33-96S624,644.844,624,677.981Z" transform="translate(-56 -106.019)" fill="#ff6584"/>
<path id="Path_94" data-name="Path 94" d="M606,690.66c0,15.062-6.716,10.909-15,10.909s-15,4.153-15-10.909,15-43.636,15-43.636S606,675.6,606,690.66Z" transform="translate(-56 -106.019)" opacity="0.1"/>
<rect id="Rectangle_97" data-name="Rectangle 97" width="92" height="18" rx="9" transform="translate(489 604.962)" fill="#2f2e41"/>
<rect id="Rectangle_98" data-name="Rectangle 98" width="92" height="18" rx="9" transform="translate(489 586.962)" fill="#2f2e41"/>
<path id="Path_95" data-name="Path 95" d="M193,596.547c0,55.343,34.719,100.126,77.626,100.126" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_96" data-name="Path 96" d="M270.626,696.673c0-55.965,38.745-101.251,86.626-101.251" transform="translate(-56 -106.019)" fill="#6c63ff"/>
<path id="Path_97" data-name="Path 97" d="M221.125,601.564c0,52.57,22.14,95.109,49.5,95.109" transform="translate(-56 -106.019)" fill="#6c63ff"/>
<path id="Path_98" data-name="Path 98" d="M270.626,696.673c0-71.511,44.783-129.377,100.126-129.377" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_99" data-name="Path 99" d="M254.3,697.379s11.009-.339,14.326-2.7,16.934-5.183,17.757-1.395,16.544,18.844,4.115,18.945-28.879-1.936-32.19-3.953S254.3,697.379,254.3,697.379Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
<path id="Path_100" data-name="Path 100" d="M290.716,710.909c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7,8.879,4.009,10.9,19.761,4.053,32.19,3.953c3.588-.029,4.827-1.305,4.759-3.2C294.755,710.174,293.386,710.887,290.716,710.909Z" transform="translate(-56 -106.019)" opacity="0.2"/>
<path id="Path_101" data-name="Path 101" d="M777.429,633.081c0,38.029,23.857,68.8,53.341,68.8" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_102" data-name="Path 102" d="M830.769,701.882c0-38.456,26.623-69.575,59.525-69.575" transform="translate(-56 -106.019)" fill="#6c63ff"/>
<path id="Path_103" data-name="Path 103" d="M796.755,636.528c0,36.124,15.213,65.354,34.014,65.354" transform="translate(-56 -106.019)" fill="#6c63ff"/>
<path id="Path_104" data-name="Path 104" d="M830.769,701.882c0-49.139,30.773-88.9,68.8-88.9" transform="translate(-56 -106.019)" fill="#3f3d56"/>
<path id="Path_105" data-name="Path 105" d="M819.548,702.367s7.565-.233,9.844-1.856,11.636-3.562,12.2-.958,11.368,12.949,2.828,13.018-19.844-1.33-22.119-2.716S819.548,702.367,819.548,702.367Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
<path id="Path_106" data-name="Path 106" d="M844.574,711.664c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479,6.1,2.755,7.487,13.579,2.785,22.119,2.716c2.465-.02,3.317-.9,3.27-2.2C847.349,711.159,846.409,711.649,844.574,711.664Z" transform="translate(-56 -106.019)" opacity="0.2"/>
<path id="Path_107" data-name="Path 107" d="M949.813,724.718s11.36-1.729,14.5-4.591,16.89-7.488,18.217-3.667,19.494,17.447,6.633,19.107-30.153,1.609-33.835-.065S949.813,724.718,949.813,724.718Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
<path id="Path_108" data-name="Path 108" d="M989.228,734.173c-12.86,1.659-30.153,1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833,9.109,5.516,10.783,20.975,1.725,33.835.065c3.712-.479,4.836-1.956,4.529-3.906C993.319,732.907,991.991,733.817,989.228,734.173Z" transform="translate(-56 -106.019)" opacity="0.2"/>
<path id="Path_109" data-name="Path 109" d="M670.26,723.9s9.587-1.459,12.237-3.875,14.255-6.32,15.374-3.095,16.452,14.725,5.6,16.125-25.448,1.358-28.555-.055S670.26,723.9,670.26,723.9Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
<path id="Path_110" data-name="Path 110" d="M703.524,731.875c-10.853,1.4-25.448,1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547,7.687,4.655,9.1,17.7,1.456,28.555.055c3.133-.4,4.081-1.651,3.822-3.3C706.977,730.807,705.856,731.575,703.524,731.875Z" transform="translate(-56 -106.019)" opacity="0.2"/>
<path id="Path_111" data-name="Path 111" d="M178.389,719.109s7.463-1.136,9.527-3.016,11.1-4.92,11.969-2.409,12.808,11.463,4.358,12.553-19.811,1.057-22.23-.043S178.389,719.109,178.389,719.109Z" transform="translate(-56 -106.019)" fill="#a8a8a8"/>
<path id="Path_112" data-name="Path 112" d="M204.285,725.321c-8.449,1.09-19.811,1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2,5.984,3.624,7.085,13.781,1.133,22.23.043c2.439-.315,3.177-1.285,2.976-2.566C206.973,724.489,206.1,725.087,204.285,725.321Z" transform="translate(-56 -106.019)" opacity="0.2"/>
<path id="Path_113" data-name="Path 113" d="M439.7,707.337c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873,42.118-36.793,93.694-36.793S439.7,677.117,439.7,707.337Z" transform="translate(-56 -106.019)" opacity="0.1"/>
<path id="Path_114" data-name="Path 114" d="M439.7,699.9c0,30.22-42.124,20.873-93.7,20.873s-93.074,9.347-93.074-20.873S295.04,663.1,346.616,663.1,439.7,669.676,439.7,699.9Z" transform="translate(-56 -106.019)" fill="#3f3d56"/>
</g>
<g id="docusaurus_keytar" transform="translate(312.271 493.733)">
<path id="Path_40" data-name="Path 40" d="M99,52h91.791V89.153H99Z" transform="translate(5.904 -14.001)" fill="#fff" fill-rule="evenodd"/>
<path id="Path_41" data-name="Path 41" d="M24.855,163.927A21.828,21.828,0,0,1,5.947,153a21.829,21.829,0,0,0,18.908,32.782H46.71V163.927Z" transform="translate(-3 -4.634)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_42" data-name="Path 42" d="M121.861,61.1l76.514-4.782V45.39A21.854,21.854,0,0,0,176.52,23.535H78.173L75.441,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L64.513,18.8a3.154,3.154,0,0,0-5.464,0l-2.732,4.732L53.586,18.8a3.154,3.154,0,0,0-5.464,0L45.39,23.535c-.024,0-.046,0-.071,0l-4.526-4.525a3.153,3.153,0,0,0-5.276,1.414l-1.5,5.577-5.674-1.521a3.154,3.154,0,0,0-3.863,3.864L26,34.023l-5.575,1.494a3.155,3.155,0,0,0-1.416,5.278l4.526,4.526c0,.023,0,.046,0,.07L18.8,48.122a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,59.05a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,69.977a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,80.9a3.154,3.154,0,0,0,0,5.464L23.535,89.1,18.8,91.832a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,102.76a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,113.687a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,124.615a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,135.542a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,146.469a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,157.4a3.154,3.154,0,0,0,0,5.464l4.732,2.732L18.8,168.324a3.154,3.154,0,0,0,0,5.464l4.732,2.732A21.854,21.854,0,0,0,45.39,198.375H176.52a21.854,21.854,0,0,0,21.855-21.855V89.1l-76.514-4.782a11.632,11.632,0,0,1,0-23.219" transform="translate(-1.681 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_43" data-name="Path 43" d="M143,186.71h32.782V143H143Z" transform="translate(9.984 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_44" data-name="Path 44" d="M196.71,159.855a5.438,5.438,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(10.912 -6.025)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_45" data-name="Path 45" d="M153,124.855h32.782V103H153Z" transform="translate(10.912 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_46" data-name="Path 46" d="M194.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.814,2.814,0,0,0,.349.035" transform="translate(12.767 -9.377)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_47" data-name="Path 47" d="M65.087,56.891a2.732,2.732,0,0,1-2.732-2.732,8.2,8.2,0,0,0-16.391,0,2.732,2.732,0,0,1-5.464,0,13.659,13.659,0,0,1,27.319,0,2.732,2.732,0,0,1-2.732,2.732" transform="translate(0.478 -15.068)" fill-rule="evenodd"/>
<path id="Path_48" data-name="Path 48" d="M103,191.347h65.565a21.854,21.854,0,0,0,21.855-21.855V93H124.855A21.854,21.854,0,0,0,103,114.855Z" transform="translate(6.275 -10.199)" fill="#ffff50" fill-rule="evenodd"/>
<path id="Path_49" data-name="Path 49" d="M173.216,129.787H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0-54.434H118.535a1.093,1.093,0,1,1,0-2.185h54.681a1.093,1.093,0,0,1,0,2.185m0,21.652H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186m0,21.855H118.535a1.093,1.093,0,1,1,0-2.186h54.681a1.093,1.093,0,0,1,0,2.186M189.585,61.611c-.013,0-.024-.007-.037-.005-3.377.115-4.974,3.492-6.384,6.472-1.471,3.114-2.608,5.139-4.473,5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932,3.053-6.346,5.646-1.5,2.762-2.529,4.442-4.5,4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879,2.606-6.3,4.808-1.5,2.328-2.552,3.746-4.551,3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807,2.153-6.224,3.954-1.346,1.709-2.4,3.062-4.621,2.977a1.093,1.093,0,0,0-.079,2.186c3.3.11,4.967-1.967,6.417-3.81,1.286-1.635,2.4-3.045,4.582-3.12,2.1-.09,3.091,1.218,4.584,3.327,1.417,2,3.026,4.277,6.263,4.394,3.391.114,5.022-2.42,6.467-4.663,1.292-2,2.406-3.734,4.535-3.807,1.959-.073,3.026,1.475,4.529,4.022,1.417,2.4,3.023,5.121,6.324,5.241,3.415.118,5.064-2.863,6.5-5.5,1.245-2.282,2.419-4.437,4.5-4.509,1.959-.046,2.981,1.743,4.492,4.732,1.412,2.79,3.013,5.95,6.365,6.071l.185,0c3.348,0,4.937-3.36,6.343-6.331,1.245-2.634,2.423-5.114,4.444-5.216Z" transform="translate(7.109 -13.11)" fill-rule="evenodd"/>
<path id="Path_50" data-name="Path 50" d="M83,186.71h43.71V143H83Z" transform="translate(4.42 -5.561)" fill="#3ecc5f" fill-rule="evenodd"/>
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 109.327, 91.085)">
<rect id="Rectangle_3" data-name="Rectangle 3" width="92.361" height="36.462" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
<g id="Group_2" data-name="Group 2" transform="translate(1.531 23.03)">
<rect id="Rectangle_4" data-name="Rectangle 4" width="5.336" height="5.336" rx="1" transform="translate(16.797 0)" fill="#4a4a4a"/>
<rect id="Rectangle_5" data-name="Rectangle 5" width="5.336" height="5.336" rx="1" transform="translate(23.12 0)" fill="#4a4a4a"/>
<rect id="Rectangle_6" data-name="Rectangle 6" width="5.336" height="5.336" rx="1" transform="translate(29.444 0)" fill="#4a4a4a"/>
<rect id="Rectangle_7" data-name="Rectangle 7" width="5.336" height="5.336" rx="1" transform="translate(35.768 0)" fill="#4a4a4a"/>
<rect id="Rectangle_8" data-name="Rectangle 8" width="5.336" height="5.336" rx="1" transform="translate(42.091 0)" fill="#4a4a4a"/>
<rect id="Rectangle_9" data-name="Rectangle 9" width="5.336" height="5.336" rx="1" transform="translate(48.415 0)" fill="#4a4a4a"/>
<rect id="Rectangle_10" data-name="Rectangle 10" width="5.336" height="5.336" rx="1" transform="translate(54.739 0)" fill="#4a4a4a"/>
<rect id="Rectangle_11" data-name="Rectangle 11" width="5.336" height="5.336" rx="1" transform="translate(61.063 0)" fill="#4a4a4a"/>
<rect id="Rectangle_12" data-name="Rectangle 12" width="5.336" height="5.336" rx="1" transform="translate(67.386 0)" fill="#4a4a4a"/>
<path id="Path_51" data-name="Path 51" d="M1.093,0H14.518a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0ZM75,0H88.426a1.093,1.093,0,0,1,1.093,1.093V4.243a1.093,1.093,0,0,1-1.093,1.093H75a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,75,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
</g>
<g id="Group_3" data-name="Group 3" transform="translate(1.531 10.261)">
<path id="Path_52" data-name="Path 52" d="M1.093,0H6.218A1.093,1.093,0,0,1,7.31,1.093V4.242A1.093,1.093,0,0,1,6.218,5.335H1.093A1.093,1.093,0,0,1,0,4.242V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_13" data-name="Rectangle 13" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
<rect id="Rectangle_14" data-name="Rectangle 14" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
<rect id="Rectangle_15" data-name="Rectangle 15" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
<rect id="Rectangle_16" data-name="Rectangle 16" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
<rect id="Rectangle_17" data-name="Rectangle 17" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
<rect id="Rectangle_18" data-name="Rectangle 18" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
<rect id="Rectangle_19" data-name="Rectangle 19" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
<rect id="Rectangle_20" data-name="Rectangle 20" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
<rect id="Rectangle_21" data-name="Rectangle 21" width="5.336" height="5.336" rx="1" transform="translate(58.888 0)" fill="#4a4a4a"/>
<rect id="Rectangle_22" data-name="Rectangle 22" width="5.336" height="5.336" rx="1" transform="translate(65.212 0)" fill="#4a4a4a"/>
<rect id="Rectangle_23" data-name="Rectangle 23" width="5.336" height="5.336" rx="1" transform="translate(71.536 0)" fill="#4a4a4a"/>
<rect id="Rectangle_24" data-name="Rectangle 24" width="5.336" height="5.336" rx="1" transform="translate(77.859 0)" fill="#4a4a4a"/>
<rect id="Rectangle_25" data-name="Rectangle 25" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
</g>
<g id="Group_4" data-name="Group 4" transform="translate(91.05 9.546) rotate(180)">
<path id="Path_53" data-name="Path 53" d="M1.093,0H6.219A1.093,1.093,0,0,1,7.312,1.093v3.15A1.093,1.093,0,0,1,6.219,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.093A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_26" data-name="Rectangle 26" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
<rect id="Rectangle_27" data-name="Rectangle 27" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
<rect id="Rectangle_28" data-name="Rectangle 28" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
<rect id="Rectangle_29" data-name="Rectangle 29" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
<rect id="Rectangle_30" data-name="Rectangle 30" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
<rect id="Rectangle_31" data-name="Rectangle 31" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
<rect id="Rectangle_32" data-name="Rectangle 32" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
<rect id="Rectangle_33" data-name="Rectangle 33" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
<rect id="Rectangle_34" data-name="Rectangle 34" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
<rect id="Rectangle_35" data-name="Rectangle 35" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
<rect id="Rectangle_36" data-name="Rectangle 36" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
<rect id="Rectangle_37" data-name="Rectangle 37" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
<rect id="Rectangle_38" data-name="Rectangle 38" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
<rect id="Rectangle_39" data-name="Rectangle 39" width="5.336" height="5.336" rx="1" transform="translate(8.299 0)" fill="#4a4a4a"/>
<rect id="Rectangle_40" data-name="Rectangle 40" width="5.336" height="5.336" rx="1" transform="translate(14.623 0)" fill="#4a4a4a"/>
<rect id="Rectangle_41" data-name="Rectangle 41" width="5.336" height="5.336" rx="1" transform="translate(20.947 0)" fill="#4a4a4a"/>
<rect id="Rectangle_42" data-name="Rectangle 42" width="5.336" height="5.336" rx="1" transform="translate(27.271 0)" fill="#4a4a4a"/>
<rect id="Rectangle_43" data-name="Rectangle 43" width="5.336" height="5.336" rx="1" transform="translate(33.594 0)" fill="#4a4a4a"/>
<rect id="Rectangle_44" data-name="Rectangle 44" width="5.336" height="5.336" rx="1" transform="translate(39.918 0)" fill="#4a4a4a"/>
<rect id="Rectangle_45" data-name="Rectangle 45" width="5.336" height="5.336" rx="1" transform="translate(46.242 0)" fill="#4a4a4a"/>
<rect id="Rectangle_46" data-name="Rectangle 46" width="5.336" height="5.336" rx="1" transform="translate(52.565 0)" fill="#4a4a4a"/>
<rect id="Rectangle_47" data-name="Rectangle 47" width="5.336" height="5.336" rx="1" transform="translate(58.889 0)" fill="#4a4a4a"/>
<rect id="Rectangle_48" data-name="Rectangle 48" width="5.336" height="5.336" rx="1" transform="translate(65.213 0)" fill="#4a4a4a"/>
<rect id="Rectangle_49" data-name="Rectangle 49" width="5.336" height="5.336" rx="1" transform="translate(71.537 0)" fill="#4a4a4a"/>
<rect id="Rectangle_50" data-name="Rectangle 50" width="5.336" height="5.336" rx="1" transform="translate(77.86 0)" fill="#4a4a4a"/>
<rect id="Rectangle_51" data-name="Rectangle 51" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
</g>
<g id="Group_6" data-name="Group 6" transform="translate(1.531 16.584)">
<path id="Path_54" data-name="Path 54" d="M1.093,0h7.3A1.093,1.093,0,0,1,9.485,1.093v3.15A1.093,1.093,0,0,1,8.392,5.336h-7.3A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<g id="Group_5" data-name="Group 5" transform="translate(10.671 0)">
<rect id="Rectangle_52" data-name="Rectangle 52" width="5.336" height="5.336" rx="1" fill="#4a4a4a"/>
<rect id="Rectangle_53" data-name="Rectangle 53" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
<rect id="Rectangle_54" data-name="Rectangle 54" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
<rect id="Rectangle_55" data-name="Rectangle 55" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
<rect id="Rectangle_56" data-name="Rectangle 56" width="5.336" height="5.336" rx="1" transform="translate(25.295 0)" fill="#4a4a4a"/>
<rect id="Rectangle_57" data-name="Rectangle 57" width="5.336" height="5.336" rx="1" transform="translate(31.619 0)" fill="#4a4a4a"/>
<rect id="Rectangle_58" data-name="Rectangle 58" width="5.336" height="5.336" rx="1" transform="translate(37.942 0)" fill="#4a4a4a"/>
<rect id="Rectangle_59" data-name="Rectangle 59" width="5.336" height="5.336" rx="1" transform="translate(44.265 0)" fill="#4a4a4a"/>
<rect id="Rectangle_60" data-name="Rectangle 60" width="5.336" height="5.336" rx="1" transform="translate(50.589 0)" fill="#4a4a4a"/>
<rect id="Rectangle_61" data-name="Rectangle 61" width="5.336" height="5.336" rx="1" transform="translate(56.912 0)" fill="#4a4a4a"/>
<rect id="Rectangle_62" data-name="Rectangle 62" width="5.336" height="5.336" rx="1" transform="translate(63.236 0)" fill="#4a4a4a"/>
</g>
<path id="Path_55" data-name="Path 55" d="M1.094,0H8A1.093,1.093,0,0,1,9.091,1.093v3.15A1.093,1.093,0,0,1,8,5.336H1.093A1.093,1.093,0,0,1,0,4.243V1.094A1.093,1.093,0,0,1,1.093,0Z" transform="translate(80.428 0)" fill="#4a4a4a" fill-rule="evenodd"/>
</g>
<g id="Group_7" data-name="Group 7" transform="translate(1.531 29.627)">
<rect id="Rectangle_63" data-name="Rectangle 63" width="5.336" height="5.336" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
<rect id="Rectangle_64" data-name="Rectangle 64" width="5.336" height="5.336" rx="1" transform="translate(6.324 0)" fill="#4a4a4a"/>
<rect id="Rectangle_65" data-name="Rectangle 65" width="5.336" height="5.336" rx="1" transform="translate(12.647 0)" fill="#4a4a4a"/>
<rect id="Rectangle_66" data-name="Rectangle 66" width="5.336" height="5.336" rx="1" transform="translate(18.971 0)" fill="#4a4a4a"/>
<path id="Path_56" data-name="Path 56" d="M1.093,0H31.515a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H1.093A1.093,1.093,0,0,1,0,4.244V1.093A1.093,1.093,0,0,1,1.093,0ZM34.687,0h3.942a1.093,1.093,0,0,1,1.093,1.093V4.244a1.093,1.093,0,0,1-1.093,1.093H34.687a1.093,1.093,0,0,1-1.093-1.093V1.093A1.093,1.093,0,0,1,34.687,0Z" transform="translate(25.294 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_67" data-name="Rectangle 67" width="5.336" height="5.336" rx="1" transform="translate(66.003 0)" fill="#4a4a4a"/>
<rect id="Rectangle_68" data-name="Rectangle 68" width="5.336" height="5.336" rx="1" transform="translate(72.327 0)" fill="#4a4a4a"/>
<rect id="Rectangle_69" data-name="Rectangle 69" width="5.336" height="5.336" rx="1" transform="translate(84.183 0)" fill="#4a4a4a"/>
<path id="Path_57" data-name="Path 57" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(83.59 2.273) rotate(180)" fill="#4a4a4a"/>
<path id="Path_58" data-name="Path 58" d="M5.336,0V1.18A1.093,1.093,0,0,1,4.243,2.273H1.093A1.093,1.093,0,0,1,0,1.18V0Z" transform="translate(78.255 3.063)" fill="#4a4a4a"/>
</g>
<rect id="Rectangle_70" data-name="Rectangle 70" width="88.927" height="2.371" rx="1.085" transform="translate(1.925 1.17)" fill="#4a4a4a"/>
<rect id="Rectangle_71" data-name="Rectangle 71" width="4.986" height="1.581" rx="0.723" transform="translate(4.1 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_72" data-name="Rectangle 72" width="4.986" height="1.581" rx="0.723" transform="translate(10.923 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_73" data-name="Rectangle 73" width="4.986" height="1.581" rx="0.723" transform="translate(16.173 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_74" data-name="Rectangle 74" width="4.986" height="1.581" rx="0.723" transform="translate(21.421 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_75" data-name="Rectangle 75" width="4.986" height="1.581" rx="0.723" transform="translate(26.671 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_76" data-name="Rectangle 76" width="4.986" height="1.581" rx="0.723" transform="translate(33.232 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_77" data-name="Rectangle 77" width="4.986" height="1.581" rx="0.723" transform="translate(38.48 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_78" data-name="Rectangle 78" width="4.986" height="1.581" rx="0.723" transform="translate(43.73 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_79" data-name="Rectangle 79" width="4.986" height="1.581" rx="0.723" transform="translate(48.978 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_80" data-name="Rectangle 80" width="4.986" height="1.581" rx="0.723" transform="translate(55.54 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_81" data-name="Rectangle 81" width="4.986" height="1.581" rx="0.723" transform="translate(60.788 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_82" data-name="Rectangle 82" width="4.986" height="1.581" rx="0.723" transform="translate(66.038 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_83" data-name="Rectangle 83" width="4.986" height="1.581" rx="0.723" transform="translate(72.599 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_84" data-name="Rectangle 84" width="4.986" height="1.581" rx="0.723" transform="translate(77.847 1.566)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_85" data-name="Rectangle 85" width="4.986" height="1.581" rx="0.723" transform="translate(83.097 1.566)" fill="#d8d8d8" opacity="0.136"/>
</g>
<path id="Path_59" data-name="Path 59" d="M146.71,159.855a5.439,5.439,0,0,0-.7.07c-.042-.164-.081-.329-.127-.493a5.457,5.457,0,1,0-5.4-9.372q-.181-.185-.366-.367a5.454,5.454,0,1,0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467,5.467,0,1,0-10.788,0c-.162.042-.325.08-.486.126a5.457,5.457,0,1,0-9.384,5.4,21.843,21.843,0,1,0,36.421,21.02,5.452,5.452,0,1,0,.7-10.858" transform="translate(6.275 -6.025)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_60" data-name="Path 60" d="M83,124.855h43.71V103H83Z" transform="translate(4.42 -9.271)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_61" data-name="Path 61" d="M134.855,116.765a2.732,2.732,0,1,0,0-5.464,2.811,2.811,0,0,0-.349.035c-.022-.082-.04-.164-.063-.246a2.733,2.733,0,0,0-1.052-5.253,2.7,2.7,0,0,0-1.648.566q-.09-.093-.184-.184a2.7,2.7,0,0,0,.553-1.633,2.732,2.732,0,0,0-5.245-1.07,10.928,10.928,0,1,0,0,21.031,2.732,2.732,0,0,0,5.245-1.07,2.7,2.7,0,0,0-.553-1.633q.093-.09.184-.184a2.7,2.7,0,0,0,1.648.566,2.732,2.732,0,0,0,1.052-5.253c.023-.081.042-.164.063-.246a2.811,2.811,0,0,0,.349.035" transform="translate(7.202 -9.377)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_62" data-name="Path 62" d="M143.232,42.33a2.967,2.967,0,0,1-.535-.055,2.754,2.754,0,0,1-.514-.153,2.838,2.838,0,0,1-.471-.251,4.139,4.139,0,0,1-.415-.339,3.2,3.2,0,0,1-.338-.415A2.7,2.7,0,0,1,140.5,39.6a2.968,2.968,0,0,1,.055-.535,3.152,3.152,0,0,1,.152-.514,2.874,2.874,0,0,1,.252-.47,2.633,2.633,0,0,1,.753-.754,2.837,2.837,0,0,1,.471-.251,2.753,2.753,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,4.019,4.019,0,0,1,.339.415,2.786,2.786,0,0,1,.251.47,2.864,2.864,0,0,1,.208,1.049,2.77,2.77,0,0,1-.8,1.934,4.139,4.139,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459m21.855-1.366a2.789,2.789,0,0,1-1.935-.8,4.162,4.162,0,0,1-.338-.415,2.7,2.7,0,0,1-.459-1.519,2.789,2.789,0,0,1,.8-1.934,4.139,4.139,0,0,1,.415-.339,2.838,2.838,0,0,1,.471-.251,2.752,2.752,0,0,1,.514-.153,2.527,2.527,0,0,1,1.071,0,2.654,2.654,0,0,1,.983.4,4.139,4.139,0,0,1,.415.339,2.79,2.79,0,0,1,.8,1.934,3.069,3.069,0,0,1-.055.535,2.779,2.779,0,0,1-.153.514,3.885,3.885,0,0,1-.251.47,4.02,4.02,0,0,1-.339.415,4.138,4.138,0,0,1-.415.339,2.722,2.722,0,0,1-1.519.459" transform="translate(9.753 -15.532)" fill-rule="evenodd"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 31 KiB

View File

@ -0,0 +1,169 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1041.277" height="554.141" viewBox="0 0 1041.277 554.141">
<g id="Group_24" data-name="Group 24" transform="translate(-440 -263)">
<g id="Group_23" data-name="Group 23" transform="translate(439.989 262.965)">
<path id="Path_299" data-name="Path 299" d="M1040.82,611.12q-1.74,3.75-3.47,7.4-2.7,5.67-5.33,11.12c-.78,1.61-1.56,3.19-2.32,4.77-8.6,17.57-16.63,33.11-23.45,45.89A73.21,73.21,0,0,1,942.44,719l-151.65,1.65h-1.6l-13,.14-11.12.12-34.1.37h-1.38l-17.36.19h-.53l-107,1.16-95.51,1-11.11.12-69,.75H429l-44.75.48h-.48l-141.5,1.53-42.33.46a87.991,87.991,0,0,1-10.79-.54h0c-1.22-.14-2.44-.3-3.65-.49a87.38,87.38,0,0,1-51.29-27.54C116,678.37,102.75,655,93.85,629.64q-1.93-5.49-3.6-11.12C59.44,514.37,97,380,164.6,290.08q4.25-5.64,8.64-11l.07-.08c20.79-25.52,44.1-46.84,68.93-62,44-26.91,92.75-34.49,140.7-11.9,40.57,19.12,78.45,28.11,115.17,30.55,3.71.24,7.42.42,11.11.53,84.23,2.65,163.17-27.7,255.87-47.29,3.69-.78,7.39-1.55,11.12-2.28,66.13-13.16,139.49-20.1,226.73-5.51a189.089,189.089,0,0,1,26.76,6.4q5.77,1.86,11.12,4c41.64,16.94,64.35,48.24,74,87.46q1.37,5.46,2.37,11.11C1134.3,384.41,1084.19,518.23,1040.82,611.12Z" transform="translate(-79.34 -172.91)" fill="#f2f2f2"/>
<path id="Path_300" data-name="Path 300" d="M576.36,618.52a95.21,95.21,0,0,1-1.87,11.12h93.7V618.52Zm-78.25,62.81,11.11-.09V653.77c-3.81-.17-7.52-.34-11.11-.52ZM265.19,618.52v11.12h198.5V618.52ZM1114.87,279h-74V191.51q-5.35-2.17-11.12-4V279H776.21V186.58c-3.73.73-7.43,1.5-11.12,2.28V279H509.22V236.15c-3.69-.11-7.4-.29-11.11-.53V279H242.24V217c-24.83,15.16-48.14,36.48-68.93,62h-.07v.08q-4.4,5.4-8.64,11h8.64V618.52h-83q1.66,5.63,3.6,11.12h79.39v93.62a87,87,0,0,0,12.2,2.79c1.21.19,2.43.35,3.65.49h0a87.991,87.991,0,0,0,10.79.54l42.33-.46v-97H498.11v94.21l11.11-.12V629.64H765.09V721l11.12-.12V629.64H1029.7v4.77c.76-1.58,1.54-3.16,2.32-4.77q2.63-5.45,5.33-11.12,1.73-3.64,3.47-7.4v-321h76.42Q1116.23,284.43,1114.87,279ZM242.24,618.52V290.08H498.11V618.52Zm267,0V290.08H765.09V618.52Zm520.48,0H776.21V290.08H1029.7Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_301" data-name="Path 301" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" fill="#65617d"/>
<path id="Path_302" data-name="Path 302" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l46.65-28,93.6-.78,2-.01.66-.01,2-.03,44.94-.37,2.01-.01.64-.01,2-.01L315,509.3l.38-.01,35.55-.3h.29l277.4-2.34,6.79-.05h.68l5.18-.05,37.65-.31,2-.03,1.85-.02h.96l11.71-.09,2.32-.03,3.11-.02,9.75-.09,15.47-.13,2-.02,3.48-.02h.65l74.71-.64Z" opacity="0.2"/>
<path id="Path_303" data-name="Path 303" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
<path id="Path_304" data-name="Path 304" d="M375.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_305" data-name="Path 305" d="M377.44,656.57v24.49a6.13,6.13,0,0,1-3.5,5.54,6,6,0,0,1-2.5.6l-34.9.74a6,6,0,0,1-2.7-.57,6.12,6.12,0,0,1-3.57-5.57V656.57Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
<rect id="Rectangle_137" data-name="Rectangle 137" width="47.17" height="31.5" transform="translate(680.92 483.65)" fill="#3f3d56"/>
<rect id="Rectangle_138" data-name="Rectangle 138" width="47.17" height="31.5" transform="translate(680.92 483.65)" opacity="0.1"/>
<rect id="Rectangle_139" data-name="Rectangle 139" width="47.17" height="31.5" transform="translate(678.92 483.65)" fill="#3f3d56"/>
<path id="Path_306" data-name="Path 306" d="M298.09,483.65v4.97l-47.17,1.26v-6.23Z" opacity="0.1"/>
<path id="Path_307" data-name="Path 307" d="M460.69,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6a4,4,0,0,1,3.95,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
<path id="Path_308" data-name="Path 308" d="M265.19,481.32v181.2h-.05a4,4,0,0,1-3.95-3.95V485.27a4,4,0,0,1,3.95-3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_309" data-name="Path 309" d="M194.59,319.15h177.5V467.4l-177.5,4Z" fill="#39374d"/>
<path id="Path_310" data-name="Path 310" d="M726.09,483.65v6.41l-47.17-1.26v-5.15Z" opacity="0.1"/>
<path id="Path_311" data-name="Path 311" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0L672,657.42a4,4,0,0,1-3.85-3.95V485.27a4,4,0,0,1,3.95-3.95H863.7a4,4,0,0,1,3.99,3.95Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
<path id="Path_312" data-name="Path 312" d="M867.69,485.27v173.3a4,4,0,0,1-4,3.95h0V481.32h0a4,4,0,0,1,4,3.95Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_313" data-name="Path 313" d="M775.59,319.15H598.09V467.4l177.5,4Z" fill="#39374d"/>
<path id="Path_314" data-name="Path 314" d="M663.19,485.27v168.2a4,4,0,0,1-3.85,3.95l-191.65,5.1h0a4,4,0,0,1-4-3.95V485.27a4,4,0,0,1,3.95-3.95h191.6A4,4,0,0,1,663.19,485.27Z" transform="translate(-79.34 -172.91)" fill="#65617d"/>
<path id="Path_315" data-name="Path 315" d="M397.09,319.15h177.5V467.4l-177.5,4Z" fill="#4267b2"/>
<path id="Path_316" data-name="Path 316" d="M863.09,533.65v13l-151.92,1.4-1.62.03-57.74.53-1.38.02-17.55.15h-.52l-106.98.99L349.77,551.4h-.15l-44.65.42-.48.01-198.4,1.82v-15l202.51-1.33h.48l40.99-.28h.19l283.08-1.87h.29l.17-.01h.47l4.79-.03h1.46l74.49-.5,4.4-.02.98-.01Z" opacity="0.1"/>
<circle id="Ellipse_111" data-name="Ellipse 111" cx="51.33" cy="51.33" r="51.33" transform="translate(435.93 246.82)" fill="#fbbebe"/>
<path id="Path_317" data-name="Path 317" d="M617.94,550.07s-99.5,12-90,0c3.44-4.34,4.39-17.2,4.2-31.85-.06-4.45-.22-9.06-.45-13.65-1.1-22-3.75-43.5-3.75-43.5s87-41,77-8.5c-4,13.13-2.69,31.57.35,48.88.89,5.05,1.92,10,3,14.7a344.66,344.66,0,0,0,9.65,33.92Z" transform="translate(-79.34 -172.91)" fill="#fbbebe"/>
<path id="Path_318" data-name="Path 318" d="M585.47,546c11.51-2.13,23.7-6,34.53-1.54,2.85,1.17,5.47,2.88,8.39,3.86s6.12,1.22,9.16,1.91c10.68,2.42,19.34,10.55,24.9,20s8.44,20.14,11.26,30.72l6.9,25.83c6,22.45,12,45.09,13.39,68.3a2437.506,2437.506,0,0,1-250.84,1.43c5.44-10.34,11-21.31,10.54-33s-7.19-23.22-4.76-34.74c1.55-7.34,6.57-13.39,9.64-20.22,8.75-19.52,1.94-45.79,17.32-60.65,6.92-6.68,17-9.21,26.63-8.89,12.28.41,24.85,4.24,37,6.11C555.09,547.48,569.79,548.88,585.47,546Z" transform="translate(-79.34 -172.91)" fill="#ff6584"/>
<path id="Path_319" data-name="Path 319" d="M716.37,657.17l-.1,1.43v.1l-.17,2.3-1.33,18.51-1.61,22.3-.46,6.28-1,13.44v.17l-107,1-175.59,1.9v.84h-.14v-1.12l.45-14.36.86-28.06.74-23.79.07-2.37a10.53,10.53,0,0,1,11.42-10.17c4.72.4,10.85.89,18.18,1.41l3,.22c42.33,2.94,120.56,6.74,199.5,2,1.66-.09,3.33-.19,5-.31,12.24-.77,24.47-1.76,36.58-3a10.53,10.53,0,0,1,11.6,11.23Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_320" data-name="Path 320" d="M429.08,725.44v-.84l175.62-1.91,107-1h.3v-.17l1-13.44.43-6,1.64-22.61,1.29-17.9v-.44a10.617,10.617,0,0,0-.11-2.47.3.3,0,0,0,0-.1,10.391,10.391,0,0,0-2-4.64,10.54,10.54,0,0,0-9.42-4c-12.11,1.24-24.34,2.23-36.58,3-1.67.12-3.34.22-5,.31-78.94,4.69-157.17.89-199.5-2l-3-.22c-7.33-.52-13.46-1-18.18-1.41a10.54,10.54,0,0,0-11.24,8.53,11,11,0,0,0-.18,1.64l-.68,22.16L429.54,710l-.44,14.36v1.12Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
<path id="Path_321" data-name="Path 321" d="M716.67,664.18l-1.23,15.33-1.83,22.85-.46,5.72-1,12.81-.06.64v.17h0l-.15,1.48.11-1.48h-.29l-107,1-175.65,1.9v-.28l.49-14.36,1-28.06.64-18.65A6.36,6.36,0,0,1,434.3,658a6.25,6.25,0,0,1,3.78-.9c2.1.17,4.68.37,7.69.59,4.89.36,10.92.78,17.94,1.22,13,.82,29.31,1.7,48,2.42,52,2,122.2,2.67,188.88-3.17,3-.26,6.1-.55,9.13-.84a6.26,6.26,0,0,1,3.48.66,5.159,5.159,0,0,1,.86.54,6.14,6.14,0,0,1,2,2.46,3.564,3.564,0,0,1,.25.61A6.279,6.279,0,0,1,716.67,664.18Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_322" data-name="Path 322" d="M377.44,677.87v3.19a6.13,6.13,0,0,1-3.5,5.54l-40.1.77a6.12,6.12,0,0,1-3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_323" data-name="Path 323" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
<path id="Path_324" data-name="Path 324" d="M298.59,515.57l-52.25,1V507.9l52.25-1Z" opacity="0.1"/>
<path id="Path_325" data-name="Path 325" d="M300.59,515.57l-52.25,1V507.9l52.25-1Z" fill="#3f3d56"/>
<path id="Path_326" data-name="Path 326" d="M758.56,679.87v3.19a6.13,6.13,0,0,0,3.5,5.54l40.1.77a6.12,6.12,0,0,0,3.57-5.57v-3Z" transform="translate(-79.34 -172.91)" opacity="0.1"/>
<path id="Path_327" data-name="Path 327" d="M678.72,517.57l52.25,1V509.9l-52.25-1Z" opacity="0.1"/>
<path id="Path_328" data-name="Path 328" d="M676.72,517.57l52.25,1V509.9l-52.25-1Z" fill="#3f3d56"/>
<path id="Path_329" data-name="Path 329" d="M534.13,486.79c.08,7-3.16,13.6-5.91,20.07a163.491,163.491,0,0,0-12.66,74.71c.73,11,2.58,22,.73,32.9s-8.43,21.77-19,24.9c17.53,10.45,41.26,9.35,57.76-2.66,8.79-6.4,15.34-15.33,21.75-24.11a97.86,97.86,0,0,1-13.31,44.75A103.43,103.43,0,0,0,637,616.53c4.31-5.81,8.06-12.19,9.72-19.23,3.09-13-1.22-26.51-4.51-39.5a266.055,266.055,0,0,1-6.17-33c-.43-3.56-.78-7.22.1-10.7,1-4.07,3.67-7.51,5.64-11.22,5.6-10.54,5.73-23.3,2.86-34.88s-8.49-22.26-14.06-32.81c-4.46-8.46-9.3-17.31-17.46-22.28-5.1-3.1-11-4.39-16.88-5.64l-25.37-5.43c-5.55-1.19-11.26-2.38-16.87-1.51-9.47,1.48-16.14,8.32-22,15.34-4.59,5.46-15.81,15.71-16.6,22.86-.72,6.59,5.1,17.63,6.09,24.58,1.3,9,2.22,6,7.3,11.52C532,478.05,534.07,482,534.13,486.79Z" transform="translate(-79.34 -172.91)" fill="#3f3d56"/>
</g>
<g id="docusaurus_keytar" transform="translate(670.271 615.768)">
<path id="Path_40" data-name="Path 40" d="M99,52h43.635V69.662H99Z" transform="translate(-49.132 -33.936)" fill="#fff" fill-rule="evenodd"/>
<path id="Path_41" data-name="Path 41" d="M13.389,158.195A10.377,10.377,0,0,1,4.4,153a10.377,10.377,0,0,0,8.988,15.584H23.779V158.195Z" transform="translate(-3 -82.47)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_42" data-name="Path 42" d="M66.967,38.083l36.373-2.273V30.615A10.389,10.389,0,0,0,92.95,20.226H46.2l-1.3-2.249a1.5,1.5,0,0,0-2.6,0L41,20.226l-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-1.3-2.249a1.5,1.5,0,0,0-2.6,0l-1.3,2.249-.034,0-2.152-2.151a1.5,1.5,0,0,0-2.508.672L25.21,21.4l-2.7-.723a1.5,1.5,0,0,0-1.836,1.837l.722,2.7-2.65.71a1.5,1.5,0,0,0-.673,2.509l2.152,2.152c0,.011,0,.022,0,.033l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6L20.226,41l-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3-2.249,1.3a1.5,1.5,0,0,0,0,2.6l2.249,1.3A10.389,10.389,0,0,0,30.615,103.34H92.95A10.389,10.389,0,0,0,103.34,92.95V51.393L66.967,49.12a5.53,5.53,0,0,1,0-11.038" transform="translate(-9.836 -17.226)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_43" data-name="Path 43" d="M143,163.779h15.584V143H143Z" transform="translate(-70.275 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_44" data-name="Path 44" d="M173.779,148.389a2.582,2.582,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-75.08 -75.262)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_45" data-name="Path 45" d="M153,113.389h15.584V103H153Z" transform="translate(-75.08 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_46" data-name="Path 46" d="M183.389,108.944a1.3,1.3,0,1,0,0-2.6,1.336,1.336,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.337,1.337,0,0,0,.166.017" transform="translate(-84.691 -57.894)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_47" data-name="Path 47" d="M52.188,48.292a1.3,1.3,0,0,1-1.3-1.3,3.9,3.9,0,0,0-7.792,0,1.3,1.3,0,1,1-2.6,0,6.493,6.493,0,0,1,12.987,0,1.3,1.3,0,0,1-1.3,1.3" transform="translate(-21.02 -28.41)" fill-rule="evenodd"/>
<path id="Path_48" data-name="Path 48" d="M103,139.752h31.168a10.389,10.389,0,0,0,10.389-10.389V93H113.389A10.389,10.389,0,0,0,103,103.389Z" transform="translate(-51.054 -53.638)" fill="#ffff50" fill-rule="evenodd"/>
<path id="Path_49" data-name="Path 49" d="M141.1,94.017H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0-25.877H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.293H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m0,10.389H115.106a.519.519,0,1,1,0-1.039H141.1a.519.519,0,0,1,0,1.039m7.782-47.993c-.006,0-.011,0-.018,0-1.605.055-2.365,1.66-3.035,3.077-.7,1.48-1.24,2.443-2.126,2.414-.981-.035-1.542-1.144-2.137-2.317-.683-1.347-1.462-2.876-3.1-2.819-1.582.054-2.344,1.451-3.017,2.684-.715,1.313-1.2,2.112-2.141,2.075-1-.036-1.533-.938-2.149-1.981-.686-1.162-1.479-2.467-3.084-2.423-1.555.053-2.319,1.239-2.994,2.286-.713,1.106-1.213,1.781-2.164,1.741-1.025-.036-1.554-.784-2.167-1.65-.688-.973-1.463-2.074-3.062-2.021a3.815,3.815,0,0,0-2.959,1.879c-.64.812-1.14,1.456-2.2,1.415a.52.52,0,0,0-.037,1.039,3.588,3.588,0,0,0,3.05-1.811c.611-.777,1.139-1.448,2.178-1.483,1-.043,1.47.579,2.179,1.582.674.953,1.438,2.033,2.977,2.089,1.612.054,2.387-1.151,3.074-2.217.614-.953,1.144-1.775,2.156-1.81.931-.035,1.438.7,2.153,1.912.674,1.141,1.437,2.434,3.006,2.491,1.623.056,2.407-1.361,3.09-2.616.592-1.085,1.15-2.109,2.14-2.143.931-.022,1.417.829,2.135,2.249.671,1.326,1.432,2.828,3.026,2.886l.088,0c1.592,0,2.347-1.6,3.015-3.01.592-1.252,1.152-2.431,2.113-2.479Z" transform="translate(-55.378 -38.552)" fill-rule="evenodd"/>
<path id="Path_50" data-name="Path 50" d="M83,163.779h20.779V143H83Z" transform="translate(-41.443 -77.665)" fill="#3ecc5f" fill-rule="evenodd"/>
<g id="Group_8" data-name="Group 8" transform="matrix(0.966, -0.259, 0.259, 0.966, 51.971, 43.3)">
<rect id="Rectangle_3" data-name="Rectangle 3" width="43.906" height="17.333" rx="2" transform="translate(0 0)" fill="#d8d8d8"/>
<g id="Group_2" data-name="Group 2" transform="translate(0.728 10.948)">
<rect id="Rectangle_4" data-name="Rectangle 4" width="2.537" height="2.537" rx="1" transform="translate(7.985 0)" fill="#4a4a4a"/>
<rect id="Rectangle_5" data-name="Rectangle 5" width="2.537" height="2.537" rx="1" transform="translate(10.991 0)" fill="#4a4a4a"/>
<rect id="Rectangle_6" data-name="Rectangle 6" width="2.537" height="2.537" rx="1" transform="translate(13.997 0)" fill="#4a4a4a"/>
<rect id="Rectangle_7" data-name="Rectangle 7" width="2.537" height="2.537" rx="1" transform="translate(17.003 0)" fill="#4a4a4a"/>
<rect id="Rectangle_8" data-name="Rectangle 8" width="2.537" height="2.537" rx="1" transform="translate(20.009 0)" fill="#4a4a4a"/>
<rect id="Rectangle_9" data-name="Rectangle 9" width="2.537" height="2.537" rx="1" transform="translate(23.015 0)" fill="#4a4a4a"/>
<rect id="Rectangle_10" data-name="Rectangle 10" width="2.537" height="2.537" rx="1" transform="translate(26.021 0)" fill="#4a4a4a"/>
<rect id="Rectangle_11" data-name="Rectangle 11" width="2.537" height="2.537" rx="1" transform="translate(29.028 0)" fill="#4a4a4a"/>
<rect id="Rectangle_12" data-name="Rectangle 12" width="2.537" height="2.537" rx="1" transform="translate(32.034 0)" fill="#4a4a4a"/>
<path id="Path_51" data-name="Path 51" d="M.519,0H6.9A.519.519,0,0,1,7.421.52v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0ZM35.653,0h6.383a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H35.652a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,35.652,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
</g>
<g id="Group_3" data-name="Group 3" transform="translate(0.728 4.878)">
<path id="Path_52" data-name="Path 52" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_13" data-name="Rectangle 13" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
<rect id="Rectangle_14" data-name="Rectangle 14" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
<rect id="Rectangle_15" data-name="Rectangle 15" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
<rect id="Rectangle_16" data-name="Rectangle 16" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
<rect id="Rectangle_17" data-name="Rectangle 17" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
<rect id="Rectangle_18" data-name="Rectangle 18" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
<rect id="Rectangle_19" data-name="Rectangle 19" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
<rect id="Rectangle_20" data-name="Rectangle 20" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
<rect id="Rectangle_21" data-name="Rectangle 21" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
<rect id="Rectangle_22" data-name="Rectangle 22" width="2.537" height="2.537" rx="1" transform="translate(31 0)" fill="#4a4a4a"/>
<rect id="Rectangle_23" data-name="Rectangle 23" width="2.537" height="2.537" rx="1" transform="translate(34.006 0)" fill="#4a4a4a"/>
<rect id="Rectangle_24" data-name="Rectangle 24" width="2.537" height="2.537" rx="1" transform="translate(37.012 0)" fill="#4a4a4a"/>
<rect id="Rectangle_25" data-name="Rectangle 25" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
</g>
<g id="Group_4" data-name="Group 4" transform="translate(43.283 4.538) rotate(180)">
<path id="Path_53" data-name="Path 53" d="M.519,0H2.956a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.519A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_26" data-name="Rectangle 26" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
<rect id="Rectangle_27" data-name="Rectangle 27" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
<rect id="Rectangle_28" data-name="Rectangle 28" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
<rect id="Rectangle_29" data-name="Rectangle 29" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
<rect id="Rectangle_30" data-name="Rectangle 30" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
<rect id="Rectangle_31" data-name="Rectangle 31" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
<rect id="Rectangle_32" data-name="Rectangle 32" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
<rect id="Rectangle_33" data-name="Rectangle 33" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
<rect id="Rectangle_34" data-name="Rectangle 34" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
<rect id="Rectangle_35" data-name="Rectangle 35" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
<rect id="Rectangle_36" data-name="Rectangle 36" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
<rect id="Rectangle_37" data-name="Rectangle 37" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
<rect id="Rectangle_38" data-name="Rectangle 38" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
<rect id="Rectangle_39" data-name="Rectangle 39" width="2.537" height="2.537" rx="1" transform="translate(3.945 0)" fill="#4a4a4a"/>
<rect id="Rectangle_40" data-name="Rectangle 40" width="2.537" height="2.537" rx="1" transform="translate(6.951 0)" fill="#4a4a4a"/>
<rect id="Rectangle_41" data-name="Rectangle 41" width="2.537" height="2.537" rx="1" transform="translate(9.958 0)" fill="#4a4a4a"/>
<rect id="Rectangle_42" data-name="Rectangle 42" width="2.537" height="2.537" rx="1" transform="translate(12.964 0)" fill="#4a4a4a"/>
<rect id="Rectangle_43" data-name="Rectangle 43" width="2.537" height="2.537" rx="1" transform="translate(15.97 0)" fill="#4a4a4a"/>
<rect id="Rectangle_44" data-name="Rectangle 44" width="2.537" height="2.537" rx="1" transform="translate(18.976 0)" fill="#4a4a4a"/>
<rect id="Rectangle_45" data-name="Rectangle 45" width="2.537" height="2.537" rx="1" transform="translate(21.982 0)" fill="#4a4a4a"/>
<rect id="Rectangle_46" data-name="Rectangle 46" width="2.537" height="2.537" rx="1" transform="translate(24.988 0)" fill="#4a4a4a"/>
<rect id="Rectangle_47" data-name="Rectangle 47" width="2.537" height="2.537" rx="1" transform="translate(27.994 0)" fill="#4a4a4a"/>
<rect id="Rectangle_48" data-name="Rectangle 48" width="2.537" height="2.537" rx="1" transform="translate(31.001 0)" fill="#4a4a4a"/>
<rect id="Rectangle_49" data-name="Rectangle 49" width="2.537" height="2.537" rx="1" transform="translate(34.007 0)" fill="#4a4a4a"/>
<rect id="Rectangle_50" data-name="Rectangle 50" width="2.537" height="2.537" rx="1" transform="translate(37.013 0)" fill="#4a4a4a"/>
<rect id="Rectangle_51" data-name="Rectangle 51" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
</g>
<g id="Group_6" data-name="Group 6" transform="translate(0.728 7.883)">
<path id="Path_54" data-name="Path 54" d="M.519,0h3.47a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(0 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<g id="Group_5" data-name="Group 5" transform="translate(5.073 0)">
<rect id="Rectangle_52" data-name="Rectangle 52" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
<rect id="Rectangle_53" data-name="Rectangle 53" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
<rect id="Rectangle_54" data-name="Rectangle 54" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
<rect id="Rectangle_55" data-name="Rectangle 55" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
<rect id="Rectangle_56" data-name="Rectangle 56" width="2.537" height="2.537" rx="1" transform="translate(12.025 0)" fill="#4a4a4a"/>
<rect id="Rectangle_57" data-name="Rectangle 57" width="2.537" height="2.537" rx="1" transform="translate(15.031 0)" fill="#4a4a4a"/>
<rect id="Rectangle_58" data-name="Rectangle 58" width="2.537" height="2.537" rx="1" transform="translate(18.037 0)" fill="#4a4a4a"/>
<rect id="Rectangle_59" data-name="Rectangle 59" width="2.537" height="2.537" rx="1" transform="translate(21.042 0)" fill="#4a4a4a"/>
<rect id="Rectangle_60" data-name="Rectangle 60" width="2.537" height="2.537" rx="1" transform="translate(24.049 0)" fill="#4a4a4a"/>
<rect id="Rectangle_61" data-name="Rectangle 61" width="2.537" height="2.537" rx="1" transform="translate(27.055 0)" fill="#4a4a4a"/>
<rect id="Rectangle_62" data-name="Rectangle 62" width="2.537" height="2.537" rx="1" transform="translate(30.061 0)" fill="#4a4a4a"/>
</g>
<path id="Path_55" data-name="Path 55" d="M.52,0H3.8a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.017V.52A.519.519,0,0,1,.519,0Z" transform="translate(38.234 0)" fill="#4a4a4a" fill-rule="evenodd"/>
</g>
<g id="Group_7" data-name="Group 7" transform="translate(0.728 14.084)">
<rect id="Rectangle_63" data-name="Rectangle 63" width="2.537" height="2.537" rx="1" transform="translate(0 0)" fill="#4a4a4a"/>
<rect id="Rectangle_64" data-name="Rectangle 64" width="2.537" height="2.537" rx="1" transform="translate(3.006 0)" fill="#4a4a4a"/>
<rect id="Rectangle_65" data-name="Rectangle 65" width="2.537" height="2.537" rx="1" transform="translate(6.012 0)" fill="#4a4a4a"/>
<rect id="Rectangle_66" data-name="Rectangle 66" width="2.537" height="2.537" rx="1" transform="translate(9.018 0)" fill="#4a4a4a"/>
<path id="Path_56" data-name="Path 56" d="M.519,0H14.981A.519.519,0,0,1,15.5.519v1.5a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,2.018V.519A.519.519,0,0,1,.519,0Zm15.97,0h1.874a.519.519,0,0,1,.519.519v1.5a.519.519,0,0,1-.519.519H16.489a.519.519,0,0,1-.519-.519V.519A.519.519,0,0,1,16.489,0Z" transform="translate(12.024 0)" fill="#4a4a4a" fill-rule="evenodd"/>
<rect id="Rectangle_67" data-name="Rectangle 67" width="2.537" height="2.537" rx="1" transform="translate(31.376 0)" fill="#4a4a4a"/>
<rect id="Rectangle_68" data-name="Rectangle 68" width="2.537" height="2.537" rx="1" transform="translate(34.382 0)" fill="#4a4a4a"/>
<rect id="Rectangle_69" data-name="Rectangle 69" width="2.537" height="2.537" rx="1" transform="translate(40.018 0)" fill="#4a4a4a"/>
<path id="Path_57" data-name="Path 57" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(39.736 1.08) rotate(180)" fill="#4a4a4a"/>
<path id="Path_58" data-name="Path 58" d="M2.537,0V.561a.519.519,0,0,1-.519.519H.519A.519.519,0,0,1,0,.561V0Z" transform="translate(37.2 1.456)" fill="#4a4a4a"/>
</g>
<rect id="Rectangle_70" data-name="Rectangle 70" width="42.273" height="1.127" rx="0.564" transform="translate(0.915 0.556)" fill="#4a4a4a"/>
<rect id="Rectangle_71" data-name="Rectangle 71" width="2.37" height="0.752" rx="0.376" transform="translate(1.949 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_72" data-name="Rectangle 72" width="2.37" height="0.752" rx="0.376" transform="translate(5.193 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_73" data-name="Rectangle 73" width="2.37" height="0.752" rx="0.376" transform="translate(7.688 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_74" data-name="Rectangle 74" width="2.37" height="0.752" rx="0.376" transform="translate(10.183 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_75" data-name="Rectangle 75" width="2.37" height="0.752" rx="0.376" transform="translate(12.679 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_76" data-name="Rectangle 76" width="2.37" height="0.752" rx="0.376" transform="translate(15.797 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_77" data-name="Rectangle 77" width="2.37" height="0.752" rx="0.376" transform="translate(18.292 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_78" data-name="Rectangle 78" width="2.37" height="0.752" rx="0.376" transform="translate(20.788 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_79" data-name="Rectangle 79" width="2.37" height="0.752" rx="0.376" transform="translate(23.283 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_80" data-name="Rectangle 80" width="2.37" height="0.752" rx="0.376" transform="translate(26.402 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_81" data-name="Rectangle 81" width="2.37" height="0.752" rx="0.376" transform="translate(28.897 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_82" data-name="Rectangle 82" width="2.37" height="0.752" rx="0.376" transform="translate(31.393 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_83" data-name="Rectangle 83" width="2.37" height="0.752" rx="0.376" transform="translate(34.512 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_84" data-name="Rectangle 84" width="2.37" height="0.752" rx="0.376" transform="translate(37.007 0.744)" fill="#d8d8d8" opacity="0.136"/>
<rect id="Rectangle_85" data-name="Rectangle 85" width="2.37" height="0.752" rx="0.376" transform="translate(39.502 0.744)" fill="#d8d8d8" opacity="0.136"/>
</g>
<path id="Path_59" data-name="Path 59" d="M123.779,148.389a2.583,2.583,0,0,0-.332.033c-.02-.078-.038-.156-.06-.234a2.594,2.594,0,1,0-2.567-4.455q-.086-.088-.174-.175a2.593,2.593,0,1,0-4.461-2.569c-.077-.022-.154-.04-.231-.06a2.6,2.6,0,1,0-5.128,0c-.077.02-.154.038-.231.06a2.594,2.594,0,1,0-4.461,2.569,10.384,10.384,0,1,0,17.314,9.992,2.592,2.592,0,1,0,.332-5.161" transform="translate(-51.054 -75.262)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_60" data-name="Path 60" d="M83,113.389h20.779V103H83Z" transform="translate(-41.443 -58.444)" fill="#3ecc5f" fill-rule="evenodd"/>
<path id="Path_61" data-name="Path 61" d="M123.389,108.944a1.3,1.3,0,1,0,0-2.6,1.338,1.338,0,0,0-.166.017c-.01-.039-.019-.078-.03-.117a1.3,1.3,0,0,0-.5-2.5,1.285,1.285,0,0,0-.783.269q-.043-.044-.087-.087a1.285,1.285,0,0,0,.263-.776,1.3,1.3,0,0,0-2.493-.509,5.195,5.195,0,1,0,0,10,1.3,1.3,0,0,0,2.493-.509,1.285,1.285,0,0,0-.263-.776q.044-.043.087-.087a1.285,1.285,0,0,0,.783.269,1.3,1.3,0,0,0,.5-2.5c.011-.038.02-.078.03-.117a1.335,1.335,0,0,0,.166.017" transform="translate(-55.859 -57.894)" fill="#44d860" fill-rule="evenodd"/>
<path id="Path_62" data-name="Path 62" d="M141.8,38.745a1.41,1.41,0,0,1-.255-.026,1.309,1.309,0,0,1-.244-.073,1.349,1.349,0,0,1-.224-.119,1.967,1.967,0,0,1-.2-.161,1.52,1.52,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.41,1.41,0,0,1,.026-.255,1.5,1.5,0,0,1,.072-.244,1.364,1.364,0,0,1,.12-.223,1.252,1.252,0,0,1,.358-.358,1.349,1.349,0,0,1,.224-.119,1.309,1.309,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.968,1.968,0,0,1,.2.161,1.908,1.908,0,0,1,.161.2,1.322,1.322,0,0,1,.12.223,1.361,1.361,0,0,1,.1.5,1.317,1.317,0,0,1-.379.919,1.968,1.968,0,0,1-.2.161,1.346,1.346,0,0,1-.223.119,1.332,1.332,0,0,1-.5.1m10.389-.649a1.326,1.326,0,0,1-.92-.379,1.979,1.979,0,0,1-.161-.2,1.282,1.282,0,0,1-.218-.722,1.326,1.326,0,0,1,.379-.919,1.967,1.967,0,0,1,.2-.161,1.351,1.351,0,0,1,.224-.119,1.308,1.308,0,0,1,.244-.073,1.2,1.2,0,0,1,.509,0,1.262,1.262,0,0,1,.468.192,1.967,1.967,0,0,1,.2.161,1.326,1.326,0,0,1,.379.919,1.461,1.461,0,0,1-.026.255,1.323,1.323,0,0,1-.073.244,1.847,1.847,0,0,1-.119.223,1.911,1.911,0,0,1-.161.2,1.967,1.967,0,0,1-.2.161,1.294,1.294,0,0,1-.722.218" transform="translate(-69.074 -26.006)" fill-rule="evenodd"/>
</g>
<g id="React-icon" transform="translate(906.3 541.56)">
<path id="Path_330" data-name="Path 330" d="M263.668,117.179c0-5.827-7.3-11.35-18.487-14.775,2.582-11.4,1.434-20.477-3.622-23.382a7.861,7.861,0,0,0-4.016-1v4a4.152,4.152,0,0,1,2.044.466c2.439,1.4,3.5,6.724,2.672,13.574-.2,1.685-.52,3.461-.914,5.272a86.9,86.9,0,0,0-11.386-1.954,87.469,87.469,0,0,0-7.459-8.965c5.845-5.433,11.332-8.41,15.062-8.41V78h0c-4.931,0-11.386,3.514-17.913,9.611-6.527-6.061-12.982-9.539-17.913-9.539v4c3.712,0,9.216,2.959,15.062,8.356a84.687,84.687,0,0,0-7.405,8.947,83.732,83.732,0,0,0-11.4,1.972c-.412-1.793-.717-3.532-.932-5.2-.843-6.85.2-12.175,2.618-13.592a3.991,3.991,0,0,1,2.062-.466v-4h0a8,8,0,0,0-4.052,1c-5.039,2.9-6.168,11.96-3.568,23.328-11.153,3.443-18.415,8.947-18.415,14.757,0,5.828,7.3,11.35,18.487,14.775-2.582,11.4-1.434,20.477,3.622,23.382a7.882,7.882,0,0,0,4.034,1c4.931,0,11.386-3.514,17.913-9.611,6.527,6.061,12.982,9.539,17.913,9.539a8,8,0,0,0,4.052-1c5.039-2.9,6.168-11.96,3.568-23.328C256.406,128.511,263.668,122.988,263.668,117.179Zm-23.346-11.96c-.663,2.313-1.488,4.7-2.421,7.083-.735-1.434-1.506-2.869-2.349-4.3-.825-1.434-1.7-2.833-2.582-4.2C235.517,104.179,237.974,104.645,240.323,105.219Zm-8.212,19.1c-1.4,2.421-2.833,4.716-4.321,6.85-2.672.233-5.379.359-8.1.359-2.708,0-5.415-.126-8.069-.341q-2.232-3.2-4.339-6.814-2.044-3.523-3.73-7.136c1.112-2.4,2.367-4.805,3.712-7.154,1.4-2.421,2.833-4.716,4.321-6.85,2.672-.233,5.379-.359,8.1-.359,2.708,0,5.415.126,8.069.341q2.232,3.2,4.339,6.814,2.044,3.523,3.73,7.136C234.692,119.564,233.455,121.966,232.11,124.315Zm5.792-2.331c.968,2.4,1.793,4.805,2.474,7.136-2.349.574-4.823,1.058-7.387,1.434.879-1.381,1.757-2.8,2.582-4.25C236.4,124.871,237.167,123.419,237.9,121.984ZM219.72,141.116a73.921,73.921,0,0,1-4.985-5.738c1.614.072,3.263.126,4.931.126,1.685,0,3.353-.036,4.985-.126A69.993,69.993,0,0,1,219.72,141.116ZM206.38,130.555c-2.546-.377-5-.843-7.352-1.417.663-2.313,1.488-4.7,2.421-7.083.735,1.434,1.506,2.869,2.349,4.3S205.5,129.192,206.38,130.555ZM219.63,93.241a73.924,73.924,0,0,1,4.985,5.738c-1.614-.072-3.263-.126-4.931-.126-1.686,0-3.353.036-4.985.126A69.993,69.993,0,0,1,219.63,93.241ZM206.362,103.8c-.879,1.381-1.757,2.8-2.582,4.25-.825,1.434-1.6,2.869-2.331,4.3-.968-2.4-1.793-4.805-2.474-7.136C201.323,104.663,203.8,104.179,206.362,103.8Zm-16.227,22.449c-6.348-2.708-10.454-6.258-10.454-9.073s4.106-6.383,10.454-9.073c1.542-.663,3.228-1.255,4.967-1.811a86.122,86.122,0,0,0,4.034,10.92,84.9,84.9,0,0,0-3.981,10.866C193.38,127.525,191.694,126.915,190.134,126.252Zm9.647,25.623c-2.439-1.4-3.5-6.724-2.672-13.574.2-1.686.52-3.461.914-5.272a86.9,86.9,0,0,0,11.386,1.954,87.465,87.465,0,0,0,7.459,8.965c-5.845,5.433-11.332,8.41-15.062,8.41A4.279,4.279,0,0,1,199.781,151.875Zm42.532-13.663c.843,6.85-.2,12.175-2.618,13.592a3.99,3.99,0,0,1-2.062.466c-3.712,0-9.216-2.959-15.062-8.356a84.689,84.689,0,0,0,7.405-8.947,83.731,83.731,0,0,0,11.4-1.972A50.194,50.194,0,0,1,242.313,138.212Zm6.9-11.96c-1.542.663-3.228,1.255-4.967,1.811a86.12,86.12,0,0,0-4.034-10.92,84.9,84.9,0,0,0,3.981-10.866c1.775.556,3.461,1.165,5.039,1.829,6.348,2.708,10.454,6.258,10.454,9.073C259.67,119.994,255.564,123.562,249.216,126.252Z" fill="#61dafb"/>
<path id="Path_331" data-name="Path 331" d="M320.8,78.4Z" transform="translate(-119.082 -0.328)" fill="#61dafb"/>
<circle id="Ellipse_112" data-name="Ellipse 112" cx="8.194" cy="8.194" r="8.194" transform="translate(211.472 108.984)" fill="#61dafb"/>
<path id="Path_332" data-name="Path 332" d="M520.5,78.1Z" transform="translate(-282.975 -0.082)" fill="#61dafb"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 35 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 12 KiB

7610
docs/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +0,0 @@
[[redirects]]
from = "/*"
to = "/index.html"
status = 200

View File

@ -1,77 +1,8 @@
{ {
"name": "estiband", "name": "root",
"version": "0.1.0", "private": true,
"homepage": "https://foretold-app.github.io/estiband/",
"scripts": {
"build": "bsb -make-world",
"build:style": "tailwind build src/styles/index.css -o src/styles/tailwind.css",
"start": "bsb -make-world -w -ws _ ",
"clean": "bsb -clean-world",
"parcel": "parcel ./src/index.html --public-url / --no-autoinstall -- watch",
"parcel-build": "parcel build ./src/index.html --no-source-maps --no-autoinstall",
"showcase": "PORT=12345 parcel showcase/index.html",
"server": "moduleserve ./ --port 8000",
"predeploy": "parcel build ./src/index.html --no-source-maps --no-autoinstall",
"deploy": "gh-pages -d dist",
"test": "jest",
"test:ci": "yarn jest",
"watch:test": "jest --watchAll",
"watch:s": "yarn jest -- Converter_test --watch"
},
"keywords": [
"BuckleScript",
"ReasonReact",
"reason-react"
],
"author": "",
"license": "MIT",
"dependencies": {
"@foretold/components": "0.0.6",
"@glennsl/bs-json": "^5.0.2",
"ace-builds": "^1.4.12",
"antd": "3.17.0",
"autoprefixer": "9.7.4",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"binary-search-tree": "0.2.6",
"bs-ant-design-alt": "2.0.0-alpha.33",
"bs-css": "11.0.0",
"bs-moment": "0.4.5",
"bs-reform": "9.7.1",
"bsb-js": "1.1.7",
"d3": "5.15.0",
"gh-pages": "2.2.0",
"jest": "^25.5.1",
"jstat": "1.9.2",
"lenses-ppx": "5.1.0",
"less": "3.10.3",
"lodash": "4.17.15",
"mathjs": "5.10.3",
"moduleserve": "0.9.1",
"moment": "2.24.0",
"pdfast": "^0.2.0",
"postcss-cli": "7.1.0",
"rationale": "0.2.0",
"react": "^16.10.0",
"react-ace": "^9.2.0",
"react-dom": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0",
"react-use": "^13.27.0",
"react-vega": "^7.4.1",
"reason-react": ">=0.7.0",
"reschema": "1.3.0",
"tailwindcss": "1.2.0",
"vega": "*",
"vega-embed": "6.6.0",
"vega-lite": "*"
},
"devDependencies": { "devDependencies": {
"@glennsl/bs-jest": "^0.5.1",
"bs-platform": "7.3.2",
"parcel-bundler": "1.12.4",
"parcel-plugin-bundle-visualiser": "^1.2.0",
"parcel-plugin-less-js-enabled": "1.0.2"
}, },
"alias": { "scripts": {
"react": "./node_modules/react", }
"react-dom": "./node_modules/react-dom"
} }
}

View File

@ -5,7 +5,6 @@ npm-debug.log
/node_modules/ /node_modules/
.cache .cache
.cache/* .cache/*
dist
lib/* lib/*
*.cache *.cache
build build

View File

@ -0,0 +1,13 @@
open Jest
open Expect
describe("Bandwidth", () => {
test("nrd0()", () => {
let data = [1., 4., 3., 2.]
expect(Bandwidth.nrd0(data)) |> toEqual(0.7625801874014622)
})
test("nrd()", () => {
let data = [1., 4., 3., 2.]
expect(Bandwidth.nrd(data)) |> toEqual(0.8981499984950554)
})
})

View File

@ -1,71 +1,56 @@
open Jest; open Jest
open Expect; open Expect
let makeTest = (~only=false, str, item1, item2) => let makeTest = (~only=false, str, item1, item2) =>
only only
? Only.test(str, () => ? Only.test(str, () => expect(item1) |> toEqual(item2))
expect(item1) |> toEqual(item2) : test(str, () => expect(item1) |> toEqual(item2))
)
: test(str, () =>
expect(item1) |> toEqual(item2)
);
describe("DistTypes", () => { describe("DistTypes", () =>
describe("Domain", () => { describe("Domain", () => {
let makeComplete = (yPoint, expectation) => let makeComplete = (yPoint, expectation) =>
makeTest( makeTest(
"With input: " ++ Js.Float.toString(yPoint), "With input: " ++ Js.Float.toString(yPoint),
DistTypes.Domain.yPointToSubYPoint(Complete, yPoint), DistTypes.Domain.yPointToSubYPoint(Complete, yPoint),
expectation, expectation,
); )
let makeSingle = let makeSingle = (direction: [#left | #right], excludingProbabilityMass, yPoint, expectation) =>
(
direction: [ | `left | `right],
excludingProbabilityMass,
yPoint,
expectation,
) =>
makeTest( makeTest(
"Excluding: " "Excluding: " ++
++ Js.Float.toString(excludingProbabilityMass) (Js.Float.toString(excludingProbabilityMass) ++
++ " and yPoint: " (" and yPoint: " ++ Js.Float.toString(yPoint))),
++ Js.Float.toString(yPoint),
DistTypes.Domain.yPointToSubYPoint( DistTypes.Domain.yPointToSubYPoint(
direction == `left direction == #left
? LeftLimited({xPoint: 3.0, excludingProbabilityMass}) ? LeftLimited({xPoint: 3.0, excludingProbabilityMass: excludingProbabilityMass})
: RightLimited({xPoint: 3.0, excludingProbabilityMass}), : RightLimited({xPoint: 3.0, excludingProbabilityMass: excludingProbabilityMass}),
yPoint, yPoint,
), ),
expectation, expectation,
); )
let makeDouble = (domain, yPoint, expectation) => let makeDouble = (domain, yPoint, expectation) =>
makeTest( makeTest("Excluding: limits", DistTypes.Domain.yPointToSubYPoint(domain, yPoint), expectation)
"Excluding: limits",
DistTypes.Domain.yPointToSubYPoint(domain, yPoint),
expectation,
);
describe("With Complete Domain", () => { describe("With Complete Domain", () => {
makeComplete(0.0, Some(0.0)); makeComplete(0.0, Some(0.0))
makeComplete(0.6, Some(0.6)); makeComplete(0.6, Some(0.6))
makeComplete(1.0, Some(1.0)); makeComplete(1.0, Some(1.0))
}); })
describe("With Left Limit", () => { describe("With Left Limit", () => {
makeSingle(`left, 0.5, 1.0, Some(1.0)); makeSingle(#left, 0.5, 1.0, Some(1.0))
makeSingle(`left, 0.5, 0.75, Some(0.5)); makeSingle(#left, 0.5, 0.75, Some(0.5))
makeSingle(`left, 0.8, 0.9, Some(0.5)); makeSingle(#left, 0.8, 0.9, Some(0.5))
makeSingle(`left, 0.5, 0.4, None); makeSingle(#left, 0.5, 0.4, None)
makeSingle(`left, 0.5, 0.5, Some(0.0)); makeSingle(#left, 0.5, 0.5, Some(0.0))
}); })
describe("With Right Limit", () => { describe("With Right Limit", () => {
makeSingle(`right, 0.5, 1.0, None); makeSingle(#right, 0.5, 1.0, None)
makeSingle(`right, 0.5, 0.25, Some(0.5)); makeSingle(#right, 0.5, 0.25, Some(0.5))
makeSingle(`right, 0.8, 0.5, None); makeSingle(#right, 0.8, 0.5, None)
makeSingle(`right, 0.2, 0.2, Some(0.25)); makeSingle(#right, 0.2, 0.2, Some(0.25))
makeSingle(`right, 0.5, 0.5, Some(1.0)); makeSingle(#right, 0.5, 0.5, Some(1.0))
makeSingle(`right, 0.5, 0.0, Some(0.0)); makeSingle(#right, 0.5, 0.0, Some(0.0))
makeSingle(`right, 0.5, 0.5, Some(1.0)); makeSingle(#right, 0.5, 0.5, Some(1.0))
}); })
describe("With Left and Right Limit", () => { describe("With Left and Right Limit", () => {
makeDouble( makeDouble(
LeftAndRightLimited( LeftAndRightLimited(
@ -74,7 +59,7 @@ describe("DistTypes", () => {
), ),
0.5, 0.5,
Some(0.5), Some(0.5),
); )
makeDouble( makeDouble(
LeftAndRightLimited( LeftAndRightLimited(
{excludingProbabilityMass: 0.1, xPoint: 3.0}, {excludingProbabilityMass: 0.1, xPoint: 3.0},
@ -82,7 +67,7 @@ describe("DistTypes", () => {
), ),
0.2, 0.2,
Some(0.125), Some(0.125),
); )
makeDouble( makeDouble(
LeftAndRightLimited( LeftAndRightLimited(
{excludingProbabilityMass: 0.1, xPoint: 3.0}, {excludingProbabilityMass: 0.1, xPoint: 3.0},
@ -90,7 +75,7 @@ describe("DistTypes", () => {
), ),
0.1, 0.1,
Some(0.0), Some(0.0),
); )
makeDouble( makeDouble(
LeftAndRightLimited( LeftAndRightLimited(
{excludingProbabilityMass: 0.1, xPoint: 3.0}, {excludingProbabilityMass: 0.1, xPoint: 3.0},
@ -98,7 +83,7 @@ describe("DistTypes", () => {
), ),
0.05, 0.05,
None, None,
); )
});
}) })
}); })
)

View File

@ -1,7 +1,7 @@
open Jest; open Jest
open Expect; open Expect
let shape: DistTypes.xyShape = {xs: [|1., 4., 8.|], ys: [|8., 9., 2.|]}; let shape: DistTypes.xyShape = {xs: [1., 4., 8.], ys: [8., 9., 2.]}
// let makeTest = (~only=false, str, item1, item2) => // let makeTest = (~only=false, str, item1, item2) =>
// only // only

View File

@ -1,6 +1,6 @@
open Jest; open Jest;
open Expect; open Expect;
/*
let makeTest = (~only=false, str, item1, item2) => let makeTest = (~only=false, str, item1, item2) =>
only only
? Only.test(str, () => ? Only.test(str, () =>
@ -54,4 +54,4 @@ describe("XYShapes", () => {
Error("Sad"), Error("Sad"),
) )
}) })
}); }); */

View File

@ -0,0 +1,12 @@
var js = require("../src/js/index.js");
describe("A simple result", () => {
test("mean(normal(5,2))", () => {
expect(js.runMePlease("mean(normal(5,2))")).toEqual({ tag: 'Ok', value: { hd: { NAME: 'Float', VAL: 5 }, tl: 0 } });
});
test("10+10", () => {
let foo = js.runMePlease("normal(5,2)");
console.log(foo.value.hd.VAL)
expect(1).toEqual(1);
});
});

View File

@ -0,0 +1,47 @@
open Jest
open Expect
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () => expect(item1) |> toEqual(item2))
: test(str, () => expect(item1) |> toEqual(item2))
describe("Lodash", () =>
describe("Lodash", () => {
makeTest(
"split",
SamplesToShape.Internals.T.splitContinuousAndDiscrete([1.432, 1.33455, 2.0]),
([1.432, 1.33455, 2.0], E.FloatFloatMap.empty()),
)
makeTest(
"split",
SamplesToShape.Internals.T.splitContinuousAndDiscrete([
1.432,
1.33455,
2.0,
2.0,
2.0,
2.0,
]) |> (((c, disc)) => (c, disc |> E.FloatFloatMap.toArray)),
([1.432, 1.33455], [(2.0, 4.0)]),
)
let makeDuplicatedArray = count => {
let arr = Belt.Array.range(1, count) |> E.A.fmap(float_of_int)
let sorted = arr |> Belt.SortArray.stableSortBy(_, compare)
E.A.concatMany([sorted, sorted, sorted, sorted]) |> Belt.SortArray.stableSortBy(_, compare)
}
let (_, discrete) = SamplesToShape.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(10),
)
let toArr = discrete |> E.FloatFloatMap.toArray
makeTest("splitMedium", toArr |> Belt.Array.length, 10)
let (c, discrete) = SamplesToShape.Internals.T.splitContinuousAndDiscrete(
makeDuplicatedArray(500),
)
let toArr = discrete |> E.FloatFloatMap.toArray
makeTest("splitMedium", toArr |> Belt.Array.length, 500)
})
)

View File

@ -0,0 +1,51 @@
open Jest
open Expect
let makeTest = (~only=false, str, item1, item2) =>
only
? Only.test(str, () => expect(item1) |> toEqual(item2))
: test(str, () => expect(item1) |> toEqual(item2))
let shape1: DistTypes.xyShape = {xs: [1., 4., 8.], ys: [0.2, 0.4, 0.8]}
let shape2: DistTypes.xyShape = {
xs: [1., 5., 10.],
ys: [0.2, 0.5, 0.8],
}
let shape3: DistTypes.xyShape = {
xs: [1., 20., 50.],
ys: [0.2, 0.5, 0.8],
}
describe("XYShapes", () => {
describe("logScorePoint", () => {
makeTest("When identical", XYShape.logScorePoint(30, shape1, shape1), Some(0.0))
makeTest("When similar", XYShape.logScorePoint(30, shape1, shape2), Some(1.658971191043856))
makeTest(
"When very different",
XYShape.logScorePoint(30, shape1, shape3),
Some(210.3721280423322),
)
})
// describe("transverse", () => {
// makeTest(
// "When very different",
// XYShape.Transversal._transverse(
// (aCurrent, aLast) => aCurrent +. aLast,
// [|1.0, 2.0, 3.0, 4.0|],
// ),
// [|1.0, 3.0, 6.0, 10.0|],
// )
// });
describe("integrateWithTriangles", () =>
makeTest(
"integrates correctly",
XYShape.Range.integrateWithTriangles(shape1),
Some({
xs: [1., 4., 8.],
ys: [0.0, 0.9000000000000001, 3.3000000000000007],
}),
)
)
})

View File

@ -1,18 +1,11 @@
{ {
"name": "probExample", "name": "probExample",
"reason": { "reason": {},
"react-jsx": 3
},
"sources": [ "sources": [
{ {
"dir": "src", "dir": "src",
"subdirs": true "subdirs": true
}, },
{
"dir": "showcase",
"type": "dev",
"subdirs": true
},
{ {
"dir": "__tests__", "dir": "__tests__",
"type": "dev", "type": "dev",
@ -35,17 +28,20 @@
"bs-dependencies": [ "bs-dependencies": [
"@glennsl/bs-jest", "@glennsl/bs-jest",
"@glennsl/bs-json", "@glennsl/bs-json",
"@foretold/components", "rationale"
"bs-ant-design-alt",
"reason-react",
"bs-reform",
"bs-css",
"rationale",
"bs-moment",
"reschema"
], ],
"gentypeconfig": {
"language": "untyped",
"module": "commonjs",
"shims": {},
"debug": {
"all": false,
"basic": false
}
},
"refmt": 3, "refmt": 3,
"ppx-flags": [ "warnings": {
"lenses-ppx/ppx" "number": "+A-42-48-9-30-4-102"
] },
"ppx-flags": []
} }

1368
packages/squiggle-lang/dist/index.js vendored Normal file

File diff suppressed because one or more lines are too long

393
packages/squiggle-lang/dist/report.html vendored Normal file

File diff suppressed because one or more lines are too long

17027
packages/squiggle-lang/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,44 @@
{
"name": "squiggle-experimental",
"version": "0.1.5",
"homepage": "https://foretold-app.github.io/estiband/",
"private": false,
"scripts": {
"build": "rescript build",
"parcel": "parcel build ./src/js/index.js --no-source-maps --no-autoinstall",
"start": "rescript build -w",
"clean": "rescript clean",
"test": "jest",
"test:ci": "yarn jest",
"watch:test": "jest --watchAll",
"watch:s": "yarn jest -- Converter_test --watch"
},
"keywords": [
"Rescript"
],
"author": "Quantified Uncertainty Research Institute",
"license": "MIT",
"dependencies": {
"@glennsl/bs-json": "^5.0.2",
"@rescriptbr/reform": "^11.0.1",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"lodash": "4.17.15",
"mathjs": "5.10.3",
"pdfast": "^0.2.0",
"rationale": "0.2.0"
},
"devDependencies": {
"@glennsl/bs-jest": "^0.5.1",
"bs-platform": "9.0.2",
"docsify": "^4.12.2",
"gentype": "^4.3.0",
"jest": "^25.5.1",
"jstat": "1.9.2",
"moduleserve": "0.9.1",
"parcel": "^2.2.1",
"parcel-bundler": "1.12.4",
"parcel-plugin-bundle-visualiser": "^1.2.0",
"parcel-plugin-less-js-enabled": "1.0.2",
"rescript": "^9.1.4"
}
}

View File

@ -0,0 +1,5 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.mkShell {
name = "squiggle";
buildInputs = with pkgs; [ yarn yarn2nix nodePackages.npm ];
}

View File

@ -0,0 +1,12 @@
/* Untyped file generated from ProgramEvaluator.res by genType. */
/* eslint-disable */
const ProgramEvaluatorBS = require('./ProgramEvaluator.bs');
const runAll = function (Arg1) {
const result = ProgramEvaluatorBS.runAll(Arg1);
return result.TAG===0
? {tag:"Ok", value:result._0}
: {tag:"Error", value:result._0}
};;
exports.runAll = runAll

View File

@ -0,0 +1,190 @@
// TODO: This setup is more confusing than it should be, there's more work to do in cleanup here.
module Inputs = {
module SamplingInputs = {
type t = {
sampleCount: option<int>,
outputXYPoints: option<int>,
kernelWidth: option<float>,
shapeLength: option<int>,
}
}
let defaultRecommendedLength = 100
let defaultShouldDownsample = true
type inputs = {
squiggleString: string,
samplingInputs: SamplingInputs.t,
environment: ExpressionTypes.ExpressionTree.environment,
}
let empty: SamplingInputs.t = {
sampleCount: None,
outputXYPoints: None,
kernelWidth: None,
shapeLength: None,
}
let make = (
~samplingInputs=empty,
~squiggleString,
~environment=ExpressionTypes.ExpressionTree.Environment.empty,
(),
): inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: environment,
}
}
type \"export" = [
| #DistPlus(ProbExample.DistPlus.t)
| #Float(float)
| #Function(
(array<string>, ProbExample.ExpressionTypes.ExpressionTree.node),
ProbExample.ExpressionTypes.ExpressionTree.environment,
)
]
module Internals = {
let addVariable = (
{samplingInputs, squiggleString, environment}: Inputs.inputs,
str,
node,
): Inputs.inputs => {
samplingInputs: samplingInputs,
squiggleString: squiggleString,
environment: ExpressionTypes.ExpressionTree.Environment.update(environment, str, _ => Some(
node,
)),
}
type outputs = {
graph: ExpressionTypes.ExpressionTree.node,
shape: DistTypes.shape,
}
let makeOutputs = (graph, shape): outputs => {graph: graph, shape: shape}
let makeInputs = (inputs: Inputs.inputs): ExpressionTypes.ExpressionTree.samplingInputs => {
sampleCount: inputs.samplingInputs.sampleCount |> E.O.default(10000),
outputXYPoints: inputs.samplingInputs.outputXYPoints |> E.O.default(10000),
kernelWidth: inputs.samplingInputs.kernelWidth,
shapeLength: inputs.samplingInputs.shapeLength |> E.O.default(10000),
}
let runNode = (inputs, node) =>
ExpressionTree.toLeaf(makeInputs(inputs), inputs.environment, node)
let runProgram = (inputs: Inputs.inputs, p: ExpressionTypes.Program.program) => {
let ins = ref(inputs)
p
|> E.A.fmap(x =>
switch x {
| #Assignment(name, node) =>
ins := addVariable(ins.contents, name, node)
None
| #Expression(node) =>
Some(runNode(ins.contents, node) |> E.R.fmap(r => (ins.contents.environment, r)))
}
)
|> E.A.O.concatSomes
|> E.A.R.firstErrorOrOpen
}
let inputsToLeaf = (inputs: Inputs.inputs) =>
MathJsParser.fromString(inputs.squiggleString) |> E.R.bind(_, g => runProgram(inputs, g))
let outputToDistPlus = (inputs: Inputs.inputs, shape: DistTypes.shape) =>
DistPlus.make(~shape, ~squiggleString=Some(inputs.squiggleString), ())
}
let renderIfNeeded = (inputs: Inputs.inputs, node: ExpressionTypes.ExpressionTree.node): result<
ExpressionTypes.ExpressionTree.node,
string,
> =>
node |> (
x =>
switch x {
| #Normalize(_) as n
| #SymbolicDist(_) as n =>
#Render(n)
|> Internals.runNode(inputs)
|> (
x =>
switch x {
| Ok(#RenderedDist(_)) as r => r
| Error(r) => Error(r)
| _ => Error("Didn't render, but intended to")
}
)
| n => Ok(n)
}
)
// TODO: Consider using ExpressionTypes.ExpressionTree.getFloat or similar in this function
let coersionToExportedTypes = (
inputs,
env: ProbExample.ExpressionTypes.ExpressionTree.environment,
node: ExpressionTypes.ExpressionTree.node,
): result<\"export", string> =>
node
|> renderIfNeeded(inputs)
|> E.R.bind(_, x =>
switch x {
| #RenderedDist(Discrete({xyShape: {xs: [x], ys: [1.0]}})) => Ok(#Float(x))
| #SymbolicDist(#Float(x)) => Ok(#Float(x))
| #RenderedDist(n) => Ok(#DistPlus(Internals.outputToDistPlus(inputs, n)))
| #Function(n) => Ok(#Function(n, env))
| n => Error("Didn't output a rendered distribution. Format:" ++ ExpressionTree.toString(n))
}
)
let rec mapM = (f, xs) =>
switch xs {
| list{} => Ok(list{})
| list{x, ...rest} =>
switch f(x) {
| Error(err) => Error(err)
| Ok(val) =>
switch mapM(f, rest) {
| Error(err) => Error(err)
| Ok(restList) => Ok(list{val, ...restList})
}
}
}
let evaluateProgram = (inputs: Inputs.inputs) =>
inputs
|> Internals.inputsToLeaf
|> E.R.bind(_, xs => mapM(((a, b)) => coersionToExportedTypes(inputs, a, b), Array.to_list(xs)))
let evaluateFunction = (
inputs: Inputs.inputs,
fn: (array<string>, ExpressionTypes.ExpressionTree.node),
fnInputs,
) => {
let output = ExpressionTree.runFunction(
Internals.makeInputs(inputs),
inputs.environment,
fnInputs,
fn,
)
output |> E.R.bind(_, coersionToExportedTypes(inputs, inputs.environment))
}
@genType
let runAll = (squiggleString: string) => {
let inputs = Inputs.make(
~samplingInputs={
sampleCount: Some(10000),
outputXYPoints: Some(10000),
kernelWidth: None,
shapeLength: Some(1000),
},
~squiggleString,
~environment=[]->Belt.Map.String.fromArray,
(),
)
let response1 = evaluateProgram(inputs);
response1;
}

View File

@ -0,0 +1,266 @@
type pointMassesWithMoments = {
n: int,
masses: array<float>,
means: array<float>,
variances: array<float>,
}
/* This function takes a continuous distribution and efficiently approximates it as
point masses that have variances associated with them.
We estimate the means and variances from overlapping triangular distributions which we imagine are making up the
XYShape.
We can then use the algebra of random variables to "convolve" the point masses and their variances,
and finally reconstruct a new distribution from them, e.g. using a Fast Gauss Transform or Raykar et al. (2007). */
let toDiscretePointMassesFromTriangulars = (
~inverse=false,
s: XYShape.T.t,
): pointMassesWithMoments => {
// TODO: what if there is only one point in the distribution?
let n = s |> XYShape.T.length
// first, double up the leftmost and rightmost points:
let {xs, ys}: XYShape.T.t = s
Js.Array.unshift(xs[0], xs) |> ignore
Js.Array.unshift(ys[0], ys) |> ignore
Js.Array.push(xs[n - 1], xs) |> ignore
Js.Array.push(ys[n - 1], ys) |> ignore
let n = E.A.length(xs)
// squares and neighbourly products of the xs
let xsSq: array<float> = Belt.Array.makeUninitializedUnsafe(n)
let xsProdN1: array<float> = Belt.Array.makeUninitializedUnsafe(n - 1)
let xsProdN2: array<float> = Belt.Array.makeUninitializedUnsafe(n - 2)
for i in 0 to n - 1 {
Belt.Array.set(xsSq, i, xs[i] *. xs[i]) |> ignore
()
}
for i in 0 to n - 2 {
Belt.Array.set(xsProdN1, i, xs[i] *. xs[i + 1]) |> ignore
()
}
for i in 0 to n - 3 {
Belt.Array.set(xsProdN2, i, xs[i] *. xs[i + 2]) |> ignore
()
}
// means and variances
let masses: array<float> = Belt.Array.makeUninitializedUnsafe(n - 2) // doesn't include the fake first and last points
let means: array<float> = Belt.Array.makeUninitializedUnsafe(n - 2)
let variances: array<float> = Belt.Array.makeUninitializedUnsafe(n - 2)
if inverse {
for i in 1 to n - 2 {
Belt.Array.set(masses, i - 1, (xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.) |> ignore
// this only works when the whole triange is either on the left or on the right of zero
let a = xs[i - 1]
let c = xs[i]
let b = xs[i + 1]
// These are the moments of the reciprocal of a triangular distribution, as symbolically integrated by Mathematica.
// They're probably pretty close to invMean ~ 1/mean = 3/(a+b+c) and invVar. But I haven't worked out
// the worst case error, so for now let's use these monster equations
let inverseMean =
2. *. (a *. log(a /. c) /. (a -. c) +. b *. log(c /. b) /. (b -. c)) /. (a -. b)
let inverseVar =
2. *. (log(c /. a) /. (a -. c) +. b *. log(b /. c) /. (b -. c)) /. (a -. b) -.
inverseMean ** 2.
Belt.Array.set(means, i - 1, inverseMean) |> ignore
Belt.Array.set(variances, i - 1, inverseVar) |> ignore
()
}
{n: n - 2, masses: masses, means: means, variances: variances}
} else {
for i in 1 to n - 2 {
// area of triangle = width * height / 2
Belt.Array.set(masses, i - 1, (xs[i + 1] -. xs[i - 1]) *. ys[i] /. 2.) |> ignore
// means of triangle = (a + b + c) / 3
Belt.Array.set(means, i - 1, (xs[i - 1] +. xs[i] +. xs[i + 1]) /. 3.) |> ignore
// variance of triangle = (a^2 + b^2 + c^2 - ab - ac - bc) / 18
Belt.Array.set(
variances,
i - 1,
(xsSq[i - 1] +.
xsSq[i] +.
xsSq[i + 1] -.
xsProdN1[i - 1] -.
xsProdN1[i] -.
xsProdN2[i - 1]) /. 18.,
) |> ignore
()
}
{n: n - 2, masses: masses, means: means, variances: variances}
}
}
let combineShapesContinuousContinuous = (
op: ExpressionTypes.algebraicOperation,
s1: DistTypes.xyShape,
s2: DistTypes.xyShape,
): DistTypes.xyShape => {
let t1n = s1 |> XYShape.T.length
let t2n = s2 |> XYShape.T.length
// if we add the two distributions, we should probably use normal filters.
// if we multiply the two distributions, we should probably use lognormal filters.
let t1m = toDiscretePointMassesFromTriangulars(s1)
let t2m = switch op {
| #Divide => toDiscretePointMassesFromTriangulars(~inverse=true, s2)
| _ => toDiscretePointMassesFromTriangulars(~inverse=false, s2)
}
let combineMeansFn = switch op {
| #Add => (m1, m2) => m1 +. m2
| #Subtract => (m1, m2) => m1 -. m2
| #Multiply => (m1, m2) => m1 *. m2
| #Divide => (m1, mInv2) => m1 *. mInv2
| #Exponentiate => (m1, mInv2) => m1 ** mInv2
} // note: here, mInv2 = mean(1 / t2) ~= 1 / mean(t2)
// TODO: I don't know what the variances are for exponentatiation
// converts the variances and means of the two inputs into the variance of the output
let combineVariancesFn = switch op {
| #Add => (v1, v2, _, _) => v1 +. v2
| #Subtract => (v1, v2, _, _) => v1 +. v2
| #Multiply => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
| #Exponentiate => (v1, v2, m1, m2) => v1 *. v2 +. v1 *. m2 ** 2. +. v2 *. m1 ** 2.
| #Divide => (v1, vInv2, m1, mInv2) => v1 *. vInv2 +. v1 *. mInv2 ** 2. +. vInv2 *. m1 ** 2.
}
// TODO: If operating on two positive-domain distributions, we should take that into account
let outputMinX: ref<float> = ref(infinity)
let outputMaxX: ref<float> = ref(neg_infinity)
let masses: array<float> = Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n)
let means: array<float> = Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n)
let variances: array<float> = Belt.Array.makeUninitializedUnsafe(t1m.n * t2m.n)
// then convolve the two sets of pointMassesWithMoments
for i in 0 to t1m.n - 1 {
for j in 0 to t2m.n - 1 {
let k = i * t2m.n + j
Belt.Array.set(masses, k, t1m.masses[i] *. t2m.masses[j]) |> ignore
let mean = combineMeansFn(t1m.means[i], t2m.means[j])
let variance = combineVariancesFn(
t1m.variances[i],
t2m.variances[j],
t1m.means[i],
t2m.means[j],
)
Belt.Array.set(means, k, mean) |> ignore
Belt.Array.set(variances, k, variance) |> ignore
// update bounds
let minX = mean -. 2. *. sqrt(variance) *. 1.644854
let maxX = mean +. 2. *. sqrt(variance) *. 1.644854
if minX < outputMinX.contents {
outputMinX := minX
}
if maxX > outputMaxX.contents {
outputMaxX := maxX
}
}
}
// we now want to create a set of target points. For now, let's just evenly distribute 200 points between
// between the outputMinX and outputMaxX
let nOut = 300
let outputXs: array<float> = E.A.Floats.range(outputMinX.contents, outputMaxX.contents, nOut)
let outputYs: array<float> = Belt.Array.make(nOut, 0.0)
// now, for each of the outputYs, accumulate from a Gaussian kernel over each input point.
for j in 0 to E.A.length(masses) - 1 {
if (
// go through all of the result points
variances[j] > 0. && masses[j] > 0.
) {
for i in 0 to E.A.length(outputXs) - 1 {
// go through all of the target points
let dx = outputXs[i] -. means[j]
let contribution =
masses[j] *.
exp(-.(dx ** 2.) /. (2. *. variances[j])) /.
sqrt(2. *. 3.14159276 *. variances[j])
Belt.Array.set(outputYs, i, outputYs[i] +. contribution) |> ignore
}
}
}
{xs: outputXs, ys: outputYs}
}
let toDiscretePointMassesFromDiscrete = (s: DistTypes.xyShape): pointMassesWithMoments => {
let {xs, ys}: XYShape.T.t = s
let n = E.A.length(xs)
let masses: array<float> = Belt.Array.makeBy(n, i => ys[i])
let means: array<float> = Belt.Array.makeBy(n, i => xs[i])
let variances: array<float> = Belt.Array.makeBy(n, i => 0.0)
{n: n, masses: masses, means: means, variances: variances}
}
let combineShapesContinuousDiscrete = (
op: ExpressionTypes.algebraicOperation,
continuousShape: DistTypes.xyShape,
discreteShape: DistTypes.xyShape,
): DistTypes.xyShape => {
let t1n = continuousShape |> XYShape.T.length
let t2n = discreteShape |> XYShape.T.length
// each x pair is added/subtracted
let fn = Operation.Algebraic.toFn(op)
let outXYShapes: array<array<(float, float)>> = Belt.Array.makeUninitializedUnsafe(t2n)
switch op {
| #Add
| #Subtract =>
for j in 0 to t2n - 1 {
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
let dxyShape: array<(float, float)> = Belt.Array.makeUninitializedUnsafe(t1n)
for i in 0 to t1n - 1 {
Belt.Array.set(
dxyShape,
i,
(
fn(continuousShape.xs[i], discreteShape.xs[j]),
continuousShape.ys[i] *. discreteShape.ys[j],
),
) |> ignore
()
}
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore
()
}
| #Multiply
| #Exponentiate
| #Divide =>
for j in 0 to t2n - 1 {
// creates a new continuous shape for each one of the discrete points, and collects them in outXYShapes.
let dxyShape: array<(float, float)> = Belt.Array.makeUninitializedUnsafe(t1n)
for i in 0 to t1n - 1 {
Belt.Array.set(
dxyShape,
i,
(
fn(continuousShape.xs[i], discreteShape.xs[j]),
continuousShape.ys[i] *. discreteShape.ys[j] /. discreteShape.xs[j],
),
) |> ignore
()
}
Belt.Array.set(outXYShapes, j, dxyShape) |> ignore
()
}
}
outXYShapes
|> E.A.fmap(XYShape.T.fromZippedArray)
|> E.A.fold_left(
XYShape.PointwiseCombination.combine(
\"+.",
XYShape.XtoY.continuousInterpolator(#Linear, #UseZero),
),
XYShape.T.empty,
)
}

View File

@ -0,0 +1,264 @@
open Distributions
type t = DistTypes.continuousShape
let getShape = (t: t) => t.xyShape
let interpolation = (t: t) => t.interpolation
let make = (~interpolation=#Linear, ~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape,
interpolation: interpolation,
integralSumCache: integralSumCache,
integralCache: integralCache,
}
let shapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape),
interpolation: interpolation,
integralSumCache: integralSumCache,
integralCache: integralCache,
}
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let oShapeMap = (fn, {xyShape, interpolation, integralSumCache, integralCache}: t): option<
DistTypes.continuousShape,
> => fn(xyShape) |> E.O.fmap(make(~interpolation, ~integralSumCache, ~integralCache))
let emptyIntegral: DistTypes.continuousShape = {
xyShape: {
xs: [neg_infinity],
ys: [0.0],
},
interpolation: #Linear,
integralSumCache: Some(0.0),
integralCache: None,
}
let empty: DistTypes.continuousShape = {
xyShape: XYShape.T.empty,
interpolation: #Linear,
integralSumCache: Some(0.0),
integralCache: Some(emptyIntegral),
}
let stepwiseToLinear = (t: t): t =>
make(
~integralSumCache=t.integralSumCache,
~integralCache=t.integralCache,
XYShape.Range.stepwiseToLinear(t.xyShape),
)
// Note: This results in a distribution with as many points as the sum of those in t1 and t2.
let combinePointwise = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
~distributionType: DistTypes.distributionType=#PDF,
fn: (float, float) => float,
t1: DistTypes.continuousShape,
t2: DistTypes.continuousShape,
): DistTypes.continuousShape => {
// If we're adding the distributions, and we know the total of each, then we
// can just sum them up. Otherwise, all bets are off.
let combinedIntegralSum = Common.combineIntegralSums(
integralSumCachesFn,
t1.integralSumCache,
t2.integralSumCache,
)
// TODO: does it ever make sense to pointwise combine the integrals here?
// It could be done for pointwise additions, but is that ever needed?
// If combining stepwise and linear, we must convert the stepwise to linear first,
// i.e. add a point at the bottom of each step
let (t1, t2) = switch (t1.interpolation, t2.interpolation) {
| (#Linear, #Linear) => (t1, t2)
| (#Stepwise, #Stepwise) => (t1, t2)
| (#Linear, #Stepwise) => (t1, stepwiseToLinear(t2))
| (#Stepwise, #Linear) => (stepwiseToLinear(t1), t2)
}
let extrapolation = switch distributionType {
| #PDF => #UseZero
| #CDF => #UseOutermostPoints
}
let interpolator = XYShape.XtoY.continuousInterpolator(t1.interpolation, extrapolation)
make(
~integralSumCache=combinedIntegralSum,
XYShape.PointwiseCombination.combine(fn, interpolator, t1.xyShape, t2.xyShape),
)
}
let toLinear = (t: t): option<t> =>
switch t {
| {interpolation: #Stepwise, xyShape, integralSumCache, integralCache} =>
xyShape |> XYShape.Range.stepsToContinuous |> E.O.fmap(make(~integralSumCache, ~integralCache))
| {interpolation: #Linear} => Some(t)
}
let shapeFn = (fn, t: t) => t |> getShape |> fn
let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t,
integralSumCache: integralSumCache,
}
let updateIntegralCache = (integralCache, t: t): t => {...t, integralCache: integralCache}
let reduce = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
~integralCachesFn: (t, t) => option<t>=(_, _) => None,
fn,
continuousShapes,
) =>
continuousShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t) =>
make(
~interpolation=t.interpolation,
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
t |> getShape |> XYShape.T.mapY(fn),
)
let rec scaleBy = (~scale=1.0, t: t): t => {
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, v => Some(scale *. v))
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(scaleBy(~scale, v)))
t
|> mapY(~fn=(r: float) => r *. scale)
|> updateIntegralSumCache(scaledIntegralSumCache)
|> updateIntegralCache(scaledIntegralCache)
}
module T = Dist({
type t = DistTypes.continuousShape
type integral = DistTypes.continuousShape
let minX = shapeFn(XYShape.T.minX)
let maxX = shapeFn(XYShape.T.maxX)
let mapY = mapY
let updateIntegralCache = updateIntegralCache
let toDiscreteProbabilityMassFraction = _ => 0.0
let toShape = (t: t): DistTypes.shape => Continuous(t)
let xToY = (f, {interpolation, xyShape}: t) =>
switch interpolation {
| #Stepwise => xyShape |> XYShape.XtoY.stepwiseIncremental(f) |> E.O.default(0.0)
| #Linear => xyShape |> XYShape.XtoY.linear(f)
} |> DistTypes.MixedPoint.makeContinuous
let truncate = (leftCutoff: option<float>, rightCutoff: option<float>, t: t) => {
let lc = E.O.default(neg_infinity, leftCutoff)
let rc = E.O.default(infinity, rightCutoff)
let truncatedZippedPairs =
t |> getShape |> XYShape.T.zip |> XYShape.Zipped.filterByX(x => x >= lc && x <= rc)
let leftNewPoint = leftCutoff |> E.O.dimap(lc => [(lc -. epsilon_float, 0.)], _ => [])
let rightNewPoint = rightCutoff |> E.O.dimap(rc => [(rc +. epsilon_float, 0.)], _ => [])
let truncatedZippedPairsWithNewPoints = E.A.concatMany([
leftNewPoint,
truncatedZippedPairs,
rightNewPoint,
])
let truncatedShape = XYShape.T.fromZippedArray(truncatedZippedPairsWithNewPoints)
make(truncatedShape)
}
// TODO: This should work with stepwise plots.
let integral = t =>
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
| (true, _) => emptyIntegral
| (false, Some(cache)) => cache
| (false, None) =>
t
|> getShape
|> XYShape.Range.integrateWithTriangles
|> E.O.toExt("This should not have happened")
|> make
}
let downsample = (length, t): t =>
t |> shapeMap(XYShape.XsConversion.proportionByProbabilityMass(length, integral(t).xyShape))
let integralEndY = (t: t) => t.integralSumCache |> E.O.default(t |> integral |> lastY)
let integralXtoY = (f, t: t) => t |> integral |> shapeFn(XYShape.XtoY.linear(f))
let integralYtoX = (f, t: t) => t |> integral |> shapeFn(XYShape.YtoX.linear(f))
let toContinuous = t => Some(t)
let toDiscrete = _ => None
let normalize = (t: t): t =>
t
|> updateIntegralCache(Some(integral(t)))
|> scaleBy(~scale=1. /. integralEndY(t))
|> updateIntegralSumCache(Some(1.0))
let mean = (t: t) => {
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 2.0 /. 2.0
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 2.0 /. 2.0 +. b *. p ** 3.0 /. 3.0
XYShape.Analysis.integrateContinuousShape(
~indefiniteIntegralStepwise,
~indefiniteIntegralLinear,
t,
)
}
let variance = (t: t): float =>
XYShape.Analysis.getVarianceDangerously(
t,
mean,
XYShape.Analysis.getMeanOfSquaresContinuousShape,
)
})
/* This simply creates multiple copies of the continuous distribution, scaled and shifted according to
each discrete data point, and then adds them all together. */
let combineAlgebraicallyWithDiscrete = (
op: ExpressionTypes.algebraicOperation,
t1: t,
t2: DistTypes.discreteShape,
) => {
let t1s = t1 |> getShape
let t2s = t2.xyShape // TODO would like to use Discrete.getShape here, but current file structure doesn't allow for that
if XYShape.T.isEmpty(t1s) || XYShape.T.isEmpty(t2s) {
empty
} else {
let continuousAsLinear = switch t1.interpolation {
| #Linear => t1
| #Stepwise => stepwiseToLinear(t1)
}
let combinedShape = AlgebraicShapeCombination.combineShapesContinuousDiscrete(
op,
continuousAsLinear |> getShape,
t2s,
)
let combinedIntegralSum = switch op {
| #Multiply
| #Divide =>
Common.combineIntegralSums((a, b) => Some(a *. b), t1.integralSumCache, t2.integralSumCache)
| _ => None
}
// TODO: It could make sense to automatically transform the integrals here (shift or scale)
make(~interpolation=t1.interpolation, ~integralSumCache=combinedIntegralSum, combinedShape)
}
}
let combineAlgebraically = (op: ExpressionTypes.algebraicOperation, t1: t, t2: t) => {
let s1 = t1 |> getShape
let s2 = t2 |> getShape
let t1n = s1 |> XYShape.T.length
let t2n = s2 |> XYShape.T.length
if t1n == 0 || t2n == 0 {
empty
} else {
let combinedShape = AlgebraicShapeCombination.combineShapesContinuousContinuous(op, s1, s2)
let combinedIntegralSum = Common.combineIntegralSums(
(a, b) => Some(a *. b),
t1.integralSumCache,
t2.integralSumCache,
)
// return a new Continuous distribution
make(~integralSumCache=combinedIntegralSum, combinedShape)
}
}

View File

@ -0,0 +1,216 @@
open Distributions
type t = DistTypes.discreteShape
let make = (~integralSumCache=None, ~integralCache=None, xyShape): t => {
xyShape: xyShape,
integralSumCache: integralSumCache,
integralCache: integralCache,
}
let shapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): t => {
xyShape: fn(xyShape),
integralSumCache: integralSumCache,
integralCache: integralCache,
}
let getShape = (t: t) => t.xyShape
let oShapeMap = (fn, {xyShape, integralSumCache, integralCache}: t): option<t> =>
fn(xyShape) |> E.O.fmap(make(~integralSumCache, ~integralCache))
let emptyIntegral: DistTypes.continuousShape = {
xyShape: {xs: [neg_infinity], ys: [0.0]},
interpolation: #Stepwise,
integralSumCache: Some(0.0),
integralCache: None,
}
let empty: DistTypes.discreteShape = {
xyShape: XYShape.T.empty,
integralSumCache: Some(0.0),
integralCache: Some(emptyIntegral),
}
let shapeFn = (fn, t: t) => t |> getShape |> fn
let lastY = (t: t) => t |> getShape |> XYShape.T.lastY
let combinePointwise = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn: (
DistTypes.continuousShape,
DistTypes.continuousShape,
) => option<DistTypes.continuousShape>=(_, _) => None,
fn,
t1: DistTypes.discreteShape,
t2: DistTypes.discreteShape,
): DistTypes.discreteShape => {
let combinedIntegralSum = Common.combineIntegralSums(
integralSumCachesFn,
t1.integralSumCache,
t2.integralSumCache,
)
// TODO: does it ever make sense to pointwise combine the integrals here?
// It could be done for pointwise additions, but is that ever needed?
make(
~integralSumCache=combinedIntegralSum,
XYShape.PointwiseCombination.combine(
\"+.",
XYShape.XtoY.discreteInterpolator,
t1.xyShape,
t2.xyShape,
),
)
}
let reduce = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn=(_, _) => None,
fn,
discreteShapes,
): DistTypes.discreteShape =>
discreteShapes |> E.A.fold_left(
combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn),
empty,
)
let updateIntegralSumCache = (integralSumCache, t: t): t => {
...t,
integralSumCache: integralSumCache,
}
let updateIntegralCache = (integralCache, t: t): t => {
...t,
integralCache: integralCache,
}
/* This multiples all of the data points together and creates a new discrete distribution from the results.
Data points at the same xs get added together. It may be a good idea to downsample t1 and t2 before and/or the result after. */
let combineAlgebraically = (op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
let t1s = t1 |> getShape
let t2s = t2 |> getShape
let t1n = t1s |> XYShape.T.length
let t2n = t2s |> XYShape.T.length
let combinedIntegralSum = Common.combineIntegralSums(
(s1, s2) => Some(s1 *. s2),
t1.integralSumCache,
t2.integralSumCache,
)
let fn = Operation.Algebraic.toFn(op)
let xToYMap = E.FloatFloatMap.empty()
for i in 0 to t1n - 1 {
for j in 0 to t2n - 1 {
let x = fn(t1s.xs[i], t2s.xs[j])
let cv = xToYMap |> E.FloatFloatMap.get(x) |> E.O.default(0.)
let my = t1s.ys[i] *. t2s.ys[j]
let _ = Belt.MutableMap.set(xToYMap, x, cv +. my)
}
}
let rxys = xToYMap |> E.FloatFloatMap.toArray |> XYShape.Zipped.sortByX
let combinedShape = XYShape.T.fromZippedArray(rxys)
make(~integralSumCache=combinedIntegralSum, combinedShape)
}
let mapY = (~integralSumCacheFn=_ => None, ~integralCacheFn=_ => None, ~fn, t: t) =>
make(
~integralSumCache=t.integralSumCache |> E.O.bind(_, integralSumCacheFn),
~integralCache=t.integralCache |> E.O.bind(_, integralCacheFn),
t |> getShape |> XYShape.T.mapY(fn),
)
let scaleBy = (~scale=1.0, t: t): t => {
let scaledIntegralSumCache = t.integralSumCache |> E.O.fmap(\"*."(scale))
let scaledIntegralCache = t.integralCache |> E.O.fmap(Continuous.scaleBy(~scale))
t
|> mapY(~fn=(r: float) => r *. scale)
|> updateIntegralSumCache(scaledIntegralSumCache)
|> updateIntegralCache(scaledIntegralCache)
}
module T = Dist({
type t = DistTypes.discreteShape
type integral = DistTypes.continuousShape
let integral = t =>
switch (getShape(t) |> XYShape.T.isEmpty, t.integralCache) {
| (true, _) => emptyIntegral
| (false, Some(c)) => c
| (false, None) =>
let ts = getShape(t)
// The first xy of this integral should always be the zero, to ensure nice plotting
let firstX = ts |> XYShape.T.minX
let prependedZeroPoint: XYShape.T.t = {xs: [firstX -. epsilon_float], ys: [0.]}
let integralShape =
ts |> XYShape.T.concat(prependedZeroPoint) |> XYShape.T.accumulateYs(\"+.")
Continuous.make(~interpolation=#Stepwise, integralShape)
}
let integralEndY = (t: t) => t.integralSumCache |> E.O.default(t |> integral |> Continuous.lastY)
let minX = shapeFn(XYShape.T.minX)
let maxX = shapeFn(XYShape.T.maxX)
let toDiscreteProbabilityMassFraction = _ => 1.0
let mapY = mapY
let updateIntegralCache = updateIntegralCache
let toShape = (t: t): DistTypes.shape => Discrete(t)
let toContinuous = _ => None
let toDiscrete = t => Some(t)
let normalize = (t: t): t =>
t |> scaleBy(~scale=1. /. integralEndY(t)) |> updateIntegralSumCache(Some(1.0))
let downsample = (i, t: t): t => {
// It's not clear how to downsample a set of discrete points in a meaningful way.
// The best we can do is to clip off the smallest values.
let currentLength = t |> getShape |> XYShape.T.length
if i < currentLength && (i >= 1 && currentLength > 1) {
t
|> getShape
|> XYShape.T.zip
|> XYShape.Zipped.sortByY
|> Belt.Array.reverse
|> Belt.Array.slice(_, ~offset=0, ~len=i)
|> XYShape.Zipped.sortByX
|> XYShape.T.fromZippedArray
|> make
} else {
t
}
}
let truncate = (leftCutoff: option<float>, rightCutoff: option<float>, t: t): t =>
t
|> getShape
|> XYShape.T.zip
|> XYShape.Zipped.filterByX(x =>
x >= E.O.default(neg_infinity, leftCutoff) && x <= E.O.default(infinity, rightCutoff)
)
|> XYShape.T.fromZippedArray
|> make
let xToY = (f, t) =>
t
|> getShape
|> XYShape.XtoY.stepwiseIfAtX(f)
|> E.O.default(0.0)
|> DistTypes.MixedPoint.makeDiscrete
let integralXtoY = (f, t) => t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f)
let integralYtoX = (f, t) => t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f)
let mean = (t: t): float => {
let s = getShape(t)
E.A.reducei(s.xs, 0.0, (acc, x, i) => acc +. x *. s.ys[i])
}
let variance = (t: t): float => {
let getMeanOfSquares = t => t |> shapeMap(XYShape.Analysis.squareXYShape) |> mean
XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
}
})

View File

@ -84,7 +84,7 @@ module T =
let integral = (t: t) => let integral = (t: t) =>
updateShape(Continuous(t.integralCache), t); updateShape(Continuous(t.integralCache), t);
let updateIntegralCache = (integralCache: option(DistTypes.continuousShape), t) => let updateIntegralCache = (integralCache: option<DistTypes.continuousShape>, t) =>
update(~integralCache=E.O.default(t.integralCache, integralCache), t); update(~integralCache=E.O.default(t.integralCache, integralCache), t);
let downsample = (i, t): t => let downsample = (i, t): t =>

View File

@ -0,0 +1,152 @@
type domainLimit = {
xPoint: float,
excludingProbabilityMass: float,
}
type domain =
| Complete
| LeftLimited(domainLimit)
| RightLimited(domainLimit)
| LeftAndRightLimited(domainLimit, domainLimit)
type distributionType = [
| #PDF
| #CDF
]
type xyShape = {
xs: array<float>,
ys: array<float>,
}
type interpolationStrategy = [
| #Stepwise
| #Linear
]
type extrapolationStrategy = [
| #UseZero
| #UseOutermostPoints
]
type interpolator = (xyShape, int, float) => float
type rec continuousShape = {
xyShape: xyShape,
interpolation: interpolationStrategy,
integralSumCache: option<float>,
integralCache: option<continuousShape>,
}
type discreteShape = {
xyShape: xyShape,
integralSumCache: option<float>,
integralCache: option<continuousShape>,
}
type mixedShape = {
continuous: continuousShape,
discrete: discreteShape,
integralSumCache: option<float>,
integralCache: option<continuousShape>,
}
type shapeMonad<'a, 'b, 'c> =
| Mixed('a)
| Discrete('b)
| Continuous('c)
type shape = shapeMonad<mixedShape, discreteShape, continuousShape>
module ShapeMonad = {
let fmap = (t: shapeMonad<'a, 'b, 'c>, (fn1, fn2, fn3)): shapeMonad<'d, 'e, 'f> =>
switch t {
| Mixed(m) => Mixed(fn1(m))
| Discrete(m) => Discrete(fn2(m))
| Continuous(m) => Continuous(fn3(m))
}
}
type generationSource =
| SquiggleString(string)
| Shape(shape)
type distributionUnit =
| UnspecifiedDistribution
type distPlus = {
shape: shape,
domain: domain,
integralCache: continuousShape,
unit: distributionUnit,
squiggleString: option<string>,
}
module DistributionUnit = {
let toJson = (distributionUnit: distributionUnit) =>
switch distributionUnit {
| _ => Js.Null.fromOption(None)
}
}
module Domain = {
let excludedProbabilityMass = (t: domain) =>
switch t {
| Complete => 0.0
| LeftLimited({excludingProbabilityMass}) => excludingProbabilityMass
| RightLimited({excludingProbabilityMass}) => excludingProbabilityMass
| LeftAndRightLimited({excludingProbabilityMass: l}, {excludingProbabilityMass: r}) => l +. r
}
let includedProbabilityMass = (t: domain) => 1.0 -. excludedProbabilityMass(t)
let initialProbabilityMass = (t: domain) =>
switch t {
| Complete
| RightLimited(_) => 0.0
| LeftLimited({excludingProbabilityMass}) => excludingProbabilityMass
| LeftAndRightLimited({excludingProbabilityMass}, _) => excludingProbabilityMass
}
let normalizeProbabilityMass = (t: domain) => 1. /. excludedProbabilityMass(t)
let yPointToSubYPoint = (t: domain, yPoint) =>
switch t {
| Complete => Some(yPoint)
| LeftLimited({excludingProbabilityMass}) if yPoint < excludingProbabilityMass => None
| LeftLimited({excludingProbabilityMass}) if yPoint >= excludingProbabilityMass =>
Some((yPoint -. excludingProbabilityMass) /. includedProbabilityMass(t))
| RightLimited({excludingProbabilityMass}) if yPoint > 1. -. excludingProbabilityMass => None
| RightLimited({excludingProbabilityMass}) if yPoint <= 1. -. excludingProbabilityMass =>
Some(yPoint /. includedProbabilityMass(t))
| LeftAndRightLimited({excludingProbabilityMass: l}, _) if yPoint < l => None
| LeftAndRightLimited(_, {excludingProbabilityMass: r}) if yPoint > 1.0 -. r => None
| LeftAndRightLimited({excludingProbabilityMass: l}, _) =>
Some((yPoint -. l) /. includedProbabilityMass(t))
| _ => None
}
}
type mixedPoint = {
continuous: float,
discrete: float,
}
module MixedPoint = {
type t = mixedPoint
let toContinuousValue = (t: t) => t.continuous
let toDiscreteValue = (t: t) => t.discrete
let makeContinuous = (continuous: float): t => {continuous: continuous, discrete: 0.0}
let makeDiscrete = (discrete: float): t => {continuous: 0.0, discrete: discrete}
let fmap = (fn: float => float, t: t) => {
continuous: fn(t.continuous),
discrete: fn(t.discrete),
}
let combine2 = (fn, c: t, d: t): t => {
continuous: fn(c.continuous, d.continuous),
discrete: fn(c.discrete, d.discrete),
}
let add = combine2((a, b) => a +. b)
}

View File

@ -0,0 +1,89 @@
module type dist = {
type t
type integral
let minX: t => float
let maxX: t => float
let mapY: (
~integralSumCacheFn: float => option<float>=?,
~integralCacheFn: DistTypes.continuousShape => option<DistTypes.continuousShape>=?,
~fn: float => float,
t,
) => t
let xToY: (float, t) => DistTypes.mixedPoint
let toShape: t => DistTypes.shape
let toContinuous: t => option<DistTypes.continuousShape>
let toDiscrete: t => option<DistTypes.discreteShape>
let normalize: t => t
let toDiscreteProbabilityMassFraction: t => float
let downsample: (int, t) => t
let truncate: (option<float>, option<float>, t) => t
let updateIntegralCache: (option<DistTypes.continuousShape>, t) => t
let integral: t => integral
let integralEndY: t => float
let integralXtoY: (float, t) => float
let integralYtoX: (float, t) => float
let mean: t => float
let variance: t => float
}
module Dist = (T: dist) => {
type t = T.t
type integral = T.integral
let minX = T.minX
let maxX = T.maxX
let integral = T.integral
let xTotalRange = (t: t) => maxX(t) -. minX(t)
let mapY = T.mapY
let xToY = T.xToY
let downsample = T.downsample
let toShape = T.toShape
let toDiscreteProbabilityMassFraction = T.toDiscreteProbabilityMassFraction
let toContinuous = T.toContinuous
let toDiscrete = T.toDiscrete
let normalize = T.normalize
let truncate = T.truncate
let mean = T.mean
let variance = T.variance
let updateIntegralCache = T.updateIntegralCache
module Integral = {
type t = T.integral
let get = T.integral
let xToY = T.integralXtoY
let yToX = T.integralYtoX
let sum = T.integralEndY
}
}
module Common = {
let combineIntegralSums = (
combineFn: (float, float) => option<float>,
t1IntegralSumCache: option<float>,
t2IntegralSumCache: option<float>,
) =>
switch (t1IntegralSumCache, t2IntegralSumCache) {
| (None, _)
| (_, None) =>
None
| (Some(s1), Some(s2)) => combineFn(s1, s2)
}
let combineIntegrals = (
combineFn: (
DistTypes.continuousShape,
DistTypes.continuousShape,
) => option<DistTypes.continuousShape>,
t1IntegralCache: option<DistTypes.continuousShape>,
t2IntegralCache: option<DistTypes.continuousShape>,
) =>
switch (t1IntegralCache, t2IntegralCache) {
| (None, _)
| (_, None) =>
None
| (Some(s1), Some(s2)) => combineFn(s1, s2)
}
}

View File

@ -0,0 +1,307 @@
open Distributions
type t = DistTypes.mixedShape
let make = (~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete): t => {
continuous: continuous,
discrete: discrete,
integralSumCache: integralSumCache,
integralCache: integralCache,
}
let totalLength = (t: t): int => {
let continuousLength = t.continuous |> Continuous.getShape |> XYShape.T.length
let discreteLength = t.discrete |> Discrete.getShape |> XYShape.T.length
continuousLength + discreteLength
}
let scaleBy = (~scale=1.0, t: t): t => {
let scaledDiscrete = Discrete.scaleBy(~scale, t.discrete)
let scaledContinuous = Continuous.scaleBy(~scale, t.continuous)
let scaledIntegralCache = E.O.bind(t.integralCache, v => Some(Continuous.scaleBy(~scale, v)))
let scaledIntegralSumCache = E.O.bind(t.integralSumCache, s => Some(s *. scale))
make(
~discrete=scaledDiscrete,
~continuous=scaledContinuous,
~integralSumCache=scaledIntegralSumCache,
~integralCache=scaledIntegralCache,
)
}
let toContinuous = ({continuous}: t) => Some(continuous)
let toDiscrete = ({discrete}: t) => Some(discrete)
let updateIntegralCache = (integralCache, t: t): t => {
...t,
integralCache: integralCache,
}
module T = Dist({
type t = DistTypes.mixedShape
type integral = DistTypes.continuousShape
let minX = ({continuous, discrete}: t) =>
min(Continuous.T.minX(continuous), Discrete.T.minX(discrete))
let maxX = ({continuous, discrete}: t) =>
max(Continuous.T.maxX(continuous), Discrete.T.maxX(discrete))
let toShape = (t: t): DistTypes.shape => Mixed(t)
let updateIntegralCache = updateIntegralCache
let toContinuous = toContinuous
let toDiscrete = toDiscrete
let truncate = (
leftCutoff: option<float>,
rightCutoff: option<float>,
{discrete, continuous}: t,
) => {
let truncatedContinuous = Continuous.T.truncate(leftCutoff, rightCutoff, continuous)
let truncatedDiscrete = Discrete.T.truncate(leftCutoff, rightCutoff, discrete)
make(
~integralSumCache=None,
~integralCache=None,
~discrete=truncatedDiscrete,
~continuous=truncatedContinuous,
)
}
let normalize = (t: t): t => {
let continuousIntegral = Continuous.T.Integral.get(t.continuous)
let discreteIntegral = Discrete.T.Integral.get(t.discrete)
let continuous = t.continuous |> Continuous.updateIntegralCache(Some(continuousIntegral))
let discrete = t.discrete |> Discrete.updateIntegralCache(Some(discreteIntegral))
let continuousIntegralSum = Continuous.T.Integral.sum(continuous)
let discreteIntegralSum = Discrete.T.Integral.sum(discrete)
let totalIntegralSum = continuousIntegralSum +. discreteIntegralSum
let newContinuousSum = continuousIntegralSum /. totalIntegralSum
let newDiscreteSum = discreteIntegralSum /. totalIntegralSum
let normalizedContinuous =
continuous
|> Continuous.scaleBy(~scale=newContinuousSum /. continuousIntegralSum)
|> Continuous.updateIntegralSumCache(Some(newContinuousSum))
let normalizedDiscrete =
discrete
|> Discrete.scaleBy(~scale=newDiscreteSum /. discreteIntegralSum)
|> Discrete.updateIntegralSumCache(Some(newDiscreteSum))
make(
~integralSumCache=Some(1.0),
~integralCache=None,
~continuous=normalizedContinuous,
~discrete=normalizedDiscrete,
)
}
let xToY = (x, t: t) => {
// This evaluates the mixedShape at x, interpolating if necessary.
// Note that we normalize entire mixedShape first.
let {continuous, discrete}: t = normalize(t)
let c = Continuous.T.xToY(x, continuous)
let d = Discrete.T.xToY(x, discrete)
DistTypes.MixedPoint.add(c, d) // "add" here just combines the two values into a single MixedPoint.
}
let toDiscreteProbabilityMassFraction = ({discrete, continuous}: t) => {
let discreteIntegralSum = Discrete.T.Integral.sum(discrete)
let continuousIntegralSum = Continuous.T.Integral.sum(continuous)
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
discreteIntegralSum /. totalIntegralSum
}
let downsample = (count, t: t): t => {
// We will need to distribute the new xs fairly between the discrete and continuous shapes.
// The easiest way to do this is to simply go by the previous probability masses.
let discreteIntegralSum = Discrete.T.Integral.sum(t.discrete)
let continuousIntegralSum = Continuous.T.Integral.sum(t.continuous)
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
// TODO: figure out what to do when the totalIntegralSum is zero.
let downsampledDiscrete = Discrete.T.downsample(
int_of_float(float_of_int(count) *. (discreteIntegralSum /. totalIntegralSum)),
t.discrete,
)
let downsampledContinuous = Continuous.T.downsample(
int_of_float(float_of_int(count) *. (continuousIntegralSum /. totalIntegralSum)),
t.continuous,
)
{...t, discrete: downsampledDiscrete, continuous: downsampledContinuous}
}
let integral = (t: t) =>
switch t.integralCache {
| Some(cache) => cache
| None =>
// note: if the underlying shapes aren't normalized, then these integrals won't be either -- but that's the way it should be.
let continuousIntegral = Continuous.T.Integral.get(t.continuous)
let discreteIntegral = Continuous.stepwiseToLinear(Discrete.T.Integral.get(t.discrete))
Continuous.make(
XYShape.PointwiseCombination.combine(
\"+.",
XYShape.XtoY.continuousInterpolator(#Linear, #UseOutermostPoints),
Continuous.getShape(continuousIntegral),
Continuous.getShape(discreteIntegral),
),
)
}
let integralEndY = (t: t) => t |> integral |> Continuous.lastY
let integralXtoY = (f, t) => t |> integral |> Continuous.getShape |> XYShape.XtoY.linear(f)
let integralYtoX = (f, t) => t |> integral |> Continuous.getShape |> XYShape.YtoX.linear(f)
// This pipes all ys (continuous and discrete) through fn.
// If mapY is a linear operation, we might be able to update the integralSumCaches as well;
// if not, they'll be set to None.
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
t: t,
): t => {
let yMappedDiscrete: DistTypes.discreteShape =
t.discrete
|> Discrete.T.mapY(~fn)
|> Discrete.updateIntegralSumCache(E.O.bind(t.discrete.integralSumCache, integralSumCacheFn))
|> Discrete.updateIntegralCache(E.O.bind(t.discrete.integralCache, integralCacheFn))
let yMappedContinuous: DistTypes.continuousShape =
t.continuous
|> Continuous.T.mapY(~fn)
|> Continuous.updateIntegralSumCache(
E.O.bind(t.continuous.integralSumCache, integralSumCacheFn),
)
|> Continuous.updateIntegralCache(E.O.bind(t.continuous.integralCache, integralCacheFn))
{
discrete: yMappedDiscrete,
continuous: yMappedContinuous,
integralSumCache: E.O.bind(t.integralSumCache, integralSumCacheFn),
integralCache: E.O.bind(t.integralCache, integralCacheFn),
}
}
let mean = ({discrete, continuous}: t): float => {
let discreteMean = Discrete.T.mean(discrete)
let continuousMean = Continuous.T.mean(continuous)
// the combined mean is the weighted sum of the two:
let discreteIntegralSum = Discrete.T.Integral.sum(discrete)
let continuousIntegralSum = Continuous.T.Integral.sum(continuous)
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
totalIntegralSum
}
let variance = ({discrete, continuous} as t: t): float => {
// the combined mean is the weighted sum of the two:
let discreteIntegralSum = Discrete.T.Integral.sum(discrete)
let continuousIntegralSum = Continuous.T.Integral.sum(continuous)
let totalIntegralSum = discreteIntegralSum +. continuousIntegralSum
let getMeanOfSquares = ({discrete, continuous}: t) => {
let discreteMean =
discrete |> Discrete.shapeMap(XYShape.Analysis.squareXYShape) |> Discrete.T.mean
let continuousMean = continuous |> XYShape.Analysis.getMeanOfSquaresContinuousShape
(discreteMean *. discreteIntegralSum +. continuousMean *. continuousIntegralSum) /.
totalIntegralSum
}
switch discreteIntegralSum /. totalIntegralSum {
| 1.0 => Discrete.T.variance(discrete)
| 0.0 => Continuous.T.variance(continuous)
| _ => XYShape.Analysis.getVarianceDangerously(t, mean, getMeanOfSquares)
}
}
})
let combineAlgebraically = (op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t => {
// Discrete convolution can cause a huge increase in the number of samples,
// so we'll first downsample.
// An alternative (to be explored in the future) may be to first perform the full convolution and then to downsample the result;
// to use non-uniform fast Fourier transforms (for addition only), add web workers or gpu.js, etc. ...
// we have to figure out where to downsample, and how to effectively
//let downsampleIfTooLarge = (t: t) => {
// let sqtl = sqrt(float_of_int(totalLength(t)));
// sqtl > 10 ? T.downsample(int_of_float(sqtl), t) : t;
//};
let t1d = t1
let t2d = t2
// continuous (*) continuous => continuous, but also
// discrete (*) continuous => continuous (and vice versa). We have to take care of all combos and then combine them:
let ccConvResult = Continuous.combineAlgebraically(op, t1.continuous, t2.continuous)
let dcConvResult = Continuous.combineAlgebraicallyWithDiscrete(op, t2.continuous, t1.discrete)
let cdConvResult = Continuous.combineAlgebraicallyWithDiscrete(op, t1.continuous, t2.discrete)
let continuousConvResult = Continuous.reduce(\"+.", [ccConvResult, dcConvResult, cdConvResult])
// ... finally, discrete (*) discrete => discrete, obviously:
let discreteConvResult = Discrete.combineAlgebraically(op, t1.discrete, t2.discrete)
let combinedIntegralSum = Common.combineIntegralSums(
(a, b) => Some(a *. b),
t1.integralSumCache,
t2.integralSumCache,
)
{
discrete: discreteConvResult,
continuous: continuousConvResult,
integralSumCache: combinedIntegralSum,
integralCache: None,
}
}
let combinePointwise = (
~integralSumCachesFn=(_, _) => None,
~integralCachesFn=(_, _) => None,
fn,
t1: t,
t2: t,
): t => {
let reducedDiscrete =
[t1, t2]
|> E.A.fmap(toDiscrete)
|> E.A.O.concatSomes
|> Discrete.reduce(~integralSumCachesFn, ~integralCachesFn, fn)
let reducedContinuous =
[t1, t2]
|> E.A.fmap(toContinuous)
|> E.A.O.concatSomes
|> Continuous.reduce(~integralSumCachesFn, ~integralCachesFn, fn)
let combinedIntegralSum = Common.combineIntegralSums(
integralSumCachesFn,
t1.integralSumCache,
t2.integralSumCache,
)
let combinedIntegral = Common.combineIntegrals(
integralCachesFn,
t1.integralCache,
t2.integralCache,
)
make(
~integralSumCache=combinedIntegralSum,
~integralCache=combinedIntegral,
~discrete=reducedDiscrete,
~continuous=reducedContinuous,
)
}

View File

@ -0,0 +1,29 @@
type assumption =
| ADDS_TO_1
| ADDS_TO_CORRECT_PROBABILITY
type assumptions = {
continuous: assumption,
discrete: assumption,
discreteProbabilityMass: option<float>,
}
let buildSimple = (
~continuous: option<DistTypes.continuousShape>,
~discrete: option<DistTypes.discreteShape>,
): option<DistTypes.shape> => {
let continuous =
continuous |> E.O.default(Continuous.make(~integralSumCache=Some(0.0), {xs: [], ys: []}))
let discrete =
discrete |> E.O.default(Discrete.make(~integralSumCache=Some(0.0), {xs: [], ys: []}))
let cLength = continuous |> Continuous.getShape |> XYShape.T.xs |> E.A.length
let dLength = discrete |> Discrete.getShape |> XYShape.T.xs |> E.A.length
switch (cLength, dLength) {
| (0 | 1, 0) => None
| (0 | 1, _) => Some(Discrete(discrete))
| (_, 0) => Some(Continuous(continuous))
| (_, _) =>
let mixedDist = Mixed.make(~integralSumCache=None, ~integralCache=None, ~continuous, ~discrete)
Some(Mixed(mixedDist))
}
}

View File

@ -0,0 +1,207 @@
open Distributions
type t = DistTypes.shape
let mapToAll = ((fn1, fn2, fn3), t: t) =>
switch t {
| Mixed(m) => fn1(m)
| Discrete(m) => fn2(m)
| Continuous(m) => fn3(m)
}
let fmap = ((fn1, fn2, fn3), t: t): t =>
switch t {
| Mixed(m) => Mixed(fn1(m))
| Discrete(m) => Discrete(fn2(m))
| Continuous(m) => Continuous(fn3(m))
}
let toMixed = mapToAll((
m => m,
d =>
Mixed.make(
~integralSumCache=d.integralSumCache,
~integralCache=d.integralCache,
~discrete=d,
~continuous=Continuous.empty,
),
c =>
Mixed.make(
~integralSumCache=c.integralSumCache,
~integralCache=c.integralCache,
~discrete=Discrete.empty,
~continuous=c,
),
))
let combineAlgebraically = (op: ExpressionTypes.algebraicOperation, t1: t, t2: t): t =>
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
Continuous.combineAlgebraically(op, m1, m2) |> Continuous.T.toShape
| (Continuous(m1), Discrete(m2))
| (Discrete(m2), Continuous(m1)) =>
Continuous.combineAlgebraicallyWithDiscrete(op, m1, m2) |> Continuous.T.toShape
| (Discrete(m1), Discrete(m2)) => Discrete.combineAlgebraically(op, m1, m2) |> Discrete.T.toShape
| (m1, m2) => Mixed.combineAlgebraically(op, toMixed(m1), toMixed(m2)) |> Mixed.T.toShape
}
let combinePointwise = (
~integralSumCachesFn: (float, float) => option<float>=(_, _) => None,
~integralCachesFn: (
DistTypes.continuousShape,
DistTypes.continuousShape,
) => option<DistTypes.continuousShape>=(_, _) => None,
fn,
t1: t,
t2: t,
) =>
switch (t1, t2) {
| (Continuous(m1), Continuous(m2)) =>
DistTypes.Continuous(
Continuous.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
| (Discrete(m1), Discrete(m2)) =>
DistTypes.Discrete(
Discrete.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, m1, m2),
)
| (m1, m2) =>
DistTypes.Mixed(
Mixed.combinePointwise(~integralSumCachesFn, ~integralCachesFn, fn, toMixed(m1), toMixed(m2)),
)
}
module T = Dist({
type t = DistTypes.shape
type integral = DistTypes.continuousShape
let xToY = (f: float) => mapToAll((Mixed.T.xToY(f), Discrete.T.xToY(f), Continuous.T.xToY(f)))
let toShape = (t: t) => t
let toContinuous = t => None
let toDiscrete = t => None
let downsample = (i, t) =>
fmap((Mixed.T.downsample(i), Discrete.T.downsample(i), Continuous.T.downsample(i)), t)
let truncate = (leftCutoff, rightCutoff, t): t =>
fmap(
(
Mixed.T.truncate(leftCutoff, rightCutoff),
Discrete.T.truncate(leftCutoff, rightCutoff),
Continuous.T.truncate(leftCutoff, rightCutoff),
),
t,
)
let toDiscreteProbabilityMassFraction = t => 0.0
let normalize = fmap((Mixed.T.normalize, Discrete.T.normalize, Continuous.T.normalize))
let updateIntegralCache = (integralCache, t: t): t =>
fmap(
(
Mixed.T.updateIntegralCache(integralCache),
Discrete.T.updateIntegralCache(integralCache),
Continuous.T.updateIntegralCache(integralCache),
),
t,
)
let toContinuous = mapToAll((
Mixed.T.toContinuous,
Discrete.T.toContinuous,
Continuous.T.toContinuous,
))
let toDiscrete = mapToAll((Mixed.T.toDiscrete, Discrete.T.toDiscrete, Continuous.T.toDiscrete))
let toDiscreteProbabilityMassFraction = mapToAll((
Mixed.T.toDiscreteProbabilityMassFraction,
Discrete.T.toDiscreteProbabilityMassFraction,
Continuous.T.toDiscreteProbabilityMassFraction,
))
let minX = mapToAll((Mixed.T.minX, Discrete.T.minX, Continuous.T.minX))
let integral = mapToAll((
Mixed.T.Integral.get,
Discrete.T.Integral.get,
Continuous.T.Integral.get,
))
let integralEndY = mapToAll((
Mixed.T.Integral.sum,
Discrete.T.Integral.sum,
Continuous.T.Integral.sum,
))
let integralXtoY = f =>
mapToAll((Mixed.T.Integral.xToY(f), Discrete.T.Integral.xToY(f), Continuous.T.Integral.xToY(f)))
let integralYtoX = f =>
mapToAll((Mixed.T.Integral.yToX(f), Discrete.T.Integral.yToX(f), Continuous.T.Integral.yToX(f)))
let maxX = mapToAll((Mixed.T.maxX, Discrete.T.maxX, Continuous.T.maxX))
let mapY = (
~integralSumCacheFn=previousIntegralSum => None,
~integralCacheFn=previousIntegral => None,
~fn,
) =>
fmap((
Mixed.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
Discrete.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
Continuous.T.mapY(~integralSumCacheFn, ~integralCacheFn, ~fn),
))
let mean = (t: t): float =>
switch t {
| Mixed(m) => Mixed.T.mean(m)
| Discrete(m) => Discrete.T.mean(m)
| Continuous(m) => Continuous.T.mean(m)
}
let variance = (t: t): float =>
switch t {
| Mixed(m) => Mixed.T.variance(m)
| Discrete(m) => Discrete.T.variance(m)
| Continuous(m) => Continuous.T.variance(m)
}
})
let pdf = (f: float, t: t) => {
let mixedPoint: DistTypes.mixedPoint = T.xToY(f, t)
mixedPoint.continuous +. mixedPoint.discrete
}
let inv = T.Integral.yToX
let cdf = T.Integral.xToY
let doN = (n, fn) => {
let items = Belt.Array.make(n, 0.0)
for x in 0 to n - 1 {
let _ = Belt.Array.set(items, x, fn())
}
items
}
let sample = (t: t): float => {
let randomItem = Random.float(1.)
let bar = t |> T.Integral.yToX(randomItem)
bar
}
let isFloat = (t: t) =>
switch t {
| Discrete({xyShape: {xs: [_], ys: [1.0]}}) => true
| _ => false
}
let sampleNRendered = (n, dist) => {
let integralCache = T.Integral.get(dist)
let distWithUpdatedIntegralCache = T.updateIntegralCache(Some(integralCache), dist)
doN(n, () => sample(distWithUpdatedIntegralCache))
}
let operate = (distToFloatOp: ExpressionTypes.distToFloatOperation, s): float =>
switch distToFloatOp {
| #Pdf(f) => pdf(f, s)
| #Cdf(f) => pdf(f, s)
| #Inv(f) => inv(f, s)
| #Sample => sample(s)
| #Mean => T.mean(s)
}

View File

@ -0,0 +1,440 @@
open DistTypes
let interpolate = (xMin: float, xMax: float, yMin: float, yMax: float, xIntended: float): float => {
let minProportion = (xMax -. xIntended) /. (xMax -. xMin)
let maxProportion = (xIntended -. xMin) /. (xMax -. xMin)
yMin *. minProportion +. yMax *. maxProportion
}
// TODO: Make sure that shapes cannot be empty.
let extImp = E.O.toExt("Tried to perform an operation on an empty XYShape.")
module T = {
type t = xyShape
let toXyShape = (t: t): xyShape => t
type ts = array<xyShape>
let xs = (t: t) => t.xs
let ys = (t: t) => t.ys
let length = (t: t) => E.A.length(t.xs)
let empty = {xs: [], ys: []}
let isEmpty = (t: t) => length(t) == 0
let minX = (t: t) => t |> xs |> E.A.Sorted.min |> extImp
let maxX = (t: t) => t |> xs |> E.A.Sorted.max |> extImp
let firstY = (t: t) => t |> ys |> E.A.first |> extImp
let lastY = (t: t) => t |> ys |> E.A.last |> extImp
let xTotalRange = (t: t) => maxX(t) -. minX(t)
let mapX = (fn, t: t): t => {xs: E.A.fmap(fn, t.xs), ys: t.ys}
let mapY = (fn, t: t): t => {xs: t.xs, ys: E.A.fmap(fn, t.ys)}
let zip = ({xs, ys}: t) => Belt.Array.zip(xs, ys)
let fromArray = ((xs, ys)): t => {xs: xs, ys: ys}
let fromArrays = (xs, ys): t => {xs: xs, ys: ys}
let accumulateYs = (fn, p: t) => fromArray((p.xs, E.A.accumulate(fn, p.ys)))
let concat = (t1: t, t2: t) => {
let cxs = Array.concat(list{t1.xs, t2.xs})
let cys = Array.concat(list{t1.ys, t2.ys})
{xs: cxs, ys: cys}
}
let fromZippedArray = (pairs: array<(float, float)>): t => pairs |> Belt.Array.unzip |> fromArray
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
let toJs = (t: t) => {"xs": t.xs, "ys": t.ys}
}
module Ts = {
type t = T.ts
let minX = (t: t) => t |> E.A.fmap(T.minX) |> E.A.min |> extImp
let maxX = (t: t) => t |> E.A.fmap(T.maxX) |> E.A.max |> extImp
let equallyDividedXs = (t: t, newLength) => E.A.Floats.range(minX(t), maxX(t), newLength)
let allXs = (t: t) => t |> E.A.fmap(T.xs) |> E.A.Sorted.concatMany
}
module Pairs = {
let x = fst
let y = snd
let first = (t: T.t) => (T.minX(t), T.firstY(t))
let last = (t: T.t) => (T.maxX(t), T.lastY(t))
let getBy = (t: T.t, fn) => t |> T.zip |> E.A.getBy(_, fn)
let firstAtOrBeforeXValue = (xValue, t: T.t) => {
let zipped = T.zip(t)
let firstIndex = zipped |> Belt.Array.getIndexBy(_, ((x, _)) => x > xValue)
let previousIndex = switch firstIndex {
| None => Some(Array.length(zipped) - 1)
| Some(0) => None
| Some(n) => Some(n - 1)
}
previousIndex |> Belt.Option.flatMap(_, Belt.Array.get(zipped))
}
}
module YtoX = {
let linear = (y: float, t: T.t): float => {
let firstHigherIndex = E.A.Sorted.binarySearchFirstElementGreaterIndex(T.ys(t), y)
let foundX = switch firstHigherIndex {
| #overMax => T.maxX(t)
| #underMin => T.minX(t)
| #firstHigher(firstHigherIndex) =>
let lowerOrEqualIndex = firstHigherIndex - 1 < 0 ? 0 : firstHigherIndex - 1
let (_xs, _ys) = (T.xs(t), T.ys(t))
let needsInterpolation = _ys[lowerOrEqualIndex] != y
if needsInterpolation {
interpolate(
_ys[lowerOrEqualIndex],
_ys[firstHigherIndex],
_xs[lowerOrEqualIndex],
_xs[firstHigherIndex],
y,
)
} else {
_xs[lowerOrEqualIndex]
}
}
foundX
}
}
module XtoY = {
let stepwiseIncremental = (f, t: T.t) => Pairs.firstAtOrBeforeXValue(f, t) |> E.O.fmap(Pairs.y)
let stepwiseIfAtX = (f: float, t: T.t) =>
Pairs.getBy(t, ((x: float, _)) => x == f) |> E.O.fmap(Pairs.y)
let linear = (x: float, t: T.t): float => {
let firstHigherIndex = E.A.Sorted.binarySearchFirstElementGreaterIndex(T.xs(t), x)
let n = switch firstHigherIndex {
| #overMax => T.lastY(t)
| #underMin => T.firstY(t)
| #firstHigher(firstHigherIndex) =>
let lowerOrEqualIndex = firstHigherIndex - 1 < 0 ? 0 : firstHigherIndex - 1
let (_xs, _ys) = (T.xs(t), T.ys(t))
let needsInterpolation = _xs[lowerOrEqualIndex] != x
if needsInterpolation {
interpolate(
_xs[lowerOrEqualIndex],
_xs[firstHigherIndex],
_ys[lowerOrEqualIndex],
_ys[firstHigherIndex],
x,
)
} else {
_ys[lowerOrEqualIndex]
}
}
n
}
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
Interpolation can either be stepwise (using the value on the left) or linear. Extrapolation can be `UseZero or `UseOutermostPoints. */
let continuousInterpolator = (
interpolation: DistTypes.interpolationStrategy,
extrapolation: DistTypes.extrapolationStrategy,
): interpolator =>
switch (interpolation, extrapolation) {
| (#Linear, #UseZero) =>
(t: T.t, leftIndex: int, x: float) =>
if leftIndex < 0 {
0.0
} else if leftIndex >= T.length(t) - 1 {
0.0
} else {
let x1 = t.xs[leftIndex]
let x2 = t.xs[leftIndex + 1]
let y1 = t.ys[leftIndex]
let y2 = t.ys[leftIndex + 1]
let fraction = (x -. x1) /. (x2 -. x1)
y1 *. (1. -. fraction) +. y2 *. fraction
}
| (#Linear, #UseOutermostPoints) =>
(t: T.t, leftIndex: int, x: float) =>
if leftIndex < 0 {
t.ys[0]
} else if leftIndex >= T.length(t) - 1 {
t.ys[T.length(t) - 1]
} else {
let x1 = t.xs[leftIndex]
let x2 = t.xs[leftIndex + 1]
let y1 = t.ys[leftIndex]
let y2 = t.ys[leftIndex + 1]
let fraction = (x -. x1) /. (x2 -. x1)
y1 *. (1. -. fraction) +. y2 *. fraction
}
| (#Stepwise, #UseZero) =>
(t: T.t, leftIndex: int, x: float) =>
if leftIndex < 0 {
0.0
} else if leftIndex >= T.length(t) - 1 {
0.0
} else {
t.ys[leftIndex]
}
| (#Stepwise, #UseOutermostPoints) =>
(t: T.t, leftIndex: int, x: float) =>
if leftIndex < 0 {
t.ys[0]
} else if leftIndex >= T.length(t) - 1 {
t.ys[T.length(t) - 1]
} else {
t.ys[leftIndex]
}
}
/* Returns a between-points-interpolating function that can be used with PointwiseCombination.combine.
For discrete distributions, the probability density between points is zero, so we just return zero here. */
let discreteInterpolator: interpolator = (t: T.t, leftIndex: int, x: float) => 0.0
}
module XsConversion = {
let _replaceWithXs = (newXs: array<float>, t: T.t): T.t => {
let newYs = Belt.Array.map(newXs, XtoY.linear(_, t))
{xs: newXs, ys: newYs}
}
let equallyDivideXByMass = (newLength: int, integral: T.t) =>
E.A.Floats.range(0.0, 1.0, newLength) |> E.A.fmap(YtoX.linear(_, integral))
let proportionEquallyOverX = (newLength: int, t: T.t): T.t =>
T.equallyDividedXs(t, newLength) |> _replaceWithXs(_, t)
let proportionByProbabilityMass = (newLength: int, integral: T.t, t: T.t): T.t =>
integral |> equallyDivideXByMass(newLength) |> _replaceWithXs(_, t) // creates a new set of xs at evenly spaced percentiles // linearly interpolates new ys for the new xs
}
module Zipped = {
type zipped = array<(float, float)>
let compareYs = ((_, y1), (_, y2)) => y1 > y2 ? 1 : 0
let compareXs = ((x1, _), (x2, _)) => x1 > x2 ? 1 : 0
let sortByY = (t: zipped) => t |> E.A.stableSortBy(_, compareYs)
let sortByX = (t: zipped) => t |> E.A.stableSortBy(_, compareXs)
let filterByX = (testFn: float => bool, t: zipped) => t |> E.A.filter(((x, _)) => testFn(x))
}
module PointwiseCombination = {
// t1Interpolator and t2Interpolator are functions from XYShape.XtoY, e.g. linearBetweenPointsExtrapolateFlat.
let combine = %raw(` // : (float => float => float, T.t, T.t, bool) => T.t
// This function combines two xyShapes by looping through both of them simultaneously.
// It always moves on to the next smallest x, whether that's in the first or second input's xs,
// and interpolates the value on the other side, thus accumulating xs and ys.
// This is written in raw JS because this can still be a bottleneck, and using refs for the i and j indices is quite painful.
function(fn, interpolator, t1, t2) {
let t1n = t1.xs.length;
let t2n = t2.xs.length;
let outX = [];
let outY = [];
let i = -1;
let j = -1;
while (i <= t1n - 1 && j <= t2n - 1) {
let x, ya, yb;
if (j == t2n - 1 && i < t1n - 1 ||
t1.xs[i+1] < t2.xs[j+1]) { // if a has to catch up to b, or if b is already done
i++;
x = t1.xs[i];
ya = t1.ys[i];
yb = interpolator(t2, j, x);
} else if (i == t1n - 1 && j < t2n - 1 ||
t1.xs[i+1] > t2.xs[j+1]) { // if b has to catch up to a, or if a is already done
j++;
x = t2.xs[j];
yb = t2.ys[j];
ya = interpolator(t1, i, x);
} else if (i < t1n - 1 && j < t2n && t1.xs[i+1] === t2.xs[j+1]) { // if they happen to be equal, move both ahead
i++;
j++;
x = t1.xs[i];
ya = t1.ys[i];
yb = t2.ys[j];
} else if (i === t1n - 1 && j === t2n - 1) {
// finished!
i = t1n;
j = t2n;
continue;
} else {
console.log("Error!", i, j);
}
outX.push(x);
outY.push(fn(ya, yb));
}
return {xs: outX, ys: outY};
}
`)
let combineEvenXs = (~fn, ~xToYSelection, sampleCount, t1: T.t, t2: T.t) =>
switch (E.A.length(t1.xs), E.A.length(t2.xs)) {
| (0, 0) => T.empty
| (0, _) => t2
| (_, 0) => t1
| (_, _) =>
let allXs = Ts.equallyDividedXs([t1, t2], sampleCount)
let allYs = allXs |> E.A.fmap(x => fn(xToYSelection(x, t1), xToYSelection(x, t2)))
T.fromArrays(allXs, allYs)
}
// TODO: I'd bet this is pretty slow. Maybe it would be faster to intersperse Xs and Ys separately.
let intersperse = (t1: T.t, t2: T.t) => E.A.intersperse(T.zip(t1), T.zip(t2)) |> T.fromZippedArray
}
// I'm really not sure this part is actually what we want at this point.
module Range = {
// ((lastX, lastY), (nextX, nextY))
type zippedRange = ((float, float), (float, float))
let toT = T.fromZippedArray
let nextX = ((_, (nextX, _)): zippedRange) => nextX
let rangePointAssumingSteps = (((_, lastY), (nextX, _)): zippedRange) => (nextX, lastY)
let rangeAreaAssumingTriangles = (((lastX, lastY), (nextX, nextY)): zippedRange) =>
(nextX -. lastX) *. (lastY +. nextY) /. 2.
//Todo: figure out how to without making new array.
let rangeAreaAssumingTrapezoids = (((lastX, lastY), (nextX, nextY)): zippedRange) =>
(nextX -. lastX) *. (Js.Math.min_float(lastY, nextY) +. (lastY +. nextY) /. 2.)
let delta_y_over_delta_x = (((lastX, lastY), (nextX, nextY)): zippedRange) =>
(nextY -. lastY) /. (nextX -. lastX)
let mapYsBasedOnRanges = (fn, t) =>
Belt.Array.zip(t.xs, t.ys)
|> E.A.toRanges
|> E.R.toOption
|> E.O.fmap(r => r |> Belt.Array.map(_, r => (nextX(r), fn(r))))
// This code is messy, in part because I'm trying to make things easy on garbage collection here.
// It's using triangles instead of trapezoids right now.
let integrateWithTriangles = ({xs, ys}) => {
let length = E.A.length(xs)
let cumulativeY = Belt.Array.make(length, 0.0)
for x in 0 to E.A.length(xs) - 2 {
let _ = Belt.Array.set(
cumulativeY,
x + 1,
(xs[x + 1] -. xs[x]) *. ((ys[x] +. ys[x + 1]) /. 2.) +. cumulativeY[x], // dx // (1/2) * (avgY)
)
}
Some({xs: xs, ys: cumulativeY})
}
let derivative = mapYsBasedOnRanges(delta_y_over_delta_x)
let stepwiseToLinear = ({xs, ys}: T.t): T.t => {
// adds points at the bottom of each step.
let length = E.A.length(xs)
let newXs: array<float> = Belt.Array.makeUninitializedUnsafe(2 * length)
let newYs: array<float> = Belt.Array.makeUninitializedUnsafe(2 * length)
Belt.Array.set(newXs, 0, xs[0] -. epsilon_float) |> ignore
Belt.Array.set(newYs, 0, 0.) |> ignore
Belt.Array.set(newXs, 1, xs[0]) |> ignore
Belt.Array.set(newYs, 1, ys[0]) |> ignore
for i in 1 to E.A.length(xs) - 1 {
Belt.Array.set(newXs, i * 2, xs[i] -. epsilon_float) |> ignore
Belt.Array.set(newYs, i * 2, ys[i - 1]) |> ignore
Belt.Array.set(newXs, i * 2 + 1, xs[i]) |> ignore
Belt.Array.set(newYs, i * 2 + 1, ys[i]) |> ignore
()
}
{xs: newXs, ys: newYs}
}
// TODO: I think this isn't needed by any functions anymore.
let stepsToContinuous = t => {
// TODO: It would be nicer if this the diff didn't change the first element, and also maybe if there were a more elegant way of doing this.
let diff = T.xTotalRange(t) |> (r => r *. 0.00001)
let items = switch E.A.toRanges(Belt.Array.zip(t.xs, t.ys)) {
| Ok(items) =>
Some(
items
|> Belt.Array.map(_, rangePointAssumingSteps)
|> T.fromZippedArray
|> PointwiseCombination.intersperse(t |> T.mapX(e => e +. diff)),
)
| _ => Some(t)
}
let first = items |> E.O.fmap(T.zip) |> E.O.bind(_, E.A.get(_, 0))
switch (items, first) {
| (Some(items), Some((0.0, _))) => Some(items)
| (Some(items), Some((firstX, _))) =>
let all = E.A.append([(firstX, 0.0)], items |> T.zip)
all |> T.fromZippedArray |> E.O.some
| _ => None
}
}
}
let pointLogScore = (prediction, answer) =>
switch answer {
| 0. => 0.0
| answer => answer *. Js.Math.log2(Js.Math.abs_float(prediction /. answer))
}
let logScorePoint = (sampleCount, t1, t2) =>
PointwiseCombination.combineEvenXs(
~fn=pointLogScore,
~xToYSelection=XtoY.linear,
sampleCount,
t1,
t2,
)
|> Range.integrateWithTriangles
|> E.O.fmap(T.accumulateYs(\"+."))
|> E.O.fmap(Pairs.last)
|> E.O.fmap(Pairs.y)
module Analysis = {
let integrateContinuousShape = (
~indefiniteIntegralStepwise=(p, h1) => h1 *. p,
~indefiniteIntegralLinear=(p, a, b) => a *. p +. b *. p ** 2.0 /. 2.0,
t: DistTypes.continuousShape,
): float => {
let xs = t.xyShape.xs
let ys = t.xyShape.ys
E.A.reducei(xs, 0.0, (acc, _x, i) => {
let areaUnderIntegral = // TODO Take this switch statement out of the loop body
switch (t.interpolation, i) {
| (_, 0) => 0.0
| (#Stepwise, _) =>
indefiniteIntegralStepwise(xs[i], ys[i - 1]) -.
indefiniteIntegralStepwise(xs[i - 1], ys[i - 1])
| (#Linear, _) =>
let x1 = xs[i - 1]
let x2 = xs[i]
if x1 == x2 {
0.0
} else {
let h1 = ys[i - 1]
let h2 = ys[i]
let b = (h1 -. h2) /. (x1 -. x2)
let a = h1 -. b *. x1
indefiniteIntegralLinear(x2, a, b) -. indefiniteIntegralLinear(x1, a, b)
}
}
acc +. areaUnderIntegral
})
}
let getMeanOfSquaresContinuousShape = (t: DistTypes.continuousShape) => {
let indefiniteIntegralLinear = (p, a, b) => a *. p ** 3.0 /. 3.0 +. b *. p ** 4.0 /. 4.0
let indefiniteIntegralStepwise = (p, h1) => h1 *. p ** 3.0 /. 3.0
integrateContinuousShape(~indefiniteIntegralStepwise, ~indefiniteIntegralLinear, t)
}
let getVarianceDangerously = (t: 't, mean: 't => float, getMeanOfSquares: 't => float): float => {
let meanSquared = mean(t) ** 2.0
let meanOfSquares = getMeanOfSquares(t)
meanOfSquares -. meanSquared
}
let squareXYShape = T.mapX(x => x ** 2.0)
}

View File

@ -206,7 +206,6 @@ module Truncate = {
module Normalize = { module Normalize = {
let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => { let rec operationToLeaf = (evaluationParams, t: node): result(node, string) => {
Js.log2("normalize", t);
switch (t) { switch (t) {
| `RenderedDist(s) => Ok(`RenderedDist(Shape.T.normalize(s))) | `RenderedDist(s) => Ok(`RenderedDist(Shape.T.normalize(s)))
| `SymbolicDist(_) => Ok(t) | `SymbolicDist(_) => Ok(t)

View File

@ -344,7 +344,6 @@ let fromString2 = str => {
}); });
let value = E.R.bind(mathJsParse, MathAdtToDistDst.run); let value = E.R.bind(mathJsParse, MathAdtToDistDst.run);
Js.log2(mathJsParse, value);
value; value;
}; };

View File

@ -0,0 +1,443 @@
open Rationale.Function.Infix
module FloatFloatMap = {
module Id = Belt.Id.MakeComparable({
type t = float
let cmp: (float, float) => int = Pervasives.compare
})
type t = Belt.MutableMap.t<Id.t, float, Id.identity>
let fromArray = (ar: array<(float, float)>) => Belt.MutableMap.fromArray(ar, ~id=module(Id))
let toArray = (t: t) => Belt.MutableMap.toArray(t)
let empty = () => Belt.MutableMap.make(~id=module(Id))
let increment = (el, t: t) =>
Belt.MutableMap.update(t, el, x =>
switch x {
| Some(n) => Some(n +. 1.0)
| None => Some(1.0)
}
)
let get = (el, t: t) => Belt.MutableMap.get(t, el)
let fmap = (fn, t: t) => Belt.MutableMap.map(t, fn)
}
module Int = {
let max = (i1: int, i2: int) => i1 > i2 ? i1 : i2
}
/* Utils */
module U = {
let isEqual = (a, b) => a == b
let toA = a => [a]
let id = e => e
}
module O = {
let dimap = (sFn, rFn, e) =>
switch e {
| Some(r) => sFn(r)
| None => rFn()
}
()
let fmap = Rationale.Option.fmap
let bind = Rationale.Option.bind
let default = Rationale.Option.default
let isSome = Rationale.Option.isSome
let isNone = Rationale.Option.isNone
let toExn = Rationale.Option.toExn
let some = Rationale.Option.some
let firstSome = Rationale.Option.firstSome
let toExt = Rationale.Option.toExn
let flatApply = (fn, b) => Rationale.Option.apply(fn, Some(b)) |> Rationale.Option.flatten
let toBool = opt =>
switch opt {
| Some(_) => true
| _ => false
}
let ffmap = (fn, r) =>
switch r {
| Some(sm) => fn(sm)
| _ => None
}
let toString = opt =>
switch opt {
| Some(s) => s
| _ => ""
}
let toResult = (error, e) =>
switch e {
| Some(r) => Belt.Result.Ok(r)
| None => Error(error)
}
let compare = (compare, f1: option<float>, f2: option<float>) =>
switch (f1, f2) {
| (Some(f1), Some(f2)) => Some(compare(f1, f2) ? f1 : f2)
| (Some(f1), None) => Some(f1)
| (None, Some(f2)) => Some(f2)
| (None, None) => None
}
let min = compare(\"<")
let max = compare(\">")
}
/* Functions */
module F = {
let apply = (a, e) => a |> e
let flatten2Callbacks = (fn1, fn2, fnlast) =>
fn1(response1 => fn2(response2 => fnlast(response1, response2)))
let flatten3Callbacks = (fn1, fn2, fn3, fnlast) =>
fn1(response1 => fn2(response2 => fn3(response3 => fnlast(response1, response2, response3))))
let flatten4Callbacks = (fn1, fn2, fn3, fn4, fnlast) =>
fn1(response1 =>
fn2(response2 =>
fn3(response3 => fn4(response4 => fnlast(response1, response2, response3, response4)))
)
)
}
module Bool = {
type t = bool
let toString = (t: t) => t ? "TRUE" : "FALSE"
let fromString = str => str == "TRUE" ? true : false
module O = {
let toBool = opt =>
switch opt {
| Some(true) => true
| _ => false
}
}
}
module Float = {
let with2DigitsPrecision = Js.Float.toPrecisionWithPrecision(_, ~digits=2)
let with3DigitsPrecision = Js.Float.toPrecisionWithPrecision(_, ~digits=3)
let toFixed = Js.Float.toFixed
let toString = Js.Float.toString
}
module I = {
let increment = n => n + 1
let decrement = n => n - 1
let toString = Js.Int.toString
}
/* R for Result */
module R = {
let result = Rationale.Result.result
let id = e => e |> result(U.id, U.id)
let fmap = Rationale.Result.fmap
let bind = Rationale.Result.bind
let toExn = Belt.Result.getExn
let default = (default, res: Belt.Result.t<'a, 'b>) =>
switch res {
| Ok(r) => r
| Error(_) => default
}
let merge = (a, b) =>
switch (a, b) {
| (Error(e), _) => Error(e)
| (_, Error(e)) => Error(e)
| (Ok(a), Ok(b)) => Ok((a, b))
}
let toOption = (e: Belt.Result.t<'a, 'b>) =>
switch e {
| Ok(r) => Some(r)
| Error(_) => None
}
let errorIfCondition = (errorCondition, errorMessage, r) =>
errorCondition(r) ? Error(errorMessage) : Ok(r)
}
let safe_fn_of_string = (fn, s: string): option<'a> =>
try Some(fn(s)) catch {
| _ => None
}
module S = {
let safe_float = float_of_string->safe_fn_of_string
let safe_int = int_of_string->safe_fn_of_string
let default = (defaultStr, str) => str == "" ? defaultStr : str
}
module J = {
let toString = \"||>"(Js.Json.decodeString, O.default(""))
let fromString = Js.Json.string
let fromNumber = Js.Json.number
module O = {
let fromString = (str: string) =>
switch str {
| "" => None
| _ => Some(Js.Json.string(str))
}
let toString = (str: option<'a>) =>
switch str {
| Some(str) => Some(str |> \"||>"(Js.Json.decodeString, O.default("")))
| _ => None
}
}
}
module JsDate = {
let fromString = Js.Date.fromString
let now = Js.Date.now
let make = Js.Date.make
let valueOf = Js.Date.valueOf
}
/* List */
module L = {
let fmap = List.map
let get = Belt.List.get
let toArray = Array.of_list
let fmapi = List.mapi
let concat = List.concat
let drop = Rationale.RList.drop
let remove = Rationale.RList.remove
let find = List.find
let filter = List.filter
let for_all = List.for_all
let exists = List.exists
let sort = List.sort
let length = List.length
let filter_opt = Rationale.RList.filter_opt
let uniqBy = Rationale.RList.uniqBy
let join = Rationale.RList.join
let head = Rationale.RList.head
let uniq = Rationale.RList.uniq
let flatten = List.flatten
let last = Rationale.RList.last
let append = List.append
let getBy = Belt.List.getBy
let dropLast = Rationale.RList.dropLast
let contains = Rationale.RList.contains
let without = Rationale.RList.without
let update = Rationale.RList.update
let iter = List.iter
let findIndex = Rationale.RList.findIndex
}
/* A for Array */
module A = {
let fmap = Array.map
let fmapi = Array.mapi
let to_list = Array.to_list
let of_list = Array.of_list
let length = Array.length
let append = Array.append
// let empty = [||];
let unsafe_get = Array.unsafe_get
let get = Belt.Array.get
let getBy = Belt.Array.getBy
let last = a => get(a, length(a) - 1)
let first = get(_, 0)
let hasBy = (r, fn) => Belt.Array.getBy(r, fn) |> O.isSome
let fold_left = Array.fold_left
let fold_right = Array.fold_right
let concatMany = Belt.Array.concatMany
let keepMap = Belt.Array.keepMap
let init = Array.init
let reduce = Belt.Array.reduce
let reducei = Belt.Array.reduceWithIndex
let isEmpty = r => length(r) < 1
let min = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i < j ? i : j))
let max = a => get(a, 0) |> O.fmap(first => Belt.Array.reduce(a, first, (i, j) => i > j ? i : j))
let stableSortBy = Belt.SortArray.stableSortBy
let toRanges = (a: array<'a>) =>
switch a |> Belt.Array.length {
| 0
| 1 =>
Belt.Result.Error("Must be at least 2 elements")
| n =>
Belt.Array.makeBy(n - 1, r => r)
|> Belt.Array.map(_, index => (
Belt.Array.getUnsafe(a, index),
Belt.Array.getUnsafe(a, index + 1),
))
|> Rationale.Result.return
}
// This zips while taking the longest elements of each array.
let zipMaxLength = (array1, array2) => {
let maxLength = Int.max(length(array1), length(array2))
let result = maxLength |> Belt.Array.makeUninitializedUnsafe
for i in 0 to maxLength - 1 {
Belt.Array.set(result, i, (get(array1, i), get(array2, i))) |> ignore
}
result
}
let asList = (f: list<'a> => list<'a>, r: array<'a>) => r |> to_list |> f |> of_list
/* TODO: Is there a better way of doing this? */
let uniq = r => asList(L.uniq, r)
//intersperse([1,2,3], [10,11,12]) => [1,10,2,11,3,12]
let intersperse = (a: array<'a>, b: array<'a>) => {
let items: ref<array<'a>> = ref([])
Belt.Array.forEachWithIndex(a, (i, item) =>
switch Belt.Array.get(b, i) {
| Some(r) => items := append(items.contents, [item, r])
| None => items := append(items.contents, [item])
}
)
items.contents
}
// This is like map, but
//accumulate((a,b) => a + b, [1,2,3]) => [1, 3, 5]
let accumulate = (fn: ('a, 'a) => 'a, items: array<'a>) => {
let length = items |> length
let empty = Belt.Array.make(length, items |> unsafe_get(_, 0))
Belt.Array.forEachWithIndex(items, (index, element) => {
let item = switch index {
| 0 => element
| index => fn(element, unsafe_get(empty, index - 1))
}
let _ = Belt.Array.set(empty, index, item)
})
empty
}
// @todo: Is -1 still the indicator that this is false (as is true with
// @todo: js findIndex)? Wasn't sure.
let findIndex = (e, i) =>
Js.Array.findIndex(e, i) |> (
r =>
switch r {
| -1 => None
| r => Some(r)
}
)
let filter = (o, e) => Js.Array.filter(o, e)
module O = {
let concatSomes = (optionals: array<option<'a>>): array<'a> =>
optionals
|> Js.Array.filter(Rationale.Option.isSome)
|> Js.Array.map(Rationale.Option.toExn("Warning: This should not have happened"))
let defaultEmpty = (o: option<array<'a>>): array<'a> =>
switch o {
| Some(o) => o
| None => []
}
}
module R = {
let firstErrorOrOpen = (results: array<Belt.Result.t<'a, 'b>>): Belt.Result.t<
array<'a>,
'b,
> => {
let bringErrorUp = switch results |> Belt.Array.getBy(_, Belt.Result.isError) {
| Some(Belt.Result.Error(err)) => Belt.Result.Error(err)
| Some(Belt.Result.Ok(_)) => Belt.Result.Ok(results)
| None => Belt.Result.Ok(results)
}
let forceOpen = (r: array<Belt.Result.t<'a, 'b>>): array<'a> =>
r |> Belt.Array.map(_, r => Belt.Result.getExn(r))
bringErrorUp |> Belt.Result.map(_, forceOpen)
}
}
module Sorted = {
let min = first
let max = last
let range = (~min=min, ~max=max, a) =>
switch (min(a), max(a)) {
| (Some(min), Some(max)) => Some(max -. min)
| _ => None
}
let binarySearchFirstElementGreaterIndex = (ar: array<'a>, el: 'a) => {
let el = Belt.SortArray.binarySearchBy(ar, el, compare)
let el = el < 0 ? el * -1 - 1 : el
switch el {
| e if e >= length(ar) => #overMax
| e if e == 0 => #underMin
| e => #firstHigher(e)
}
}
let concat = (t1: array<'a>, t2: array<'a>) => {
let ts = Belt.Array.concat(t1, t2)
ts |> Array.fast_sort(compare)
ts
}
let concatMany = (t1: array<array<'a>>) => {
let ts = Belt.Array.concatMany(t1)
ts |> Array.fast_sort(compare)
ts
}
module Floats = {
let makeIncrementalUp = (a, b) =>
Array.make(b - a + 1, a) |> Array.mapi((i, c) => c + i) |> Belt.Array.map(_, float_of_int)
let makeIncrementalDown = (a, b) =>
Array.make(a - b + 1, a) |> Array.mapi((i, c) => c - i) |> Belt.Array.map(_, float_of_int)
let split = (sortedArray: array<float>) => {
let continuous = []
let discrete = FloatFloatMap.empty()
Belt.Array.forEachWithIndex(sortedArray, (index, element) => {
let maxIndex = (sortedArray |> Array.length) - 1
let possiblySimilarElements = switch index {
| 0 => [index + 1]
| n if n == maxIndex => [index - 1]
| _ => [index - 1, index + 1]
} |> Belt.Array.map(_, r => sortedArray[r])
let hasSimilarElement = Belt.Array.some(possiblySimilarElements, r => r == element)
hasSimilarElement
? FloatFloatMap.increment(element, discrete)
: {
let _ = Js.Array.push(element, continuous)
}
()
})
(continuous, discrete)
}
}
}
module Floats = {
let sum = Belt.Array.reduce(_, 0., (i, j) => i +. j)
let mean = a => sum(a) /. (Array.length(a) |> float_of_int)
let random = Js.Math.random_int
exception RangeError(string)
let range = (min: float, max: float, n: int): array<float> =>
switch n {
| 0 => []
| 1 => [min]
| 2 => [min, max]
| _ if min == max => Belt.Array.make(n, min)
| _ if n < 0 => raise(RangeError("n must be greater than 0"))
| _ if min > max => raise(RangeError("Min value is less then max value"))
| _ =>
let diff = (max -. min) /. Belt.Float.fromInt(n - 1)
Belt.Array.makeBy(n, i => min +. Belt.Float.fromInt(i) *. diff)
}
}
}
module JsArray = {
let concatSomes = (optionals: Js.Array.t<option<'a>>): Js.Array.t<'a> =>
optionals
|> Js.Array.filter(Rationale.Option.isSome)
|> Js.Array.map(Rationale.Option.toExn("Warning: This should not have happened"))
let filter = Js.Array.filter
}

View File

@ -0,0 +1,3 @@
const ProgramEvaluator = require('../distPlus/ProgramEvaluator.gen.js');
exports.runMePlease = ProgramEvaluator.runAll

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +0,0 @@
//postcss.config.js
const tailwindcss = require('tailwindcss');
module.exports = {
plugins: [
tailwindcss('./tailwind.js'),
require('autoprefixer'),
],
};

View File

@ -1 +0,0 @@
let entries = [];

View File

@ -1,30 +0,0 @@
type compEntry = {
mutable id: string,
title: string,
render: unit => React.element,
container: containerType,
}
and folderEntry = {
mutable id: string,
title: string,
children: list(navEntry),
}
and navEntry =
| CompEntry(compEntry)
| FolderEntry(folderEntry)
and containerType =
| FullWidth
| Sidebar;
let entry = (~title, ~render): navEntry => {
CompEntry({id: "", title, render, container: FullWidth});
};
// Maybe different api, this avoids breaking changes
let sidebar = (~title, ~render): navEntry => {
CompEntry({id: "", title, render, container: Sidebar});
};
let folder = (~title, ~children): navEntry => {
FolderEntry({id: "", title, children});
};

View File

@ -1,198 +0,0 @@
open EntryTypes;
module HS = Belt.HashMap.String;
let entriesByPath: HS.t(navEntry) = HS.make(~hintSize=100);
/* Creates unique id's per scope based on title */
let buildIds = entries => {
let genId = (title, path) => {
let noSpaces = Js.String.replaceByRe([%bs.re "/\\s+/g"], "-", title);
if (!HS.has(entriesByPath, path ++ "/" ++ noSpaces)) {
noSpaces;
} else {
let rec loop = num => {
let testId = noSpaces ++ "-" ++ string_of_int(num);
if (!HS.has(entriesByPath, path ++ "/" ++ testId)) {
testId;
} else {
loop(num + 1);
};
};
loop(2);
};
};
let rec processFolder = (f: folderEntry, curPath) => {
f.id = curPath ++ "/" ++ genId(f.title, curPath);
HS.set(entriesByPath, f.id, FolderEntry(f));
f.children
|> E.L.iter(
fun
| CompEntry(c) => processEntry(c, f.id)
| FolderEntry(f) => processFolder(f, f.id),
);
}
and processEntry = (c: compEntry, curPath) => {
c.id = curPath ++ "/" ++ genId(c.title, curPath);
HS.set(entriesByPath, c.id, CompEntry(c));
};
entries
|> E.L.iter(
fun
| CompEntry(c) => processEntry(c, "")
| FolderEntry(f) => processFolder(f, ""),
);
};
let entries = Entries.entries;
buildIds(entries);
module Styles = {
open Css;
let pageContainer = style([display(`flex), height(`vh(100.))]);
let leftNav =
style([
padding(`em(2.)),
flexBasis(`px(200)),
flexShrink(0.),
backgroundColor(`hex("eaeff3")),
boxShadows([
Shadow.box(
~x=px(-1),
~blur=px(1),
~inset=true,
rgba(0, 0, 0, 0.1),
),
]),
]);
let folderNav =
style([
selector(
">h4",
[
cursor(`pointer),
margin2(~v=`em(0.3), ~h=`zero),
hover([color(`hex("7089ad"))]),
],
),
]);
let folderChildren = style([paddingLeft(`px(7))]);
let compNav =
style([
cursor(`pointer),
paddingBottom(`px(3)),
hover([color(`hex("7089ad"))]),
]);
let compContainer = style([padding(`em(2.)), flexGrow(1.)]);
// Approximate sidebar container for entry
let sidebarContainer = style([maxWidth(`px(430))]);
let folderChildContainer = style([marginBottom(`em(2.))]);
};
let baseUrl = "/showcase/index.html";
module Index = {
type state = {route: ReasonReactRouter.url};
type action =
| ItemClick(string)
| ChangeRoute(ReasonReactRouter.url);
let changeId = (id: string) => {
ReasonReactRouter.push(baseUrl ++ "#" ++ id);
();
};
let buildNav = setRoute => {
let rec buildFolder = (f: folderEntry) => {
<div key={f.id} className=Styles.folderNav>
<h4 onClick={_e => changeId(f.id)}> f.title->React.string </h4>
<div className=Styles.folderChildren>
{(
f.children
|> E.L.fmap(e =>
switch (e) {
| FolderEntry(folder) => buildFolder(folder)
| CompEntry(entry) => buildEntry(entry)
}
)
|> E.L.toArray
)
->React.array}
</div>
</div>;
}
and buildEntry = (e: compEntry) => {
<div key={e.id} className=Styles.compNav onClick={_e => changeId(e.id)}>
e.title->React.string
</div>;
};
(
entries
|> E.L.fmap(e =>
switch (e) {
| FolderEntry(folder) => buildFolder(folder)
| CompEntry(entry) => buildEntry(entry)
}
)
|> E.L.toArray
)
->React.array;
};
let renderEntry = e => {
switch (e.container) {
| FullWidth => e.render()
| Sidebar => <div className=Styles.sidebarContainer> {e.render()} </div>
};
};
[@react.component]
let make = () => {
let (route, setRoute) =
React.useState(() => {
let url: ReasonReactRouter.url = {path: [], hash: "", search: ""};
url;
});
React.useState(() => {
ReasonReactRouter.watchUrl(url => setRoute(_ => url));
();
})
|> ignore;
<div className=Styles.pageContainer>
<div className=Styles.leftNav> {buildNav(setRoute)} </div>
<div className=Styles.compContainer>
{if (route.hash == "") {
React.null;
} else {
switch (HS.get(entriesByPath, route.hash)) {
| Some(navEntry) =>
switch (navEntry) {
| CompEntry(c) => renderEntry(c)
| FolderEntry(f) =>
/* Rendering immediate children */
(
f.children
|> E.L.fmap(child =>
switch (child) {
| CompEntry(c) =>
<div className=Styles.folderChildContainer key={c.id}>
{renderEntry(c)}
</div>
| _ => React.null
}
)
|> E.L.toArray
)
->React.array
}
| None => <div> "Component not found"->React.string </div>
};
}}
</div>
</div>;
};
};

View File

@ -1,2 +0,0 @@
ReactDOMRe.renderToElementWithId(<div> <Lib.Index /> </div>, "main");
ReasonReactRouter.push("");

View File

@ -1,24 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link href="https://fonts.googleapis.com/css?family=Lato:300,400,700,900" rel="stylesheet">
<link href="https://fonts.googleapis.com/css?family=Lato:300,400,700,900" rel="stylesheet">
<link href="../src/styles/index.css" rel="stylesheet">
<style>
body {
margin: 0;
}
</style>
<title>Showcase</title>
</head>
<body>
<div id="main"></div>
<script src=" ./ShowcaseIndex.bs.js "></script>
</body>
</html>

View File

@ -1,87 +0,0 @@
type route =
| DistBuilder
| Home
| NotFound;
let routeToPath = route =>
switch (route) {
| DistBuilder => "/dist-builder"
| Home => "/"
| _ => "/"
};
module Menu = {
module Styles = {
open Css;
let menu =
style([
position(`relative),
marginTop(em(0.25)),
marginBottom(em(0.25)),
selector(
"a",
[
borderRadius(em(0.25)),
display(`inlineBlock),
backgroundColor(`hex("eee")),
padding(em(1.)),
cursor(`pointer),
],
),
selector("a:hover", [backgroundColor(`hex("bfcad4"))]),
selector("a:hover", [backgroundColor(`hex("bfcad4"))]),
selector(
"a:not(:first-child):not(:last-child)",
[marginRight(em(0.25)), marginLeft(em(0.25))],
),
]);
};
module Item = {
[@react.component]
let make = (~href, ~children) => {
<a
href
onClick={e => {
e->ReactEvent.Synthetic.preventDefault;
ReasonReactRouter.push(href);
}}>
children
</a>;
};
};
[@react.component]
let make = () => {
<div className=Styles.menu>
<Item href={routeToPath(Home)} key="home"> {"Home" |> R.ste} </Item>
<Item href={routeToPath(DistBuilder)} key="dist-builder">
{"Dist Builder" |> R.ste}
</Item>
</div>;
};
};
let fixedLength = r =>
<div className="w-full max-w-screen-xl mx-auto px-6"> r </div>;
[@react.component]
let make = () => {
let url = ReasonReactRouter.useUrl();
let routing =
switch (url.path) {
| ["dist-builder"] => DistBuilder
| [] => Home
| _ => NotFound
};
<>
<Menu />
{switch (routing) {
| DistBuilder => <DistBuilder />
| Home => <Home />
| _ => fixedLength({"Page is not found" |> R.ste})
}}
</>;
};

View File

@ -1,10 +0,0 @@
'use strict';
var reasonReactBlue = "#48a9dc";
var style = "\n body {\n background-color: rgb(224, 226, 229);\n display: flex;\n flex-direction: column;\n align-items: center;\n }\n button {\n background-color: white;\n color: " + (String(reasonReactBlue) + (";\n box-shadow: 0 0 0 1px " + (String(reasonReactBlue) + (";\n border: none;\n padding: 8px;\n font-size: 16px;\n }\n button:active {\n background-color: " + (String(reasonReactBlue) + ";\n color: white;\n }\n .container {\n margin: 12px 0px;\n box-shadow: 0px 4px 16px rgb(200, 200, 200);\n width: 720px;\n border-radius: 12px;\n font-family: sans-serif;\n }\n .containerTitle {\n background-color: rgb(242, 243, 245);\n border-radius: 12px 12px 0px 0px;\n padding: 12px;\n font-weight: bold;\n }\n .containerContent {\n background-color: white;\n padding: 16px;\n border-radius: 0px 0px 12px 12px;\n }\n")))));
exports.reasonReactBlue = reasonReactBlue;
exports.style = style;
/* style Not a pure module */

View File

@ -1,44 +0,0 @@
let reasonReactBlue = "#48a9dc";
// The {j|...|j} feature is just string interpolation, from
// bucklescript.github.io/docs/en/interop-cheatsheet#string-unicode-interpolation
// This allows us to conveniently write CSS, together with variables, by
// constructing a string
let style = {j|
body {
background-color: rgb(224, 226, 229);
display: flex;
flex-direction: column;
align-items: center;
}
button {
background-color: white;
color: $reasonReactBlue;
box-shadow: 0 0 0 1px $reasonReactBlue;
border: none;
padding: 8px;
font-size: 16px;
}
button:active {
background-color: $reasonReactBlue;
color: white;
}
.container {
margin: 12px 0px;
box-shadow: 0px 4px 16px rgb(200, 200, 200);
width: 720px;
border-radius: 12px;
font-family: sans-serif;
}
.containerTitle {
background-color: rgb(242, 243, 245);
border-radius: 12px 12px 0px 0px;
padding: 12px;
font-weight: bold;
}
.containerContent {
background-color: white;
padding: 16px;
border-radius: 0px 0px 12px 12px;
}
|j};

View File

@ -1,21 +0,0 @@
let normal = (mean: float, std: float) =>
Js.Float.(
{
let nMean = toPrecisionWithPrecision(mean, ~digits=4);
let nStd = toPrecisionWithPrecision(std, ~digits=2);
{j|normal($(nMean), $(nStd))|j};
}
);
let logNormal = (mean: float, std: float) => {
Js.Float.(
{
let nMean = toPrecisionWithPrecision(mean, ~digits=4);
let nStd = toPrecisionWithPrecision(std, ~digits=2);
{j|lognormal({mean: $(nMean), stdev: $(nStd)})|j};
}
);
};
let divide = (str1: string, str2: string) => {j|$(str1)/$(str2)|j};
let min = (str1: string, str2: string) => {j|min($(str1),$(str2))|j};

View File

@ -1,2 +0,0 @@
[%bs.raw {|import('./styles/index.css')|}];
ReactDOMRe.renderToElementWithId(<App />, "app");

View File

@ -1,9 +0,0 @@
let ste = React.string;
let showIf = (cond, comp) => cond ? comp : ReasonReact.null;
module O = {
let defaultNull = E.O.default(ReasonReact.null);
let fmapOrNull = (fn, el) => el |> E.O.fmap(fn) |> E.O.default(ReasonReact.null);
let flatten = E.O.default(ReasonReact.null);
};

View File

@ -1,41 +0,0 @@
import React from "react";
import AceEditor from "react-ace";
import "ace-builds/src-noconflict/mode-golang";
import "ace-builds/src-noconflict/theme-github";
import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/src-noconflict/keybinding-vim";
function onChange(newValue) {
console.log("change", newValue);
}
export class CodeEditor extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<AceEditor
value={this.props.value}
mode="golang"
height="400px"
width="100%"
theme="github"
showGutter={false}
highlightActiveLine={false}
showPrintMargin={false}
onChange={this.props.onChange}
name="UNIQUE_ID_OF_DIV"
editorProps={{
$blockScrolling: true,
}}
setOptions={{
enableBasicAutocompletion: false,
enableLiveAutocompletion: true,
enableSnippets: true,
}}
/>
);
}
}

View File

@ -1,10 +0,0 @@
[@bs.module "./CodeEditor.js"]
external codeEditor: ReasonReact.reactClass = "CodeEditor";
[@react.component]
let make = (~value="", ~onChange=(_:string) => (), ~children=ReasonReact.null) =>
ReasonReact.wrapJsForReason(~reactClass=codeEditor, ~props={
"value": value,
"onChange": onChange
}, children)
|> ReasonReact.element;

View File

@ -1,347 +0,0 @@
open BsReform;
open Antd.Grid;
module FormConfig = [%lenses
type state = {
squiggleString: string,
sampleCount: string,
outputXYPoints: string,
downsampleTo: string,
kernelWidth: string,
diagramStart: string,
diagramStop: string,
diagramCount: string,
}
];
type options = {
sampleCount: int,
outputXYPoints: int,
downsampleTo: option(int),
kernelWidth: option(float),
diagramStart: float,
diagramStop: float,
diagramCount: int,
};
module Form = ReForm.Make(FormConfig);
let schema = Form.Validation.Schema([||]);
module FieldText = {
[@react.component]
let make = (~field, ~label) => {
<>
<Form.Field
field
render={({handleChange, error, value, validate}) =>
<CodeEditor value onChange={r => handleChange(r)} />
}
/>
</>;
};
};
module FieldString = {
[@react.component]
let make = (~field, ~label) => {
<Form.Field
field
render={({handleChange, error, value, validate}) =>
<Antd.Form.Item label={label |> R.ste}>
<Antd.Input
value
onChange={BsReform.Helpers.handleChange(handleChange)}
onBlur={_ => validate()}
/>
</Antd.Form.Item>
}
/>;
};
};
module FieldFloat = {
[@react.component]
let make = (~field, ~label, ~className=Css.style([])) => {
<Form.Field
field
render={({handleChange, error, value, validate}) =>
<Antd.Form.Item label={label |> R.ste}>
<Antd.Input
value
onChange={BsReform.Helpers.handleChange(handleChange)}
onBlur={_ => validate()}
className
/>
</Antd.Form.Item>
}
/>;
};
};
module Styles = {
open Css;
let rows =
style([
selector(
">.ant-col:first-child",
[paddingLeft(em(0.25)), paddingRight(em(0.125))],
),
selector(
">.ant-col:last-child",
[paddingLeft(em(0.125)), paddingRight(em(0.25))],
),
selector(
">.ant-col:not(:first-child):not(:last-child)",
[paddingLeft(em(0.125)), paddingRight(em(0.125))],
),
]);
let parent =
style([
selector(".ant-input-number", [width(`percent(100.))]),
selector(".anticon", [verticalAlign(`zero)]),
]);
let form = style([backgroundColor(hex("eee")), padding(em(1.))]);
let dist = style([padding(em(1.))]);
let spacer = style([marginTop(em(1.))]);
let groupA =
style([
selector(
".ant-input-number-input",
[backgroundColor(hex("fff7db"))],
),
]);
let groupB =
style([
selector(
".ant-input-number-input",
[backgroundColor(hex("eaf4ff"))],
),
]);
};
module DemoDist = {
[@react.component]
let make = (~squiggleString:string, ~options) => {
<Antd.Card title={"Distribution" |> R.ste}>
<div>
{switch (options) {
| Some(options) =>
let inputs1 =
ProgramEvaluator.Inputs.make(
~samplingInputs={
sampleCount: Some(options.sampleCount),
outputXYPoints: Some(options.outputXYPoints),
kernelWidth: options.kernelWidth,
shapeLength:
Some(options.downsampleTo |> E.O.default(1000)),
},
~squiggleString,
~environment=
[|
("K", `SymbolicDist(`Float(1000.0))),
("M", `SymbolicDist(`Float(1000000.0))),
("B", `SymbolicDist(`Float(1000000000.0))),
("T", `SymbolicDist(`Float(1000000000000.0))),
|]
->Belt.Map.String.fromArray,
(),
);
let response1 = ProgramEvaluator.evaluateProgram(inputs1);
switch (response1) {
| Ok(`DistPlus(distPlus1)) =>
<DistPlusPlot distPlus={DistPlus.T.normalize(distPlus1)} />
| Ok(`Float(f)) =>
<ForetoldComponents.NumberShower number=f precision=3 />
| Ok(`Function((f, a), env)) =>
// Problem: When it gets the function, it doesn't save state about previous commands
let foo: ProgramEvaluator.Inputs.inputs = {
squiggleString,
samplingInputs: inputs1.samplingInputs,
environment: env,
};
let results =
E.A.Floats.range(options.diagramStart, options.diagramStop, options.diagramCount)
|> E.A.fmap(r =>
ProgramEvaluator.evaluateFunction(
foo,
(f, a),
[|`SymbolicDist(`Float(r))|],
)
|> E.R.bind(_, a =>
switch (a) {
| `DistPlus(d) => Ok((r, DistPlus.T.normalize(d)))
| n =>
Js.log2("Error here", n);
Error("wrong type");
}
)
)
|> E.A.R.firstErrorOrOpen;
switch (results) {
| Ok(dists) => <PercentilesChart dists />
| Error(r) => r |> R.ste
};
| Error(r) => r |> R.ste
};
| _ =>
"Nothing to show. Try to change the distribution description."
|> R.ste
}}
</div>
</Antd.Card>;
};
};
[@react.component]
let make = () => {
let (reloader, setReloader) = React.useState(() => 1);
let reform =
Form.use(
~validationStrategy=OnDemand,
~schema,
~onSubmit=({state}) => {None},
~initialState={
//squiggleString: "mm(normal(-10, 2), uniform(18, 25), lognormal({mean: 10, stdev: 8}), triangular(31,40,50))",
squiggleString: "mm(normal(5,2), normal(10,2))",
sampleCount: "1000",
outputXYPoints: "1000",
downsampleTo: "",
kernelWidth: "",
diagramStart: "0",
diagramStop: "10",
diagramCount: "20",
},
(),
);
let onSubmit = e => {
e->ReactEvent.Synthetic.preventDefault;
reform.submit();
};
let squiggleString = reform.state.values.squiggleString;
let sampleCount = reform.state.values.sampleCount |> Js.Float.fromString;
let outputXYPoints =
reform.state.values.outputXYPoints |> Js.Float.fromString;
let downsampleTo = reform.state.values.downsampleTo |> Js.Float.fromString;
let kernelWidth = reform.state.values.kernelWidth |> Js.Float.fromString;
let diagramStart = reform.state.values.diagramStart |> Js.Float.fromString;
let diagramStop = reform.state.values.diagramStop |> Js.Float.fromString;
let diagramCount = reform.state.values.diagramCount |> Js.Float.fromString;
let options =
switch (sampleCount, outputXYPoints, downsampleTo) {
| (_, _, _)
when
!Js.Float.isNaN(sampleCount)
&& !Js.Float.isNaN(outputXYPoints)
&& !Js.Float.isNaN(downsampleTo)
&& sampleCount > 10.
&& outputXYPoints > 10. =>
Some({
sampleCount: sampleCount |> int_of_float,
outputXYPoints: outputXYPoints |> int_of_float,
downsampleTo:
int_of_float(downsampleTo) > 0
? Some(int_of_float(downsampleTo)) : None,
kernelWidth: kernelWidth == 0.0 ? None : Some(kernelWidth),
diagramStart: diagramStart,
diagramStop: diagramStop,
diagramCount: diagramCount |> int_of_float,
})
| _ => None
};
let demoDist =
React.useMemo1(
() => <DemoDist squiggleString options />,
[|
reform.state.values.squiggleString,
reform.state.values.sampleCount,
reform.state.values.outputXYPoints,
reform.state.values.downsampleTo,
reform.state.values.kernelWidth,
reform.state.values.diagramStart,
reform.state.values.diagramStop,
reform.state.values.diagramCount,
reloader |> string_of_int,
|],
);
let onReload = _ => {
setReloader(_ => reloader + 1);
};
<div className="grid grid-cols-2 gap-4">
<div>
<Antd.Card
title={"Distribution Form" |> R.ste}
extra={
<Antd.Button
icon=Antd.IconName.reload
shape=`circle
onClick=onReload
/>
}>
<Form.Provider value=reform>
<Antd.Form onSubmit>
<Row _type=`flex className=Styles.rows>
<Col span=24>
<FieldText
field=FormConfig.SquiggleString
label="Program"
/>
</Col>
</Row>
<Row _type=`flex className=Styles.rows>
<Col span=12>
<FieldFloat
field=FormConfig.SampleCount
label="Sample Count"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.OutputXYPoints
label="Output XY-points"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.DownsampleTo
label="Downsample To"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.KernelWidth
label="Kernel Width"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.DiagramStart
label="Diagram Start"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.DiagramStop
label="Diagram Stop"
/>
</Col>
<Col span=12>
<FieldFloat
field=FormConfig.DiagramCount
label="Diagram Count"
/>
</Col>
</Row>
</Antd.Form>
</Form.Provider>
</Antd.Card>
</div>
<div> demoDist </div>
</div>;
};

View File

@ -1,390 +0,0 @@
open DistPlusPlotReducer;
let plotBlue = `hex("1860ad");
let showAsForm = (distPlus: DistTypes.distPlus) => {
<div>
<Antd.Input value={distPlus.squiggleString |> E.O.default("")} />
</div>;
};
let showFloat = (~precision=3, number) =>
<ForetoldComponents.NumberShower number precision />;
let table = (distPlus, x) => {
<div>
<table className="table-auto text-sm">
<thead>
<tr>
<td className="px-4 py-2 "> {"X Point" |> ReasonReact.string} </td>
<td className="px-4 py-2">
{"Discrete Value" |> ReasonReact.string}
</td>
<td className="px-4 py-2">
{"Continuous Value" |> ReasonReact.string}
</td>
<td className="px-4 py-2">
{"Y Integral to Point" |> ReasonReact.string}
</td>
<td className="px-4 py-2">
{"Y Integral Total" |> ReasonReact.string}
</td>
</tr>
</thead>
<tbody>
<tr>
<td className="px-4 py-2 border">
{x |> E.Float.toString |> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> DistPlus.T.xToY(x)
|> DistTypes.MixedPoint.toDiscreteValue
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> DistPlus.T.xToY(x)
|> DistTypes.MixedPoint.toContinuousValue
|> Js.Float.toPrecisionWithPrecision(_, ~digits=7)
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> DistPlus.T.Integral.xToY(x)
|> E.Float.with2DigitsPrecision
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> DistPlus.T.Integral.sum
|> E.Float.with2DigitsPrecision
|> ReasonReact.string}
</td>
</tr>
</tbody>
</table>
<table className="table-auto text-sm">
<thead>
<tr>
<td className="px-4 py-2">
{"Continuous Total" |> ReasonReact.string}
</td>
<td className="px-4 py-2">
{"Discrete Total" |> ReasonReact.string}
</td>
</tr>
</thead>
<tbody>
<tr>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.toContinuous
|> E.O.fmap(
Continuous.T.Integral.sum
)
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
|> ReasonReact.string}
</td>
<td className="px-4 py-2 border ">
{distPlus
|> DistPlus.T.toDiscrete
|> E.O.fmap(Discrete.T.Integral.sum)
|> E.O.fmap(E.Float.with2DigitsPrecision)
|> E.O.default("")
|> ReasonReact.string}
</td>
</tr>
</tbody>
</table>
</div>;
};
let percentiles = distPlus => {
<div>
<table className="table-auto text-sm">
<thead>
<tr>
<td className="px-4 py-2"> {"1" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"5" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"25" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"50" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"75" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"95" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"99" |> ReasonReact.string} </td>
<td className="px-4 py-2"> {"99.999" |> ReasonReact.string} </td>
</tr>
</thead>
<tbody>
<tr>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.01)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.05)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.25)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.5)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.75)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.95)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.99)
|> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus
|> DistPlus.T.Integral.yToX(0.99999)
|> showFloat}
</td>
</tr>
</tbody>
</table>
<table className="table-auto text-sm">
<thead>
<tr>
<td className="px-4 py-2"> {"mean" |> ReasonReact.string} </td>
<td className="px-4 py-2">
{"standard deviation" |> ReasonReact.string}
</td>
<td className="px-4 py-2"> {"variance" |> ReasonReact.string} </td>
</tr>
</thead>
<tbody>
<tr>
<td className="px-4 py-2 border">
{distPlus |> DistPlus.T.mean |> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus |> DistPlus.T.variance |> (r => r ** 0.5) |> showFloat}
</td>
<td className="px-4 py-2 border">
{distPlus |> DistPlus.T.variance |> showFloat}
</td>
</tr>
</tbody>
</table>
</div>;
};
let adjustBoth = discreteProbabilityMassFraction => {
let yMaxDiscreteDomainFactor = discreteProbabilityMassFraction;
let yMaxContinuousDomainFactor = 1.0 -. discreteProbabilityMassFraction;
// use the bigger proportion, such that whichever is the bigger proportion, the yMax is 1.
let yMax = (yMaxDiscreteDomainFactor > 0.5 ? yMaxDiscreteDomainFactor : yMaxContinuousDomainFactor);
(
yMax /. yMaxDiscreteDomainFactor,
yMax /. yMaxContinuousDomainFactor,
);
};
module DistPlusChart = {
[@react.component]
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
open DistPlus;
let discrete = distPlus |> T.toDiscrete |> E.O.fmap(Discrete.getShape);
let continuous =
distPlus
|> T.toContinuous
|> E.O.fmap(Continuous.getShape);
let range = T.xTotalRange(distPlus);
// // We subtract a bit from the range to make sure that it fits. Maybe this should be done in d3 instead.
// let minX =
// switch (
// distPlus
// |> DistPlus.T.Integral.yToX(0.0001),
// range,
// ) {
// | (min, Some(range)) => Some(min -. range *. 0.001)
// | _ => None
// };
let minX = {
distPlus |> DistPlus.T.Integral.yToX(0.00001);
};
let maxX = {
distPlus |> DistPlus.T.Integral.yToX(0.99999);
};
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
let discreteProbabilityMassFraction =
distPlus |> DistPlus.T.toDiscreteProbabilityMassFraction;
let (yMaxDiscreteDomainFactor, yMaxContinuousDomainFactor) =
adjustBoth(discreteProbabilityMassFraction);
<DistributionPlot
xScale={config.xLog ? "log" : "linear"}
yScale={config.yLog ? "log" : "linear"}
height={DistPlusPlotReducer.heightToPix(config.height)}
minX
maxX
yMaxDiscreteDomainFactor
yMaxContinuousDomainFactor
?discrete
?continuous
color=plotBlue
onHover
timeScale
/>;
};
};
module IntegralChart = {
[@react.component]
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
let integral = distPlus.integralCache;
let continuous =
integral
|> Continuous.toLinear
|> E.O.fmap(Continuous.getShape);
let minX = {
distPlus |> DistPlus.T.Integral.yToX(0.00001);
};
let maxX = {
distPlus |> DistPlus.T.Integral.yToX(0.99999);
};
let timeScale = distPlus.unit |> DistTypes.DistributionUnit.toJson;
<DistributionPlot
xScale={config.xLog ? "log" : "linear"}
yScale={config.yLog ? "log" : "linear"}
height={DistPlusPlotReducer.heightToPix(config.height)}
minX
maxX
?continuous
color=plotBlue
timeScale
onHover
/>;
};
};
module Chart = {
[@react.component]
let make = (~distPlus: DistTypes.distPlus, ~config: chartConfig, ~onHover) => {
let chart =
React.useMemo2(
() => {
config.isCumulative
? <IntegralChart distPlus config onHover />
: <DistPlusChart distPlus config onHover />
},
(distPlus, config),
);
<div
className=Css.(
style([
minHeight(`px(DistPlusPlotReducer.heightToPix(config.height))),
])
)>
chart
</div>;
};
};
let button = "bg-gray-300 hover:bg-gray-500 text-grey-darkest text-xs px-4 py-1";
[@react.component]
let make = (~distPlus: DistTypes.distPlus) => {
let (x, setX) = React.useState(() => 0.);
let (state, dispatch) =
React.useReducer(DistPlusPlotReducer.reducer, DistPlusPlotReducer.init);
<div>
{state.distributions
|> E.L.fmapi((index, config) =>
<div className="flex" key={string_of_int(index)}>
<div className="w-4/5">
<Chart distPlus config onHover={r => {setX(_ => r)}} />
</div>
<div className="w-1/5">
<div className="opacity-50 hover:opacity-100">
<button
className=button
onClick={_ => dispatch(CHANGE_X_LOG(index))}>
{(config.xLog ? "x-log" : "x-linear") |> ReasonReact.string}
</button>
<button
className=button
onClick={_ => dispatch(CHANGE_Y_LOG(index))}>
{(config.yLog ? "y-log" : "y-linear") |> ReasonReact.string}
</button>
<button
className=button
onClick={_ =>
dispatch(
CHANGE_IS_CUMULATIVE(index, !config.isCumulative),
)
}>
{(config.isCumulative ? "cdf" : "pdf") |> ReasonReact.string}
</button>
<button
className=button
onClick={_ => dispatch(HEIGHT_INCREMENT(index))}>
{"expand" |> ReasonReact.string}
</button>
<button
className=button
onClick={_ => dispatch(HEIGHT_DECREMENT(index))}>
{"shrink" |> ReasonReact.string}
</button>
{index != 0
? <button
className=button
onClick={_ => dispatch(REMOVE_DIST(index))}>
{"remove" |> ReasonReact.string}
</button>
: ReasonReact.null}
</div>
</div>
</div>
)
|> E.L.toArray
|> ReasonReact.array}
<div className="inline-flex opacity-50 hover:opacity-100">
<button
className=button onClick={_ => dispatch(CHANGE_SHOW_PERCENTILES)}>
{"Percentiles" |> ReasonReact.string}
</button>
<button className=button onClick={_ => dispatch(CHANGE_SHOW_STATS)}>
{"Debug Stats" |> ReasonReact.string}
</button>
<button className=button onClick={_ => dispatch(CHANGE_SHOW_PARAMS)}>
{"Params" |> ReasonReact.string}
</button>
<button className=button onClick={_ => dispatch(ADD_DIST)}>
{"Add" |> ReasonReact.string}
</button>
</div>
{state.showParams ? showAsForm(distPlus) : ReasonReact.null}
{state.showStats ? table(distPlus, x) : ReasonReact.null}
{state.showPercentiles ? percentiles(distPlus) : ReasonReact.null}
</div>;
};

Some files were not shown because too many files have changed in this diff Show More