refactor: monorepo

This commit is contained in:
Vyacheslav Matyukhin 2022-03-17 00:02:34 +03:00
parent 2df73b4933
commit 244f02b3af
No known key found for this signature in database
GPG Key ID: 3D2A774C5489F96C
174 changed files with 22767 additions and 56647 deletions

33
.gitignore vendored
View File

@ -1,14 +1,39 @@
## Node modules # Node modules
node_modules/ node_modules/
node_modules/* node_modules/*
## Security # Security
**/betfaircertificates/ **/betfaircertificates/
**/secrets.json **/secrets.json
## Personal notes # Personal notes
notes/* notes/*
## Build artifacts # Build artifacts
done.txt done.txt
*.swp *.swp
# next.js
/.next/
/out/
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env.local
.env.development.local
.env.test.local
.env.production.local
# vercel
.vercel
# yarn vs npm conflict
package-lock.json ## use yarn.lock instead

1
.nvmrc Normal file
View File

@ -0,0 +1 @@
16.4.2

View File

@ -1 +1 @@
// worker: node src/utils/doEverythingForScheduler.js // worker: node src/backend/utils/doEverythingForScheduler.js

File diff suppressed because one or more lines are too long

5
netlify.toml Normal file
View File

@ -0,0 +1,5 @@
[[plugins]]
package = "netlify-plugin-cache-nextjs"
[build]
publish = ".next"

5
next-env.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
// NOTE: This file should not be edited
// see https://nextjs.org/docs/basic-features/typescript for more information.

16796
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,13 +2,6 @@
"name": "metaforecasts", "name": "metaforecasts",
"version": "1.0.0", "version": "1.0.0",
"description": "Get forecasts from various platforms", "description": "Get forecasts from various platforms",
"main": "src/index.js",
"scripts": {
"start": "node src/index.js",
"test": "echo \"Error: no test specified\" && exit 1",
"reload": "heroku run:detached node src/utils/doEverythingForScheduler.js",
"setCookies": "./src/utils/setCookies.sh"
},
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/QURIresearch/metaforecasts.git" "url": "git+https://github.com/QURIresearch/metaforecasts.git"
@ -17,27 +10,75 @@
"forecasts", "forecasts",
"predictions" "predictions"
], ],
"type": "module",
"author": "Nuño Sempere", "author": "Nuño Sempere",
"license": "MIT", "license": "MIT",
"bugs": { "bugs": {
"url": "https://github.com/QURIresearch/metaforecasts/" "url": "https://github.com/QURIresearch/metaforecasts/"
}, },
"homepage": "https://github.com/QURIresearch/metaforecasts#readme", "homepage": "https://github.com/QURIresearch/metaforecasts#readme",
"scripts": {
"cli": "ts-node src/backend/index.js",
"reload": "heroku run:detached node src/backend/utils/doEverythingForScheduler.js",
"setCookies": "./src/backend/utils/setCookies.sh",
"next-dev": "next dev",
"next-build": "next build",
"next-start": "next start",
"next-export": "next export"
},
"dependencies": { "dependencies": {
"@tailwindcss/forms": "^0.4.0",
"@tailwindcss/typography": "^0.5.1",
"@types/react": "^17.0.39",
"airtable": "^0.11.1",
"algoliasearch": "^4.10.3", "algoliasearch": "^4.10.3",
"axios": "^0.21.1", "autoprefixer": "^10.1.0",
"axios": "^0.25.0",
"chroma-js": "^2.4.2",
"critters": "^0.0.16",
"dom-to-image": "^2.6.0",
"dotenv": "^16.0.0",
"fetch": "^1.1.0",
"fs": "^0.0.1-security", "fs": "^0.0.1-security",
"fuse.js": "^6.4.6",
"google-spreadsheet": "^3.1.15", "google-spreadsheet": "^3.1.15",
"graphql": "^16.3.0",
"graphql-request": "^4.0.0",
"html-to-image": "^1.7.0",
"https": "^1.0.0", "https": "^1.0.0",
"isomorphic-fetch": "^3.0.0", "isomorphic-fetch": "^3.0.0",
"json2csv": "^5.0.5", "json2csv": "^5.0.5",
"mongodb": "^3.6.6", "mongodb": "^3.6.6",
"multiselect-react-dropdown": "^2.0.17",
"next": "12",
"open": "^7.3.1", "open": "^7.3.1",
"papaparse": "^5.3.0", "papaparse": "^5.3.0",
"pg": "^8.7.3", "pg": "^8.7.3",
"postcss": "^8.2.1",
"postcss-flexbugs-fixes": "^5.0.2",
"postcss-preset-env": "^7.3.2",
"query-string": "^7.1.1",
"react": "^17.0.2",
"react-component-export-image": "^1.0.6",
"react-compound-slider": "^3.3.1",
"react-copy-to-clipboard": "^5.0.3",
"react-dom": "^17.0.2",
"react-dropdown": "^1.9.2",
"react-hook-form": "^7.27.0",
"react-icons": "^4.2.0",
"react-markdown": "^8.0.0",
"react-safe": "^1.3.0",
"react-select": "^5.2.2",
"remark-gfm": "^3.0.1",
"squiggle-experimental": "^0.1.9",
"tabletojson": "^2.0.4", "tabletojson": "^2.0.4",
"tailwindcss": "^3.0.22",
"textversionjs": "^1.1.3", "textversionjs": "^1.1.3",
"tunnel": "^0.0.6" "tunnel": "^0.0.6"
},
"devDependencies": {
"@netlify/plugin-nextjs": "^4.2.4",
"@svgr/cli": "^6.2.1",
"netlify-plugin-cache-nextjs": "^1.6.1",
"ts-node": "^10.7.0"
} }
} }

7
postcss.config.js Normal file
View File

@ -0,0 +1,7 @@
// postcss.config.js
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

BIN
public/favicon0.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

4
public/icons/favicon.svg Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg xmlns="http://www.w3.org/2000/svg" class="icon" viewBox="0 0 1024 1024">
<path d="M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372zm198.4-588.1a32 32 0 0 0-24.5.5L414.9 415 296.4 686c-3.6 8.2-3.6 17.5 0 25.7 3.4 7.8 9.7 13.9 17.7 17 3.8 1.5 7.7 2.2 11.7 2.2 4.4 0 8.7-.9 12.8-2.7l271-118.6 118.5-271a32.06 32.06 0 0 0-17.7-42.7zM576.8 534.4l26.2 26.2-42.4 42.4-26.2-26.2L380 644.4 447.5 490 422 464.4l42.4-42.4 25.5 25.5L644.4 380l-67.6 154.4zM464.4 422L422 464.4l25.5 25.6 86.9 86.8 26.2 26.2 42.4-42.4-26.2-26.2-86.8-86.9z"/>
</svg>

After

Width:  |  Height:  |  Size: 720 B

64
public/icons/logo.svg Normal file
View File

@ -0,0 +1,64 @@
<svg width="1333.3333" height="1333.3333" xmlns="http://www.w3.org/2000/svg" xmlns:undefined="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xml:space="preserve" version="1.1">
<metadata id="metadata8">image/svg+xml</metadata>
<defs id="defs6">
<clipPath id="clipPath38" clipPathUnits="userSpaceOnUse">
<path id="path36" d="m-88.32353,1308.85141l1000,0l0,-1000l-1000,0l0,1000z"/>
</clipPath>
<clipPath id="clipPath90" clipPathUnits="userSpaceOnUse">
<path id="path88" d="m27.50203,1000l1000,0l0,-1000l-1000,0l0,1000z"/>
</clipPath>
<linearGradient id="linearGradient28" spreadMethod="pad" y2="-1.61518" x2="0.58126" y1="6.22699" x1="0.29036">
<stop stop-color="#f96703" id="stop24" offset="0"/>
<stop stop-color="#d52417" id="stop26" offset="1"/>
</linearGradient>
<linearGradient id="linearGradient80" spreadMethod="pad" y2="0.3928" x2="0.50131" y1="0.9813" x1="0.40732">
<stop stop-color="#f96703" id="stop76" offset="0"/>
<stop stop-color="#d52417" id="stop78" offset="1"/>
</linearGradient>
</defs>
<g>
<title>Layer 1</title>
<g id="svg_2">
<g transform="matrix(3.90084, 0, 0, -3.90084, -879.13, 3184.77)" id="g12">
<g id="g14">
<g id="g20">
<g id="g22">
<path fill="url(#linearGradient28)" id="path30" d="m306.45138,665.04445l10.144,-5.539c2.183,-0.936 23.559,-9.572 34.04,6.064l0,0c0.043,0.064 0.086,0.134 0.129,0.199l0,0c-8.093,-1.526 -16.714,-0.07 -24.183,4.204l0,0l-3.785,2.165l-16.345,-7.093z"/>
</g>
</g>
</g>
</g>
<g transform="matrix(3.90084, 0, 0, -3.90084, -879.13, 3184.77)" id="g32">
<g clip-path="url(#clipPath38)" id="g34">
<g id="g40">
<path fill-rule="nonzero" fill="#275372" id="path42" d="m387.43706,569.56636c-2.483,3.266 -4.704,6.277 -6.753,9.058c-17.83,24.184 -21.54,29.216 -76.768,32.8c-23.29,1.511 -49.701,14.663 -65.768,23.982l-4.889,-2.121c-5.648,-2.451 -6.038,-9.875 -0.674,-12.839l179.986,-99.446c4.693,-2.593 10.49,-2.658 15.248,-0.173l11.924,6.23c-23.473,15.71 -43.634,31.11 -52.306,42.509"/>
</g>
<g id="g44">
<path fill-rule="nonzero" fill="#0b92b5" id="path46" d="m322.09237,643.49013c2.422,1.242 4.783,2.543 7.113,3.854c-6.631,0.395 -12.739,2.175 -16.904,4.049l-0.339,0.168l-16.199,8.846l-18.052,-7.834c14.789,-8.356 32.676,-15.082 44.381,-9.083"/>
</g>
<g id="g48">
<path fill-rule="nonzero" fill="#275372" id="path50" d="m351.51037,675.29738c2.402,0.755 4.609,1.87 6.632,3.309c0.961,1.836 1.915,3.689 2.859,5.527c1.012,1.972 2.029,3.949 3.056,5.907l-30.603,-13.279c5.782,-2.733 12.249,-3.29 18.056,-1.464"/>
</g>
<g id="g52">
<path fill-rule="nonzero" fill="#056687" id="path54" d="m407.30239,613.09853c-6.513,9.994 -12.663,19.433 -18.565,26.956c-16.671,21.242 -29.086,14.118 -49.652,2.321c-3.963,-2.273 -8.061,-4.624 -12.397,-6.846c-17.967,-9.205 -42.821,2.247 -59.403,12.52l-18.501,-8.028c15.341,-8.4 37.071,-18.342 55.793,-19.557c57.639,-3.739 64.45,-10.143 84.005,-36.671c2.026,-2.746 4.218,-5.722 6.673,-8.947c8.499,-11.172 29.525,-26.888 53.831,-42.909l17.959,9.383c-26.044,20.09 -44.431,48.279 -59.743,71.778"/>
</g>
<g id="g56">
<path fill-rule="nonzero" fill="#275372" id="path58" d="m492.57577,630.75923c8.688,-16.651 18.526,-31.548 29.892,-41.847c4.407,-3.995 8.928,-7.856 13.502,-11.584l25.811,13.484c2.689,1.405 2.629,5.066 -0.103,6.393l-69.102,33.554z"/>
</g>
<g id="g60">
<path fill-rule="nonzero" fill="#0b92b5" id="path62" d="m434.22805,649.51466c-2.202,4.231 -4.451,8.55 -6.711,12.835c-2.478,4.699 -4.972,9.352 -7.435,13.805c-9.608,17.376 -18.756,31.655 -24.776,33.193c-4.062,1.037 -7.909,-1.364 -11.61,-5.67c-1.77,-2.059 -3.507,-4.555 -5.218,-7.321c-1.536,-2.485 -3.052,-5.184 -4.553,-7.984c-1.434,-2.675 -2.855,-5.439 -4.267,-8.187c-1.254,-2.442 -2.524,-4.913 -3.817,-7.347c-2.082,-3.921 -4.225,-7.727 -6.455,-11.155c11.999,2.782 24.318,0.023 37.083,-16.241c6.151,-7.837 12.405,-17.438 19.03,-27.603c15.499,-23.786 34.263,-52.513 60.679,-71.747l19.23,10.046c-22.38,18.849 -42.936,58.323 -61.18,93.376"/>
</g>
</g>
</g>
<g transform="matrix(3.90084, 0, 0, -3.90084, -879.13, 3184.77)" id="g64">
<g id="g66">
<g id="g72">
<g id="g74">
<path fill="url(#linearGradient80)" id="path82" d="m389.88638,718.40945c2.472,0.468 5.105,0.407 7.938,-0.315l0,0c10.785,-2.755 21.255,-19.585 36.885,-49.042l0,0c2.278,-4.294 4.667,-8.856 7.182,-13.682l0,0c0.324,-0.622 0.644,-1.235 0.972,-1.866l0,0c18.612,-35.76 39.69,-76.237 61.532,-92.672l0,0l22.638,11.828c-3.798,3.153 -7.556,6.387 -11.235,9.721l0,0c-14.746,13.364 -26.87,33.471 -37.227,55.178l0,0c-2.059,4.316 -4.049,8.693 -5.976,13.097l0,0c-6.279,14.352 -11.886,28.948 -17.048,42.391l0,0c-11.444,29.808 -24.926,78.95 -34.89,79l0,0c-0.011,0 -0.022,0 -0.033,0l0,0c-12.204,0.002 -22.241,-34.256 -30.738,-53.638"/>
</g>
</g>
</g>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

23
public/icons/logo2.svg Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="1333pt" height="1333pt" viewBox="0 0 1333 1333" version="1.1">
<defs>
<linearGradient id="linear0" gradientUnits="userSpaceOnUse" x1="0.29036" y1="6.22699" x2="0.58126" y2="-1.61518" gradientTransform="matrix(172.855469,0,0,-61.6875,316.289062,624.558594)">
<stop offset="0" style="stop-color:rgb(97.647059%,40.392157%,1.176471%);stop-opacity:1;"/>
<stop offset="1" style="stop-color:rgb(83.529412%,14.117647%,9.019608%);stop-opacity:1;"/>
</linearGradient>
<linearGradient id="linear1" gradientUnits="userSpaceOnUse" x1="0.40732" y1="0.9813" x2="0.50131" y2="0.3928" gradientTransform="matrix(534.988281,0,0,-823.914063,641.753906,997.050781)">
<stop offset="0" style="stop-color:rgb(97.647059%,40.392157%,1.176471%);stop-opacity:1;"/>
<stop offset="1" style="stop-color:rgb(83.529412%,14.117647%,9.019608%);stop-opacity:1;"/>
</linearGradient>
</defs>
<g id="surface1">
<path style=" stroke:none;fill-rule:nonzero;fill:url(#linear0);" d="M 316.289062 590.539062 L 355.859375 612.144531 C 364.375 615.796875 447.757812 649.484375 488.640625 588.488281 C 488.808594 588.242188 488.976562 587.96875 489.144531 587.714844 C 457.578125 593.667969 423.945312 587.988281 394.8125 571.316406 L 380.046875 562.871094 Z M 316.289062 590.539062 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(15.294118%,32.54902%,44.705882%);fill-opacity:1;" d="M 632.199219 962.984375 C 622.515625 950.242188 613.851562 938.496094 605.859375 927.648438 C 536.304688 833.3125 521.832031 813.683594 306.398438 799.703125 C 215.546875 793.808594 112.523438 742.503906 49.847656 706.152344 L 30.777344 714.425781 C 8.746094 723.984375 7.222656 752.945312 28.148438 764.507812 L 730.242188 1152.429688 C 748.550781 1162.546875 771.164062 1162.800781 789.722656 1153.105469 L 836.238281 1128.804688 C 744.671875 1067.519531 666.027344 1007.449219 632.199219 962.984375 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(4.313725%,57.254902%,70.980392%);fill-opacity:1;" d="M 377.300781 674.617188 C 386.75 669.773438 395.957031 664.699219 405.046875 659.585938 C 379.179688 658.042969 355.355469 651.101562 339.109375 643.789062 L 337.785156 643.132812 L 274.59375 608.628906 L 204.175781 639.1875 C 261.867188 671.78125 331.640625 698.019531 377.300781 674.617188 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(15.294118%,32.54902%,44.705882%);fill-opacity:1;" d="M 492.054688 550.542969 C 501.425781 547.597656 510.035156 543.25 517.925781 537.636719 C 521.675781 530.472656 525.394531 523.246094 529.078125 516.074219 C 533.027344 508.382812 536.992188 500.671875 541 493.03125 L 421.621094 544.832031 C 444.175781 555.492188 469.402344 557.664062 492.054688 550.542969 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(1.960784%,40%,52.941176%);fill-opacity:1;" d="M 709.691406 793.171875 C 684.285156 754.1875 660.296875 717.367188 637.273438 688.019531 C 572.242188 605.15625 523.8125 632.949219 443.585938 678.964844 C 428.128906 687.832031 412.144531 697.003906 395.230469 705.671875 C 325.144531 741.578125 228.191406 696.90625 163.507812 656.832031 L 91.339844 688.148438 C 151.179688 720.914062 235.945312 759.699219 308.976562 764.4375 C 533.816406 779.023438 560.386719 804.003906 636.667969 907.484375 C 644.570312 918.195312 653.121094 929.804688 662.699219 942.386719 C 695.851562 985.964844 777.871094 1047.269531 872.683594 1109.765625 L 942.738281 1073.164062 C 841.144531 994.796875 769.421875 884.835938 709.691406 793.171875 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(15.294118%,32.54902%,44.705882%);fill-opacity:1;" d="M 1042.328125 724.277344 C 1076.21875 789.230469 1114.597656 847.34375 1158.933594 887.519531 C 1176.125 903.101562 1193.761719 918.164062 1211.601562 932.703125 L 1312.285156 880.105469 C 1322.777344 874.625 1322.542969 860.34375 1311.886719 855.167969 Z M 1042.328125 724.277344 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:rgb(4.313725%,57.254902%,70.980392%);fill-opacity:1;" d="M 814.722656 651.117188 C 806.132812 634.613281 797.363281 617.765625 788.546875 601.050781 C 778.878906 582.71875 769.152344 564.570312 759.542969 547.199219 C 722.0625 479.417969 686.378906 423.71875 662.894531 417.71875 C 647.050781 413.671875 632.042969 423.039062 617.605469 439.835938 C 610.703125 447.867188 603.925781 457.605469 597.253906 468.394531 C 591.261719 478.085938 585.347656 488.617188 579.492188 499.539062 C 573.898438 509.972656 568.355469 520.753906 562.847656 531.472656 C 557.957031 541 553 550.640625 547.957031 560.132812 C 539.835938 575.429688 531.476562 590.277344 522.777344 603.648438 C 569.582031 592.796875 617.636719 603.558594 667.433594 667 C 691.425781 697.574219 715.820312 735.023438 741.664062 774.675781 C 802.125 867.460938 875.320312 979.519531 978.363281 1054.550781 L 1053.378906 1015.363281 C 966.078125 941.835938 885.890625 787.851562 814.722656 651.117188 "/>
<path style=" stroke:none;fill-rule:nonzero;fill:url(#linear1);" d="M 641.753906 382.371094 C 651.398438 380.542969 661.667969 380.78125 672.71875 383.597656 C 714.789062 394.34375 755.632812 459.996094 816.601562 574.902344 C 825.488281 591.652344 834.808594 609.449219 844.617188 628.273438 C 845.882812 630.699219 847.128906 633.09375 848.410156 635.554688 C 921.011719 775.046875 1003.234375 932.941406 1088.4375 997.050781 L 1176.742188 950.914062 C 1161.925781 938.613281 1147.269531 926 1132.917969 912.992188 C 1075.394531 860.863281 1028.101562 782.429688 987.699219 697.753906 C 979.667969 680.917969 971.90625 663.84375 964.390625 646.664062 C 939.894531 590.679688 918.023438 533.742188 897.886719 481.304688 C 853.246094 365.027344 800.65625 173.332031 761.789062 173.136719 C 761.746094 173.136719 761.703125 173.136719 761.660156 173.136719 C 714.050781 173.128906 674.898438 306.761719 641.753906 382.371094 "/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 5.8 KiB

BIN
public/screenshot.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 364 KiB

View File

@ -1,10 +1,9 @@
import { import {
mongoUpsert,
mongoRead, mongoRead,
mongoReadWithReadCredentials, mongoReadWithReadCredentials,
mongoGetAllElements, mongoUpsert,
} from "./mongo-wrapper.js"; } from "./mongo-wrapper.js";
import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js"; import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper.js";
export async function databaseUpsert({ contents, group }) { export async function databaseUpsert({ contents, group }) {
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
@ -23,9 +22,7 @@ export async function databaseUpsert({ contents, group }) {
break; break;
case "history": case "history":
let currentDate = new Date(); let currentDate = new Date();
let dateUpToYear = currentDate. let dateUpToYear = currentDate.toISOString().slice(0, 4);
toISOString()
.slice(0,4)
let dateUpToMonth = currentDate let dateUpToMonth = currentDate
.toISOString() .toISOString()
.slice(0, 7) .slice(0, 7)
@ -37,8 +34,16 @@ export async function databaseUpsert({ contents, group }) {
"metaforecastHistory", "metaforecastHistory",
"metaforecastDatabase" "metaforecastDatabase"
); );
await pgUpsert({ contents, schema: "history", tableName: `h${dateUpToYear}` }); await pgUpsert({
await pgUpsert({ contents, schema: "history", tableName: `h${dateUpToMonth}` }); contents,
schema: "history",
tableName: `h${dateUpToYear}`,
});
await pgUpsert({
contents,
schema: "history",
tableName: `h${dateUpToMonth}`,
});
break; break;
default: default:
mongoDocName = `${group}-questions`; mongoDocName = `${group}-questions`;

View File

@ -1,9 +1,14 @@
import pkg from 'mongodb'; import pkg from "mongodb";
import { getSecret } from "../utils/getSecrets.js";
import { roughSizeOfObject } from "../utils/roughSize.js";
const { MongoClient } = pkg; const { MongoClient } = pkg;
import { getSecret } from "../utils/getSecrets.js"
import { roughSizeOfObject } from "../utils/roughSize.js"
export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { export async function mongoUpsert(
contents,
documentName,
collectionName = "metaforecastCollection",
databaseName = "metaforecastDatabase"
) {
const url = process.env.MONGODB_URL || getSecret("mongodb"); const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url); const client = new MongoClient(url);
try { try {
@ -15,35 +20,46 @@ export async function mongoUpsert(contents, documentName, collectionName = "meta
const collection = db.collection(collectionName); const collection = db.collection(collectionName);
// Construct a document // Construct a document
let document = ({ let document = {
"name": documentName, name: documentName,
"timestamp": new Date().toISOString(), timestamp: new Date().toISOString(),
"contentsArray": contents contentsArray: contents,
}) };
// Create a filter // Create a filter
const filter = { "name": documentName }; const filter = { name: documentName };
// Insert a single document, wait for promise so we can read it back // Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument); // const p = await collection.insertOne(metaforecastDocument);
await collection.replaceOne(filter, document, { upsert: true }); await collection.replaceOne(filter, document, { upsert: true });
console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`) console.log(
`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(
document
)} MB`
);
// Find one document // Find one document
const myDocument = await collection.findOne(filter); const myDocument = await collection.findOne(filter);
// Print to the console // Print to the console
console.log(`Received document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(contents)} MB`) console.log(
console.log("Sample: ") `Received document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(
contents
)} MB`
);
console.log("Sample: ");
console.log(JSON.stringify(myDocument.contentsArray.slice(0, 1), null, 4)); console.log(JSON.stringify(myDocument.contentsArray.slice(0, 1), null, 4));
} catch (err) { } catch (err) {
console.log(err.stack); console.log(err.stack);
} } finally {
finally {
await client.close(); await client.close();
} }
} }
export async function mongoRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { export async function mongoRead(
documentName,
collectionName = "metaforecastCollection",
databaseName = "metaforecastDatabase"
) {
const url = process.env.MONGODB_URL || getSecret("mongodb"); const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url, { const client = new MongoClient(url, {
@ -51,7 +67,7 @@ export async function mongoRead(documentName, collectionName = "metaforecastColl
useUnifiedTopology: true, useUnifiedTopology: true,
}); });
let documentContents let documentContents = [];
try { try {
await client.connect(); await client.connect();
console.log(`Connected correctly to server to read ${documentName}`); console.log(`Connected correctly to server to read ${documentName}`);
@ -61,7 +77,7 @@ export async function mongoRead(documentName, collectionName = "metaforecastColl
const collection = db.collection(collectionName); const collection = db.collection(collectionName);
// Search options // Search options
const query = { "name": documentName }; const query = { name: documentName };
const options = { const options = {
// sort matched documents in descending order by rating // sort matched documents in descending order by rating
sort: { rating: -1 }, sort: { rating: -1 },
@ -70,26 +86,30 @@ export async function mongoRead(documentName, collectionName = "metaforecastColl
// Insert a single document, wait for promise so we can read it back // Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument); // const p = await collection.insertOne(metaforecastDocument);
const document = await collection.findOne(query, options); const document = await collection.findOne(query, options);
documentContents = document.contentsArray documentContents = document.contentsArray;
} catch (err) { } catch (err) {
console.log(err.stack); console.log(err.stack);
} } finally {
finally {
await client.close(); await client.close();
} }
console.log(documentContents.slice(0, 1)); console.log(documentContents.slice(0, 1));
return documentContents return documentContents;
} }
export async function mongoReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { export async function mongoReadWithReadCredentials(
const url = "mongodb+srv://metaforecast-frontend:hJr5c9kDhbutBtF1@metaforecastdatabaseclu.wgk8a.mongodb.net/?retryWrites=true&w=majority&useNewUrlParser=true&useUnifiedTopology=true"; // This user only has read permissions, so I'm not excessively worried, and would even be pleased, if someone read this and decided to do something cool with the database. documentName,
collectionName = "metaforecastCollection",
databaseName = "metaforecastDatabase"
) {
const url =
"mongodb+srv://metaforecast-frontend:hJr5c9kDhbutBtF1@metaforecastdatabaseclu.wgk8a.mongodb.net/?retryWrites=true&w=majority&useNewUrlParser=true&useUnifiedTopology=true"; // This user only has read permissions, so I'm not excessively worried, and would even be pleased, if someone read this and decided to do something cool with the database.
const client = new MongoClient(url, { const client = new MongoClient(url, {
useNewUrlParser: true, useNewUrlParser: true,
useUnifiedTopology: true, useUnifiedTopology: true,
}); });
let documentContents let documentContents;
try { try {
await client.connect(); await client.connect();
// console.log(`Connected correctly to server to read ${documentName}`); // console.log(`Connected correctly to server to read ${documentName}`);
@ -99,7 +119,7 @@ export async function mongoReadWithReadCredentials(documentName, collectionName
const collection = db.collection(collectionName); const collection = db.collection(collectionName);
// Search options // Search options
const query = { "name": documentName }; const query = { name: documentName };
const options = { const options = {
// sort matched documents in descending order by rating // sort matched documents in descending order by rating
sort: { rating: -1 }, sort: { rating: -1 },
@ -108,18 +128,20 @@ export async function mongoReadWithReadCredentials(documentName, collectionName
// Insert a single document, wait for promise so we can read it back // Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument); // const p = await collection.insertOne(metaforecastDocument);
const document = await collection.findOne(query, options); const document = await collection.findOne(query, options);
documentContents = document.contentsArray documentContents = document.contentsArray;
} catch (err) { } catch (err) {
console.log(err.stack); console.log(err.stack);
} } finally {
finally {
await client.close(); await client.close();
} }
// console.log(documentContents.slice(0,1)); // console.log(documentContents.slice(0,1));
return documentContents return documentContents;
} }
export async function mongoGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") { export async function mongoGetAllElements(
databaseName = "metaforecastDatabase",
collectionName = "metaforecastCollection"
) {
const url = process.env.MONGODB_URL || getSecret("mongodb"); const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url, { const client = new MongoClient(url, {
useNewUrlParser: true, useNewUrlParser: true,
@ -135,21 +157,22 @@ export async function mongoGetAllElements(databaseName = "metaforecastDatabase",
const collection = db.collection(collectionName); const collection = db.collection(collectionName);
// Search options // Search options
const query = ({}); const query = {};
const options = ({}); const options = {};
// Insert a single document, wait for promise so we can read it back // Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument); // const p = await collection.insertOne(metaforecastDocument);
const documents = await collection.find().toArray() const documents = await collection.find().toArray();
let documentNames = documents.map(document => ({ name: document.name, roughSizeMBs: roughSizeOfObject(document) })); let documentNames = documents.map((document) => ({
console.log(documentNames) name: document.name,
roughSizeMBs: roughSizeOfObject(document),
}));
console.log(documentNames);
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} } finally {
finally {
await client.close(); await client.close();
} }
} }
//mongoGetAllElements() //mongoGetAllElements()
//mongoGetAllElements("metaforecastDatabase", "metaforecastHistory") //mongoGetAllElements("metaforecastDatabase", "metaforecastHistory")

View File

@ -1,30 +1,27 @@
import pkg from "pg"; import pkg from "pg";
const { Pool } = pkg;
import { platformNames } from "../platforms/all/platformNames.js"; import { platformNames } from "../platforms/all/platformNames.js";
import { getSecret } from "../utils/getSecrets.js"; import { getSecret } from "../utils/getSecrets.js";
import { roughSizeOfObject } from "../utils/roughSize.js";
import { hash } from "../utils/hash.js"; import { hash } from "../utils/hash.js";
import { roughSizeOfObject } from "../utils/roughSize.js";
const { Pool } = pkg;
// Definitions // Definitions
const schemas = ["latest", "history"]; const schemas = ["latest", "history"];
const year = Number(new Date().toISOString().slice(0, 4)); const year = Number(new Date().toISOString().slice(0, 4));
const allowed_years = [year, year + 1].map(year => `h${year}`); // tables can't begin with number const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number
const allowed_months = [...Array(12).keys()] const allowed_months = [...Array(12).keys()]
.map((x) => x + 1) .map((x) => x + 1)
.map(x => String(x).length == 1 ? `0${x}` : x); .map((x) => (String(x).length == 1 ? `0${x}` : x));
const allowed_year_month_histories = [].concat( const allowed_year_month_histories = [].concat(
...allowed_years.map((year) => ...allowed_years.map((year) =>
allowed_months.map((month) => `${year}_${month}`) allowed_months.map((month) => `${year}_${month}`)
) )
); // h2022_01 ); // h2022_01
const tableNamesWhitelistLatest = [ const tableNamesWhitelistLatest = ["combined", ...platformNames];
"combined",
...platformNames,
];
const tableNamesWhiteListHistory = [ const tableNamesWhiteListHistory = [
...allowed_years, ...allowed_years,
...allowed_year_month_histories, ...allowed_year_month_histories,
] ];
const tableNamesWhitelist = [ const tableNamesWhitelist = [
...tableNamesWhitelistLatest, ...tableNamesWhitelistLatest,
...tableNamesWhiteListHistory, ...tableNamesWhiteListHistory,
@ -43,19 +40,24 @@ const databaseURL =
// process.env.DATABASE_URL || getSecret("heroku-postgres") // process.env.DATABASE_URL || getSecret("heroku-postgres")
const readWritePool = new Pool({ const readWritePool = new Pool({
connectionString: databaseURL, connectionString: databaseURL,
ssl: { ssl: process.env.POSTGRES_NO_SSL
rejectUnauthorized: false, ? false
}, : {
rejectUnauthorized: false,
},
}); });
const readOnlyDatabaseURL = const readOnlyDatabaseURL =
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" || "postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" ||
getSecret("digitalocean-postgres-public"); getSecret("digitalocean-postgres-public");
const readOnlyPool = new Pool({ // never used const readOnlyPool = new Pool({
// never used
connectionString: readOnlyDatabaseURL, connectionString: readOnlyDatabaseURL,
ssl: { ssl: process.env.POSTGRES_NO_SSL
rejectUnauthorized: false, ? false
}, : {
rejectUnauthorized: false,
},
}); });
// Helpers // Helpers
@ -83,7 +85,7 @@ let createIndex = (schema, table) =>
let createUniqueIndex = (schema, table) => let createUniqueIndex = (schema, table) =>
`CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`; `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
async function pgInitializeScaffolding(){ async function pgInitializeScaffolding() {
async function setPermissionsForPublicUser() { async function setPermissionsForPublicUser() {
let initCommands = [ let initCommands = [
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;", "REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
@ -111,7 +113,7 @@ async function pgInitializeScaffolding(){
}); });
} }
} }
let YOLO = false; let YOLO = true;
if (YOLO) { if (YOLO) {
console.log("Create schemas"); console.log("Create schemas");
for (let schema of schemas) { for (let schema of schemas) {
@ -132,7 +134,7 @@ async function pgInitializeScaffolding(){
console.log("Set public user permissions"); console.log("Set public user permissions");
await setPermissionsForPublicUser(); await setPermissionsForPublicUser();
console.log(""); console.log("");
}else { } else {
console.log( console.log(
"pgInitializeScaffolding: This command is dangerous, set YOLO to true in the code to invoke it" "pgInitializeScaffolding: This command is dangerous, set YOLO to true in the code to invoke it"
); );
@ -156,10 +158,10 @@ let buildMetaforecastTable = (
);`; );`;
async function pgInitializeLatest() { async function pgInitializeLatest() {
let YOLO = false; let YOLO = true;
if (YOLO) { if (YOLO) {
console.log("Create tables & their indexes"); console.log("Create tables & their indexes");
let schema = "latest" let schema = "latest";
for (let table of tableNamesWhitelistLatest) { for (let table of tableNamesWhitelistLatest) {
await runPgCommand({ await runPgCommand({
command: dropTable(schema, table), command: dropTable(schema, table),
@ -177,10 +179,10 @@ async function pgInitializeLatest() {
}); });
} else { } else {
*/ */
await runPgCommand({ await runPgCommand({
command: createUniqueIndex(schema, table), command: createUniqueIndex(schema, table),
pool: readWritePool, pool: readWritePool,
}); });
//} //}
} }
console.log(""); console.log("");
@ -193,7 +195,7 @@ async function pgInitializeLatest() {
async function pgInitializeDashboards() { async function pgInitializeDashboards() {
let buildDashboard = () => let buildDashboard = () =>
`CREATE TABLE latest.dashboards ( `CREATE TABLE latest.dashboards (
id text, id text,
title text, title text,
description text, description text,
@ -202,7 +204,7 @@ async function pgInitializeDashboards() {
creator text, creator text,
extra json extra json
);`; );`;
let YOLO = false; let YOLO = true;
if (YOLO) { if (YOLO) {
await runPgCommand({ await runPgCommand({
command: `CREATE SCHEMA IF NOT EXISTS history;`, command: `CREATE SCHEMA IF NOT EXISTS history;`,
@ -241,10 +243,7 @@ async function pgInitializeDashboards() {
} }
} }
let buildHistoryTable = ( let buildHistoryTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
schema,
table
) => `CREATE TABLE ${schema}.${table} (
id text, id text,
title text, title text,
url text, url text,
@ -257,7 +256,7 @@ let buildHistoryTable = (
extra json extra json
);`; );`;
export async function pgInitializeHistories() { export async function pgInitializeHistories() {
let YOLO = false; let YOLO = true;
if (YOLO) { if (YOLO) {
console.log("Drop all previous history tables (Danger!)"); console.log("Drop all previous history tables (Danger!)");
await runPgCommand({ await runPgCommand({
@ -283,7 +282,7 @@ export async function pgInitializeHistories() {
console.log(""); console.log("");
console.log("Create tables & their indexes"); console.log("Create tables & their indexes");
let schema = "history" let schema = "history";
for (let table of tableNamesWhiteListHistory) { for (let table of tableNamesWhiteListHistory) {
await runPgCommand({ await runPgCommand({
command: dropTable(schema, table), command: dropTable(schema, table),
@ -307,6 +306,7 @@ export async function pgInitializeHistories() {
} }
export async function pgInitialize() { export async function pgInitialize() {
await pgInitializeScaffolding();
await pgInitializeLatest(); await pgInitializeLatest();
await pgInitializeHistories(); await pgInitializeHistories();
await pgInitializeDashboards(); await pgInitializeDashboards();
@ -478,7 +478,7 @@ pgInsertIntoDashboard({
*/ */
export async function pgUpsert({ contents, schema, tableName }) { export async function pgUpsert({ contents, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let init = Date.now() let init = Date.now();
if (schema == "latest") { if (schema == "latest") {
await runPgCommand({ await runPgCommand({
command: dropTable(schema, tableName), command: dropTable(schema, tableName),
@ -493,8 +493,16 @@ export async function pgUpsert({ contents, schema, tableName }) {
pool: readWritePool, pool: readWritePool,
}); });
} }
console.log(`Upserting ${contents.length} rows into postgres table ${schema}.${tableName}.`); console.log(
console.log(`Expected to take ${Number(contents.length * 831.183 / 4422).toFixed(2)} seconds or ${Number(contents.length * 13.85305 / 4422).toFixed(2)} minutes`) `Upserting ${contents.length} rows into postgres table ${schema}.${tableName}.`
);
console.log(
`Expected to take ${Number((contents.length * 831.183) / 4422).toFixed(
2
)} seconds or ${Number((contents.length * 13.85305) / 4422).toFixed(
2
)} minutes`
);
let i = 0; let i = 0;
for (let datum of contents) { for (let datum of contents) {
await pgInsert({ datum, schema, tableName }); await pgInsert({ datum, schema, tableName });
@ -507,29 +515,36 @@ export async function pgUpsert({ contents, schema, tableName }) {
} }
} }
console.log( console.log(
`Inserted ${contents.length} rows with approximate cummulative size ${roughSizeOfObject( `Inserted ${
contents.length
} rows with approximate cummulative size ${roughSizeOfObject(
contents contents
)} MB into ${schema}.${tableName}.` )} MB into ${schema}.${tableName}.`
); );
let check = await pgRead({ schema, tableName }); let check = await pgRead({ schema, tableName });
console.log( console.log(
`Received ${check.length} rows with approximate cummulative size ${roughSizeOfObject( `Received ${
check.length
} rows with approximate cummulative size ${roughSizeOfObject(
check check
)} MB from ${schema}.${tableName}.` )} MB from ${schema}.${tableName}.`
); );
console.log("Sample: "); console.log("Sample: ");
console.log(JSON.stringify(check.slice(0, 1), null, 4)); console.log(JSON.stringify(check.slice(0, 1), null, 4));
let end = Date.now() let end = Date.now();
let difference = end - init let difference = end - init;
console.log(`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`) console.log(
`Took ${difference / 1000} seconds, or ${
difference / (1000 * 60)
} minutes.`
);
//console.log(JSON.stringify(check.slice(0, 1), null, 4)); //console.log(JSON.stringify(check.slice(0, 1), null, 4));
} else { } else {
console.log("tableWhiteList:") console.log("tableWhiteList:");
console.log(tableWhiteList) console.log(tableWhiteList);
throw Error( throw Error(
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
); );
} }

View File

@ -0,0 +1,58 @@
import { mergeEverything } from "./mergeEverything.js";
import { updateHistory } from "./history/updateHistory.js";
import { rebuildAlgoliaDatabase } from "../utils/algolia.js";
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData.js";
import { platformFetchers } from "../platforms/all-platforms.js";
/* Do everything */
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
export async function tryCatchTryAgain(fun) {
try {
console.log("Initial try");
await fun();
} catch (error) {
sleep(10000);
console.log("Second try");
console.log(error);
try {
await fun();
} catch (error) {
console.log(error);
}
}
}
export async function doEverything() {
let functions = [
...platformFetchers,
mergeEverything,
rebuildAlgoliaDatabase,
updateHistory,
rebuildNetlifySiteWithNewData,
];
// Removed Good Judgment from the fetcher, doing it using cron instead because cloudflare blocks the utility on heroku.
console.log("");
console.log("");
console.log("");
console.log("");
console.log("================================");
console.log("STARTING UP");
console.log("================================");
console.log("");
console.log("");
console.log("");
console.log("");
for (let fun of functions) {
console.log("");
console.log("");
console.log("****************************");
console.log(fun.name);
console.log("****************************");
await tryCatchTryAgain(fun);
console.log("****************************");
}
}

View File

@ -0,0 +1,3 @@
import { doEverything } from "./doEverything.js";
doEverything();

View File

@ -0,0 +1,111 @@
import { writeFileSync } from "fs";
import {
databaseReadWithReadCredentials,
databaseUpsert,
} from "../../database/database-wrapper.js";
let isEmptyArray = (arr) => arr.length == 0;
export async function addToHistory() {
let currentDate = new Date();
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_");
let currentJSONwithMetaculus = await databaseReadWithReadCredentials({
group: "combined",
});
let currentJSON = currentJSONwithMetaculus.filter(
(element) =>
element.platform != "Metaculus" && element.platform != "Estimize"
); // without Metaculus
// console.log(currentJSON.slice(0,20))
// console.log(currentJSON)
let historyJSON = await databaseReadWithReadCredentials({ group: "history" });
// console.log(historyJSON)
let currentForecastsWithAHistory = currentJSON.filter(
(element) =>
!isEmptyArray(
historyJSON.filter(
(historyElement) =>
historyElement.title == element.title &&
historyElement.url == element.url
)
)
);
// console.log(currentForecastsWithAHistory)
let currentForecastsWithoutAHistory = currentJSON.filter((element) =>
isEmptyArray(
historyJSON.filter(
(historyElement) =>
historyElement.title == element.title &&
historyElement.url == element.url
)
)
);
// console.log(currentForecastsWithoutAHistory)
// Add both types of forecast
let newHistoryJSON = [];
for (let historyElement of historyJSON) {
let correspondingNewElementArray = currentForecastsWithAHistory.filter(
(element) =>
historyElement.title == element.title &&
historyElement.url == element.url
);
// console.log(correspondingNewElement)
if (!isEmptyArray(correspondingNewElementArray)) {
let correspondingNewElement = correspondingNewElementArray[0];
let timeStampOfNewElement = correspondingNewElement.timestamp;
let doesHistoryAlreadyContainElement = historyElement.history
.map((element) => element.timestamp)
.includes(timeStampOfNewElement);
if (!doesHistoryAlreadyContainElement) {
let historyWithNewElement = historyElement["history"].concat({
timestamp: correspondingNewElement.timestamp,
options: correspondingNewElement.options,
qualityindicators: correspondingNewElement.qualityindicators,
});
let newHistoryElement = {
...correspondingNewElement,
history: historyWithNewElement,
};
// If some element (like the description) changes, we keep the new one.
newHistoryJSON.push(newHistoryElement);
} else {
newHistoryJSON.push(historyElement);
}
} else {
// console.log(historyElement)
newHistoryJSON.push(historyElement);
}
}
for (let currentForecast of currentForecastsWithoutAHistory) {
let newHistoryElement = {
...currentForecast,
history: [
{
timestamp: currentForecast.timestamp,
options: currentForecast.options,
qualityindicators: currentForecast.qualityindicators,
},
],
};
delete newHistoryElement.timestamp;
delete newHistoryElement.options;
delete newHistoryElement.qualityindicators;
newHistoryJSON.push(newHistoryElement);
}
await databaseUpsert({ contents: newHistoryJSON, group: "history" });
// console.log(newHistoryJSON.slice(0,5))
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
// writefile(JSON.stringify(newHistoryJSON, null, 2), "metaforecasts_history", "", ".json")
//console.log(newHistoryJSON)
/*
let forecastsAlreadyInHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url )))
*/
//console.log(new Date().toISOString())
}
// updateHistory()

View File

@ -0,0 +1,39 @@
import {
databaseRead,
databaseUpsert,
} from "../../database/database-wrapper.js";
export async function createHistoryForMonth() {
let currentDate = new Date();
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_");
let metaforecasts = await databaseRead({ group: "combined" });
let metaforecastsHistorySeed = metaforecasts
.map((element) => {
// let moreoriginsdata = element.author ? ({author: element.author}) : ({})
return {
title: element.title,
url: element.url,
platform: element.platform,
moreoriginsdata: element.moreoriginsdata || {},
description: element.description,
history: [
{
timestamp: element.timestamp,
options: element.options,
qualityindicators: element.qualityindicators,
},
],
extra: element.extra || {},
};
})
.filter(
(element) =>
element.platform != "Metaculus" && element.platform != "Estimize"
);
//console.log(metaforecastsHistorySeed)
await databaseUpsert({
contents: metaforecastsHistorySeed,
group: "history",
});
}
////createInitialHistory()

View File

@ -0,0 +1,29 @@
import { databaseRead, databaseUpsert } from "../database-wrapper.js";
let createInitialHistory = async () => {
let metaforecasts = await databaseRead({ group: "combined" });
let metaforecastsHistorySeed = metaforecasts.map((element) => {
// let moreoriginsdata = element.author ? ({author: element.author}) : ({})
return {
title: element.title,
url: element.url,
platform: element.platform,
moreoriginsdata: element.moreoriginsdata || {},
description: element.description,
history: [
{
timestamp: element.timestamp,
options: element.options,
qualityindicators: element.qualityindicators,
},
],
extra: element.extra || {},
};
});
console.log(metaforecastsHistorySeed);
await databaseUpsert({
contents: metaforecastsHistorySeed,
group: "history",
});
};
createInitialHistory();

View File

@ -0,0 +1,21 @@
import { addToHistory } from "./addToHistory.js";
import { createHistoryForMonth } from "./createHistoryForMonth.js";
export async function updateHistoryOld() {
let currentDate = new Date();
let dayOfMonth = currentDate.getDate();
if (dayOfMonth == 1) {
console.log(
`Creating history for the month ${currentDate.toISOString().slice(0, 7)}`
);
await createHistoryForMonth();
} else {
console.log(`Updating history for ${currentDate.toISOString()}`);
await addToHistory();
}
}
export async function updateHistory() {
let currentDate = new Date();
let year = currentDate.toISOString().slice(0, 4);
}

View File

@ -0,0 +1,12 @@
import {
databaseReadWithReadCredentials,
databaseUpsert,
} from "../../database/database-wrapper.js";
export async function updateHistory() {
let latest = await databaseReadWithReadCredentials({ group: "combined" });
await databaseUpsert({
contents: latest,
group: "history",
});
}

View File

@ -1,5 +1,5 @@
import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; import { databaseRead, databaseUpsert } from "../database/database-wrapper.js";
import { platformNames } from "../platforms/all-platforms.js" import { platformNames } from "../platforms/all-platforms.js";
/* Merge everything */ /* Merge everything */
export async function mergeEverythingInner() { export async function mergeEverythingInner() {

View File

@ -0,0 +1,14 @@
import axios from "axios";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
export async function rebuildNetlifySiteWithNewData_inner(cookie) {
let payload = {};
let response = await axios.post(cookie, payload);
let data = response.data;
console.log(data);
}
export async function rebuildNetlifySiteWithNewData() {
let cookie = process.env.REBUIDNETLIFYHOOKURL || getSecret("netlify");
await applyIfSecretExists(cookie, rebuildNetlifySiteWithNewData_inner);
}

101
src/backend/frontpage.ts Normal file
View File

@ -0,0 +1,101 @@
import fs from 'fs';
import { pgRead } from './database/pg-wrapper';
// TODO - move to global `constants.ts` config
const location = "/Users/berekuk/coding/quri/metaforecast-backend/data";
export async function getFrontpageRaw() {
let frontpageSlicedLocation = `${location}/frontpage_sliced.json`;
return JSON.parse(
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
); // TODO - async, no reason to lock
}
export async function getFrontpageFullRaw() {
let frontpageSlicedLocation = `${location}/frontpage_full.json`;
return JSON.parse(
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
); // TODO - async, no reason to lock
}
export async function getFrontpage() {
let frontPageForecastsCompatibleWithFuse = [];
try {
let data = await getFrontpageRaw();
frontPageForecastsCompatibleWithFuse = data.map((result) => ({
item: result,
score: 0,
}));
return frontPageForecastsCompatibleWithFuse;
} catch (error) {
console.log(error);
} finally {
return frontPageForecastsCompatibleWithFuse;
}
}
// Helpers
let shuffle = (array) => {
// https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array
let currentIndex = array.length,
randomIndex;
// While there remain elements to shuffle...
while (currentIndex != 0) {
// Pick a remaining element...
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex--;
// And swap it with the current element.
[array[currentIndex], array[randomIndex]] = [
array[randomIndex],
array[currentIndex],
];
}
return array;
};
// Main
export async function downloadFrontpage() {
let init = Date.now();
let response = await pgRead({ schema: "latest", tableName: "combined" });
fs.writeFileSync(
`${location}/frontpage_full.json`,
JSON.stringify(response, null, 4)
);
console.log(`frontpage_full.json written to ${location}`);
let responseFiltered = response.filter(
(forecast) =>
forecast.qualityindicators &&
forecast.qualityindicators.stars >= 3 &&
forecast.options &&
forecast.options.length > 0 &&
forecast.description != ""
);
let responseFilteredAndRandomized = shuffle(responseFiltered).slice(0, 50);
fs.writeFileSync(
`${location}/frontpage_sliced.json`,
JSON.stringify(responseFilteredAndRandomized, null, 4)
);
console.log(`frontpage_sliced.json written to ${location}`);
let end = Date.now();
let difference = end - init;
console.log(
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
);
/*
# (run code)
sleep 10
cp /home/azrael/server/data/frontpage_freshly_sliced.json /home/azrael/server/data/frontpage_sliced.json
date > /home/azrael/server/data/frontpage_slicetime.txt
cat /home/azrael/server/data/frontpage_freshly_sliced.json >> /home/azrael/server/data/frontpage_slicetime.txt
*/
}
// TODO: call /api/cron/update-frontpage from github actions every 6 hours
// TODO: store frontpage_sliced copy somewhere

View File

@ -1,16 +1,13 @@
/* Imports */ /* Imports */
import fs from "fs"; import "dotenv/config";
import readline from "readline"; import readline from "readline";
import { pgInitialize } from "./database/pg-wrapper.js";
import { platformFetchers } from "./platforms/all-platforms.js";
import { mergeEverything } from "./flow/mergeEverything.js";
import { updateHistory } from "./flow/history/updateHistory.js";
import { rebuildAlgoliaDatabase } from "./utils/algolia.js";
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js";
import {
pgInitialize,
} from "./database/pg-wrapper.js";
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"; import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js";
import { updateHistory } from "./flow/history/updateHistory.js";
import { mergeEverything } from "./flow/mergeEverything.js";
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js";
import { platformFetchers } from "./platforms/all-platforms.js";
import { rebuildAlgoliaDatabase } from "./utils/algolia.js";
/* Support functions */ /* Support functions */
let functions = [ let functions = [

View File

@ -0,0 +1,12 @@
import "dotenv/config";
import fs from "fs";
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js";
let main = async () => {
let json = await databaseReadWithReadCredentials({ group: "combined" });
let string = JSON.stringify(json, null, 2);
let filename = "metaforecasts.json";
fs.writeFileSync(filename, string);
console.log(`File downloaded to ./${filename}`);
};
main();

View File

@ -0,0 +1,3 @@
import { pgInitialize } from "../database/pg-wrapper.js";
pgInitialize();

View File

@ -9,10 +9,10 @@ let suffixMongo = "-questions";
let main = async () => { let main = async () => {
for (let file of pushManualFiles) { for (let file of pushManualFiles) {
let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`); let fileRaw = fs.readFileSync(`./input/${file + suffixFiles}`);
let fileContents = JSON.parse(fileRaw); let fileContents = JSON.parse(fileRaw);
console.log(fileContents); console.log(fileContents);
await databaseUpsert({contents: fileContents, group: file }); await databaseUpsert({ contents: fileContents, group: file });
} }
}; };
main(); main();

View File

@ -1,5 +1,5 @@
/* Imports */ /* Imports */
import { goodjudgment } from "../platforms/goodjudgment-fetch.js" import { goodjudgment } from "../platforms/goodjudgment-fetch.js";
/* Definitions */ /* Definitions */
@ -8,5 +8,4 @@ import { goodjudgment } from "../platforms/goodjudgment-fetch.js"
/* Support functions */ /* Support functions */
/* Body */ /* Body */
goodjudgment() goodjudgment();

View File

@ -0,0 +1,103 @@
/* Imports */
import fs from "fs";
import axios from "axios";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let graphQLendpoint = "https://api.foretold.io/graphql";
let ScottAlexanderPredictions = ["6eebf79b-4b6f-487b-a6a5-748d82524637"];
/* Support functions */
async function fetchAllCommunityQuestions(communityId) {
let response = await axios({
url: graphQLendpoint,
method: "POST",
headers: { "Content-Type": "application/json" },
data: JSON.stringify({
query: `
query {
measurables(
channelId: "${communityId}",
states: OPEN,
first: 500
){
total
edges{
node{
id
name
valueType
measurementCount
previousAggregate{
value{
percentage
}
}
}
}
}
}
`,
}),
})
.then((res) => res.data)
.then((res) => res.data.measurables.edges);
//console.log(response)
return response;
}
/* Body */
export async function astralcodexten() {
let results = [];
for (let community of ScottAlexanderPredictions) {
let questions = await fetchAllCommunityQuestions(community);
questions = questions.map((question) => question.node);
questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions
questions.forEach((question) => {
let options = [];
if (question.valueType == "PERCENTAGE") {
let probability = question.previousAggregate.value.percentage;
options = [
{
name: "Yes",
probability: probability / 100,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - probability / 100,
type: "PROBABILITY",
},
];
}
let result = {
title: question.name.split(". ")[1],
url: `https://www.foretold.io/c/${community}/m/${question.id}`,
platform: "AstralCodexTen",
description: "...by the end of 2021",
options: options,
timestamp: new Date().toISOString(),
qualityindicators: {
numforecasts: Number((question.measurementCount + 1) / 2),
stars: calculateStars("AstralCodexTen", {}),
},
/*liquidity: liquidity.toFixed(2),
tradevolume: tradevolume.toFixed(2),
address: obj.address*/
};
// console.log(result)
results.push(result);
});
}
/*
let string = JSON.stringify(results, null, 2)
console.log(JSON.stringify(results, null, 2))
fs.writeFileSync('./data/astralcodexten-questions.json', string);
*/
await databaseUpsert(results, "astralcodexten-questions");
// console.log(results)
console.log("Done");
}
// astralcodexten()

View File

@ -0,0 +1,190 @@
/* Imports */
import fs from "fs";
import axios from "axios";
import Papa from "papaparse";
import open from "open";
import readline from "readline";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let coupCastEndpoint =
"https://www.oneearthfuture.org/sites/all/themes/stability/stability_sub/data/dashboard_2021_code_06.csv";
var datenow = new Date();
var currentmonth = datenow.getMonth() + 1;
dd;
/* Support functions */
let unique = (arr) => [...new Set(arr)];
let sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
let sanitizeCountryName = (country_name) => {
let sanitized_name;
switch (country_name) {
case "Cen African Rep":
sanitized_name = "Central African Republic";
break;
case "Congo-Brz":
sanitized_name = "Republic of the Congo";
break;
case "Congo/Zaire":
sanitized_name = "Democratic Republic of the Congo";
break;
case "Czech Rep":
sanitized_name = "Czech Republic";
break;
case "Dominican Rep":
sanitized_name = "Dominican Republic";
break;
case "Korea North":
sanitized_name = "North Korea";
break;
case "Korea South":
sanitized_name = "South Korea";
break;
case "UKG":
sanitized_name = "UK";
break;
default:
sanitized_name = country_name;
}
return sanitized_name;
};
async function processArray(countryArray) {
let results = [];
for (let country of countryArray) {
let url = `https://www.oneearthfuture.org/activities/coup-cast`;
// We don't really want the prediction for all months; one is enough
// console.log(country.month)
if (Number(country.month) == currentmonth) {
// Monthly
country.country_name = sanitizeCountryName(country.country_name);
let processedPrediction1 = {
title: `Will there be a coup in ${country.country_name} in the next month (as of ${country.month}/${country.year})?`,
url: url,
platform: "CoupCast",
description: `The current leader of ${country.country_name} is ${
country.leader_name
}, who has been in power for ${Number(country.leader_years).toFixed(
1
)} years. ${
country.country_name
} has a ${country.regime_type.toLowerCase()} regime type which has lasted for ${
country.regime_years
} years.`,
options: [
{
name: "Yes",
probability: country.month_risk,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - country.month_risk,
type: "PROBABILITY",
},
],
timestamp: new Date().toISOString(),
qualityindicators: {
stars: calculateStars("Coupcast", {}),
},
extra: {
country_name: country.country_name,
regime_type: country.regime_type,
month: country.month,
year: country.year,
leader_name: country.leader_name,
month_risk: country.month_risk,
annual_risk: country.annual_risk,
risk_change_percent: country.risk_change_percent,
regime_years: country.regime_years,
leader_years: country.leader_years,
country_code: country.country_code,
country_abb: country.country_abb,
},
};
// Yearly
let processedPrediction2 = {
title: `Will there be a coup in ${country.country_name} in the next year (as of ${country.month}/${country.year})?`,
url: url,
platform: "CoupCast",
description: `The current leader of ${country.country_name} is ${
country.leader_name
}, who has been in power for ${Number(country.leader_years).toFixed(
1
)} years. ${
country.country_name
} has a ${country.regime_type.toLowerCase()} regime type which has lasted for ${
country.regime_years
} years`,
options: [
{
name: "Yes",
probability: country.annual_risk,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - country.annual_risk,
type: "PROBABILITY",
},
],
timestamp: new Date().toISOString(),
qualityindicators: {
stars: calculateStars("CoupCast", {}),
},
extra: {
country_name: country.country_name,
regime_type: country.regime_type,
month: country.month,
year: country.year,
leader_name: country.leader_name,
month_risk: country.month_risk,
annual_risk: country.annual_risk,
risk_change_percent: country.risk_change_percent,
regime_years: country.regime_years,
leader_years: country.leader_years,
country_code: country.country_code,
country_abb: country.country_abb,
},
};
// results.push(processedPrediction1)
// Not pushing monthly
results.push(processedPrediction2);
}
}
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/elicit-questions.json', string);
await databaseUpsert(results, "coupcast-questions");
// console.log(results)
console.log("Done");
}
/* Body */
let filePath = "./data/coupcast-raw-download.csv"; // not used right now.
export async function coupcast() {
let csvContent = await axios
.get(coupCastEndpoint)
.then((query) => query.data);
await Papa.parse(csvContent, {
header: true,
complete: async (results) => {
console.log("Downloaded", results.data.length, "records.");
/* console.log(
JSON.stringify(
unique(results.data.map(country => country.country_name)),
null,
4
)
)*/
// console.log(results.data)
await processArray(results.data);
},
});
await sleep(1000); // needed to wait for Papaparse's callback to be executed.
}
// coupcast()

View File

@ -0,0 +1,283 @@
/* Imports */
import axios from "axios";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
import { Tabletojson } from "tabletojson";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let htmlEndPoint = "https://www.cset-foretell.com/questions?page=";
String.prototype.replaceAll = function replaceAll(search, replace) {
return this.split(search).join(replace);
};
const DEBUG_MODE = "on"; // "off"
const SLEEP_TIME_RANDOM = 100; //5000 // miliseconds
const SLEEP_TIME_EXTRA = 0; //1000
/* Support functions */
async function fetchPage(page, cookie) {
console.log(`Page #${page}`);
if (page == 1) {
cookie = cookie.split(";")[0]; // Interesting that it otherwise doesn't work :(
}
let urlEndpoint = htmlEndPoint + page;
console.log(urlEndpoint);
let response = await axios({
url: urlEndpoint,
method: "GET",
headers: {
"Content-Type": "text/html",
Cookie: cookie,
},
}).then((res) => res.data);
// console.log(response)
return response;
}
async function fetchStats(questionUrl, cookie) {
let response = await axios({
url: questionUrl + "/stats",
method: "GET",
headers: {
"Content-Type": "text/html",
Cookie: cookie,
Referer: questionUrl,
},
}).then((res) => res.data);
if (response.includes("Sign up or sign in to forecast")) {
throw Error("Not logged in");
}
// Is binary?
let isbinary = response.includes("binary?&quot;:true");
// console.log(`is binary? ${isbinary}`)
let options = [];
if (isbinary) {
// Crowd percentage
let htmlElements = response.split("\n");
// DEBUG_MODE == "on" ? htmlLines.forEach(line => console.log(line)) : id()
let h3Element = htmlElements.filter((str) => str.includes("<h3>"))[0];
// DEBUG_MODE == "on" ? console.log(h5elements) : id()
let crowdpercentage = h3Element.split(">")[1].split("<")[0];
let probability = Number(crowdpercentage.replace("%", "")) / 100;
options.push(
{
name: "Yes",
probability: probability,
type: "PROBABILITY",
},
{
name: "No",
probability: +(1 - probability).toFixed(2), // avoids floating point shenanigans
type: "PROBABILITY",
}
);
} else {
try {
let optionsBody = response.split("tbody")[1]; // Previously [1], then previously [3] but they added a new table.
// console.log(optionsBody)
let optionsHtmlElement = "<table" + optionsBody + "table>";
let tablesAsJson = Tabletojson.convert(optionsHtmlElement);
let firstTable = tablesAsJson[0];
options = firstTable.map((element) => ({
name: element["0"],
probability: Number(element["1"].replace("%", "")) / 100,
type: "PROBABILITY",
}));
} catch (error) {
let optionsBody = response.split("tbody")[3]; // Catch if the error is related to table position
let optionsHtmlElement = "<table" + optionsBody + "table>";
let tablesAsJson = Tabletojson.convert(optionsHtmlElement);
let firstTable = tablesAsJson[0];
if (firstTable) {
options = firstTable.map((element) => ({
name: element["0"],
probability: Number(element["1"].replace("%", "")) / 100,
type: "PROBABILITY",
}));
} else {
// New type of question, tricky to parse the options
// Just leave options = [] for now.
// https://www.cset-foretell.com/blog/rolling-question-formats
}
}
}
// Description
let descriptionraw = response.split(`<meta name="description" content="`)[1];
let descriptionprocessed1 = descriptionraw.split(`">`)[0];
let descriptionprocessed2 = descriptionprocessed1.replace(">", "");
let descriptionprocessed3 = descriptionprocessed2.replace(
"To suggest a change or clarification to this question, please select Request Clarification from the green gear-shaped dropdown button to the right of the question.",
``
);
// console.log(descriptionprocessed3)
let descriptionprocessed4 = descriptionprocessed3.replaceAll(
"\r\n\r\n",
"\n"
);
let descriptionprocessed5 = descriptionprocessed4.replaceAll("\n\n", "\n");
let descriptionprocessed6 = descriptionprocessed5.replaceAll("&quot;", `"`);
let descriptionprocessed7 = descriptionprocessed6.replaceAll("&#39;", "'");
let descriptionprocessed8 = toMarkdown(descriptionprocessed7);
let description = descriptionprocessed8;
// Number of forecasts
//console.log(response)
//console.log(response.split("prediction_sets_count&quot;:")[1])
let numforecasts = response
.split("prediction_sets_count&quot;:")[1]
.split(",")[0];
// console.log(numforecasts)
// Number of predictors
let numforecasters = response
.split("predictors_count&quot;:")[1]
.split(",")[0];
// console.log(numpredictors)
let result = {
description: description,
options: options,
timestamp: new Date().toISOString(),
qualityindicators: {
numforecasts: Number(numforecasts),
numforecasters: Number(numforecasters),
stars: calculateStars("CSET-foretell", { numforecasts }),
},
};
return result;
}
function isSignedIn(html) {
let isSignedInBool = !(
html.includes("You need to sign in or sign up before continuing") ||
html.includes("Sign up")
);
if (!isSignedInBool) {
console.log("Error: Not signed in.");
}
console.log(`Signed in? ${isSignedInBool}`);
return isSignedInBool;
}
function isEnd(html) {
let isEndBool = html.includes("No questions match your filter");
if (isEndBool) {
//console.log(html)
}
console.log(`IsEnd? ${isEndBool}`);
return isEndBool;
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/* Body */
async function csetforetell_inner(cookie) {
let i = 1;
let response = await fetchPage(i, cookie);
let results = [];
let init = Date.now();
// console.log("Downloading... This might take a couple of minutes. Results will be shown.")
while (!isEnd(response) && isSignedIn(response)) {
let htmlLines = response.split("\n");
// let h4elements = htmlLines.filter(str => str.includes("<h5> <a href=") || str.includes("<h4> <a href="))
let questionHrefs = htmlLines.filter((str) =>
str.includes("https://www.cset-foretell.com/questions/")
);
// console.log(questionHrefs)
if (process.env.DEBUG_MODE == "on" || DEBUG_MODE == "on") {
//console.log(response)
console.log("questionHrefs: ");
console.log(questionHrefs);
}
//console.log("")
//console.log("")
//console.log(h4elements)
for (let questionHref of questionHrefs) {
//console.log(h4element)
let elementSplit = questionHref.split('"><span>');
let url = elementSplit[0].split('<a href="')[1];
let title = elementSplit[1]
.replace("</h4>", "")
.replace("</h5>", "")
.replace("</span></a>", "");
await sleep(Math.random() * SLEEP_TIME_RANDOM + SLEEP_TIME_EXTRA); // don't be as noticeable
try {
let moreinfo = await fetchStats(url, cookie);
let question = {
title: title,
url: url,
platform: "CSET-foretell",
...moreinfo,
};
if (
i % 30 == 0 &&
!(process.env.DEBUG_MODE == "on" || DEBUG_MODE == "on")
) {
console.log(
`Page #${i}` &&
!(process.env.DEBUG_MODE == "on" || DEBUG_MODE == "on")
);
console.log(question);
}
results.push(question);
if (process.env.DEBUG_MODE == "on" || DEBUG_MODE == "on") {
console.log(url);
console.log(question);
}
} catch (error) {
console.log(error);
console.log(
`We encountered some error when fetching the URL: ${url}, so it won't appear on the final json`
);
}
}
i++;
//i=Number(i)+1
console.log(
"Sleeping for ~5secs so as to not be as noticeable to the cset-foretell servers"
);
await sleep(Math.random() * SLEEP_TIME_RANDOM + SLEEP_TIME_EXTRA); // don't be as noticeable
try {
response = await fetchPage(i, cookie);
} catch (error) {
console.log(error);
console.log(
`The program encountered some error when fetching page #${i}, so it won't appear on the final json. It is possible that this page wasn't actually a prediction question pages`
);
}
}
// let string = JSON.stringify(results,null, 2)
// fs.writeFileSync('./data/csetforetell-questions.json', string);
// console.log(results)
if (results.length > 0) {
await databaseUpsert(results, "csetforetell-questions");
} else {
console.log("Not updating results, as process was not signed in");
}
let end = Date.now();
let difference = end - init;
console.log(
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
);
}
export async function csetforetell() {
let cookie = process.env.CSETFORETELL_COOKIE || getSecret("csetforetell");
await applyIfSecretExists(cookie, csetforetell_inner);
}

View File

@ -0,0 +1,126 @@
/* Imports */
import fs from "fs";
import axios from "axios";
import Papa from "papaparse";
import open from "open";
import readline from "readline";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let elicitEndpoint =
"https://elicit.org/api/v1/binary-questions/csv?binaryQuestions.resolved=false&binaryQuestions.search=&binaryQuestions.sortBy=popularity&predictors=community";
/* Support functions */
let avg = (array) =>
array.reduce((a, b) => Number(a) + Number(b)) / array.length;
let unique = (arr) => [...new Set(arr)];
let sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function processArray(arrayQuestions) {
let questions = arrayQuestions.map((question) => question.questionTitle);
let uniqueQuestions = unique(questions);
let questionsObj = {};
uniqueQuestions.forEach((questionTitle) => {
questionsObj[questionTitle] = {
title: questionTitle,
forecasters: [],
forecasts: [],
};
});
arrayQuestions.forEach((question) => {
// console.log(question.questionTitle)
let questionTitle = question.questionTitle;
let correspondingQuestion = questionsObj[questionTitle];
let forecasters = correspondingQuestion.forecasters.concat(
question.predictionCreator
);
let forecasts = correspondingQuestion.forecasts.concat(question.prediction);
questionsObj[questionTitle] = {
forecasters,
forecasts,
};
});
let onlyQuestionsWithMoreThan;
let results = [];
for (let question in questionsObj) {
let title = question;
let forecasters = questionsObj[question].forecasters;
let numforecasters = unique(forecasters).length;
if (numforecasters >= 10) {
let url = `https://elicit.org/binary?binaryQuestions.search=${title.replace(
/ /g,
"%20"
)}&binaryQuestions.sortBy=popularity&limit=20&offset=0`;
let forecasts = questionsObj[question].forecasts;
//console.log(forecasts)
//console.log(avg(forecasts))
let probability = avg(forecasts) / 100;
let numforecasts = forecasts.length;
let standardObj = {
title: title,
url: url,
platform: "Elicit",
options: [
{
name: "Yes",
probability: probability,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - probability,
type: "PROBABILITY",
},
],
timestamp: new Date().toISOString(),
qualityindicators: {
numforecasts: Number(numforecasts),
numforecasters: Number(numforecasters),
stars: calculateStars("Elicit", {}),
},
};
results.push(standardObj);
}
}
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/elicit-questions.json', string);
await databaseUpsert(results, "elicit-questions");
console.log("Done");
}
async function awaitdownloadconfirmation(message, callback) {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
rl.question(message, (answer) => {
//console.log("Received");
rl.close();
callback();
});
}
/* Body */
let filePath = "./data/elicit-binary_export.csv";
export async function elicit() {
let csvContent = await axios.get(elicitEndpoint).then((query) => query.data);
await Papa.parse(csvContent, {
header: true,
complete: async (results) => {
console.log("Downloaded", results.data.length, "records.");
//resolve(results.data);
//console.log(results.data)
await processArray(results.data);
},
});
await sleep(5000); // needed to wait for Papaparse's callback to be executed.
}
//elicit()

View File

@ -0,0 +1,38 @@
import fs from "fs";
import { databaseUpsert } from "../utils/database-wrapper.js";
import { calculateStars } from "../utils/stars.js";
export async function estimize() {
let data = fs.readFileSync(
"./input/s-and-p-500-companies/companies.csv",
"utf8"
);
let splitData = data.split("\n");
let results = [];
for (let datum of splitData) {
if (datum != "") {
//console.log(datum)
let datumSplit = datum.split(",");
let companyStickerSymbol = datumSplit[0];
let companyName = datumSplit[1];
let standardObj = {
title: `Estimates for ${companyName} (${companyStickerSymbol})`,
url: `https://www.estimize.com/${companyStickerSymbol.toLowerCase()}`,
platform: "Estimize",
description: `A link to Estimize's forecasts for *${companyName}* (sticker symbol ${companyStickerSymbol}). Viewing them requires making a prediction, Wall Street estimates are provided for free`,
options: [],
timestamp: new Date().toISOString(),
qualityindicators: {
stars: calculateStars("Estimize", {}),
},
};
results.push(standardObj);
}
}
// console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/estimize-questions.json', string);
await databaseUpsert(results, "estimize-questions");
}
//estimize()

View File

@ -0,0 +1,205 @@
/* Imports */
import fs from "fs";
import axios from "axios";
import https from "https";
import fetch from "isomorphic-fetch";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let hypermindEnpoint1 = "https://predict.hypermind.com/dash/jsx.json";
let hypermindEnpoint2 = "https://prod.hypermind.com/ngdp-jsx/jsx.json";
const insecureHttpsAgent = new https.Agent({
rejectUnauthorized: false, // (NOTE: this will disable client verification)
});
/* Support Functions */
String.prototype.replaceAll = function replaceAll(search, replace) {
return this.split(search).join(replace);
};
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/* Fetchers */
async function fetchHypermindData1(slug) {
let jsx = `jsx=%5B%5B%22dataMgr%22%2C%22getGQList%22%2C%7B%22listName%22%3A%20%22${slug}%22%2C%22format%22%3A%20%7B%22props%22%3A%20true%2C%22posts%22%3A%20true%2C%22cond%22%3A%20%7B%22props%22%3A%20true%2C%22otcm%22%3A%20%7B%22tradingHistory%22%3A%20true%2C%22props%22%3A%20true%7D%7D%2C%22otcm%22%3A%20%7B%22tradingHistory%22%3A%20true%2C%22props%22%3A%20true%7D%7D%7D%5D%5D`;
// console.log(jsx)
let response = await await axios(hypermindEnpoint1, {
credentials: "omit",
headers: {
"User-Agent":
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:90.0) Gecko/20100101 Firefox/90.0",
Accept:
"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "none",
"Sec-Fetch-User": "?1",
"Cache-Control": "max-age=0",
},
referrer: `https://predict.hypermind.com/dash/dash/dash.html?list=${slug}`,
data: jsx,
method: "POST",
mode: "cors",
httpsAgent: insecureHttpsAgent,
}).then((response) => response.data[0].questions);
//console.log(response)
return response;
}
async function fetchHypermindDataShowcases(slug, cookie) {
let response = await axios(hypermindEnpoint2, {
credentials: "include",
headers: {
"User-Agent": "",
Accept: "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Content-Type": "application/json; charset=UTF-8",
//"Cookie": cookie
},
referrer:
"https://prod.hypermind.com/ngdp/en/showcase/showcase.html?inFrame=true",
data: `[["showcase","getShowcase",{"showcase":"${slug}","fmt":{"fcsterCnt":true,"crowdFcst":true,"crowdFcstHist":false}}]]`,
method: "POST",
mode: "cors",
httpsAgent: insecureHttpsAgent,
})
.then((resp) => resp.data[0].items)
.then((items) => items.filter((item) => item.type == "IFP"))
.then((items) => items.map((item) => item.IFP));
// console.log(response)
// response.forEach(item => delete item.crowdFcstHist)
return response;
}
/* Body */
async function hypermind_inner(cookie) {
// Hypermind panelists and competitors; dashboard type two: "showcase"
// https://prod.hypermind.com/ngdp/fr/showcase2/showcase.html?sc=SLUG
// E.g., https://prod.hypermind.com/ngdp/fr/showcase2/showcase.html?sc=AI2023
let slugs2 = ["AI2030", "Covid19", "DOSES", "H5N8", "NGDP", "JSAI", "AI2023"]; // []
let results2 = [];
for (let slug of slugs2) {
console.log(slug);
await sleep(1000 + Math.random() * 1000);
let response = await fetchHypermindDataShowcases(slug);
let objs = response.map((result) => {
let descriptionraw = result.props.details.split("<hr size=1>")[0];
let descriptionprocessed1 = toMarkdown(descriptionraw);
let descriptionprocessed2 = descriptionprocessed1.replaceAll(
"![image] ()",
""
);
let descriptionprocessed3 = descriptionprocessed2.replaceAll(
" Forecasting Schedule ",
""
);
let descriptionprocessed4 = descriptionprocessed3
.replaceAll("\n", " ")
.replaceAll(" ", " ");
let descriptionprocessed5 = descriptionprocessed4.replaceAll(
"Context:",
""
);
let description =
descriptionprocessed5 || toMarkdown(result.props.details);
return {
title: result.props.title,
url:
"https://prod.hypermind.com/ngdp/fr/showcase2/showcase.html?sc=" +
slug,
platform: "Hypermind",
description: description,
options: [],
timestamp: new Date().toISOString(),
qualityindicators: {
stars: calculateStars("Hypermind", {}),
numforecasters: Number(result.fcsterCnt),
},
};
});
// console.log(objs)
results2.push(...objs);
}
// Prediction markets; dashboard type one.
// https://predict.hypermind.com/dash/dash/dash.html?list=SLUG
// e.g., https://predict.hypermind.com/dash/dash/dash.html?list=POL
let slugs1 = ["USA", "FRA", "AFR", "INT", "COV", "POL", "ECO"]; // []
let results1 = [];
for (let slug of slugs1) {
console.log(slug);
await sleep(2000 + Math.random() * 2000);
let result = await fetchHypermindData1(slug);
let objs = result.map((res) => {
let descriptionraw = res.props.details;
let descriptionprocessed1 = descriptionraw.split("%%fr")[0];
let descriptionprocessed2 = descriptionprocessed1.replaceAll(
"<BR>",
"\n"
);
let descriptionprocessed3 = descriptionprocessed2.replace("%%en:", "");
let descriptionprocessed4 = descriptionprocessed3.replace(
`Shares of the correct outcome will be worth 100<sup></sup>, while the others will be worthless (0<sup></sup>).<p>`,
""
);
let descriptionprocessed5 = toMarkdown(descriptionprocessed4);
let description = descriptionprocessed5
.replaceAll("\n", " ")
.replaceAll(" ", " ");
//console.log(res.otcms)
//let percentage = (res.otcms.length==2) ? Number(res.otcms[0].price).toFixed(0) +"%" : "none"
let options = res.otcms.map((option) => ({
name: option.props.title.split("%%fr")[0].replaceAll("%%en:", ""),
probability: Number(option.price) / 100,
type: "PROBABILITY",
}));
return {
title: res.props.title.split("%%fr")[0].replace("%%en:", ""),
url: "https://predict.hypermind.com/dash/dash/dash.html?list=" + slug,
platform: "Hypermind",
description: description,
options: options,
timestamp: new Date().toISOString(),
qualityindicators: {
stars: calculateStars("Hypermind", {}),
// "numforecasters": res.fcsterCnt
},
};
});
// console.log(objs)
results1.push(...objs);
}
let resultsTotal = [...results1, ...results2];
let distinctTitles = [];
let resultsTotalUnique = [];
for (let result of resultsTotal) {
if (!distinctTitles.includes(result["title"])) {
resultsTotalUnique.push(result);
distinctTitles.push(result["title"]);
}
}
// console.log(resultsTotal)
// console.log(resultsTotalUnique)
console.log(resultsTotalUnique.length, "results");
// let string = JSON.stringify(resultsTotalUnique, null, 2)
// fs.writeFileSync('./data/hypermind-questions.json', string);
await databaseUpsert(resultsTotalUnique, "hypermind-questions");
}
//hypermind()
export async function hypermind() {
let cookie = process.env.HYPERMINDCOOKIE || getSecret("hypermind");
await applyIfSecretExists(cookie, hypermind_inner);
}

View File

@ -1,23 +1,23 @@
/* Imports */ /* Imports */
import fs from 'fs' import fs from "fs";
import axios from "axios" import axios from "axios";
import { calculateStars } from "../../utils/stars.js" import { calculateStars } from "../../utils/stars.js";
import {databaseUpsert} from "../../utils/database-wrapper.js" import { databaseUpsert } from "../../utils/database-wrapper.js";
/* Definitions */ /* Definitions */
let graphQLendpoint = "https://api.thegraph.com/subgraphs/name/protofire/omen" let graphQLendpoint = "https://api.thegraph.com/subgraphs/name/protofire/omen";
// "https://gateway.thegraph.com/api/[api-key]/subgraphs/id/0x0503024fcc5e1bd834530e69d592dbb6e8c03968-0" // "https://gateway.thegraph.com/api/[api-key]/subgraphs/id/0x0503024fcc5e1bd834530e69d592dbb6e8c03968-0"
// 'https://api.thegraph.com/subgraphs/name/protofire/omen' // 'https://api.thegraph.com/subgraphs/name/protofire/omen'
// https://github.com/protofire/omen-subgraph // https://github.com/protofire/omen-subgraph
// https://thegraph.com/explorer/subgraph/protofire/omen // https://thegraph.com/explorer/subgraph/protofire/omen
async function fetchAllContractData() { async function fetchAllContractData() {
let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 50 // last 30 days let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 50; // last 30 days
let response = await axios({ let response = await axios({
url: graphQLendpoint, url: graphQLendpoint,
method: 'POST', method: "POST",
headers: ({ 'Content-Type': 'application/json' }), headers: { "Content-Type": "application/json" },
data: JSON.stringify(({ data: JSON.stringify({
query: ` query: `
{ {
fixedProductMarketMakers(first: 1000, fixedProductMarketMakers(first: 1000,
@ -37,63 +37,65 @@ async function fetchAllContractData() {
resolutionTimestamp resolutionTimestamp
} }
} }
` `,
})), }),
}) })
.then(res => res.data) .then((res) => res.data)
.then(res => res.data.fixedProductMarketMakers) .then((res) => res.data.fixedProductMarketMakers);
console.log(response) console.log(response);
return response return response;
} }
async function fetch_all() { async function fetch_all() {
let allData = await fetchAllContractData() let allData = await fetchAllContractData();
let results = [] let results = [];
for (let data of allData) { for (let data of allData) {
if (
if (data.question != null & (data.question != null) &
data.usdLiquidityMeasure != '0' & (data.usdLiquidityMeasure != "0") &
data.resolutionTimestamp == null & (data.resolutionTimestamp == null) &
data.question.title != "ssdds") { (data.question.title != "ssdds")
) {
// console.log(data) // console.log(data)
// console.log(data.usdLiquidityMeasure) // console.log(data.usdLiquidityMeasure)
let options = data.outcomeTokenMarginalPrices.map((price, slotnum) => { let options = data.outcomeTokenMarginalPrices.map((price, slotnum) => {
let name = `Option ${slotnum}` let name = `Option ${slotnum}`;
if (data.outcomeTokenMarginalPrices.length == 2 && slotnum == 0) name = "Yes" if (data.outcomeTokenMarginalPrices.length == 2 && slotnum == 0)
if (data.outcomeTokenMarginalPrices.length == 2 && slotnum == 1) name = "No" name = "Yes";
return ({ if (data.outcomeTokenMarginalPrices.length == 2 && slotnum == 1)
"name": name, name = "No";
"probability": Number(price), return {
"type": "PROBABILITY" name: name,
}) probability: Number(price),
}) type: "PROBABILITY",
};
});
let obj = { let obj = {
"title": data.question.title, title: data.question.title,
"url": "https://omen.eth.link/#/" + data.id, url: "https://omen.eth.link/#/" + data.id,
"platform": "Omen", platform: "Omen",
"description": "", description: "",
"options": options, options: options,
"timestamp": new Date().toISOString(), timestamp: new Date().toISOString(),
"qualityindicators": { qualityindicators: {
"stars": calculateStars("Omen", ({})) stars: calculateStars("Omen", {}),
} },
} };
// console.log(obj) // console.log(obj)
results.push(obj) results.push(obj);
} }
} }
return results return results;
} }
/* Body */ /* Body */
export async function omen() { export async function omen() {
let results = await fetch_all() let results = await fetch_all();
// console.log(result) // console.log(result)
// let string = JSON.stringify(results, null, 2) // let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/omen-questions.json', string); // fs.writeFileSync('./data/omen-questions.json', string);
await databaseUpsert(results, "omen-questions") await databaseUpsert(results, "omen-questions");
console.log("Done") console.log("Done");
} }
//omen() //omen()

View File

@ -0,0 +1,151 @@
/* Imports */
import axios from "axios";
import fs from "fs";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../utils/database-wrapper.js";
/* Definitions */
let endpoint = "https://sports.williamhill.com/betting/en-gb/politics";
// <header class="header-dropdown header-dropdown--large -expanded" data-id="
/* Support functions */
async function fetchUrl(url) {
let response = await axios(url, {
credentials: "include",
headers: {
"User-Agent":
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:86.0) Gecko/20100101 Firefox/86.0",
Accept:
"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Upgrade-Insecure-Requests": "1",
"Cache-Control": "max-age=0",
},
method: "GET",
mode: "cors",
}).then((response) => response.data);
return response;
}
let processResults = (html) => {
let results = [];
let chunks = html.split(
'<header class="header-dropdown header-dropdown--large -expanded" data-id="'
);
chunks.shift();
// Kamala Special
let kamalaspecial = chunks[0];
let kamalamarkets = kamalaspecial.split(
'<div class="btmarket__selection"><p class="btmarket__name"><span>'
);
kamalamarkets.shift();
for (let kamalamarket of kamalamarkets) {
let title = kamalamarket.split("</span>")[0];
let numerator = Number(kamalamarket.split('data-num="')[1].split('"')[0]);
let denominator = Number(
kamalamarket.split('data-denom="')[1].split('"')[0]
);
let probability = denominator / (numerator + denominator);
let obj = {
title: title,
url: "https://sports.williamhill.com/betting/en-gb/politics",
platform: "WilliamHill",
description: "",
timestamp: new Date().toISOString(),
options: [
{
name: "Yes",
probability: probability,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - probability,
type: "PROBABILITY",
},
],
qualityindicators: {
stars: calculateStars("WilliamHill", {}),
},
};
results.push(obj);
}
chunks.shift();
// Deal with the other markets
for (let chunk of chunks) {
let title = chunk.split('"')[0];
let title2 = chunk.split('<a title="')[1].split('"')[0];
title = title.length > title2.length ? title : title2;
let options = [];
let alternatives = chunk.split(
'<div class="btmarket__selection"><p class="btmarket__name"><span>'
);
alternatives.shift();
for (let alternative of alternatives) {
let optionName = alternative.split("</span>")[0];
let numerator = Number(alternative.split('data-num="')[1].split('"')[0]);
let denominator = Number(
alternative.split('data-denom="')[1].split('"')[0]
);
let option = {
name: optionName,
probability: denominator / (numerator + denominator),
type: "PROBABILITY",
};
options.push(option);
}
// normalize probabilities
let totalValue = options
.map((element) => Number(element.probability))
.reduce((a, b) => a + b, 0);
options = options.map((element) => ({
...element,
probability: Number(element.probability) / totalValue,
}));
// Filter very unlikely probabilities: Not here, but on the front end
// options = options.filter(element => element.probability > 0.02)
let obj = {
title: title,
url: "https://sports.williamhill.com/betting/en-gb/politics",
platform: "WilliamHill",
options: options,
qualityindicators: {
stars: calculateStars("WilliamHill", {}),
},
};
results = results.filter(
(result) => result.title.length > 4 && result.title != "2024 or later"
);
// removes some predictions because hard to parse.
results.push(obj);
}
//console.log(results)
return results;
};
let processhtml2 = (html) => {
html.split();
};
/* Body */
export async function williamhill() {
let response = await fetchUrl(
"https://sports.williamhill.com/betting/en-gb/politics"
);
let results = processResults(response);
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/williamhill-questions.json', string);
await databaseUpsert(results, "williamhill-questions");
console.log(results.sort((a, b) => a.title > b.title));
console.log("Done");
}
//williamhill()

View File

@ -1,9 +1,8 @@
/* Imports */ /* Imports */
import fs from "fs";
import axios from "axios"; import axios from "axios";
import toMarkdown from "../utils/toMarkdown.js"; import fs from "fs";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../database/database-wrapper.js"; import { databaseUpsert } from "../database/database-wrapper.js";
import { calculateStars } from "../utils/stars.js";
/* Definitions */ /* Definitions */
let locationData = "./data/"; let locationData = "./data/";
@ -24,7 +23,7 @@ async function fetchPage(url) {
/* Body */ /* Body */
async function main1() { async function main1() {
let rawdata = fs.readFileSync("./src/input/givewellopenphil-urls.txt"); let rawdata = fs.readFileSync("./input/givewellopenphil-urls.txt");
let data = rawdata let data = rawdata
.toString() .toString()
.split("\n") .split("\n")
@ -72,10 +71,12 @@ async function main1() {
// main1() // main1()
async function main2() { async function main2() {
let rawdata = fs.readFileSync("./src/input/givewellopenphil-questions.json"); let rawdata = fs.readFileSync("./input/givewellopenphil-questions.json");
let data = JSON.parse(rawdata); let data = JSON.parse(rawdata);
let dataWithDate = data.map(datum => ({...datum, timestamp: '2021-02-23'})) let dataWithDate = data.map((datum) => ({
...datum,
timestamp: "2021-02-23",
}));
await databaseUpsert({ group: "givewellopenphil", contents: dataWithDate }); await databaseUpsert({ group: "givewellopenphil", contents: dataWithDate });
} }
main2(); main2();

View File

@ -224,7 +224,6 @@ async function goodjudgmentopen_inner(cookie) {
console.log(results); console.log(results);
if (results.length > 0) { if (results.length > 0) {
await databaseUpsert({ contents: results, group: "goodjudmentopen" }); await databaseUpsert({ contents: results, group: "goodjudmentopen" });
} else { } else {
console.log("Not updating results, as process was not signed in"); console.log("Not updating results, as process was not signed in");
} }

View File

@ -1,9 +1,8 @@
/* Imports */ /* Imports */
import axios from "axios"; import axios from "axios";
import fs from "fs";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
import { databaseUpsert } from "../database/database-wrapper.js"; import { databaseUpsert } from "../database/database-wrapper.js";
import { calculateStars } from "../utils/stars.js";
import toMarkdown from "../utils/toMarkdown.js";
/* Definitions */ /* Definitions */
let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page=";

View File

Can't render this file because it contains an unexpected character in line 3 and column 50.

View File

@ -0,0 +1,50 @@
/* Imports */
import fs from "fs";
import { databaseReadWithReadCredentials } from "../database-wrapper.js";
/* Definitions */
/* Utilities */
/* Support functions */
let getQualityIndicators = (forecast) =>
Object.entries(forecast.qualityindicators)
.map((entry) => `${entry[0]}: ${entry[1]}`)
.join("; ");
/* Body */
let main = async () => {
let highQualityPlatforms = [
"CSET-foretell",
"Foretold",
"Good Judgment Open",
"Metaculus",
"PredictIt",
"Rootclaim",
];
let json = await databaseReadWithReadCredentials({ group: "combined" });
console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms)
let forecastsFromGoodPlatforms = json.filter((forecast) =>
highQualityPlatforms.includes(forecast.platform)
);
let tsv =
"index\ttitle\turl\tqualityindicators\n" +
forecastsFromGoodPlatforms
.map((forecast, index) => {
let row = `${index}\t${forecast.title}\t${
forecast.url
}\t${getQualityIndicators(forecast)}`;
console.log(row);
return row;
})
.join("\n");
//console.log(tsv)
// let string = JSON.stringify(json, null, 2)
fs.writeFileSync("metaforecasts.tsv", tsv);
};
main();

View File

@ -0,0 +1,55 @@
/* Imports */
import fs from "fs";
import { databaseReadWithReadCredentials } from "../database-wrapper.js";
/* Definitions */
/* Utilities */
/* Support functions */
let getQualityIndicators = (forecast) =>
Object.entries(forecast.qualityindicators)
.map((entry) => `${entry[0]}: ${entry[1]}`)
.join("; ");
let shuffleArray = (array) => {
// See: https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
return array;
};
/* Body */
let main = async () => {
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
let json = await databaseReadWithReadCredentials({ group: "combined" });
console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms)
let forecastsFromGoodPlatforms = json.filter((forecast) =>
highQualityPlatforms.includes(forecast.platform)
);
let forecastsFromGoodPlatformsShuffled = shuffleArray(
forecastsFromGoodPlatforms
);
let tsv =
"index\ttitle\turl\tqualityindicators\n" +
forecastsFromGoodPlatforms
.map((forecast, index) => {
let row = `${index}\t${forecast.title}\t${
forecast.url
}\t${getQualityIndicators(forecast)}`;
console.log(row);
return row;
})
.join("\n");
//console.log(tsv)
// let string = JSON.stringify(json, null, 2)
fs.writeFileSync("metaforecasts_metaculus_v2.tsv", tsv);
};
main();

View File

@ -0,0 +1,27 @@
import fs from "fs";
export function getSecret(property) {
let answer = 0;
try {
let rawcookie = fs.readFileSync("./input/secrets.json");
let cookie = JSON.parse(rawcookie);
if (cookie[property]) {
answer = cookie[property];
console.log(`Got cookie: ${answer.slice(0, 5)}...`);
}
} catch (error) {
console.log(error);
}
return answer;
}
export async function applyIfSecretExists(cookie, fun) {
if (cookie) {
await fun(cookie);
} else if (!cookie) {
console.log(
`Cannot proceed with ${fun.name} because cookie does not exist`
);
throw new Error(`No cookie for ${fun.name}`);
}
}

View File

@ -1,18 +1,17 @@
## GiveWell ## GiveWell
wget --recursive --no-clobber --html-extension --domains givewell.org --follow-tags=a --reject '*.js,*.css,*.ico,*.txt,*.gif,*.jpg,*.jpeg,*.png,*.mp3,*.mp4,*.pdf,*.tgz,*.flv,*.avi,*.mpeg,*.iso,*.xls,*.xlsx,*.csv,*.doc,*.docx,*.mpa,*mp4' --ignore-tags=img,link,script --header="Accept: text/html" --no-parent https://www.givewell.org wget --recursive --no-clobber --html-extension --domains givewell.org --follow-tags=a --reject '_.js,_.css,_.ico,_.txt,_.gif,_.jpg,_.jpeg,_.png,_.mp3,_.mp4,_.pdf,_.tgz,_.flv,_.avi,_.mpeg,_.iso,_.xls,_.xlsx,_.csv,_.doc,_.docx,_.mpa,\*mp4' --ignore-tags=img,link,script --header="Accept: text/html" --no-parent https://www.givewell.org
grep -ri "Internal forecast" -E "prediction|Prediction|forecast|Forecast" * | sed 's/^/https:\/\/www.givewell.org\//' > searchresults.txt grep -ri "Internal forecast" -E "prediction|Prediction|forecast|Forecast" \* | sed 's/^/https:\/\/www.givewell.org\//' > searchresults.txt
grep -ril "Internal forecast" -E "prediction|Prediction|forecast|Forecast" * > searchresults.txt grep -ril "Internal forecast" -E "prediction|Prediction|forecast|Forecast" \* > searchresults.txt
cat searchresults.txt cat searchresults.txt
cat searchresults.txt | sed 's/^/https:\/\/www.givewell.org\//' > searchresults2.txt cat searchresults.txt | sed 's/^/https:\/\/www.givewell.org\//' > searchresults2.txt
cat searchresults2.txt cat searchresults2.txt
grep -v "print" searchresults2.txt > searchresults3.txt grep -v "print" searchresults2.txt > searchresults3.txt
while read line; do while read line; do
firefox --new-tab "$line" firefox --new-tab "$line"
done < searchresults3.txt done < searchresults3.txt
We are experimenting with recording explicit numerical forecasts of the probability of events related to our decision-making (especially grant-making). The idea behind this is to pull out the implicit predictions that are playing a role in our decisions, and to make it possible for us to look back on how well-calibrated and accurate those predictions were. We are experimenting with recording explicit numerical forecasts of the probability of events related to our decision-making (especially grant-making). The idea behind this is to pull out the implicit predictions that are playing a role in our decisions, and to make it possible for us to look back on how well-calibrated and accurate those predictions were.
@ -32,7 +31,7 @@ Divide by h2, then pull the second which has forecasts
## OpenPhil ## OpenPhil
wget --recursive --no-clobber --html-extension --domains www.openphilanthropy.org --follow-tags=a --reject '*.js,*.css,*.ico,*.txt,*.gif,*.jpg,*.jpeg,*.png,*.mp3,*.mp4,*.pdf,*.tgz,*.flv,*.avi,*.mpeg,*.iso,*.xls,*.xlsx,*.csv,*.doc,*.docx,*.mpa,*mp4' --ignore-tags=img,link,script --header="Accept: text/html" --no-parent https://www.openphilanthropy.org wget --recursive --no-clobber --html-extension --domains www.openphilanthropy.org --follow-tags=a --reject '_.js,_.css,_.ico,_.txt,_.gif,_.jpg,_.jpeg,_.png,_.mp3,_.mp4,_.pdf,_.tgz,_.flv,_.avi,_.mpeg,_.iso,_.xls,_.xlsx,_.csv,_.doc,_.docx,_.mpa,\*mp4' --ignore-tags=img,link,script --header="Accept: text/html" --no-parent https://www.openphilanthropy.org
Find and delete largest files Find and delete largest files
du -a . | sort -n -r | head -n 20 du -a . | sort -n -r | head -n 20
@ -40,8 +39,7 @@ find . -xdev -type f -size +100M
find . -type f -exec du -s {} \; | sort -r -k1,1n | head -n 20 find . -type f -exec du -s {} \; | sort -r -k1,1n | head -n 20
grep -ril -E "Internal forecast" \* > searchresults.txt
grep -ril -E "Internal forecast" * > searchresults.txt
grep -v "print" searchresults.txt > searchresults2.txt grep -v "print" searchresults.txt > searchresults2.txt

View File

@ -0,0 +1,52 @@
/* Imports */
import fs from "fs";
/* Definitions */
let locationData = "../../data/";
/* Body */
let rawdata = fs.readFileSync(
"/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-current/data/xrisk-questions-raw.json"
);
let data = JSON.parse(rawdata);
let results = [];
for (let datum of data) {
let probability = datum["probability"];
let description = datum["actualEstimate"]
? `Actual estimate: ${datum["actualEstimate"]}
${datum["description"]}`
: datum["description"];
let author = `${datum["platform"]} (~${datum["date_approx"]})`;
let result = {
title: datum["title"],
url: datum["url"],
platform: "X-risk estimates",
author: author,
description: description,
options: [
{
name: "Yes",
probability: probability,
type: "PROBABILITY",
},
{
name: "No",
probability: 1 - probability,
type: "PROBABILITY",
},
],
timestamp: new Date().toISOString(),
qualityindicators: {
stars: 2, //datum["stars"]
},
};
results.push(result);
}
let string = JSON.stringify(results, null, 2);
fs.writeFileSync(
"/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-current/data/xrisk-questions-new.json",
string
);

View File

@ -0,0 +1,36 @@
/* Imports */
import fs from "fs";
/* Definitions */
let locationData = "../../data/";
/* Body */
let rawdata = fs.readFileSync(
"/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-mongo/src/input/xrisk-questions.json"
);
let data = JSON.parse(rawdata);
let results = [];
for (let datum of data) {
let result = {
title: datum["title"],
url: datum["url"],
platform: "X-risk estimates",
moreoriginsdata: {
author: datum.author,
},
description: datum.description,
options: datum.options,
timestamp: datum.timestamps,
qualityindicators: {
stars: 2, //datum["stars"]
},
};
results.push(result);
}
let string = JSON.stringify(results, null, 2);
fs.writeFileSync(
"/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-mongo/src/input/xrisk-questions-new2.json",
string
);

View File

@ -0,0 +1,57 @@
/* Imports */
import fs from "fs";
import { databaseReadWithReadCredentials } from "../database-wrapper.js";
/* Definitions */
let locationData = "./data/";
/* Body */
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
async function main() {
let data = await databaseReadWithReadCredentials({ group: "combined" }); //JSON.parse(rawdata)
let processDescription = (description) => {
if (description == null || description == undefined || description == "") {
return "";
} else {
description =
description == null
? ""
: description
.replaceAll("] (", "](")
.replaceAll(") )", "))")
.replaceAll("( [", "([")
.replaceAll(") ,", "),")
.replaceAll("\n", " ");
if (description.length > 1000) {
return description.slice(0, 1000) + "...";
} else {
return description;
}
}
};
let results = [];
for (let datum of data) {
// do something
let description = processDescription(datum["description"]);
let forecasts = datum["qualityindicators"]
? datum["qualityindicators"].numforecasts
: "unknown";
let stars = datum["qualityindicators"]
? datum["qualityindicators"].stars
: 2;
results.push("Title: " + datum["title"]);
results.push("URL: " + datum["url"]);
results.push("Platform: " + datum["platform"]);
results.push("Description: " + description);
results.push("Number of forecasts: " + forecasts);
results.push("Stars: " + forecasts);
results.push("\n");
}
let string = results.join("\n");
string = string.replaceAll("\n\n", "\n");
fs.writeFileSync("elicit-output.txt", string);
}
main();

View File

@ -0,0 +1,17 @@
/* Imports */
import fs from "fs";
/* Definitions */
let locationData = "./data/";
/* Body */
let rawdata = fs.readFileSync("../data/merged-questions.json");
let data = JSON.parse(rawdata);
let results = [];
for (let datum of data) {
// do something
}
let string = JSON.stringify(result, null, 2);
fs.writeFileSync("../data/output.txt", string);

View File

@ -0,0 +1,12 @@
import fs from "fs";
import axios from "axios";
let elicitEndpoint =
"https://elicit.org/api/v1/binary-questions/csv?binaryQuestions.resolved=false&binaryQuestions.search=&binaryQuestions.sortBy=popularity&predictors=community";
let main = async () => {
let response = await axios.get(elicitEndpoint).then((query) => query.data);
console.log(response);
};
main();

View File

@ -0,0 +1,25 @@
export function roughSizeOfObject(object) {
var objectList = [];
var stack = [object];
var bytes = 0;
while (stack.length) {
var value = stack.pop();
if (typeof value === "boolean") {
bytes += 4;
} else if (typeof value === "string") {
bytes += value.length * 2;
} else if (typeof value === "number") {
bytes += 8;
} else if (typeof value === "object" && objectList.indexOf(value) === -1) {
objectList.push(value);
for (var i in value) {
stack.push(value[i]);
}
}
}
let megaBytes = bytes / 1024 ** 2;
let megaBytesRounded = Math.round(megaBytes * 10) / 10;
return megaBytesRounded;
}

View File

@ -1,23 +1,25 @@
/* Imports */ /* Imports */
import textVersion from "textversionjs" import textVersion from "textversionjs";
/* Definitions */ /* Definitions */
String.prototype.replaceAll = function replaceAll(search, replace) { return this.split(search).join(replace); } String.prototype.replaceAll = function replaceAll(search, replace) {
return this.split(search).join(replace);
};
var styleConfig = { var styleConfig = {
linkProcess: function(href, linkText){ linkProcess: function (href, linkText) {
let newHref = href ? href.replace(/\(/g, "%28").replace(/\)/g, "%29") : "" let newHref = href ? href.replace(/\(/g, "%28").replace(/\)/g, "%29") : "";
// Deal corretly in markdown with links that contain parenthesis // Deal corretly in markdown with links that contain parenthesis
return `[${linkText}](${newHref})`; return `[${linkText}](${newHref})`;
}, },
} };
/* Support functions */ /* Support functions */
/* Body */ /* Body */
export default function toMarkdown(htmlText){ export default function toMarkdown(htmlText) {
let html2 = htmlText.replaceAll(`='`, `="`).replaceAll(`'>`, `">`) let html2 = htmlText.replaceAll(`='`, `="`).replaceAll(`'>`, `">`);
return textVersion(html2, styleConfig); return textVersion(html2, styleConfig);
} }

Some files were not shown because too many files have changed in this diff Show More