feat: code reorganization
This commit is contained in:
parent
3d43aa3b6e
commit
2dcea149d1
100477
data/frontpage.json
100477
data/frontpage.json
File diff suppressed because one or more lines are too long
|
@ -1,20 +1,21 @@
|
|||
{
|
||||
"id": "Unique string which identifies the question, not optional",
|
||||
"title": " String, not optional",
|
||||
"url": " String. A very few number of platforms have a page for more than one prediction.",
|
||||
"platform": " String, not optional",
|
||||
"description": " String, optional",
|
||||
"options": " Optional. Saved as a string. If this is a binary question, it gets converted to having Yes/No options. The probabilities should be probabilities, i.e., normalized to 1 first.",
|
||||
"options-description": " Optional. Saved as a string. If this is a binary question, it gets converted to having Yes/No options. The probabilities should be probabilities, i.e., normalized to 1 first. Eventually I'll want this to also include distributions.",
|
||||
"options": [
|
||||
"description: optional. Some prediction platforms have distributions, rather than probabilities.",
|
||||
{
|
||||
"name": "Option 1",
|
||||
"probability": "Number, 0 to 1",
|
||||
"type": "for now just 'PROBABILITY', but could include others, like 'DISTRIBUTION'",
|
||||
"type": "for now just 'PROBABILITY', but could include others, like 'DISTRIBUTION'"
|
||||
},
|
||||
{
|
||||
"name": "Option 2",
|
||||
"probability": "Number, 0 to 1",
|
||||
"type": "for now just 'PROBABILITY', but could include others, like 'DISTRIBUTION'",
|
||||
"type": "for now just 'PROBABILITY', but could include others, like 'DISTRIBUTION'"
|
||||
}
|
||||
],
|
||||
"timestamp": " Timestamp at which metaforecast fetches the probability",
|
||||
|
@ -26,7 +27,7 @@
|
|||
"tradevolume": "optional",
|
||||
"volume": "optional",
|
||||
"address": "optional",
|
||||
"other properties": "optional",
|
||||
"other properties": "optional"
|
||||
},
|
||||
"extra": {
|
||||
"field_description": "arbitrary extra information",
|
||||
|
@ -42,5 +43,6 @@
|
|||
"leader_years": " country.leader_years",
|
||||
"country_code": " country.country_code",
|
||||
"country_abb": " country.country_abb",
|
||||
"superforecastercommentary": " analysis || "",
|
||||
"superforecastercommentary": "some analysis"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import pkg from 'mongodb';
|
||||
const { MongoClient } = pkg;
|
||||
import { getCookie } from "./getCookies.js"
|
||||
import { getCookie } from "../utils/getCookies.js"
|
||||
|
||||
function roughSizeOfObject(object) {
|
||||
var objectList = [];
|
|
@ -1,9 +1,9 @@
|
|||
|
||||
import { mergeEverything } from "./mergeEverything.js"
|
||||
import { updateHistory } from "./history/updateHistory.js"
|
||||
import { rebuildAlgoliaDatabase } from "./algolia.js"
|
||||
import { rebuildAlgoliaDatabase } from "../utils/algolia.js"
|
||||
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData.js"
|
||||
import { platformFetchers } from "./platforms.js"
|
||||
import { platformFetchers } from "../platforms/all-platforms.js"
|
||||
|
||||
/* Do everything */
|
||||
function sleep(ms) {
|
|
@ -1,5 +1,5 @@
|
|||
import { writeFileSync } from "fs"
|
||||
import { mongoReadWithReadCredentials, upsert } from "../mongo-wrapper.js"
|
||||
import { mongoReadWithReadCredentials, upsert } from "../../database/mongo-wrapper.js"
|
||||
let mongoRead = mongoReadWithReadCredentials
|
||||
let isEmptyArray = arr => arr.length == 0
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import { mongoRead, upsert } from "../mongo-wrapper.js"
|
||||
import { mongoRead, upsert } from "../../database/mongo-wrapper.js"
|
||||
|
||||
export async function createHistoryForMonth(){
|
||||
let currentDate = new Date()
|
|
@ -1,5 +1,5 @@
|
|||
import { mongoRead, upsert } from "./mongo-wrapper.js";
|
||||
import { platformNames } from "./platforms.js"
|
||||
import { mongoRead, upsert } from "../database/mongo-wrapper.js";
|
||||
import { platformNames } from "../platforms/all-platforms.js"
|
||||
/* Merge everything */
|
||||
let suffix = "-questions";
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
import axios from "axios"
|
||||
import {getCookie, applyIfCookieExists} from "../utils/getCookies.js"
|
||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"
|
||||
|
||||
export async function rebuildNetlifySiteWithNewData_inner(cookie){
|
||||
export async function rebuildNetlifySiteWithNewData_inner(cookie) {
|
||||
let payload = ({});
|
||||
let response = await axios.post(cookie, payload);
|
||||
let data = response.data;
|
||||
console.log(data);
|
||||
}
|
||||
|
||||
export async function rebuildNetlifySiteWithNewData(){
|
||||
export async function rebuildNetlifySiteWithNewData() {
|
||||
let cookie = process.env.REBUIDNETLIFYHOOKURL || getCookie("netlify");
|
||||
await applyIfCookieExists(cookie, rebuildNetlifySiteWithNewData_inner)
|
||||
}
|
10
src/index.js
10
src/index.js
|
@ -2,12 +2,12 @@
|
|||
import fs from 'fs'
|
||||
import readline from "readline"
|
||||
|
||||
import { platformFetchers } from "./utils/platforms.js"
|
||||
import { mergeEverything } from "./utils/mergeEverything.js"
|
||||
import { updateHistory } from "./utils/history/updateHistory.js"
|
||||
import { platformFetchers } from "./platforms/all-platforms.js"
|
||||
import { mergeEverything } from "./flow/mergeEverything.js"
|
||||
import { updateHistory } from "./flow/history/updateHistory.js"
|
||||
import { rebuildAlgoliaDatabase } from "./utils/algolia.js"
|
||||
import { rebuildNetlifySiteWithNewData } from "./utils/rebuildNetliftySiteWithNewData.js"
|
||||
import { doEverything, tryCatchTryAgain } from "./utils/doEverything.js"
|
||||
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js"
|
||||
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"
|
||||
|
||||
/* Support functions */
|
||||
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, rebuildNetlifySiteWithNewData, doEverything]
|
||||
|
|
File diff suppressed because it is too large
Load Diff
3588
src/input/old/givewellopenphil-questions.json
Normal file
3588
src/input/old/givewellopenphil-questions.json
Normal file
File diff suppressed because it is too large
Load Diff
52
src/manual/downloadFrontpage.js
Normal file
52
src/manual/downloadFrontpage.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
import fs from "fs";
|
||||
import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js";
|
||||
|
||||
let filename =
|
||||
"/home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend/data/frontpage.json";
|
||||
let shuffle = (array) => {
|
||||
// https://stackoverflow.com/questions/2450954/how-to-randomi ze-shuffle-a-javascript-array
|
||||
let currentIndex = array.length,
|
||||
randomIndex;
|
||||
|
||||
// While there remain elements to shuffle...
|
||||
while (currentIndex != 0) {
|
||||
// Pick a remaining element...
|
||||
randomIndex = Math.floor(Math.random() * currentIndex);
|
||||
currentIndex--;
|
||||
|
||||
// And swap it with the current element.
|
||||
[array[currentIndex], array[randomIndex]] = [
|
||||
array[randomIndex],
|
||||
array[currentIndex],
|
||||
];
|
||||
}
|
||||
|
||||
return array;
|
||||
};
|
||||
|
||||
let main = async () => {
|
||||
let init = Date.now();
|
||||
|
||||
let json = await mongoReadWithReadCredentials("metaforecasts");
|
||||
|
||||
json = json.filter(
|
||||
(forecast) =>
|
||||
forecast.qualityindicators &&
|
||||
forecast.qualityindicators.stars >= 3 &&
|
||||
forecast.options &&
|
||||
forecast.options.length > 0 &&
|
||||
forecast.platform != "AstralCodexTen"
|
||||
);
|
||||
json = shuffle(json);
|
||||
|
||||
let string = JSON.stringify(json, null, 2);
|
||||
fs.writeFileSync(filename, string);
|
||||
console.log(`File downloaded to ${filename}`);
|
||||
|
||||
let end = Date.now();
|
||||
let difference = end - init;
|
||||
console.log(
|
||||
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
||||
);
|
||||
};
|
||||
main();
|
|
@ -1,6 +1,6 @@
|
|||
import fs from "fs"
|
||||
|
||||
import { mongoReadWithReadCredentials } from "./mongo-wrapper.js"
|
||||
import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js"
|
||||
|
||||
let main = async () => {
|
||||
let json = await mongoReadWithReadCredentials("metaforecasts")
|
18
src/manual/manualSendToMongo.js
Normal file
18
src/manual/manualSendToMongo.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
import fs from "fs";
|
||||
import { mongoRead, upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */
|
||||
|
||||
let pushManualFiles = ["givewellopenphil"]; // ["estimize", "givewellopenphil", "xrisk"]
|
||||
let suffixFiles = "-questions.json";
|
||||
let suffixMongo = "-questions";
|
||||
|
||||
let main = async () => {
|
||||
for (let file of pushManualFiles) {
|
||||
let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`);
|
||||
let fileContents = JSON.parse(fileRaw);
|
||||
console.log(fileContents);
|
||||
await upsert(fileContents, file + suffixMongo);
|
||||
}
|
||||
};
|
||||
main();
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
cd /home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend
|
||||
date > done.txt
|
||||
/home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/utils/pullSuperforecastsManually.js >> done.txt
|
||||
/home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/manual/pullSuperforecastsManually.js >> done.txt
|
|
@ -2,16 +2,16 @@
|
|||
clear
|
||||
echo "
|
||||
Platforms:
|
||||
[0]: CSET-Foretell
|
||||
[0]: IFER
|
||||
[1]: Good Judgment Open
|
||||
"
|
||||
read -p "Choose a platform [0/1]: " platform
|
||||
echo ""
|
||||
case $platform in
|
||||
"0" )
|
||||
echo "Platform: CSET-Foretell"
|
||||
echo "Copy request headers from https://www.cset-foretell.com/questions"
|
||||
firefox "https://www.cset-foretell.com/questions"
|
||||
echo "Platform: INFER"
|
||||
echo "Copy request headers from https://www.infer-pub.com/"
|
||||
firefox "https://www.infer-pub.com/"
|
||||
echo "Copy the request headers to clipboard"
|
||||
read -p "Press any key when copied: " copied
|
||||
cookie=$(xclip -selection c -o | {
|
||||
|
@ -30,8 +30,8 @@ case $platform in
|
|||
echo ""
|
||||
echo "Cookie found:"
|
||||
echo "$cookie"
|
||||
echo "Running: heroku config:set CSETFORETELL_COOKIE='\$cookie'"
|
||||
heroku config:set CSETFORETELL_COOKIE="$cookie"
|
||||
echo "Running: heroku config:set INFER_COOKIE='\$cookie'"
|
||||
heroku config:set INFER_COOKIE="$cookie"
|
||||
;;
|
||||
"1" )
|
||||
echo "Platform: Good Judgment Open"
|
61
src/platforms/all-platforms.js
Normal file
61
src/platforms/all-platforms.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
import { betfair } from "./betfair-fetch.js";
|
||||
import { fantasyscotus } from "./fantasyscotus-fetch.js";
|
||||
import { foretold } from "./foretold-fetch.js";
|
||||
import { goodjudgment } from "./goodjudgment-fetch.js";
|
||||
import { goodjudgmentopen } from "./goodjudmentopen-fetch.js";
|
||||
import { infer } from "./infer-fetch.js";
|
||||
import { kalshi } from "./kalshi-fetch.js";
|
||||
import { manifoldmarkets } from "./manifoldmarkets-fetch.js";
|
||||
import { metaculus } from "./metaculus-fetch.js";
|
||||
import { polymarket } from "./polymarket-fetch.js";
|
||||
import { predictit } from "./predictit-fetch.js";
|
||||
import { rootclaim } from "./rootclaim-fetch.js";
|
||||
import { smarkets } from "./smarkets-fetch.js";
|
||||
import { wildeford } from "./wildeford-fetch.js";
|
||||
|
||||
/* Deprecated
|
||||
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
|
||||
import { coupcast } from "../platforms/coupcast-fetch.js"
|
||||
import { csetforetell } from "../platforms/csetforetell-fetch.js"
|
||||
import { elicit } from "../platforms/elicit-fetch.js"
|
||||
import { estimize } from "../platforms/estimize-fetch.js"
|
||||
import { hypermind } from "../platforms/hypermind-fetch.js"
|
||||
import { ladbrokes } from "../platforms/ladbrokes-fetch.js";
|
||||
import { williamhill } from "../platforms/williamhill-fetch.js";
|
||||
*/
|
||||
|
||||
export const platformFetchers = [
|
||||
betfair,
|
||||
fantasyscotus,
|
||||
foretold,
|
||||
goodjudgment,
|
||||
goodjudgmentopen,
|
||||
infer,
|
||||
kalshi,
|
||||
manifoldmarkets,
|
||||
metaculus,
|
||||
polymarket,
|
||||
predictit,
|
||||
rootclaim,
|
||||
smarkets,
|
||||
wildeford,
|
||||
];
|
||||
export const platformNames = [
|
||||
"betfair",
|
||||
"fantasyscotus",
|
||||
"foretold",
|
||||
"givewellopenphil",
|
||||
"goodjudgment",
|
||||
"goodjudmentopen",
|
||||
"infer",
|
||||
"kalshi",
|
||||
"manifoldmarkets",
|
||||
"metaculus",
|
||||
"polymarket",
|
||||
"predictit",
|
||||
"rootclaim",
|
||||
"smarkets",
|
||||
"wildeford",
|
||||
"xrisk",
|
||||
];
|
||||
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc
|
|
@ -2,9 +2,8 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import https from "https";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = process.env.SECRET_BETFAIR_ENDPOINT;
|
||||
|
|
|
@ -3,7 +3,7 @@ import fs from "fs";
|
|||
import axios from "axios";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = "https://example.com/";
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let unixtime = new Date().getTime();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let graphQLendpoint = "https://api.foretold.io/graphql";
|
||||
|
|
|
@ -3,7 +3,7 @@ import fs from "fs";
|
|||
import axios from "axios";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let locationData = "./data/";
|
||||
|
|
|
@ -5,8 +5,8 @@ import fs from "fs";
|
|||
import { Tabletojson } from "tabletojson";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { hash } from "../utils/hash.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = "https://goodjudgment.io/superforecasts/";
|
||||
|
|
|
@ -5,7 +5,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
|||
import { Tabletojson } from "tabletojson";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let htmlEndPoint = "https://www.gjopen.com/questions?page=";
|
||||
|
|
|
@ -4,7 +4,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
|||
import { Tabletojson } from "tabletojson";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let htmlEndPoint = "https://www.infer-pub.com/questions";
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = "https://us-central1-mantic-markets.cloudfunctions.net/markets";
|
||||
|
|
|
@ -3,7 +3,7 @@ import axios from "axios";
|
|||
import fs from "fs";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page=";
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
import axios from "axios";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let graphQLendpoint =
|
||||
|
|
|
@ -3,7 +3,7 @@ import fs from "fs";
|
|||
import axios from "axios";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Support functions */
|
||||
async function fetchmarkets() {
|
||||
|
|
|
@ -3,7 +3,7 @@ import fs from "fs";
|
|||
import axios from "axios";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let jsonEndpoint =
|
||||
|
|
|
@ -3,7 +3,7 @@ import fs from "fs";
|
|||
import axios from "axios";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/";
|
||||
|
|
|
@ -5,8 +5,9 @@ import { GoogleSpreadsheet } from "google-spreadsheet";
|
|||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
||||
import toMarkdown from "../utils/toMarkdown.js";
|
||||
import { calculateStars } from "../utils/stars.js";
|
||||
import { upsert } from "../utils/mongo-wrapper.js";
|
||||
import { hash } from "../utils/hash.js";
|
||||
import { upsert } from "../database/mongo-wrapper.js";
|
||||
|
||||
/* Definitions */
|
||||
const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL
|
||||
const endpoint = `https://docs.google.com/spreadsheets/d/${SHEET_ID}/edit#gid=0`;
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import algoliasearch from 'algoliasearch';
|
||||
import {getCookie} from "./getCookies.js"
|
||||
import fs from "fs"
|
||||
|
||||
import { mongoReadWithReadCredentials } from "./mongo-wrapper.js"
|
||||
import { mergeEverythingInner } from './mergeEverything.js';
|
||||
import {getCookie} from "./getCookies.js"
|
||||
import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js"
|
||||
import { mergeEverythingInner } from '../flow/mergeEverything.js';
|
||||
|
||||
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia")
|
||||
const client = algoliasearch('96UD3NTQ7L', cookie);
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
import fs from "fs"
|
||||
|
||||
import { mongoReadWithReadCredentials } from "./mongo-wrapper.js"
|
||||
|
||||
let filename = '/home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend/data/frontpage.json'
|
||||
let shuffle = (array) => {
|
||||
// https://stackoverflow.com/questions/2450954/how-to-randomi ze-shuffle-a-javascript-array
|
||||
let currentIndex = array.length, randomIndex;
|
||||
|
||||
// While there remain elements to shuffle...
|
||||
while (currentIndex != 0) {
|
||||
|
||||
// Pick a remaining element...
|
||||
randomIndex = Math.floor(Math.random() * currentIndex);
|
||||
currentIndex--;
|
||||
|
||||
// And swap it with the current element.
|
||||
[array[currentIndex], array[randomIndex]] = [
|
||||
array[randomIndex], array[currentIndex]];
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
let main = async () => {
|
||||
let init = Date.now()
|
||||
|
||||
let json = await mongoReadWithReadCredentials("metaforecasts")
|
||||
|
||||
json = json.filter(forecast => (forecast.qualityindicators && forecast.qualityindicators.stars >= 3) && (forecast.options && forecast.options.length > 0 && forecast.platform != "AstralCodexTen"))
|
||||
json = shuffle(json)
|
||||
|
||||
let string = JSON.stringify(json, null, 2)
|
||||
fs.writeFileSync(filename, string);
|
||||
console.log(`File downloaded to ${filename}`)
|
||||
|
||||
let end = Date.now()
|
||||
let difference = end - init
|
||||
console.log(`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`)
|
||||
|
||||
}
|
||||
main()
|
Can't render this file because it contains an unexpected character in line 3 and column 50.
|
4
src/utils/hash.js
Normal file
4
src/utils/hash.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
import crypto from "crypto";
|
||||
|
||||
export const hash = (string) =>
|
||||
crypto.createHash("sha256").update(string).digest("hex").slice(0, 10);
|
|
@ -1,18 +0,0 @@
|
|||
import fs from 'fs'
|
||||
import { mongoRead, upsert } from "./mongo-wrapper.js"
|
||||
|
||||
/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */
|
||||
|
||||
let pushManualFiles = ["estimize", "givewellopenphil", "xrisk"]
|
||||
let suffixFiles = "-questions.json"
|
||||
let suffixMongo = "-questions"
|
||||
|
||||
let main = async () => {
|
||||
for(let file of pushManualFiles){
|
||||
let fileRaw = fs.readFileSync(`./src/input/${file+suffixFiles}`)
|
||||
let fileContents = JSON.parse(fileRaw)
|
||||
console.log(fileContents)
|
||||
await upsert(fileContents, file+suffixMongo)
|
||||
}
|
||||
}
|
||||
main()
|
|
@ -1,40 +1,47 @@
|
|||
/* Imports */
|
||||
import fs from "fs"
|
||||
import fs from "fs";
|
||||
|
||||
/* Definitions */
|
||||
let locationData = "./data/"
|
||||
|
||||
let locationData = "../../input";
|
||||
/* Body */
|
||||
let rawdata = fs.readFileSync("../data/givewellopenphil-questions-processed-old-format.json")
|
||||
let data = JSON.parse(rawdata)
|
||||
let rawdata = fs.readFileSync(
|
||||
`${locationData}/givewellopenphil-questions.json`
|
||||
);
|
||||
let data = JSON.parse(rawdata);
|
||||
|
||||
let results = []
|
||||
for(let datum of data){
|
||||
let probability = Math.round(Number(datum["Percentage"].replace("%","")))/100
|
||||
let result = ({
|
||||
"title": datum["Title"],
|
||||
"url": datum["URL"],
|
||||
"platform": datum["Platform"],
|
||||
"description": datum["Description"],
|
||||
"options": [
|
||||
let results = [];
|
||||
let counter = 0;
|
||||
for (let datum of data) {
|
||||
let id = `givewellopenphil-2021-${counter}`;
|
||||
counter = counter + 1;
|
||||
// let probability = Math.round(Number(datum["Percentage"].replace("%", ""))) / 100;
|
||||
let result = {
|
||||
id: id,
|
||||
title: datum["title"],
|
||||
url: datum["url"],
|
||||
platform: datum["platform"],
|
||||
description: datum["description"],
|
||||
options: datum["options"],
|
||||
/*[
|
||||
{
|
||||
"name": "Yes",
|
||||
"probability": probability,
|
||||
"type": "PROBABILITY"
|
||||
name: "Yes",
|
||||
probability: probability,
|
||||
type: "PROBABILITY",
|
||||
},
|
||||
{
|
||||
"name": "No",
|
||||
"probability": 1-Math.round(probability*100)/100,
|
||||
"type": "PROBABILITY"
|
||||
}
|
||||
name: "No",
|
||||
probability: 1 - Math.round(probability * 100) / 100,
|
||||
type: "PROBABILITY",
|
||||
},
|
||||
],
|
||||
"timestamp": "2021-02-23T15∶21∶37.005Z",//new Date().toISOString(),
|
||||
"qualityindicators": {
|
||||
"stars": datum["Stars"]
|
||||
}
|
||||
})
|
||||
results.push(result)
|
||||
*/
|
||||
timestamp: "2021-02-23T15∶21∶37.005Z", //new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: datum.qualityindicators.stars, //datum["stars"],
|
||||
},
|
||||
};
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
let string = JSON.stringify(results,null, 2)
|
||||
fs.writeFileSync("../data/givewellopenphil-questions-new.json", string)
|
||||
let string = JSON.stringify(results, null, 2);
|
||||
fs.writeFileSync(`${locationData}/givewellopenphil-questions-new.json`, string);
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
import { betfair } from "../platforms/betfair-fetch.js"
|
||||
import { fantasyscotus } from "../platforms/fantasyscotus-fetch.js"
|
||||
import { foretold } from "../platforms/foretold-fetch.js"
|
||||
import { goodjudgment } from "../platforms/goodjudgment-fetch.js"
|
||||
import { goodjudgmentopen } from "../platforms/goodjudmentopen-fetch.js"
|
||||
import { infer } from "../platforms/infer-fetch.js"
|
||||
import { kalshi } from "../platforms/kalshi-fetch.js"
|
||||
import { ladbrokes } from "../platforms/ladbrokes-fetch.js"
|
||||
import { manifoldmarkets } from "../platforms/manifoldmarkets-fetch.js"
|
||||
import { metaculus } from "../platforms/metaculus-fetch.js"
|
||||
import { polymarket } from "../platforms/polymarket-fetch.js"
|
||||
import { predictit } from "../platforms/predictit-fetch.js"
|
||||
import { rootclaim } from "../platforms/rootclaim-fetch.js"
|
||||
import { smarkets } from "../platforms/smarkets-fetch.js"
|
||||
import { wildeford } from "../platforms/wildeford-fetch.js"
|
||||
import { williamhill } from "../platforms/williamhill-fetch.js"
|
||||
|
||||
/* Deprecated
|
||||
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
|
||||
import { csetforetell } from "../platforms/csetforetell-fetch.js"
|
||||
import { elicit } from "../platforms/elicit-fetch.js"
|
||||
import { estimize } from "../platforms/estimize-fetch.js"
|
||||
import { hypermind } from "../platforms/hypermind-fetch.js"
|
||||
import { coupcast } from "../platforms/coupcast-fetch.js"
|
||||
*/
|
||||
|
||||
export const platformFetchers = [betfair, fantasyscotus, foretold, goodjudgment, goodjudgmentopen, infer, ladbrokes, kalshi, manifoldmarkets, metaculus, polymarket, predictit, rootclaim, smarkets, wildeford, williamhill]
|
||||
export const platformNames = [
|
||||
"betfair",
|
||||
"fantasyscotus",
|
||||
"foretold",
|
||||
"givewellopenphil",
|
||||
"goodjudgment",
|
||||
"goodjudmentopen",
|
||||
"infer",
|
||||
"kalshi",
|
||||
"ladbrokes",
|
||||
"manifoldmarkets",
|
||||
"metaculus",
|
||||
"polymarket",
|
||||
"predictit",
|
||||
"rootclaim",
|
||||
"smarkets",
|
||||
"wildeford",
|
||||
"williamhill",
|
||||
"xrisk",
|
||||
];
|
||||
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "omen", etc
|
Loading…
Reference in New Issue
Block a user