diff --git a/docs/configuration.md b/docs/configuration.md index 8e58241..fc8fb7b 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -14,6 +14,8 @@ There's also a template configuration file in `../env.example`. - `DIGITALOCEAN_POSTGRES`, of the form `postgres://username:password@domain.com:port/configvars`. (Disregard `DIGITALOCEAN_` prefix, you can use any endpoint you like). - `DIGITALOCEAN_POSTGRES_PUBLIC` - `ALGOLIA_MASTER_API_KEY`, a string of 32 hexidecimal characters, like `19b6c2234e50c98d30668659a39e3127` (not an actual key). +- `NEXT_PUBLIC_ALGOLIA_APP_ID`, +- `NEXT_PUBLIC_ALGOLIA_SEARCH_KEY` ## Platform cookies and keys @@ -21,7 +23,7 @@ Most of these are just session cookies, necessary to query INFER (previously CSE Note that not all of these cookies are needed to use all parts of the source code. For instance, to download Polymarket data, one could just interface with the Polymarket code. In particular, the code in this repository contains code to connect with the postgres database using read permissions, which are freely available. -- `GOODJUDGEMENTOPENCOOKIE` +- `GOODJUDGMENTOPENCOOKIE` - `INFER_COOKIE` - `CSETFORETELL_COOKIE`, deprecated, superseded by `INFER_COOKIE`. - `HYPERMINDCOOKIE` diff --git a/env.example b/env.example index 9a2eda2..8145c9d 100644 --- a/env.example +++ b/env.example @@ -1,7 +1,9 @@ #### You can copy this file to `.env` and modify accordingly. #### Documentation can be found in `docs/configuration.md`. -# ALGOLIA_MASTER_API_KEY=19b6c2234e50c98d30668659a39e3127 +# ALGOLIA_MASTER_API_KEY=999988887777666655554444eeeeffff +# NEXT_PUBLIC_ALGOLIA_APP_ID=ABCDE12345 +# NEXT_PUBLIC_ALGOLIA_SEARCH_KEY=0000111122223333aaaabbbbccccdddd # DIGITALOCEAN_POSTGRES=postgresql://...@localhost:5432/...?schema=public # POSTGRES_NO_SSL=1 diff --git a/src/backend/database/pg-wrapper.ts b/src/backend/database/pg-wrapper.ts index cfa91d5..09d2aae 100644 --- a/src/backend/database/pg-wrapper.ts +++ b/src/backend/database/pg-wrapper.ts @@ -1,6 +1,6 @@ import pkg from "pg"; -import { platformNames } from "../platforms/all/platformNames"; +import { platforms } from "../platforms"; import { hash } from "../utils/hash"; import { roughSizeOfObject } from "../utils/roughSize"; @@ -18,7 +18,10 @@ const allowed_year_month_histories = [].concat( allowed_months.map((month) => `${year}_${month}`) ) ); // h2022_01 -const tableNamesWhitelistLatest = ["combined", ...platformNames]; +const tableNamesWhitelistLatest = [ + "combined", + ...platforms.map((platform) => platform.name), +]; const tableNamesWhiteListHistory = [ ...allowed_years, ...allowed_year_month_histories, diff --git a/src/backend/flow/doEverything.ts b/src/backend/flow/doEverything.ts index 47b50ae..c6e0bab 100644 --- a/src/backend/flow/doEverything.ts +++ b/src/backend/flow/doEverything.ts @@ -1,37 +1,14 @@ -import { platformFetchers } from "../platforms/all-platforms"; -import { rebuildAlgoliaDatabase } from "../utils/algolia"; -import { updateHistory } from "./history/updateHistory"; -import { mergeEverything } from "./mergeEverything"; -import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData"; +import { platforms } from "../platforms"; +import { executeJobByName } from "./jobs"; /* Do everything */ -function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export async function tryCatchTryAgain(fun) { - try { - console.log("Initial try"); - await fun(); - } catch (error) { - sleep(10000); - console.log("Second try"); - console.log(error); - try { - await fun(); - } catch (error) { - console.log(error); - } - } -} - export async function doEverything() { - let functions = [ - ...platformFetchers, - mergeEverything, - rebuildAlgoliaDatabase, - updateHistory, - rebuildNetlifySiteWithNewData, + let jobNames = [ + ...platforms.map((platform) => platform.name), + "merge", + "algolia", + "history", + "netlify", ]; // Removed Good Judgment from the fetcher, doing it using cron instead because cloudflare blocks the utility on heroku. @@ -47,13 +24,13 @@ export async function doEverything() { console.log(""); console.log(""); - for (let fun of functions) { + for (let name of jobNames) { console.log(""); console.log(""); console.log("****************************"); - console.log(fun.name); + console.log(name); console.log("****************************"); - await tryCatchTryAgain(fun); + await executeJobByName(name); console.log("****************************"); } } diff --git a/src/backend/flow/jobs.ts b/src/backend/flow/jobs.ts new file mode 100644 index 0000000..4d79004 --- /dev/null +++ b/src/backend/flow/jobs.ts @@ -0,0 +1,90 @@ +import { pgInitialize } from "../database/pg-wrapper"; +import { doEverything } from "../flow/doEverything"; +import { updateHistory } from "../flow/history/updateHistory"; +import { mergeEverything } from "../flow/mergeEverything"; +import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData"; +import { rebuildFrontpage } from "../frontpage"; +import { platforms, processPlatform } from "../platforms"; +import { rebuildAlgoliaDatabase } from "../utils/algolia"; + +interface Job { + name: string; + message: string; + run: () => Promise; + separate?: boolean; +} + +export const jobs: Job[] = [ + ...platforms.map((platform) => ({ + name: platform.name, + message: `Download predictions from ${platform.name}`, + run: () => processPlatform(platform), + })), + { + name: "merge", + message: + "Merge tables into one big table (and push the result to a pg database)", + run: mergeEverything, + separate: true, + }, + { + name: "algolia", + message: 'Rebuild algolia database ("index")', + run: rebuildAlgoliaDatabase, + }, + { + name: "history", + message: "Update history", + run: updateHistory, + }, + { + name: "netlify", + message: `Rebuild netlify site with new data`, + run: rebuildNetlifySiteWithNewData, + }, + { + name: "frontpage", + message: "Rebuild frontpage", + run: rebuildFrontpage, + }, + { + name: "all", + message: "All of the above", + run: doEverything, + separate: true, + }, + { + name: "migrate", + message: "Initialize postgres database", + run: pgInitialize, + }, +]; + +function sleep(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function tryCatchTryAgain(fun: () => Promise) { + try { + console.log("Initial try"); + await fun(); + } catch (error) { + sleep(10000); + console.log("Second try"); + console.log(error); + try { + await fun(); + } catch (error) { + console.log(error); + } + } +} + +export const executeJobByName = async (option: string) => { + const job = jobs.find((job) => job.name === option); + if (!job) { + console.log(`Error, job ${option} not found`); + } else { + await tryCatchTryAgain(job.run); + } +}; diff --git a/src/backend/flow/mergeEverything.ts b/src/backend/flow/mergeEverything.ts index ad06214..ba50b88 100644 --- a/src/backend/flow/mergeEverything.ts +++ b/src/backend/flow/mergeEverything.ts @@ -1,11 +1,12 @@ import { databaseRead, databaseUpsert } from "../database/database-wrapper"; -import { platformNames } from "../platforms/all-platforms"; +import { platforms } from "../platforms"; /* Merge everything */ export async function mergeEverythingInner() { let merged = []; - for (let platformName of platformNames) { + for (let platform of platforms) { + const platformName = platform.name; let json = await databaseRead({ group: platformName }); console.log(`${platformName} has ${json.length} questions\n`); merged = merged.concat(json); diff --git a/src/backend/index.ts b/src/backend/index.ts index 2961419..2d9edfa 100644 --- a/src/backend/index.ts +++ b/src/backend/index.ts @@ -2,50 +2,23 @@ import "dotenv/config"; import readline from "readline"; +import util from "util"; -import { pgInitialize } from "./database/pg-wrapper"; -import { doEverything, tryCatchTryAgain } from "./flow/doEverything"; -import { updateHistory } from "./flow/history/updateHistory"; -import { mergeEverything } from "./flow/mergeEverything"; -import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData"; -import { rebuildFrontpage } from "./frontpage"; -import { platformFetchers } from "./platforms/all-platforms"; -import { rebuildAlgoliaDatabase } from "./utils/algolia"; - -/* Support functions */ -let functions = [ - ...platformFetchers, - mergeEverything, - rebuildAlgoliaDatabase, - updateHistory, - rebuildNetlifySiteWithNewData, - doEverything, - pgInitialize, - rebuildFrontpage, -]; +import { executeJobByName, jobs } from "./flow/jobs"; let generateWhatToDoMessage = () => { - let l = platformFetchers.length; - let messagesForFetchers = platformFetchers.map( - (fun, i) => `[${i}]: Download predictions from ${fun.name}` - ); - let otherMessages = [ - "Merge tables into one big table (and push the result to a pg database)", - `Rebuild algolia database ("index")`, - `Update history`, - `Rebuild netlify site with new data`, - // `\n[${functionNames.length-1}]: Add to history` + - `All of the above`, - `Initialize postgres database`, - "Rebuild frontpage", - ]; - let otherMessagesWithNums = otherMessages.map( - (message, i) => `[${i + l}]: ${message}` - ); + const color = "\x1b[36m"; + const resetColor = "\x1b[0m"; let completeMessages = [ - ...messagesForFetchers, - ...otherMessagesWithNums, - `\nChoose one option, wisely: #`, + ...jobs.map((job) => { + return ( + (job.separate ? "\n" : "") + + `[${color}${job.name}${resetColor}]:`.padStart(30) + + " " + + job.message + ); + }), + `\nChoose one option, wisely: `, ].join("\n"); return completeMessages; }; @@ -54,39 +27,24 @@ let whattodoMessage = generateWhatToDoMessage(); /* BODY */ let commandLineUtility = async () => { - let whattodo = async (message, callback) => { + const pickOption = async () => { + if (process.argv.length === 3) { + return process.argv[2]; // e.g., npm run cli polymarket + } + const rl = readline.createInterface({ input: process.stdin, output: process.stdout, }); - rl.question(message, async (answer) => { - rl.close(); - await callback(answer); - }); + + const question = util.promisify(rl.question).bind(rl); + const answer = await question(whattodoMessage); + rl.close(); + return answer; }; - let executeoption = async (option) => { - option = Number(option); - if (option < 0) { - console.log(`Error, ${option} < 0`); - } else if (option < functions.length) { - console.log(`Running: ${functions[option].name}\n`); - await tryCatchTryAgain(functions[option]); - } - process.exit(); - }; - - if (process.argv.length == 3) { - const option = process.argv[2]; // e.g., npm start 15 <- - const optionNum = Number(option); - if (!isNaN(optionNum)) { - await executeoption(optionNum); - } else if (option == "all") { - await executeoption(functions.length - 3); // doEverything - } else { - await whattodo(whattodoMessage, executeoption); - } - } else await whattodo(whattodoMessage, executeoption); + await executeJobByName(await pickOption()); + process.exit(); }; commandLineUtility(); diff --git a/src/backend/manual/manualSendToDb.ts b/src/backend/manual/manualSendToDb.ts deleted file mode 100644 index b66a747..0000000 --- a/src/backend/manual/manualSendToDb.ts +++ /dev/null @@ -1,20 +0,0 @@ -import fs from "fs"; - -import { databaseUpsert } from "../database/database-wrapper"; - -/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */ - -let pushManualFiles = ["givewellopenphil"]; // ["estimize", "givewellopenphil", "xrisk"] -let suffixFiles = "-questions.json"; - -let main = async () => { - for (let file of pushManualFiles) { - let fileRaw = fs.readFileSync(`./input/${file + suffixFiles}`, { - encoding: "utf-8", - }); - let fileContents = JSON.parse(fileRaw); - console.log(fileContents); - await databaseUpsert({ contents: fileContents, group: file }); - } -}; -main(); diff --git a/src/backend/manual/pullSuperforecastsManually.ts b/src/backend/manual/pullSuperforecastsManually.ts index 1a4829a..c1c79fb 100644 --- a/src/backend/manual/pullSuperforecastsManually.ts +++ b/src/backend/manual/pullSuperforecastsManually.ts @@ -1,11 +1,4 @@ -/* Imports */ -import { goodjudgment } from "../platforms/goodjudgment-fetch"; +import { processPlatform } from "../platforms"; +import { goodjudgment } from "../platforms/goodjudgment"; -/* Definitions */ - -/* Utilities */ - -/* Support functions */ - -/* Body */ -goodjudgment(); +processPlatform(goodjudgment); diff --git a/src/backend/platforms/all-platforms.ts b/src/backend/platforms/all-platforms.ts deleted file mode 100644 index 92258b3..0000000 --- a/src/backend/platforms/all-platforms.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { platformFetchers } from "./all/platformFetchers"; -export { platformNames } from "./all/platformNames"; diff --git a/src/backend/platforms/all/platformFetchers.ts b/src/backend/platforms/all/platformFetchers.ts deleted file mode 100644 index 04c1ce5..0000000 --- a/src/backend/platforms/all/platformFetchers.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { betfair } from "../betfair-fetch"; -import { fantasyscotus } from "../fantasyscotus-fetch"; -import { foretold } from "../foretold-fetch"; -import { goodjudgment } from "../goodjudgment-fetch"; -import { goodjudgmentopen } from "../goodjudmentopen-fetch"; -import { infer } from "../infer-fetch"; -import { kalshi } from "../kalshi-fetch"; -import { manifoldmarkets } from "../manifoldmarkets-fetch"; -import { metaculus } from "../metaculus-fetch"; -import { polymarket } from "../polymarket-fetch"; -import { predictit } from "../predictit-fetch"; -import { rootclaim } from "../rootclaim-fetch"; -import { smarkets } from "../smarkets-fetch"; -import { wildeford } from "../wildeford-fetch"; - -/* Deprecated -import { astralcodexten } from "../platforms/astralcodexten-fetch" -import { coupcast } from "../platforms/coupcast-fetch" -import { csetforetell } from "../platforms/csetforetell-fetch" -import { elicit } from "../platforms/elicit-fetch" -import { estimize } from "../platforms/estimize-fetch" -import { hypermind } from "../platforms/hypermind-fetch" -import { ladbrokes } from "../platforms/ladbrokes-fetch"; -import { williamhill } from "../platforms/williamhill-fetch"; -*/ - -export const platformFetchers = [ - betfair, - fantasyscotus, - foretold, - goodjudgment, - goodjudgmentopen, - infer, - kalshi, - manifoldmarkets, - metaculus, - polymarket, - predictit, - rootclaim, - smarkets, - wildeford, -]; diff --git a/src/backend/platforms/all/platformNames.ts b/src/backend/platforms/all/platformNames.ts deleted file mode 100644 index edbe248..0000000 --- a/src/backend/platforms/all/platformNames.ts +++ /dev/null @@ -1,20 +0,0 @@ -// This needs to be its own file to avoid cyclical dependencies. -export const platformNames = [ - "betfair", - "fantasyscotus", - "foretold", - "givewellopenphil", - "goodjudgment", - "goodjudmentopen", - "infer", - "kalshi", - "manifoldmarkets", - "metaculus", - "polymarket", - "predictit", - "rootclaim", - "smarkets", - "wildeford", - "xrisk", -]; -// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc diff --git a/src/backend/platforms/betfair-fetch.ts b/src/backend/platforms/betfair.ts similarity index 88% rename from src/backend/platforms/betfair-fetch.ts rename to src/backend/platforms/betfair.ts index ad8a130..867df56 100644 --- a/src/backend/platforms/betfair-fetch.ts +++ b/src/backend/platforms/betfair.ts @@ -1,8 +1,9 @@ /* Imports */ import axios from "axios"; import https from "https"; -import { databaseUpsert } from "../database/database-wrapper"; + import { calculateStars } from "../utils/stars"; +import { Forecast, Platform } from "./"; /* Definitions */ let endpoint = process.env.SECRET_BETFAIR_ENDPOINT; @@ -77,7 +78,7 @@ async function whipIntoShape(data) { async function processPredictions(data) { let predictions = await whipIntoShape(data); // console.log(JSON.stringify(predictions, null, 4)) - let results = predictions.map((prediction) => { + let results: Forecast[] = predictions.map((prediction) => { /* if(Math.floor(Math.random() * 10) % 20 ==0){ console.log(JSON.stringify(prediction, null, 4)) } */ @@ -134,14 +135,11 @@ async function processPredictions(data) { return results; //resultsProcessed } -/* Body */ - -export async function betfair() { - let data = await fetchPredictions(); - let results = await processPredictions(data); // somehow needed - // console.log(results.map(result => ({title: result.title, description: result.description}))) - // let string = JSON.stringify(results, null, 2) - await databaseUpsert({ contents: results, group: "betfair" }); - console.log("Done"); -} -// betfair() +export const betfair: Platform = { + name: "betfair", + async fetcher() { + const data = await fetchPredictions(); + const results = await processPredictions(data); // somehow needed + return results; + }, +}; diff --git a/src/backend/platforms/fantasyscotus-fetch.ts b/src/backend/platforms/fantasyscotus.ts similarity index 93% rename from src/backend/platforms/fantasyscotus-fetch.ts rename to src/backend/platforms/fantasyscotus.ts index 9ed3ff4..d32a32b 100644 --- a/src/backend/platforms/fantasyscotus-fetch.ts +++ b/src/backend/platforms/fantasyscotus.ts @@ -1,8 +1,8 @@ /* Imports */ import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Definitions */ let unixtime = new Date().getTime(); @@ -111,10 +111,11 @@ async function processData(data) { } /* Body */ -export async function fantasyscotus() { - let rawData = await fetchData(); - let results = await processData(rawData); - await databaseUpsert({ contents: results, group: "fantasyscotus" }); - console.log("Done"); -} -//fantasyscotus() +export const fantasyscotus: Platform = { + name: "fantasyscotus", + async fetcher() { + let rawData = await fetchData(); + let results = await processData(rawData); + return results; + }, +}; diff --git a/src/backend/platforms/foretold-fetch.ts b/src/backend/platforms/foretold-fetch.ts deleted file mode 100644 index c970952..0000000 --- a/src/backend/platforms/foretold-fetch.ts +++ /dev/null @@ -1,105 +0,0 @@ -/* Imports */ -import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; - -/* Definitions */ -let graphQLendpoint = "https://api.foretold.io/graphql"; -let highQualityCommunities = [ - "0104d8e8-07e4-464b-8b32-74ef22b49f21", - "c47c6bc8-2c9b-4a83-9583-d1ed80a40fa2", - "cf663021-f87f-4632-ad82-962d889a2d39", - "47ff5c49-9c20-4f3d-bd57-1897c35cd42d", - "b2412a1d-0aa4-4e37-a12a-0aca9e440a96", -]; - -/* Support functions */ -async function fetchAllCommunityQuestions(communityId) { - let response = await axios({ - url: graphQLendpoint, - method: "POST", - headers: { "Content-Type": "application/json" }, - data: JSON.stringify({ - query: ` - query { - measurables( - channelId: "${communityId}", - states: OPEN, - first: 500 - ){ - total - edges{ - node{ - id - name - valueType - measurementCount - previousAggregate{ - value{ - percentage - } - } - } - } - } - } - `, - }), - }) - .then((res) => res.data) - .then((res) => res.data.measurables.edges); - //console.log(response) - return response; -} - -/* Body */ - -export async function foretold() { - let results = []; - for (let community of highQualityCommunities) { - let questions = await fetchAllCommunityQuestions(community); - questions = questions.map((question) => question.node); - questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions - questions.forEach((question) => { - let id = `foretold-${question.id}`; - let options = []; - if (question.valueType == "PERCENTAGE") { - let probability = question.previousAggregate.value.percentage; - options = [ - { - name: "Yes", - probability: probability / 100, - type: "PROBABILITY", - }, - { - name: "No", - probability: 1 - probability / 100, - type: "PROBABILITY", - }, - ]; - } - let result = { - id: id, - title: question.name, - url: `https://www.foretold.io/c/${community}/m/${question.id}`, - platform: "Foretold", - description: "", - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - numforecasts: Math.floor(Number(question.measurementCount) / 2), - stars: calculateStars("Foretold", {}), - }, - /*liquidity: liquidity.toFixed(2), - tradevolume: tradevolume.toFixed(2), - address: obj.address*/ - }; - // console.log(result) - results.push(result); - }); - } - await databaseUpsert({ contents: results, group: "foretold" }); - - console.log("Done"); -} -// foretold() diff --git a/src/backend/platforms/foretold.ts b/src/backend/platforms/foretold.ts new file mode 100644 index 0000000..b41f655 --- /dev/null +++ b/src/backend/platforms/foretold.ts @@ -0,0 +1,104 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; + +/* Definitions */ +let graphQLendpoint = "https://api.foretold.io/graphql"; +let highQualityCommunities = [ + "0104d8e8-07e4-464b-8b32-74ef22b49f21", + "c47c6bc8-2c9b-4a83-9583-d1ed80a40fa2", + "cf663021-f87f-4632-ad82-962d889a2d39", + "47ff5c49-9c20-4f3d-bd57-1897c35cd42d", + "b2412a1d-0aa4-4e37-a12a-0aca9e440a96", +]; + +/* Support functions */ +async function fetchAllCommunityQuestions(communityId) { + let response = await axios({ + url: graphQLendpoint, + method: "POST", + headers: { "Content-Type": "application/json" }, + data: JSON.stringify({ + query: ` + query { + measurables( + channelId: "${communityId}", + states: OPEN, + first: 500 + ){ + total + edges{ + node{ + id + name + valueType + measurementCount + previousAggregate{ + value{ + percentage + } + } + } + } + } + } + `, + }), + }) + .then((res) => res.data) + .then((res) => res.data.measurables.edges); + //console.log(response) + return response; +} + +export const foretold: Platform = { + name: "foretold", + async fetcher() { + let results = []; + for (let community of highQualityCommunities) { + let questions = await fetchAllCommunityQuestions(community); + questions = questions.map((question) => question.node); + questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions + questions.forEach((question) => { + let id = `foretold-${question.id}`; + let options = []; + if (question.valueType == "PERCENTAGE") { + let probability = question.previousAggregate.value.percentage; + options = [ + { + name: "Yes", + probability: probability / 100, + type: "PROBABILITY", + }, + { + name: "No", + probability: 1 - probability / 100, + type: "PROBABILITY", + }, + ]; + } + let result = { + id: id, + title: question.name, + url: `https://www.foretold.io/c/${community}/m/${question.id}`, + platform: "Foretold", + description: "", + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + numforecasts: Math.floor(Number(question.measurementCount) / 2), + stars: calculateStars("Foretold", {}), + }, + /*liquidity: liquidity.toFixed(2), + tradevolume: tradevolume.toFixed(2), + address: obj.address*/ + }; + // console.log(result) + results.push(result); + }); + } + return results; + }, +}; diff --git a/src/backend/platforms/givewellopenphil-fetch.ts b/src/backend/platforms/givewellopenphil.ts similarity index 74% rename from src/backend/platforms/givewellopenphil-fetch.ts rename to src/backend/platforms/givewellopenphil.ts index 85f2961..31077f4 100644 --- a/src/backend/platforms/givewellopenphil-fetch.ts +++ b/src/backend/platforms/givewellopenphil.ts @@ -4,9 +4,10 @@ import fs from "fs"; import { databaseUpsert } from "../database/database-wrapper"; import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Support functions */ -async function fetchPage(url) { +async function fetchPage(url: string) { let response = await axios({ url: url, method: "GET", @@ -14,7 +15,6 @@ async function fetchPage(url) { "Content-Type": "text/html", }, }).then((res) => res.data); - //console.log(response) return response; } @@ -64,17 +64,20 @@ async function main1() { group: "givewell-questions-unprocessed", }); } -// main1() -async function main2() { - let rawdata = fs.readFileSync("./input/givewellopenphil-questions.json", { - encoding: "utf-8", - }); - let data = JSON.parse(rawdata); - let dataWithDate = data.map((datum) => ({ - ...datum, - timestamp: "2021-02-23", - })); - await databaseUpsert({ group: "givewellopenphil", contents: dataWithDate }); -} -main2(); +export const givewellopenphil: Platform = { + name: "givewellopenphil", + async fetcher() { + // main1() + return; // not necessary to refill the DB every time + const rawdata = fs.readFileSync("./input/givewellopenphil-questions.json", { + encoding: "utf-8", + }); + const data = JSON.parse(rawdata); + const dataWithDate = data.map((datum: any) => ({ + ...datum, + timestamp: "2021-02-23", + })); + return dataWithDate; + }, +}; diff --git a/src/backend/platforms/goodjudgment-fetch.ts b/src/backend/platforms/goodjudgment-fetch.ts deleted file mode 100644 index ace57cc..0000000 --- a/src/backend/platforms/goodjudgment-fetch.ts +++ /dev/null @@ -1,129 +0,0 @@ -/* Imports */ -import axios from "axios"; -import { Tabletojson } from "tabletojson"; -import tunnel from "tunnel"; - -import { databaseUpsert } from "../database/database-wrapper"; -import { hash } from "../utils/hash"; -import { calculateStars } from "../utils/stars"; - -/* Definitions */ -let endpoint = "https://goodjudgment.io/superforecasts/"; -String.prototype.replaceAll = function replaceAll(search, replace) { - return this.split(search).join(replace); -}; - -// Tunelling -/* Support functions */ - -/* Body */ -export async function goodjudgment() { - // Proxy fuckery - let proxy; - /* - * try { - proxy = await axios - .get("http://pubproxy.com/api/proxy") - .then((query) => query.data); - console.log(proxy); - } catch (error) { - console.log("Proxy generation failed; using backup proxy instead"); - // hard-coded backup proxy - */ - proxy = { - ip: process.env.BACKUP_PROXY_IP, - port: process.env.BACKUP_PROXY_PORT, - }; - // } - let agent = tunnel.httpsOverHttp({ - proxy: { - host: proxy.ip, - port: proxy.port, - }, - }); - - let content = await axios - .request({ - url: "https://goodjudgment.io/superforecasts/", - method: "get", - headers: { - "User-Agent": "Chrome", - }, - // agent, - // port: 80, - }) - .then((query) => query.data); - - // Processing - let results = []; - let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false }); - jsonTable.shift(); // deletes first element - jsonTable.pop(); // deletes last element - // console.log(jsonTable) - for (let table of jsonTable) { - // console.log(table) - let title = table[0]["0"].split("\t\t\t").splice(3)[0]; - if (title != undefined) { - title = title.replaceAll("", ""); - let id = `goodjudgment-${hash(title)}`; - let description = table - .filter((row) => row["0"].includes("BACKGROUND:")) - .map((row) => row["0"]) - .map((text) => - text - .split("BACKGROUND:")[1] - .split("Examples of Superforecaster")[0] - .split("AT A GLANCE")[0] - .replaceAll("\n\n", "\n") - .split("\n") - .slice(3) - .join(" ") - .replaceAll(" ", "") - .replaceAll("
", "") - )[0]; - let options = table - .filter((row) => "4" in row) - .map((row) => ({ - name: row["2"] - .split('')[1] - .replace("", ""), - probability: Number(row["3"].split("%")[0]) / 100, - type: "PROBABILITY", - })); - let analysis = table.filter((row) => - row[0] ? row[0].toLowerCase().includes("commentary") : false - ); - // "Examples of Superforecaster Commentary" / Analysis - // The following is necessary twice, because we want to check if there is an empty list, and then get the first element of the first element of the list. - analysis = analysis ? analysis[0] : ""; - analysis = analysis ? analysis[0] : ""; // not a duplicate - // console.log(analysis) - let standardObj = { - id: id, - title: title, - url: endpoint, - platform: "Good Judgment", - description: description, - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - stars: calculateStars("Good Judgment", {}), - }, - extra: { - superforecastercommentary: analysis || "", - }, - }; - results.push(standardObj); - } - } - // console.log(results.slice(0,10)) - let string = JSON.stringify(results, null, 2); - console.log(results); - await databaseUpsert({ contents: results, group: "goodjudgment" }); - - console.log( - "Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js" - ); - console.log("Done"); -} -// goodjudgment() diff --git a/src/backend/platforms/goodjudgment.ts b/src/backend/platforms/goodjudgment.ts new file mode 100644 index 0000000..992bee2 --- /dev/null +++ b/src/backend/platforms/goodjudgment.ts @@ -0,0 +1,125 @@ +/* Imports */ +import axios from "axios"; +import { Tabletojson } from "tabletojson"; +import tunnel from "tunnel"; + +import { hash } from "../utils/hash"; +import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; + +/* Definitions */ +let endpoint = "https://goodjudgment.io/superforecasts/"; +String.prototype.replaceAll = function replaceAll(search, replace) { + return this.split(search).join(replace); +}; + +/* Body */ +export const goodjudgment: Platform = { + name: "goodjudgment", + async fetcher() { + // Proxy fuckery + let proxy; + /* + * try { + proxy = await axios + .get("http://pubproxy.com/api/proxy") + .then((query) => query.data); + console.log(proxy); + } catch (error) { + console.log("Proxy generation failed; using backup proxy instead"); + // hard-coded backup proxy + */ + proxy = { + ip: process.env.BACKUP_PROXY_IP, + port: process.env.BACKUP_PROXY_PORT, + }; + // } + let agent = tunnel.httpsOverHttp({ + proxy: { + host: proxy.ip, + port: proxy.port, + }, + }); + + let content = await axios + .request({ + url: "https://goodjudgment.io/superforecasts/", + method: "get", + headers: { + "User-Agent": "Chrome", + }, + // agent, + // port: 80, + }) + .then((query) => query.data); + + // Processing + let results = []; + let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false }); + jsonTable.shift(); // deletes first element + jsonTable.pop(); // deletes last element + // console.log(jsonTable) + for (let table of jsonTable) { + // console.log(table) + let title = table[0]["0"].split("\t\t\t").splice(3)[0]; + if (title != undefined) { + title = title.replaceAll("", ""); + let id = `goodjudgment-${hash(title)}`; + let description = table + .filter((row) => row["0"].includes("BACKGROUND:")) + .map((row) => row["0"]) + .map((text) => + text + .split("BACKGROUND:")[1] + .split("Examples of Superforecaster")[0] + .split("AT A GLANCE")[0] + .replaceAll("\n\n", "\n") + .split("\n") + .slice(3) + .join(" ") + .replaceAll(" ", "") + .replaceAll("
", "") + )[0]; + let options = table + .filter((row) => "4" in row) + .map((row) => ({ + name: row["2"] + .split('')[1] + .replace("", ""), + probability: Number(row["3"].split("%")[0]) / 100, + type: "PROBABILITY", + })); + let analysis = table.filter((row) => + row[0] ? row[0].toLowerCase().includes("commentary") : false + ); + // "Examples of Superforecaster Commentary" / Analysis + // The following is necessary twice, because we want to check if there is an empty list, and then get the first element of the first element of the list. + analysis = analysis ? analysis[0] : ""; + analysis = analysis ? analysis[0] : ""; // not a duplicate + // console.log(analysis) + let standardObj = { + id: id, + title: title, + url: endpoint, + platform: "Good Judgment", + description: description, + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + stars: calculateStars("Good Judgment", {}), + }, + extra: { + superforecastercommentary: analysis || "", + }, + }; + results.push(standardObj); + } + } + + console.log( + "Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js" + ); + + return results; + }, +}; diff --git a/src/backend/platforms/goodjudmentopen-fetch.ts b/src/backend/platforms/goodjudmentopen.ts similarity index 94% rename from src/backend/platforms/goodjudmentopen-fetch.ts rename to src/backend/platforms/goodjudmentopen.ts index f09032f..e2ce6b0 100644 --- a/src/backend/platforms/goodjudmentopen-fetch.ts +++ b/src/backend/platforms/goodjudmentopen.ts @@ -2,10 +2,10 @@ import axios from "axios"; import { Tabletojson } from "tabletojson"; -import { databaseUpsert } from "../database/database-wrapper"; import { applyIfSecretExists } from "../utils/getSecrets"; import { calculateStars } from "../utils/stars"; import toMarkdown from "../utils/toMarkdown"; +import { Platform } from "./"; /* Definitions */ let htmlEndPoint = "https://www.gjopen.com/questions?page="; @@ -150,7 +150,7 @@ function isEnd(html) { return isEndBool; } -function sleep(ms) { +function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } @@ -221,11 +221,10 @@ async function goodjudgmentopen_inner(cookie) { ); } } - console.log(results); - if (results.length > 0) { - await databaseUpsert({ contents: results, group: "goodjudmentopen" }); - } else { + + if (results.length === 0) { console.log("Not updating results, as process was not signed in"); + return; } let end = Date.now(); @@ -233,9 +232,14 @@ async function goodjudgmentopen_inner(cookie) { console.log( `Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.` ); + + return results; } -export async function goodjudgmentopen() { - let cookie = process.env.GOODJUDGMENTOPENCOOKIE; - await applyIfSecretExists(cookie, goodjudgmentopen_inner); -} +export const goodjudmentopen: Platform = { + name: "goodjudmentopen", // note the typo! current table name is without `g`, `goodjudmentopen` + async fetcher() { + let cookie = process.env.GOODJUDGMENTOPENCOOKIE; + return await applyIfSecretExists(cookie, goodjudgmentopen_inner); + }, +}; diff --git a/src/backend/platforms/index.ts b/src/backend/platforms/index.ts new file mode 100644 index 0000000..301b13e --- /dev/null +++ b/src/backend/platforms/index.ts @@ -0,0 +1,85 @@ +import { databaseUpsert } from "../database/database-wrapper"; +import { betfair } from "./betfair"; +import { fantasyscotus } from "./fantasyscotus"; +import { foretold } from "./foretold"; +import { givewellopenphil } from "./givewellopenphil"; +import { goodjudgment } from "./goodjudgment"; +import { goodjudmentopen } from "./goodjudmentopen"; +import { infer } from "./infer"; +import { kalshi } from "./kalshi"; +import { manifoldmarkets } from "./manifoldmarkets"; +import { metaculus } from "./metaculus"; +import { polymarket } from "./polymarket"; +import { predictit } from "./predictit"; +import { rootclaim } from "./rootclaim"; +import { smarkets } from "./smarkets"; +import { wildeford } from "./wildeford"; +import { xrisk } from "./xrisk"; + +export interface Forecast { + id: string; + title: string; + url: string; + description: string; + platform: string; + options: any[]; + timestamp: string; + qualityindicators: any; + extra?: any; +} + +// fetcher should return null if platform failed to fetch forecasts for some reason +export type PlatformFetcher = () => Promise; + +export interface Platform { + name: string; + fetcher?: PlatformFetcher; +} + +// draft for the future callback-based streaming/chunking API: +// interface FetchOptions { +// since?: string; // some kind of cursor, Date object or opaque string? +// save: (forecasts: Forecast[]) => Promise; +// } + +// export type PlatformFetcher = (options: FetchOptions) => Promise; + +// interface Platform { +// name: string; +// color?: string; +// longName: string; +// fetcher: PlatformFetcher; +// } + +export const platforms: Platform[] = [ + betfair, + fantasyscotus, + foretold, + givewellopenphil, + goodjudgment, + goodjudmentopen, + infer, + kalshi, + manifoldmarkets, + metaculus, + polymarket, + predictit, + rootclaim, + smarkets, + wildeford, + xrisk, +]; + +export const processPlatform = async (platform: Platform) => { + if (!platform.fetcher) { + console.log(`Platform ${platform.name} doesn't have a fetcher, skipping`); + return; + } + let results = await platform.fetcher(); + if (results && results.length) { + await databaseUpsert({ contents: results, group: platform.name }); + console.log("Done"); + } else { + console.log(`Platform ${platform.name} didn't return any results`); + } +}; diff --git a/src/backend/platforms/infer-fetch.ts b/src/backend/platforms/infer.ts similarity index 96% rename from src/backend/platforms/infer-fetch.ts rename to src/backend/platforms/infer.ts index 952ad76..6a9bfba 100644 --- a/src/backend/platforms/infer-fetch.ts +++ b/src/backend/platforms/infer.ts @@ -2,10 +2,10 @@ import axios from "axios"; import { Tabletojson } from "tabletojson"; -import { databaseUpsert } from "../database/database-wrapper"; import { applyIfSecretExists } from "../utils/getSecrets"; import { calculateStars } from "../utils/stars"; import toMarkdown from "../utils/toMarkdown"; +import { Forecast, Platform } from "./"; /* Definitions */ let htmlEndPoint = "https://www.infer-pub.com/questions"; @@ -182,7 +182,7 @@ function sleep(ms) { async function infer_inner(cookie) { let i = 1; let response = await fetchPage(i, cookie); - let results = []; + let results: Forecast[] = []; let init = Date.now(); // console.log("Downloading... This might take a couple of minutes. Results will be shown.") while (!isEnd(response) && isSignedIn(response)) { @@ -263,20 +263,24 @@ async function infer_inner(cookie) { ); } } - if (results.length > 0) { - await databaseUpsert({ contents: results, group: "infer" }); - } else { - console.log("Not updating results, as process was not signed in"); - } let end = Date.now(); let difference = end - init; console.log( `Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.` ); + + if (results.length === 0) { + console.log("Not updating results, as process was not signed in"); + return; + } + return results; } -export async function infer() { - let cookie = process.env.INFER_COOKIE; - await applyIfSecretExists(cookie, infer_inner); -} +export const infer: Platform = { + name: "infer", + async fetcher() { + let cookie = process.env.INFER_COOKIE; + return await applyIfSecretExists(cookie, infer_inner); + }, +}; diff --git a/src/backend/platforms/kalshi-fetch.ts b/src/backend/platforms/kalshi.ts similarity index 88% rename from src/backend/platforms/kalshi-fetch.ts rename to src/backend/platforms/kalshi.ts index d1706c6..212506a 100644 --- a/src/backend/platforms/kalshi-fetch.ts +++ b/src/backend/platforms/kalshi.ts @@ -1,7 +1,8 @@ /* Imports */ import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; + import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Definitions */ let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' @@ -68,12 +69,10 @@ async function processMarkets(markets) { return results; //resultsProcessed } -/* Body */ -export async function kalshi() { - let markets = await fetchAllMarkets(); - let results = await processMarkets(markets); // somehow needed - await databaseUpsert({ contents: results, group: "kalshi" }); - - console.log("Done"); -} -// kalshi() +export const kalshi: Platform = { + name: "kalshi", + fetcher: async function () { + let markets = await fetchAllMarkets(); + return await processMarkets(markets); + }, +}; diff --git a/src/backend/platforms/manifoldmarkets-fetch.ts b/src/backend/platforms/manifoldmarkets.ts similarity index 87% rename from src/backend/platforms/manifoldmarkets-fetch.ts rename to src/backend/platforms/manifoldmarkets.ts index 1a078db..0969243 100644 --- a/src/backend/platforms/manifoldmarkets-fetch.ts +++ b/src/backend/platforms/manifoldmarkets.ts @@ -1,7 +1,8 @@ /* Imports */ import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; + import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Definitions */ let endpoint = "https://manifold.markets/api/v0/markets"; @@ -87,14 +88,12 @@ async function processPredictions(predictions) { return unresolvedResults; //resultsProcessed } -/* Body */ - -export async function manifoldmarkets() { - let data = await fetchData(); - let results = await processPredictions(data); // somehow needed - showStatistics(results); - await databaseUpsert({ contents: results, group: "manifoldmarkets" }); - - console.log("Done"); -} -// manifoldmarkets() +export const manifoldmarkets: Platform = { + name: "manifoldmarkets", + async fetcher() { + let data = await fetchData(); + let results = await processPredictions(data); // somehow needed + showStatistics(results); + return results; + }, +}; diff --git a/src/backend/platforms/metaculus-fetch.ts b/src/backend/platforms/metaculus-fetch.ts deleted file mode 100644 index 87f6d91..0000000 --- a/src/backend/platforms/metaculus-fetch.ts +++ /dev/null @@ -1,196 +0,0 @@ -/* Imports */ -import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; -import toMarkdown from "../utils/toMarkdown"; - -/* Definitions */ -let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; -let now = new Date().toISOString(); -let DEBUG_MODE = "off"; -let SLEEP_TIME = 5000; -/* Support functions */ -async function fetchMetaculusQuestions(next) { - // Numbers about a given address: how many, how much, at what price, etc. - let response; - let data; - try { - response = await axios({ - url: next, - method: "GET", - headers: { "Content-Type": "application/json" }, - }); - data = response.data; - } catch (error) { - console.log(`Error in async function fetchMetaculusQuestions(next)`); - if (!!error.response.headers["retry-after"]) { - let timeout = error.response.headers["retry-after"]; - console.log(`Timeout: ${timeout}`); - await sleep(Number(timeout) * 1000 + SLEEP_TIME); - } else { - await sleep(SLEEP_TIME); - } - console.log(error); - } finally { - try { - response = await axios({ - url: next, - method: "GET", - headers: { "Content-Type": "application/json" }, - }); - data = response.data; - } catch (error) { - console.log(error); - return { results: [] }; - } - } - // console.log(response) - return data; -} - -function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -async function fetchMetaculusQuestionDescription(slug) { - try { - let response = await axios({ - method: "get", - url: "https://www.metaculus.com" + slug, - }).then((response) => response.data); - return response; - } catch (error) { - console.log(`Error in: fetchMetaculusQuestionDescription`); - console.log( - `We encountered some error when attempting to fetch a metaculus page. Trying again` - ); - if ( - typeof error.response != "undefined" && - typeof error.response.headers != "undefined" && - typeof error.response.headers["retry-after"] != "undefined" - ) { - let timeout = error.response.headers["retry-after"]; - console.log(`Timeout: ${timeout}`); - await sleep(Number(timeout) * 1000 + SLEEP_TIME); - } else { - await sleep(SLEEP_TIME); - } - try { - let response = await axios({ - method: "get", - url: "https://www.metaculus.com" + slug, - }).then((response) => response.data); - // console.log(response) - return response; - } catch (error) { - console.log( - `We encountered some error when attempting to fetch a metaculus page.` - ); - console.log("Error", error); - throw "Giving up"; - } - } -} - -/* Body */ - -export async function metaculus() { - // let metaculusQuestionsInit = await fetchMetaculusQuestions(1) - // let numQueries = Math.round(Number(metaculusQuestionsInit.count) / 20) - // console.log(`Downloading... This might take a while. Total number of queries: ${numQueries}`) - // for (let i = 4; i <= numQueries; i++) { // change numQueries to 10 if one want to just test - let all_questions = []; - let next = "https://www.metaculus.com/api2/questions/"; - let i = 1; - while (next) { - if (i % 20 == 0) { - console.log("Sleeping for 500ms"); - await sleep(SLEEP_TIME); - } - console.log(`\nQuery #${i}`); - let metaculusQuestions = await fetchMetaculusQuestions(next); - let results = metaculusQuestions.results; - let j = false; - for (let result of results) { - if (result.publish_time < now && now < result.resolve_time) { - await sleep(SLEEP_TIME / 2); - let questionPage = await fetchMetaculusQuestionDescription( - result.page_url - ); - if (!questionPage.includes("A public prediction by")) { - // console.log(questionPage) - let descriptionraw = questionPage.split( - `
` - )[1]; //.split(`
`)[1] - let descriptionprocessed1 = descriptionraw.split("
")[0]; - let descriptionprocessed2 = toMarkdown(descriptionprocessed1); - let description = descriptionprocessed2; - - let isbinary = result.possibilities.type == "binary"; - let options = []; - if (isbinary) { - let probability = Number(result.community_prediction.full.q2); - options = [ - { - name: "Yes", - probability: probability, - type: "PROBABILITY", - }, - { - name: "No", - probability: 1 - probability, - type: "PROBABILITY", - }, - ]; - } - let id = `metaculus-${result.id}`; - let interestingInfo = { - id: id, - title: result.title, - url: "https://www.metaculus.com" + result.page_url, - platform: "Metaculus", - description: description, - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - numforecasts: Number(result.number_of_predictions), - stars: calculateStars("Metaculus", { - numforecasts: result.number_of_predictions, - }), - }, - extra: { - resolution_data: { - publish_time: result.publish_time, - resolution: result.resolution, - close_time: result.close_time, - resolve_time: result.resolve_time, - }, - }, - //"status": result.status, - //"publish_time": result.publish_time, - //"close_time": result.close_time, - //"type": result.possibilities.type, // We want binary ones here. - //"last_activity_time": result.last_activity_time, - }; - if (Number(result.number_of_predictions) >= 10) { - console.log(`- ${interestingInfo.title}`); - all_questions.push(interestingInfo); - if ((!j && i % 20 == 0) || DEBUG_MODE == "on") { - console.log(interestingInfo); - j = true; - } - } - } else { - console.log("- [Skipping public prediction]"); - } - } - } - next = metaculusQuestions.next; - i = i + 1; - } - - await databaseUpsert({ contents: all_questions, group: "metaculus" }); - - console.log("Done"); -} -//metaculus() diff --git a/src/backend/platforms/metaculus.ts b/src/backend/platforms/metaculus.ts new file mode 100644 index 0000000..78369ee --- /dev/null +++ b/src/backend/platforms/metaculus.ts @@ -0,0 +1,195 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import toMarkdown from "../utils/toMarkdown"; +import { Platform } from "./"; + +/* Definitions */ +let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; +let now = new Date().toISOString(); +let DEBUG_MODE = "off"; +let SLEEP_TIME = 5000; +/* Support functions */ +async function fetchMetaculusQuestions(next) { + // Numbers about a given address: how many, how much, at what price, etc. + let response; + let data; + try { + response = await axios({ + url: next, + method: "GET", + headers: { "Content-Type": "application/json" }, + }); + data = response.data; + } catch (error) { + console.log(`Error in async function fetchMetaculusQuestions(next)`); + if (!!error.response.headers["retry-after"]) { + let timeout = error.response.headers["retry-after"]; + console.log(`Timeout: ${timeout}`); + await sleep(Number(timeout) * 1000 + SLEEP_TIME); + } else { + await sleep(SLEEP_TIME); + } + console.log(error); + } finally { + try { + response = await axios({ + url: next, + method: "GET", + headers: { "Content-Type": "application/json" }, + }); + data = response.data; + } catch (error) { + console.log(error); + return { results: [] }; + } + } + // console.log(response) + return data; +} + +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function fetchMetaculusQuestionDescription(slug) { + try { + let response = await axios({ + method: "get", + url: "https://www.metaculus.com" + slug, + }).then((response) => response.data); + return response; + } catch (error) { + console.log(`Error in: fetchMetaculusQuestionDescription`); + console.log( + `We encountered some error when attempting to fetch a metaculus page. Trying again` + ); + if ( + typeof error.response != "undefined" && + typeof error.response.headers != "undefined" && + typeof error.response.headers["retry-after"] != "undefined" + ) { + let timeout = error.response.headers["retry-after"]; + console.log(`Timeout: ${timeout}`); + await sleep(Number(timeout) * 1000 + SLEEP_TIME); + } else { + await sleep(SLEEP_TIME); + } + try { + let response = await axios({ + method: "get", + url: "https://www.metaculus.com" + slug, + }).then((response) => response.data); + // console.log(response) + return response; + } catch (error) { + console.log( + `We encountered some error when attempting to fetch a metaculus page.` + ); + console.log("Error", error); + throw "Giving up"; + } + } +} + +export const metaculus: Platform = { + name: "metaculus", + async fetcher() { + // let metaculusQuestionsInit = await fetchMetaculusQuestions(1) + // let numQueries = Math.round(Number(metaculusQuestionsInit.count) / 20) + // console.log(`Downloading... This might take a while. Total number of queries: ${numQueries}`) + // for (let i = 4; i <= numQueries; i++) { // change numQueries to 10 if one want to just test } + let all_questions = []; + let next = "https://www.metaculus.com/api2/questions/"; + let i = 1; + while (next) { + if (i % 20 == 0) { + console.log("Sleeping for 500ms"); + await sleep(SLEEP_TIME); + } + console.log(`\nQuery #${i}`); + let metaculusQuestions = await fetchMetaculusQuestions(next); + let results = metaculusQuestions.results; + let j = false; + for (let result of results) { + if (result.publish_time < now && now < result.resolve_time) { + await sleep(SLEEP_TIME / 2); + let questionPage = await fetchMetaculusQuestionDescription( + result.page_url + ); + if (!questionPage.includes("A public prediction by")) { + // console.log(questionPage) + let descriptionraw = questionPage.split( + `
` + )[1]; //.split(`
`)[1] + let descriptionprocessed1 = descriptionraw.split("
")[0]; + let descriptionprocessed2 = toMarkdown(descriptionprocessed1); + let description = descriptionprocessed2; + + let isbinary = result.possibilities.type == "binary"; + let options = []; + if (isbinary) { + let probability = Number(result.community_prediction.full.q2); + options = [ + { + name: "Yes", + probability: probability, + type: "PROBABILITY", + }, + { + name: "No", + probability: 1 - probability, + type: "PROBABILITY", + }, + ]; + } + let id = `metaculus-${result.id}`; + let interestingInfo = { + id: id, + title: result.title, + url: "https://www.metaculus.com" + result.page_url, + platform: "Metaculus", + description: description, + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + numforecasts: Number(result.number_of_predictions), + stars: calculateStars("Metaculus", { + numforecasts: result.number_of_predictions, + }), + }, + extra: { + resolution_data: { + publish_time: result.publish_time, + resolution: result.resolution, + close_time: result.close_time, + resolve_time: result.resolve_time, + }, + }, + //"status": result.status, + //"publish_time": result.publish_time, + //"close_time": result.close_time, + //"type": result.possibilities.type, // We want binary ones here. + //"last_activity_time": result.last_activity_time, + }; + if (Number(result.number_of_predictions) >= 10) { + console.log(`- ${interestingInfo.title}`); + all_questions.push(interestingInfo); + if ((!j && i % 20 == 0) || DEBUG_MODE == "on") { + console.log(interestingInfo); + j = true; + } + } + } else { + console.log("- [Skipping public prediction]"); + } + } + } + next = metaculusQuestions.next; + i = i + 1; + } + + return all_questions; + }, +}; diff --git a/src/backend/platforms/polymarket-fetch.ts b/src/backend/platforms/polymarket-fetch.ts deleted file mode 100644 index d3d5c8f..0000000 --- a/src/backend/platforms/polymarket-fetch.ts +++ /dev/null @@ -1,135 +0,0 @@ -import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; - -/* Definitions */ -let graphQLendpoint = - "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' -let units = 10 ** 6; - -async function fetchAllContractInfo() { - // for info which the polymarket graphql API - let response = await axios - .get( - "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&closed=false&_limit=-1" - // "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&_limit=-1" to get all markets, including closed ones - ) - .then((query) => query.data); - response = response.filter((res) => res.closed != true); - return response; -} - -async function fetchIndividualContractData(marketMakerAddress) { - let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 7; // last week - let response = await axios({ - url: graphQLendpoint, - method: "POST", - headers: { "Content-Type": "application/json" }, - data: JSON.stringify({ - query: ` - { - fixedProductMarketMakers(first: 1000 - where: { - id: "${marketMakerAddress}" - lastActiveDay_gt: ${daysSinceEra} - }){ - id - creator - creationTimestamp - fee - tradesQuantity - buysQuantity - sellsQuantity - lastActiveDay - outcomeTokenPrices - outcomeTokenAmounts - liquidityParameter - collateralBuyVolume - collateralSellVolume - conditions { - outcomeSlotCount - } - } - } - `, - }), - }) - .then((res) => res.data) - .then((res) => res.data.fixedProductMarketMakers); - // console.log(response) - return response; -} - -async function fetchAll() { - let results = []; - let webpageEndpointData = await fetchAllContractInfo(); - for (let marketInfo of webpageEndpointData) { - let address = marketInfo.marketMakerAddress; - let addressLowerCase = address.toLowerCase(); - if (marketInfo.outcomes[0] != "Long" || marketInfo.outcomes[1] != "Long") { - let moreMarketAnswer = await fetchIndividualContractData( - addressLowerCase - ); - if (moreMarketAnswer.length > 0) { - let moreMarketInfo = moreMarketAnswer[0]; - let id = `polymarket-${addressLowerCase.slice(0, 10)}`; - // console.log(id); - let numforecasts = Number(moreMarketInfo.tradesQuantity); - let tradevolume = - (Number(moreMarketInfo.collateralBuyVolume) + - Number(moreMarketInfo.collateralSellVolume)) / - units; - let liquidity = Number(moreMarketInfo.liquidityParameter) / units; - // let isbinary = Number(moreMarketInfo.conditions[0].outcomeSlotCount) == 2 - // let percentage = Number(moreMarketInfo.outcomeTokenPrices[0]) * 100 - // let percentageFormatted = isbinary ? (percentage.toFixed(0) + "%") : "none" - let options = []; - for (let outcome in moreMarketInfo.outcomeTokenPrices) { - options.push({ - name: marketInfo.outcomes[outcome], - probability: moreMarketInfo.outcomeTokenPrices[outcome], - type: "PROBABILITY", - }); - } - - let result = { - id: id, - title: marketInfo.question, - url: "https://polymarket.com/market/" + marketInfo.slug, - platform: "PolyMarket", - description: marketInfo.description, - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - numforecasts: numforecasts.toFixed(0), - liquidity: liquidity.toFixed(2), - tradevolume: tradevolume.toFixed(2), - stars: calculateStars("Polymarket", { - liquidity, - option: options[0], - volume: tradevolume, - }), - }, - extra: { - address: marketInfo.address, - }, - /* - */ - }; - if (marketInfo.category != "Sports") { - // console.log(result) - results.push(result); - } - } - } - } - return results; -} - -export async function polymarket() { - let results = await fetchAll(); - await databaseUpsert({ contents: results, group: "polymarket" }); - - console.log("Done"); -} -// polymarket(); diff --git a/src/backend/platforms/polymarket.ts b/src/backend/platforms/polymarket.ts new file mode 100644 index 0000000..175b93c --- /dev/null +++ b/src/backend/platforms/polymarket.ts @@ -0,0 +1,134 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import { Forecast, Platform } from "./"; + +/* Definitions */ +let graphQLendpoint = + "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' +let units = 10 ** 6; + +async function fetchAllContractInfo() { + // for info which the polymarket graphql API + let response = await axios + .get( + "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&closed=false&_limit=-1" + // "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&_limit=-1" to get all markets, including closed ones + ) + .then((query) => query.data); + response = response.filter((res) => res.closed != true); + return response; +} + +async function fetchIndividualContractData(marketMakerAddress) { + let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 7; // last week + let response = await axios({ + url: graphQLendpoint, + method: "POST", + headers: { "Content-Type": "application/json" }, + data: JSON.stringify({ + query: ` + { + fixedProductMarketMakers(first: 1000 + where: { + id: "${marketMakerAddress}" + lastActiveDay_gt: ${daysSinceEra} + }){ + id + creator + creationTimestamp + fee + tradesQuantity + buysQuantity + sellsQuantity + lastActiveDay + outcomeTokenPrices + outcomeTokenAmounts + liquidityParameter + collateralBuyVolume + collateralSellVolume + conditions { + outcomeSlotCount + } + } + } + `, + }), + }) + .then((res) => res.data) + .then((res) => res.data.fixedProductMarketMakers); + // console.log(response) + return response; +} + +export const polymarket: Platform = { + name: "polymarket", + async fetcher() { + let results: Forecast[] = []; + let webpageEndpointData = await fetchAllContractInfo(); + for (let marketInfo of webpageEndpointData) { + let address = marketInfo.marketMakerAddress; + let addressLowerCase = address.toLowerCase(); + if ( + marketInfo.outcomes[0] != "Long" || + marketInfo.outcomes[1] != "Long" + ) { + let moreMarketAnswer = await fetchIndividualContractData( + addressLowerCase + ); + if (moreMarketAnswer.length > 0) { + let moreMarketInfo = moreMarketAnswer[0]; + let id = `polymarket-${addressLowerCase.slice(0, 10)}`; + // console.log(id); + let numforecasts = Number(moreMarketInfo.tradesQuantity); + let tradevolume = + (Number(moreMarketInfo.collateralBuyVolume) + + Number(moreMarketInfo.collateralSellVolume)) / + units; + let liquidity = Number(moreMarketInfo.liquidityParameter) / units; + // let isbinary = Number(moreMarketInfo.conditions[0].outcomeSlotCount) == 2 + // let percentage = Number(moreMarketInfo.outcomeTokenPrices[0]) * 100 + // let percentageFormatted = isbinary ? (percentage.toFixed(0) + "%") : "none" + let options = []; + for (let outcome in moreMarketInfo.outcomeTokenPrices) { + options.push({ + name: marketInfo.outcomes[outcome], + probability: moreMarketInfo.outcomeTokenPrices[outcome], + type: "PROBABILITY", + }); + } + + let result: Forecast = { + id: id, + title: marketInfo.question, + url: "https://polymarket.com/market/" + marketInfo.slug, + platform: "PolyMarket", + description: marketInfo.description, + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + numforecasts: numforecasts.toFixed(0), + liquidity: liquidity.toFixed(2), + tradevolume: tradevolume.toFixed(2), + stars: calculateStars("Polymarket", { + liquidity, + option: options[0], + volume: tradevolume, + }), + }, + extra: { + address: marketInfo.address, + }, + /* + */ + }; + if (marketInfo.category !== "Sports") { + results.push(result); + } + } + } + } + return results; + }, +}; diff --git a/src/backend/platforms/predictit-fetch.ts b/src/backend/platforms/predictit-fetch.ts deleted file mode 100644 index 852f361..0000000 --- a/src/backend/platforms/predictit-fetch.ts +++ /dev/null @@ -1,112 +0,0 @@ -/* Imports */ -import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; -import toMarkdown from "../utils/toMarkdown"; - -/* Support functions */ -async function fetchmarkets() { - let response = await axios({ - method: "get", - url: "https://www.predictit.org/api/marketdata/all/", - }); - let openMarkets = response.data.markets.filter( - (market) => market.status == "Open" - ); - return openMarkets; -} - -async function fetchmarketrules(market_id) { - let response = await axios({ - method: "get", - url: "https://www.predictit.org/api/Market/" + market_id, - }); - return response.data.rule; -} - -async function fetchmarketvolumes() { - let response = await axios({ - method: "get", - url: "https://predictit-f497e.firebaseio.com/marketStats.json", - }); - return response.data; -} - -function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -/* Body */ -export async function predictit() { - let markets = await fetchmarkets(); - let marketVolumes = await fetchmarketvolumes(); - - markets = markets.map((market) => ({ - ...market, - TotalSharesTraded: marketVolumes[market.id]["TotalSharesTraded"], - })); - // console.log(markets) - - let results = []; - for (let market of markets) { - // console.log(market.name) - let id = `predictit-${market.id}`; - let isbinary = market.contracts.length == 1; - await sleep(3000 * (1 + Math.random())); - let descriptionraw = await fetchmarketrules(market.id); - let descriptionprocessed1 = toMarkdown(descriptionraw); - let description = descriptionprocessed1; - let shares_volume = market["TotalSharesTraded"]; - // let percentageFormatted = isbinary ? Number(Number(market.contracts[0].lastTradePrice) * 100).toFixed(0) + "%" : "none" - - let options = market.contracts.map((contract) => ({ - name: contract.name, - probability: contract.lastTradePrice, - type: "PROBABILITY", - })); - let totalValue = options - .map((element) => Number(element.probability)) - .reduce((a, b) => a + b, 0); - - if (options.length != 1 && totalValue > 1) { - options = options.map((element) => ({ - ...element, - probability: Number(element.probability) / totalValue, - })); - } else if (options.length == 1) { - let option = options[0]; - let probability = option["probability"]; - options = [ - { - name: "Yes", - probability: probability, - type: "PROBABILITY", - }, - { - name: "No", - probability: 1 - probability, - type: "PROBABILITY", - }, - ]; - } - - let obj = { - id: id, - title: market["name"], - url: market.url, - platform: "PredictIt", - description: description, - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - stars: calculateStars("PredictIt", {}), - shares_volume: shares_volume, - }, - }; - // console.log(obj) - results.push(obj); - } - await databaseUpsert({ contents: results, group: "predictit" }); - - console.log("Done"); -} diff --git a/src/backend/platforms/predictit.ts b/src/backend/platforms/predictit.ts new file mode 100644 index 0000000..fb0f645 --- /dev/null +++ b/src/backend/platforms/predictit.ts @@ -0,0 +1,115 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import toMarkdown from "../utils/toMarkdown"; +import { Platform } from "./"; + +/* Support functions */ +async function fetchmarkets() { + let response = await axios({ + method: "get", + url: "https://www.predictit.org/api/marketdata/all/", + }); + let openMarkets = response.data.markets.filter( + (market) => market.status == "Open" + ); + return openMarkets; +} + +async function fetchmarketrules(market_id) { + let response = await axios({ + method: "get", + url: "https://www.predictit.org/api/Market/" + market_id, + }); + return response.data.rule; +} + +async function fetchmarketvolumes() { + let response = await axios({ + method: "get", + url: "https://predictit-f497e.firebaseio.com/marketStats.json", + }); + return response.data; +} + +function sleep(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +/* Body */ +export const predictit: Platform = { + name: "predictit", + async fetcher() { + let markets = await fetchmarkets(); + let marketVolumes = await fetchmarketvolumes(); + + markets = markets.map((market) => ({ + ...market, + TotalSharesTraded: marketVolumes[market.id]["TotalSharesTraded"], + })); + // console.log(markets) + + let results = []; + for (let market of markets) { + // console.log(market.name) + let id = `predictit-${market.id}`; + let isbinary = market.contracts.length == 1; + await sleep(3000 * (1 + Math.random())); + let descriptionraw = await fetchmarketrules(market.id); + let descriptionprocessed1 = toMarkdown(descriptionraw); + let description = descriptionprocessed1; + let shares_volume = market["TotalSharesTraded"]; + // let percentageFormatted = isbinary ? Number(Number(market.contracts[0].lastTradePrice) * 100).toFixed(0) + "%" : "none" + + let options = market.contracts.map((contract) => ({ + name: contract.name, + probability: contract.lastTradePrice, + type: "PROBABILITY", + })); + let totalValue = options + .map((element) => Number(element.probability)) + .reduce((a, b) => a + b, 0); + + if (options.length != 1 && totalValue > 1) { + options = options.map((element) => ({ + ...element, + probability: Number(element.probability) / totalValue, + })); + } else if (options.length == 1) { + let option = options[0]; + let probability = option["probability"]; + options = [ + { + name: "Yes", + probability: probability, + type: "PROBABILITY", + }, + { + name: "No", + probability: 1 - probability, + type: "PROBABILITY", + }, + ]; + } + + let obj = { + id: id, + title: market["name"], + url: market.url, + platform: "PredictIt", + description: description, + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + stars: calculateStars("PredictIt", {}), + shares_volume: shares_volume, + }, + }; + // console.log(obj) + results.push(obj); + } + + return results; + }, +}; diff --git a/src/backend/platforms/rootclaim-fetch.ts b/src/backend/platforms/rootclaim-fetch.ts deleted file mode 100644 index 2a39b4d..0000000 --- a/src/backend/platforms/rootclaim-fetch.ts +++ /dev/null @@ -1,65 +0,0 @@ -/* Imports */ -import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; -import toMarkdown from "../utils/toMarkdown"; - -/* Definitions */ -let jsonEndpoint = - "https://www.rootclaim.com/main_page_stories?number=100&offset=0"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' - -async function fetchAllRootclaims() { - // for info which the polymarket graphql API - let response = await axios - .get(jsonEndpoint) - .then((response) => response.data); - if (response.length != response[0] + 1) { - console.log(response.length); - console.log(response[0]); - //throw Error("Rootclaim's backend has changed.") - } - response.shift(); - return response; -} - -async function fetchAndProcessData() { - let claims = await fetchAllRootclaims(); - let results = []; - for (let claim of claims) { - let id = `rootclaim-${claim.slug.toLowerCase()}`; - let options = []; - for (let scenario of claim.scenarios) { - //console.log(scenario) - options.push({ - name: toMarkdown(scenario.text).replace("\n", "").replace("'", "'"), - probability: scenario.net_prob / 100, - type: "PROBABILITY", - }); - } - let claimUrlPath = claim.created_at < "2020" ? "claims" : "analysis"; - let obj = { - id: id, - title: toMarkdown(claim.question).replace("\n", ""), - url: `https://www.rootclaim.com/${claimUrlPath}/${claim.slug}`, - platform: "Rootclaim", - description: toMarkdown(claim.background).replace("'", "'"), - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - numforecasts: 1, - stars: calculateStars("Rootclaim", {}), - }, - }; - results.push(obj); - } - return results; -} - -/* Body */ -export async function rootclaim() { - let results = await fetchAndProcessData(); - await databaseUpsert({ contents: results, group: "rootclaim" }); - - console.log("Done"); -} -//rootclaim() diff --git a/src/backend/platforms/rootclaim.ts b/src/backend/platforms/rootclaim.ts new file mode 100644 index 0000000..6520458 --- /dev/null +++ b/src/backend/platforms/rootclaim.ts @@ -0,0 +1,62 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import toMarkdown from "../utils/toMarkdown"; +import { Platform } from "./"; + +/* Definitions */ +let jsonEndpoint = + "https://www.rootclaim.com/main_page_stories?number=100&offset=0"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' + +async function fetchAllRootclaims() { + // for info which the polymarket graphql API + let response = await axios + .get(jsonEndpoint) + .then((response) => response.data); + if (response.length != response[0] + 1) { + console.log(response.length); + console.log(response[0]); + //throw Error("Rootclaim's backend has changed.") + } + response.shift(); + return response; +} + +export const rootclaim: Platform = { + name: "rootclaim", + async fetcher() { + let claims = await fetchAllRootclaims(); + let results = []; + for (let claim of claims) { + let id = `rootclaim-${claim.slug.toLowerCase()}`; + let options = []; + for (let scenario of claim.scenarios) { + //console.log(scenario) + options.push({ + name: toMarkdown(scenario.text) + .replace("\n", "") + .replace("'", "'"), + probability: scenario.net_prob / 100, + type: "PROBABILITY", + }); + } + let claimUrlPath = claim.created_at < "2020" ? "claims" : "analysis"; + let obj = { + id: id, + title: toMarkdown(claim.question).replace("\n", ""), + url: `https://www.rootclaim.com/${claimUrlPath}/${claim.slug}`, + platform: "Rootclaim", + description: toMarkdown(claim.background).replace("'", "'"), + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + numforecasts: 1, + stars: calculateStars("Rootclaim", {}), + }, + }; + results.push(obj); + } + return results; + }, +}; diff --git a/src/backend/platforms/smarkets-fetch.ts b/src/backend/platforms/smarkets-fetch.ts deleted file mode 100644 index fdc234b..0000000 --- a/src/backend/platforms/smarkets-fetch.ts +++ /dev/null @@ -1,180 +0,0 @@ -/* Imports */ -import axios from "axios"; - -import { databaseUpsert } from "../database/database-wrapper"; -import { calculateStars } from "../utils/stars"; - -/* Definitions */ -let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/"; -let VERBOSE = false; -let empty = () => 0; -/* Support functions */ - -async function fetchEvents(url) { - let response = await axios({ - url: htmlEndPointEntrance + url, - method: "GET", - headers: { - "Content-Type": "text/html", - }, - }).then((res) => res.data); - VERBOSE ? console.log(response) : empty(); - return response; -} - -async function fetchMarkets(eventid) { - let response = await axios({ - url: `https://api.smarkets.com/v3/events/${eventid}/markets/`, - method: "GET", - headers: { - "Content-Type": "text/json", - }, - }) - .then((res) => res.data) - .then((res) => res.markets); - return response; -} - -async function fetchContracts(marketid) { - let response = await axios({ - url: `https://api.smarkets.com/v3/markets/${marketid}/contracts/`, - method: "GET", - headers: { - "Content-Type": "text/html", - }, - }).then((res) => res.data); - VERBOSE ? console.log(response) : empty(); - return response; -} - -async function fetchPrices(marketid) { - let response = await axios({ - url: `https://api.smarkets.com/v3/markets/${marketid}/last_executed_prices/`, - method: "GET", - headers: { - "Content-Type": "text/html", - }, - }).then((res) => res.data); - VERBOSE ? console.log(response) : empty(); - return response; -} - -/* Body */ - -export async function smarkets() { - let htmlPath = - "?state=new&state=upcoming&state=live&type_domain=politics&type_scope=single_event&with_new_type=true&sort=id&limit=50"; - - let events = []; - while (htmlPath) { - let data = await fetchEvents(htmlPath); - events.push(...data.events); - htmlPath = data.pagination.next_page; - } - VERBOSE ? console.log(events) : empty(); - let markets = []; - for (let event of events) { - VERBOSE ? console.log(Date.now()) : empty(); - VERBOSE ? console.log(event.name) : empty(); - let eventMarkets = await fetchMarkets(event.id); - eventMarkets = eventMarkets.map((market) => ({ - ...market, - slug: event.full_slug, - })); - VERBOSE ? console.log("Markets fetched") : empty(); - VERBOSE ? console.log(event.id) : empty(); - VERBOSE ? console.log(eventMarkets) : empty(); - markets.push(...eventMarkets); - //let lastPrices = await fetchPrices(market.id) - } - VERBOSE ? console.log(markets) : empty(); - - let results = []; - for (let market of markets) { - VERBOSE ? console.log("================") : empty(); - VERBOSE ? console.log("Market: ", market) : empty(); - let id = `smarkets-${market.id}`; - let name = market.name; - - let contracts = await fetchContracts(market.id); - VERBOSE ? console.log("Contracts: ", contracts) : empty(); - let prices = await fetchPrices(market.id); - VERBOSE - ? console.log("Prices: ", prices["last_executed_prices"][market.id]) - : empty(); - - let optionsObj = {}; - for (let contract of contracts["contracts"]) { - optionsObj[contract.id] = { name: contract.name }; - } - for (let price of prices["last_executed_prices"][market.id]) { - optionsObj[price.contract_id] = { - ...optionsObj[price.contract_id], - probability: price.last_executed_price - ? Number(price.last_executed_price) - : null, - type: "PROBABILITY", - }; - } - let options: any[] = Object.values(optionsObj); - // monkey patch the case where there are only two options and only one has traded. - if ( - options.length == 2 && - options.map((option) => option.probability).includes(null) - ) { - let nonNullPrice = - options[0].probability == null - ? options[1].probability - : options[0].probability; - options = options.map((option) => { - let probability = option.probability; - return { - ...option, - probability: probability == null ? 100 - nonNullPrice : probability, - // yes, 100, because prices are not yet normalized. - }; - }); - } - - // Normalize normally - let totalValue = options - .map((element) => Number(element.probability)) - .reduce((a, b) => a + b, 0); - - options = options.map((element) => ({ - ...element, - probability: Number(element.probability) / totalValue, - })); - VERBOSE ? console.log(options) : empty(); - - /* - if(contracts["contracts"].length == 2){ - isBinary = true - percentage = ( Number(prices["last_executed_prices"][market.id][0].last_executed_price) + (100 - Number(prices["last_executed_prices"][market.id][1].last_executed_price)) ) / 2 - percentage = Math.round(percentage)+"%" - let contractName = contracts["contracts"][0].name - name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`) - } - */ - let result = { - id: id, - title: name, - url: "https://smarkets.com/event/" + market.event_id + market.slug, - platform: "Smarkets", - description: market.description, - options: options, - timestamp: new Date().toISOString(), - qualityindicators: { - stars: calculateStars("Smarkets", {}), - }, - }; - VERBOSE ? console.log(result) : empty(); - results.push(result); - } - VERBOSE ? console.log(results) : empty(); - - await databaseUpsert({ contents: results, group: "smarkets" }); - VERBOSE ? console.log(JSON.stringify(results, null, 4)) : empty(); - VERBOSE ? console.dir(results, { depth: null }) : empty(); -} -//smarkets() diff --git a/src/backend/platforms/smarkets.ts b/src/backend/platforms/smarkets.ts new file mode 100644 index 0000000..4a922de --- /dev/null +++ b/src/backend/platforms/smarkets.ts @@ -0,0 +1,177 @@ +/* Imports */ +import axios from "axios"; + +import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; + +/* Definitions */ +let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/"; +let VERBOSE = false; +let empty = () => 0; +/* Support functions */ + +async function fetchEvents(url) { + let response = await axios({ + url: htmlEndPointEntrance + url, + method: "GET", + headers: { + "Content-Type": "text/html", + }, + }).then((res) => res.data); + VERBOSE ? console.log(response) : empty(); + return response; +} + +async function fetchMarkets(eventid) { + let response = await axios({ + url: `https://api.smarkets.com/v3/events/${eventid}/markets/`, + method: "GET", + headers: { + "Content-Type": "text/json", + }, + }) + .then((res) => res.data) + .then((res) => res.markets); + return response; +} + +async function fetchContracts(marketid) { + let response = await axios({ + url: `https://api.smarkets.com/v3/markets/${marketid}/contracts/`, + method: "GET", + headers: { + "Content-Type": "text/html", + }, + }).then((res) => res.data); + VERBOSE ? console.log(response) : empty(); + return response; +} + +async function fetchPrices(marketid) { + let response = await axios({ + url: `https://api.smarkets.com/v3/markets/${marketid}/last_executed_prices/`, + method: "GET", + headers: { + "Content-Type": "text/html", + }, + }).then((res) => res.data); + VERBOSE ? console.log(response) : empty(); + return response; +} + +export const smarkets: Platform = { + name: "smarkets", + async fetcher() { + let htmlPath = + "?state=new&state=upcoming&state=live&type_domain=politics&type_scope=single_event&with_new_type=true&sort=id&limit=50"; + + let events = []; + while (htmlPath) { + let data = await fetchEvents(htmlPath); + events.push(...data.events); + htmlPath = data.pagination.next_page; + } + VERBOSE ? console.log(events) : empty(); + let markets = []; + for (let event of events) { + VERBOSE ? console.log(Date.now()) : empty(); + VERBOSE ? console.log(event.name) : empty(); + let eventMarkets = await fetchMarkets(event.id); + eventMarkets = eventMarkets.map((market) => ({ + ...market, + slug: event.full_slug, + })); + VERBOSE ? console.log("Markets fetched") : empty(); + VERBOSE ? console.log(event.id) : empty(); + VERBOSE ? console.log(eventMarkets) : empty(); + markets.push(...eventMarkets); + //let lastPrices = await fetchPrices(market.id) + } + VERBOSE ? console.log(markets) : empty(); + + let results = []; + for (let market of markets) { + VERBOSE ? console.log("================") : empty(); + VERBOSE ? console.log("Market: ", market) : empty(); + let id = `smarkets-${market.id}`; + let name = market.name; + + let contracts = await fetchContracts(market.id); + VERBOSE ? console.log("Contracts: ", contracts) : empty(); + let prices = await fetchPrices(market.id); + VERBOSE + ? console.log("Prices: ", prices["last_executed_prices"][market.id]) + : empty(); + + let optionsObj = {}; + for (let contract of contracts["contracts"]) { + optionsObj[contract.id] = { name: contract.name }; + } + for (let price of prices["last_executed_prices"][market.id]) { + optionsObj[price.contract_id] = { + ...optionsObj[price.contract_id], + probability: price.last_executed_price + ? Number(price.last_executed_price) + : null, + type: "PROBABILITY", + }; + } + let options: any[] = Object.values(optionsObj); + // monkey patch the case where there are only two options and only one has traded. + if ( + options.length == 2 && + options.map((option) => option.probability).includes(null) + ) { + let nonNullPrice = + options[0].probability == null + ? options[1].probability + : options[0].probability; + options = options.map((option) => { + let probability = option.probability; + return { + ...option, + probability: probability == null ? 100 - nonNullPrice : probability, + // yes, 100, because prices are not yet normalized. + }; + }); + } + + // Normalize normally + let totalValue = options + .map((element) => Number(element.probability)) + .reduce((a, b) => a + b, 0); + + options = options.map((element) => ({ + ...element, + probability: Number(element.probability) / totalValue, + })); + VERBOSE ? console.log(options) : empty(); + + /* + if(contracts["contracts"].length == 2){ + isBinary = true + percentage = ( Number(prices["last_executed_prices"][market.id][0].last_executed_price) + (100 - Number(prices["last_executed_prices"][market.id][1].last_executed_price)) ) / 2 + percentage = Math.round(percentage)+"%" + let contractName = contracts["contracts"][0].name + name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`) + } + */ + let result = { + id: id, + title: name, + url: "https://smarkets.com/event/" + market.event_id + market.slug, + platform: "Smarkets", + description: market.description, + options: options, + timestamp: new Date().toISOString(), + qualityindicators: { + stars: calculateStars("Smarkets", {}), + }, + }; + VERBOSE ? console.log(result) : empty(); + results.push(result); + } + VERBOSE ? console.log(results) : empty(); + return results; + }, +}; diff --git a/src/backend/platforms/wildeford-fetch.ts b/src/backend/platforms/wildeford.ts similarity index 83% rename from src/backend/platforms/wildeford-fetch.ts rename to src/backend/platforms/wildeford.ts index f3abace..727739a 100644 --- a/src/backend/platforms/wildeford-fetch.ts +++ b/src/backend/platforms/wildeford.ts @@ -1,11 +1,10 @@ /* Imports */ -// import axios from "axios" import { GoogleSpreadsheet } from "google-spreadsheet"; -import { databaseUpsert } from "../database/database-wrapper"; import { applyIfSecretExists } from "../utils/getSecrets"; import { hash } from "../utils/hash"; import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Definitions */ const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL @@ -113,22 +112,17 @@ async function processPredictions(predictions) { uniqueTitles.push(result.title); }); return uniqueResults; - // console.log(results) - // console.log(results.map(result => result.options)) - // processPredictions() } -/* Body */ + export async function wildeford_inner(google_api_key) { let predictions = await fetchGoogleDoc(google_api_key); - let results = await processPredictions(predictions); // somehow needed - // console.log(results.sort((a,b) => (a.title > b.title))) - await databaseUpsert({ contents: results, group: "wildeford" }); - - console.log("Done"); + return await processPredictions(predictions); } -//example() -export async function wildeford() { - const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey - await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner); -} +export const wildeford: Platform = { + name: "wildeford", + async fetcher() { + const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey + return await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner); + }, +}; diff --git a/src/backend/platforms/xrisk.ts b/src/backend/platforms/xrisk.ts new file mode 100644 index 0000000..b474b46 --- /dev/null +++ b/src/backend/platforms/xrisk.ts @@ -0,0 +1,15 @@ +import fs from "fs"; + +import { Platform } from "./"; + +export const xrisk: Platform = { + name: "xrisk", + async fetcher() { + return; // not necessary to refill the DB every time + let fileRaw = fs.readFileSync("./input/xrisk-questions.json", { + encoding: "utf-8", + }); + const results = JSON.parse(fileRaw); + return results; + }, +}; diff --git a/src/backend/utils/algolia.ts b/src/backend/utils/algolia.ts index a2bfec0..d881fbe 100644 --- a/src/backend/utils/algolia.ts +++ b/src/backend/utils/algolia.ts @@ -4,13 +4,15 @@ import { databaseReadWithReadCredentials } from "../database/database-wrapper"; import { mergeEverythingInner } from "../flow/mergeEverything"; let cookie = process.env.ALGOLIA_MASTER_API_KEY; -const client = algoliasearch("96UD3NTQ7L", cookie); +const algoliaAppId = process.env.NEXT_PUBLIC_ALGOLIA_APP_ID; +const client = algoliasearch(algoliaAppId, cookie); +console.log(`Initializing algolia index for ${algoliaAppId}`); const index = client.initIndex("metaforecast"); export async function rebuildAlgoliaDatabaseTheHardWay() { console.log("Doing this the hard way"); let records = await mergeEverythingInner(); - records = records.map((record, index) => ({ + records = records.map((record, index: number) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index, @@ -28,21 +30,23 @@ export async function rebuildAlgoliaDatabaseTheHardWay() { } } -let getoptionsstringforsearch = (record) => { +let getoptionsstringforsearch = (record: any) => { let result = ""; if (!!record.options && record.options.length > 0) { result = record.options - .map((option) => option.name || null) - .filter((x) => x != null) + .map((option: any) => option.name || null) + .filter((x: any) => x != null) .join(", "); } return result; }; export async function rebuildAlgoliaDatabaseTheEasyWay() { - let records = await databaseReadWithReadCredentials({ group: "combined" }); + let records: any[] = await databaseReadWithReadCredentials({ + group: "combined", + }); - records = records.map((record, index) => ({ + records = records.map((record, index: number) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index, @@ -50,11 +54,11 @@ export async function rebuildAlgoliaDatabaseTheEasyWay() { })); // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ + console.log(index.appId, index.indexName); + if (index.exists()) { console.log("Index exists"); - index - .replaceAllObjects(records, { safe: true }) - .catch((error) => console.log(error)); + await index.replaceAllObjects(records, { safe: true }); console.log( `Pushed ${records.length} records. Algolia will update asynchronously` ); diff --git a/src/backend/utils/getSecrets.ts b/src/backend/utils/getSecrets.ts index 5ce5e1b..41e9284 100644 --- a/src/backend/utils/getSecrets.ts +++ b/src/backend/utils/getSecrets.ts @@ -1,6 +1,9 @@ -export async function applyIfSecretExists(cookie, fun) { +export async function applyIfSecretExists( + cookie, + fun: (...args: any[]) => T +) { if (cookie) { - await fun(cookie); + return await fun(cookie); } else if (!cookie) { console.log( `Cannot proceed with ${fun.name} because cookie does not exist` diff --git a/src/backend/utils/misc/process-forecasts-from-xrisk.ts b/src/backend/utils/misc/process-forecasts-from-xrisk.ts index 357bd78..5ea0c97 100644 --- a/src/backend/utils/misc/process-forecasts-from-xrisk.ts +++ b/src/backend/utils/misc/process-forecasts-from-xrisk.ts @@ -5,10 +5,9 @@ import fs from "fs"; let locationData = "../../data/"; /* Body */ -let rawdata = fs.readFileSync( - "/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-mongo/src/input/xrisk-questions.json", - { encoding: "utf-8" } -); +let rawdata = fs.readFileSync("./input/xrisk-questions.json", { + encoding: "utf-8", +}); let data = JSON.parse(rawdata); let results = []; diff --git a/src/web/worker/searchWithAlgolia.ts b/src/web/worker/searchWithAlgolia.ts index 7d2ada5..89d6704 100644 --- a/src/web/worker/searchWithAlgolia.ts +++ b/src/web/worker/searchWithAlgolia.ts @@ -1,6 +1,9 @@ import algoliasearch from "algoliasearch"; -const client = algoliasearch("96UD3NTQ7L", "618dbd0092971388cfd43aac1ae5f1f5"); // Only search. +const client = algoliasearch( + process.env.NEXT_PUBLIC_ALGOLIA_APP_ID, + process.env.NEXT_PUBLIC_ALGOLIA_SEARCH_KEY +); const index = client.initIndex("metaforecast"); let buildFilter = ({