Merge pull request #34 from QURIresearch/refactor-platforms
feat: Refactor platforms
This commit is contained in:
commit
9fc8a7de2a
|
@ -14,6 +14,8 @@ There's also a template configuration file in `../env.example`.
|
||||||
- `DIGITALOCEAN_POSTGRES`, of the form `postgres://username:password@domain.com:port/configvars`. (Disregard `DIGITALOCEAN_` prefix, you can use any endpoint you like).
|
- `DIGITALOCEAN_POSTGRES`, of the form `postgres://username:password@domain.com:port/configvars`. (Disregard `DIGITALOCEAN_` prefix, you can use any endpoint you like).
|
||||||
- `DIGITALOCEAN_POSTGRES_PUBLIC`
|
- `DIGITALOCEAN_POSTGRES_PUBLIC`
|
||||||
- `ALGOLIA_MASTER_API_KEY`, a string of 32 hexidecimal characters, like `19b6c2234e50c98d30668659a39e3127` (not an actual key).
|
- `ALGOLIA_MASTER_API_KEY`, a string of 32 hexidecimal characters, like `19b6c2234e50c98d30668659a39e3127` (not an actual key).
|
||||||
|
- `NEXT_PUBLIC_ALGOLIA_APP_ID`,
|
||||||
|
- `NEXT_PUBLIC_ALGOLIA_SEARCH_KEY`
|
||||||
|
|
||||||
## Platform cookies and keys
|
## Platform cookies and keys
|
||||||
|
|
||||||
|
@ -21,7 +23,7 @@ Most of these are just session cookies, necessary to query INFER (previously CSE
|
||||||
|
|
||||||
Note that not all of these cookies are needed to use all parts of the source code. For instance, to download Polymarket data, one could just interface with the Polymarket code. In particular, the code in this repository contains code to connect with the postgres database using read permissions, which are freely available.
|
Note that not all of these cookies are needed to use all parts of the source code. For instance, to download Polymarket data, one could just interface with the Polymarket code. In particular, the code in this repository contains code to connect with the postgres database using read permissions, which are freely available.
|
||||||
|
|
||||||
- `GOODJUDGEMENTOPENCOOKIE`
|
- `GOODJUDGMENTOPENCOOKIE`
|
||||||
- `INFER_COOKIE`
|
- `INFER_COOKIE`
|
||||||
- `CSETFORETELL_COOKIE`, deprecated, superseded by `INFER_COOKIE`.
|
- `CSETFORETELL_COOKIE`, deprecated, superseded by `INFER_COOKIE`.
|
||||||
- `HYPERMINDCOOKIE`
|
- `HYPERMINDCOOKIE`
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
#### You can copy this file to `.env` and modify accordingly.
|
#### You can copy this file to `.env` and modify accordingly.
|
||||||
#### Documentation can be found in `docs/configuration.md`.
|
#### Documentation can be found in `docs/configuration.md`.
|
||||||
|
|
||||||
# ALGOLIA_MASTER_API_KEY=19b6c2234e50c98d30668659a39e3127
|
# ALGOLIA_MASTER_API_KEY=999988887777666655554444eeeeffff
|
||||||
|
# NEXT_PUBLIC_ALGOLIA_APP_ID=ABCDE12345
|
||||||
|
# NEXT_PUBLIC_ALGOLIA_SEARCH_KEY=0000111122223333aaaabbbbccccdddd
|
||||||
|
|
||||||
# DIGITALOCEAN_POSTGRES=postgresql://...@localhost:5432/...?schema=public
|
# DIGITALOCEAN_POSTGRES=postgresql://...@localhost:5432/...?schema=public
|
||||||
# POSTGRES_NO_SSL=1
|
# POSTGRES_NO_SSL=1
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import pkg from "pg";
|
import pkg from "pg";
|
||||||
|
|
||||||
import { platformNames } from "../platforms/all/platformNames";
|
import { platforms } from "../platforms";
|
||||||
import { hash } from "../utils/hash";
|
import { hash } from "../utils/hash";
|
||||||
import { roughSizeOfObject } from "../utils/roughSize";
|
import { roughSizeOfObject } from "../utils/roughSize";
|
||||||
|
|
||||||
|
@ -18,7 +18,10 @@ const allowed_year_month_histories = [].concat(
|
||||||
allowed_months.map((month) => `${year}_${month}`)
|
allowed_months.map((month) => `${year}_${month}`)
|
||||||
)
|
)
|
||||||
); // h2022_01
|
); // h2022_01
|
||||||
const tableNamesWhitelistLatest = ["combined", ...platformNames];
|
const tableNamesWhitelistLatest = [
|
||||||
|
"combined",
|
||||||
|
...platforms.map((platform) => platform.name),
|
||||||
|
];
|
||||||
const tableNamesWhiteListHistory = [
|
const tableNamesWhiteListHistory = [
|
||||||
...allowed_years,
|
...allowed_years,
|
||||||
...allowed_year_month_histories,
|
...allowed_year_month_histories,
|
||||||
|
|
|
@ -1,37 +1,14 @@
|
||||||
import { platformFetchers } from "../platforms/all-platforms";
|
import { platforms } from "../platforms";
|
||||||
import { rebuildAlgoliaDatabase } from "../utils/algolia";
|
import { executeJobByName } from "./jobs";
|
||||||
import { updateHistory } from "./history/updateHistory";
|
|
||||||
import { mergeEverything } from "./mergeEverything";
|
|
||||||
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData";
|
|
||||||
|
|
||||||
/* Do everything */
|
/* Do everything */
|
||||||
function sleep(ms) {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function tryCatchTryAgain(fun) {
|
|
||||||
try {
|
|
||||||
console.log("Initial try");
|
|
||||||
await fun();
|
|
||||||
} catch (error) {
|
|
||||||
sleep(10000);
|
|
||||||
console.log("Second try");
|
|
||||||
console.log(error);
|
|
||||||
try {
|
|
||||||
await fun();
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function doEverything() {
|
export async function doEverything() {
|
||||||
let functions = [
|
let jobNames = [
|
||||||
...platformFetchers,
|
...platforms.map((platform) => platform.name),
|
||||||
mergeEverything,
|
"merge",
|
||||||
rebuildAlgoliaDatabase,
|
"algolia",
|
||||||
updateHistory,
|
"history",
|
||||||
rebuildNetlifySiteWithNewData,
|
"netlify",
|
||||||
];
|
];
|
||||||
// Removed Good Judgment from the fetcher, doing it using cron instead because cloudflare blocks the utility on heroku.
|
// Removed Good Judgment from the fetcher, doing it using cron instead because cloudflare blocks the utility on heroku.
|
||||||
|
|
||||||
|
@ -47,13 +24,13 @@ export async function doEverything() {
|
||||||
console.log("");
|
console.log("");
|
||||||
console.log("");
|
console.log("");
|
||||||
|
|
||||||
for (let fun of functions) {
|
for (let name of jobNames) {
|
||||||
console.log("");
|
console.log("");
|
||||||
console.log("");
|
console.log("");
|
||||||
console.log("****************************");
|
console.log("****************************");
|
||||||
console.log(fun.name);
|
console.log(name);
|
||||||
console.log("****************************");
|
console.log("****************************");
|
||||||
await tryCatchTryAgain(fun);
|
await executeJobByName(name);
|
||||||
console.log("****************************");
|
console.log("****************************");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
90
src/backend/flow/jobs.ts
Normal file
90
src/backend/flow/jobs.ts
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import { pgInitialize } from "../database/pg-wrapper";
|
||||||
|
import { doEverything } from "../flow/doEverything";
|
||||||
|
import { updateHistory } from "../flow/history/updateHistory";
|
||||||
|
import { mergeEverything } from "../flow/mergeEverything";
|
||||||
|
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
|
||||||
|
import { rebuildFrontpage } from "../frontpage";
|
||||||
|
import { platforms, processPlatform } from "../platforms";
|
||||||
|
import { rebuildAlgoliaDatabase } from "../utils/algolia";
|
||||||
|
|
||||||
|
interface Job {
|
||||||
|
name: string;
|
||||||
|
message: string;
|
||||||
|
run: () => Promise<void>;
|
||||||
|
separate?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const jobs: Job[] = [
|
||||||
|
...platforms.map((platform) => ({
|
||||||
|
name: platform.name,
|
||||||
|
message: `Download predictions from ${platform.name}`,
|
||||||
|
run: () => processPlatform(platform),
|
||||||
|
})),
|
||||||
|
{
|
||||||
|
name: "merge",
|
||||||
|
message:
|
||||||
|
"Merge tables into one big table (and push the result to a pg database)",
|
||||||
|
run: mergeEverything,
|
||||||
|
separate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "algolia",
|
||||||
|
message: 'Rebuild algolia database ("index")',
|
||||||
|
run: rebuildAlgoliaDatabase,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "history",
|
||||||
|
message: "Update history",
|
||||||
|
run: updateHistory,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "netlify",
|
||||||
|
message: `Rebuild netlify site with new data`,
|
||||||
|
run: rebuildNetlifySiteWithNewData,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "frontpage",
|
||||||
|
message: "Rebuild frontpage",
|
||||||
|
run: rebuildFrontpage,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "all",
|
||||||
|
message: "All of the above",
|
||||||
|
run: doEverything,
|
||||||
|
separate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "migrate",
|
||||||
|
message: "Initialize postgres database",
|
||||||
|
run: pgInitialize,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
function sleep(ms: number) {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function tryCatchTryAgain(fun: () => Promise<void>) {
|
||||||
|
try {
|
||||||
|
console.log("Initial try");
|
||||||
|
await fun();
|
||||||
|
} catch (error) {
|
||||||
|
sleep(10000);
|
||||||
|
console.log("Second try");
|
||||||
|
console.log(error);
|
||||||
|
try {
|
||||||
|
await fun();
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const executeJobByName = async (option: string) => {
|
||||||
|
const job = jobs.find((job) => job.name === option);
|
||||||
|
if (!job) {
|
||||||
|
console.log(`Error, job ${option} not found`);
|
||||||
|
} else {
|
||||||
|
await tryCatchTryAgain(job.run);
|
||||||
|
}
|
||||||
|
};
|
|
@ -1,11 +1,12 @@
|
||||||
import { databaseRead, databaseUpsert } from "../database/database-wrapper";
|
import { databaseRead, databaseUpsert } from "../database/database-wrapper";
|
||||||
import { platformNames } from "../platforms/all-platforms";
|
import { platforms } from "../platforms";
|
||||||
|
|
||||||
/* Merge everything */
|
/* Merge everything */
|
||||||
|
|
||||||
export async function mergeEverythingInner() {
|
export async function mergeEverythingInner() {
|
||||||
let merged = [];
|
let merged = [];
|
||||||
for (let platformName of platformNames) {
|
for (let platform of platforms) {
|
||||||
|
const platformName = platform.name;
|
||||||
let json = await databaseRead({ group: platformName });
|
let json = await databaseRead({ group: platformName });
|
||||||
console.log(`${platformName} has ${json.length} questions\n`);
|
console.log(`${platformName} has ${json.length} questions\n`);
|
||||||
merged = merged.concat(json);
|
merged = merged.concat(json);
|
||||||
|
|
|
@ -2,50 +2,23 @@
|
||||||
import "dotenv/config";
|
import "dotenv/config";
|
||||||
|
|
||||||
import readline from "readline";
|
import readline from "readline";
|
||||||
|
import util from "util";
|
||||||
|
|
||||||
import { pgInitialize } from "./database/pg-wrapper";
|
import { executeJobByName, jobs } from "./flow/jobs";
|
||||||
import { doEverything, tryCatchTryAgain } from "./flow/doEverything";
|
|
||||||
import { updateHistory } from "./flow/history/updateHistory";
|
|
||||||
import { mergeEverything } from "./flow/mergeEverything";
|
|
||||||
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData";
|
|
||||||
import { rebuildFrontpage } from "./frontpage";
|
|
||||||
import { platformFetchers } from "./platforms/all-platforms";
|
|
||||||
import { rebuildAlgoliaDatabase } from "./utils/algolia";
|
|
||||||
|
|
||||||
/* Support functions */
|
|
||||||
let functions = [
|
|
||||||
...platformFetchers,
|
|
||||||
mergeEverything,
|
|
||||||
rebuildAlgoliaDatabase,
|
|
||||||
updateHistory,
|
|
||||||
rebuildNetlifySiteWithNewData,
|
|
||||||
doEverything,
|
|
||||||
pgInitialize,
|
|
||||||
rebuildFrontpage,
|
|
||||||
];
|
|
||||||
|
|
||||||
let generateWhatToDoMessage = () => {
|
let generateWhatToDoMessage = () => {
|
||||||
let l = platformFetchers.length;
|
const color = "\x1b[36m";
|
||||||
let messagesForFetchers = platformFetchers.map(
|
const resetColor = "\x1b[0m";
|
||||||
(fun, i) => `[${i}]: Download predictions from ${fun.name}`
|
|
||||||
);
|
|
||||||
let otherMessages = [
|
|
||||||
"Merge tables into one big table (and push the result to a pg database)",
|
|
||||||
`Rebuild algolia database ("index")`,
|
|
||||||
`Update history`,
|
|
||||||
`Rebuild netlify site with new data`,
|
|
||||||
// `\n[${functionNames.length-1}]: Add to history` +
|
|
||||||
`All of the above`,
|
|
||||||
`Initialize postgres database`,
|
|
||||||
"Rebuild frontpage",
|
|
||||||
];
|
|
||||||
let otherMessagesWithNums = otherMessages.map(
|
|
||||||
(message, i) => `[${i + l}]: ${message}`
|
|
||||||
);
|
|
||||||
let completeMessages = [
|
let completeMessages = [
|
||||||
...messagesForFetchers,
|
...jobs.map((job) => {
|
||||||
...otherMessagesWithNums,
|
return (
|
||||||
`\nChoose one option, wisely: #`,
|
(job.separate ? "\n" : "") +
|
||||||
|
`[${color}${job.name}${resetColor}]:`.padStart(30) +
|
||||||
|
" " +
|
||||||
|
job.message
|
||||||
|
);
|
||||||
|
}),
|
||||||
|
`\nChoose one option, wisely: `,
|
||||||
].join("\n");
|
].join("\n");
|
||||||
return completeMessages;
|
return completeMessages;
|
||||||
};
|
};
|
||||||
|
@ -54,39 +27,24 @@ let whattodoMessage = generateWhatToDoMessage();
|
||||||
|
|
||||||
/* BODY */
|
/* BODY */
|
||||||
let commandLineUtility = async () => {
|
let commandLineUtility = async () => {
|
||||||
let whattodo = async (message, callback) => {
|
const pickOption = async () => {
|
||||||
|
if (process.argv.length === 3) {
|
||||||
|
return process.argv[2]; // e.g., npm run cli polymarket
|
||||||
|
}
|
||||||
|
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
input: process.stdin,
|
input: process.stdin,
|
||||||
output: process.stdout,
|
output: process.stdout,
|
||||||
});
|
});
|
||||||
rl.question(message, async (answer) => {
|
|
||||||
|
const question = util.promisify(rl.question).bind(rl);
|
||||||
|
const answer = await question(whattodoMessage);
|
||||||
rl.close();
|
rl.close();
|
||||||
await callback(answer);
|
return answer;
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let executeoption = async (option) => {
|
await executeJobByName(await pickOption());
|
||||||
option = Number(option);
|
|
||||||
if (option < 0) {
|
|
||||||
console.log(`Error, ${option} < 0`);
|
|
||||||
} else if (option < functions.length) {
|
|
||||||
console.log(`Running: ${functions[option].name}\n`);
|
|
||||||
await tryCatchTryAgain(functions[option]);
|
|
||||||
}
|
|
||||||
process.exit();
|
process.exit();
|
||||||
};
|
};
|
||||||
|
|
||||||
if (process.argv.length == 3) {
|
|
||||||
const option = process.argv[2]; // e.g., npm start 15 <-
|
|
||||||
const optionNum = Number(option);
|
|
||||||
if (!isNaN(optionNum)) {
|
|
||||||
await executeoption(optionNum);
|
|
||||||
} else if (option == "all") {
|
|
||||||
await executeoption(functions.length - 3); // doEverything
|
|
||||||
} else {
|
|
||||||
await whattodo(whattodoMessage, executeoption);
|
|
||||||
}
|
|
||||||
} else await whattodo(whattodoMessage, executeoption);
|
|
||||||
};
|
|
||||||
|
|
||||||
commandLineUtility();
|
commandLineUtility();
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
import fs from "fs";
|
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
|
|
||||||
/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */
|
|
||||||
|
|
||||||
let pushManualFiles = ["givewellopenphil"]; // ["estimize", "givewellopenphil", "xrisk"]
|
|
||||||
let suffixFiles = "-questions.json";
|
|
||||||
|
|
||||||
let main = async () => {
|
|
||||||
for (let file of pushManualFiles) {
|
|
||||||
let fileRaw = fs.readFileSync(`./input/${file + suffixFiles}`, {
|
|
||||||
encoding: "utf-8",
|
|
||||||
});
|
|
||||||
let fileContents = JSON.parse(fileRaw);
|
|
||||||
console.log(fileContents);
|
|
||||||
await databaseUpsert({ contents: fileContents, group: file });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
main();
|
|
|
@ -1,11 +1,4 @@
|
||||||
/* Imports */
|
import { processPlatform } from "../platforms";
|
||||||
import { goodjudgment } from "../platforms/goodjudgment-fetch";
|
import { goodjudgment } from "../platforms/goodjudgment";
|
||||||
|
|
||||||
/* Definitions */
|
processPlatform(goodjudgment);
|
||||||
|
|
||||||
/* Utilities */
|
|
||||||
|
|
||||||
/* Support functions */
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
goodjudgment();
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
export { platformFetchers } from "./all/platformFetchers";
|
|
||||||
export { platformNames } from "./all/platformNames";
|
|
|
@ -1,42 +0,0 @@
|
||||||
import { betfair } from "../betfair-fetch";
|
|
||||||
import { fantasyscotus } from "../fantasyscotus-fetch";
|
|
||||||
import { foretold } from "../foretold-fetch";
|
|
||||||
import { goodjudgment } from "../goodjudgment-fetch";
|
|
||||||
import { goodjudgmentopen } from "../goodjudmentopen-fetch";
|
|
||||||
import { infer } from "../infer-fetch";
|
|
||||||
import { kalshi } from "../kalshi-fetch";
|
|
||||||
import { manifoldmarkets } from "../manifoldmarkets-fetch";
|
|
||||||
import { metaculus } from "../metaculus-fetch";
|
|
||||||
import { polymarket } from "../polymarket-fetch";
|
|
||||||
import { predictit } from "../predictit-fetch";
|
|
||||||
import { rootclaim } from "../rootclaim-fetch";
|
|
||||||
import { smarkets } from "../smarkets-fetch";
|
|
||||||
import { wildeford } from "../wildeford-fetch";
|
|
||||||
|
|
||||||
/* Deprecated
|
|
||||||
import { astralcodexten } from "../platforms/astralcodexten-fetch"
|
|
||||||
import { coupcast } from "../platforms/coupcast-fetch"
|
|
||||||
import { csetforetell } from "../platforms/csetforetell-fetch"
|
|
||||||
import { elicit } from "../platforms/elicit-fetch"
|
|
||||||
import { estimize } from "../platforms/estimize-fetch"
|
|
||||||
import { hypermind } from "../platforms/hypermind-fetch"
|
|
||||||
import { ladbrokes } from "../platforms/ladbrokes-fetch";
|
|
||||||
import { williamhill } from "../platforms/williamhill-fetch";
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const platformFetchers = [
|
|
||||||
betfair,
|
|
||||||
fantasyscotus,
|
|
||||||
foretold,
|
|
||||||
goodjudgment,
|
|
||||||
goodjudgmentopen,
|
|
||||||
infer,
|
|
||||||
kalshi,
|
|
||||||
manifoldmarkets,
|
|
||||||
metaculus,
|
|
||||||
polymarket,
|
|
||||||
predictit,
|
|
||||||
rootclaim,
|
|
||||||
smarkets,
|
|
||||||
wildeford,
|
|
||||||
];
|
|
|
@ -1,20 +0,0 @@
|
||||||
// This needs to be its own file to avoid cyclical dependencies.
|
|
||||||
export const platformNames = [
|
|
||||||
"betfair",
|
|
||||||
"fantasyscotus",
|
|
||||||
"foretold",
|
|
||||||
"givewellopenphil",
|
|
||||||
"goodjudgment",
|
|
||||||
"goodjudmentopen",
|
|
||||||
"infer",
|
|
||||||
"kalshi",
|
|
||||||
"manifoldmarkets",
|
|
||||||
"metaculus",
|
|
||||||
"polymarket",
|
|
||||||
"predictit",
|
|
||||||
"rootclaim",
|
|
||||||
"smarkets",
|
|
||||||
"wildeford",
|
|
||||||
"xrisk",
|
|
||||||
];
|
|
||||||
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc
|
|
|
@ -1,8 +1,9 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import https from "https";
|
import https from "https";
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Forecast, Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let endpoint = process.env.SECRET_BETFAIR_ENDPOINT;
|
let endpoint = process.env.SECRET_BETFAIR_ENDPOINT;
|
||||||
|
@ -77,7 +78,7 @@ async function whipIntoShape(data) {
|
||||||
async function processPredictions(data) {
|
async function processPredictions(data) {
|
||||||
let predictions = await whipIntoShape(data);
|
let predictions = await whipIntoShape(data);
|
||||||
// console.log(JSON.stringify(predictions, null, 4))
|
// console.log(JSON.stringify(predictions, null, 4))
|
||||||
let results = predictions.map((prediction) => {
|
let results: Forecast[] = predictions.map((prediction) => {
|
||||||
/* if(Math.floor(Math.random() * 10) % 20 ==0){
|
/* if(Math.floor(Math.random() * 10) % 20 ==0){
|
||||||
console.log(JSON.stringify(prediction, null, 4))
|
console.log(JSON.stringify(prediction, null, 4))
|
||||||
} */
|
} */
|
||||||
|
@ -134,14 +135,11 @@ async function processPredictions(data) {
|
||||||
return results; //resultsProcessed
|
return results; //resultsProcessed
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Body */
|
export const betfair: Platform = {
|
||||||
|
name: "betfair",
|
||||||
export async function betfair() {
|
async fetcher() {
|
||||||
let data = await fetchPredictions();
|
const data = await fetchPredictions();
|
||||||
let results = await processPredictions(data); // somehow needed
|
const results = await processPredictions(data); // somehow needed
|
||||||
// console.log(results.map(result => ({title: result.title, description: result.description})))
|
return results;
|
||||||
// let string = JSON.stringify(results, null, 2)
|
},
|
||||||
await databaseUpsert({ contents: results, group: "betfair" });
|
};
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
// betfair()
|
|
|
@ -1,8 +1,8 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let unixtime = new Date().getTime();
|
let unixtime = new Date().getTime();
|
||||||
|
@ -111,10 +111,11 @@ async function processData(data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Body */
|
/* Body */
|
||||||
export async function fantasyscotus() {
|
export const fantasyscotus: Platform = {
|
||||||
|
name: "fantasyscotus",
|
||||||
|
async fetcher() {
|
||||||
let rawData = await fetchData();
|
let rawData = await fetchData();
|
||||||
let results = await processData(rawData);
|
let results = await processData(rawData);
|
||||||
await databaseUpsert({ contents: results, group: "fantasyscotus" });
|
return results;
|
||||||
console.log("Done");
|
},
|
||||||
}
|
};
|
||||||
//fantasyscotus()
|
|
|
@ -1,105 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let graphQLendpoint = "https://api.foretold.io/graphql";
|
|
||||||
let highQualityCommunities = [
|
|
||||||
"0104d8e8-07e4-464b-8b32-74ef22b49f21",
|
|
||||||
"c47c6bc8-2c9b-4a83-9583-d1ed80a40fa2",
|
|
||||||
"cf663021-f87f-4632-ad82-962d889a2d39",
|
|
||||||
"47ff5c49-9c20-4f3d-bd57-1897c35cd42d",
|
|
||||||
"b2412a1d-0aa4-4e37-a12a-0aca9e440a96",
|
|
||||||
];
|
|
||||||
|
|
||||||
/* Support functions */
|
|
||||||
async function fetchAllCommunityQuestions(communityId) {
|
|
||||||
let response = await axios({
|
|
||||||
url: graphQLendpoint,
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
data: JSON.stringify({
|
|
||||||
query: `
|
|
||||||
query {
|
|
||||||
measurables(
|
|
||||||
channelId: "${communityId}",
|
|
||||||
states: OPEN,
|
|
||||||
first: 500
|
|
||||||
){
|
|
||||||
total
|
|
||||||
edges{
|
|
||||||
node{
|
|
||||||
id
|
|
||||||
name
|
|
||||||
valueType
|
|
||||||
measurementCount
|
|
||||||
previousAggregate{
|
|
||||||
value{
|
|
||||||
percentage
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.then((res) => res.data)
|
|
||||||
.then((res) => res.data.measurables.edges);
|
|
||||||
//console.log(response)
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
|
|
||||||
export async function foretold() {
|
|
||||||
let results = [];
|
|
||||||
for (let community of highQualityCommunities) {
|
|
||||||
let questions = await fetchAllCommunityQuestions(community);
|
|
||||||
questions = questions.map((question) => question.node);
|
|
||||||
questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions
|
|
||||||
questions.forEach((question) => {
|
|
||||||
let id = `foretold-${question.id}`;
|
|
||||||
let options = [];
|
|
||||||
if (question.valueType == "PERCENTAGE") {
|
|
||||||
let probability = question.previousAggregate.value.percentage;
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
name: "Yes",
|
|
||||||
probability: probability / 100,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "No",
|
|
||||||
probability: 1 - probability / 100,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
let result = {
|
|
||||||
id: id,
|
|
||||||
title: question.name,
|
|
||||||
url: `https://www.foretold.io/c/${community}/m/${question.id}`,
|
|
||||||
platform: "Foretold",
|
|
||||||
description: "",
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
numforecasts: Math.floor(Number(question.measurementCount) / 2),
|
|
||||||
stars: calculateStars("Foretold", {}),
|
|
||||||
},
|
|
||||||
/*liquidity: liquidity.toFixed(2),
|
|
||||||
tradevolume: tradevolume.toFixed(2),
|
|
||||||
address: obj.address*/
|
|
||||||
};
|
|
||||||
// console.log(result)
|
|
||||||
results.push(result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
await databaseUpsert({ contents: results, group: "foretold" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
// foretold()
|
|
104
src/backend/platforms/foretold.ts
Normal file
104
src/backend/platforms/foretold.ts
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let graphQLendpoint = "https://api.foretold.io/graphql";
|
||||||
|
let highQualityCommunities = [
|
||||||
|
"0104d8e8-07e4-464b-8b32-74ef22b49f21",
|
||||||
|
"c47c6bc8-2c9b-4a83-9583-d1ed80a40fa2",
|
||||||
|
"cf663021-f87f-4632-ad82-962d889a2d39",
|
||||||
|
"47ff5c49-9c20-4f3d-bd57-1897c35cd42d",
|
||||||
|
"b2412a1d-0aa4-4e37-a12a-0aca9e440a96",
|
||||||
|
];
|
||||||
|
|
||||||
|
/* Support functions */
|
||||||
|
async function fetchAllCommunityQuestions(communityId) {
|
||||||
|
let response = await axios({
|
||||||
|
url: graphQLendpoint,
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
data: JSON.stringify({
|
||||||
|
query: `
|
||||||
|
query {
|
||||||
|
measurables(
|
||||||
|
channelId: "${communityId}",
|
||||||
|
states: OPEN,
|
||||||
|
first: 500
|
||||||
|
){
|
||||||
|
total
|
||||||
|
edges{
|
||||||
|
node{
|
||||||
|
id
|
||||||
|
name
|
||||||
|
valueType
|
||||||
|
measurementCount
|
||||||
|
previousAggregate{
|
||||||
|
value{
|
||||||
|
percentage
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.then((res) => res.data)
|
||||||
|
.then((res) => res.data.measurables.edges);
|
||||||
|
//console.log(response)
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const foretold: Platform = {
|
||||||
|
name: "foretold",
|
||||||
|
async fetcher() {
|
||||||
|
let results = [];
|
||||||
|
for (let community of highQualityCommunities) {
|
||||||
|
let questions = await fetchAllCommunityQuestions(community);
|
||||||
|
questions = questions.map((question) => question.node);
|
||||||
|
questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions
|
||||||
|
questions.forEach((question) => {
|
||||||
|
let id = `foretold-${question.id}`;
|
||||||
|
let options = [];
|
||||||
|
if (question.valueType == "PERCENTAGE") {
|
||||||
|
let probability = question.previousAggregate.value.percentage;
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
name: "Yes",
|
||||||
|
probability: probability / 100,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No",
|
||||||
|
probability: 1 - probability / 100,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
let result = {
|
||||||
|
id: id,
|
||||||
|
title: question.name,
|
||||||
|
url: `https://www.foretold.io/c/${community}/m/${question.id}`,
|
||||||
|
platform: "Foretold",
|
||||||
|
description: "",
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
numforecasts: Math.floor(Number(question.measurementCount) / 2),
|
||||||
|
stars: calculateStars("Foretold", {}),
|
||||||
|
},
|
||||||
|
/*liquidity: liquidity.toFixed(2),
|
||||||
|
tradevolume: tradevolume.toFixed(2),
|
||||||
|
address: obj.address*/
|
||||||
|
};
|
||||||
|
// console.log(result)
|
||||||
|
results.push(result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -4,9 +4,10 @@ import fs from "fs";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
import { databaseUpsert } from "../database/database-wrapper";
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Support functions */
|
/* Support functions */
|
||||||
async function fetchPage(url) {
|
async function fetchPage(url: string) {
|
||||||
let response = await axios({
|
let response = await axios({
|
||||||
url: url,
|
url: url,
|
||||||
method: "GET",
|
method: "GET",
|
||||||
|
@ -14,7 +15,6 @@ async function fetchPage(url) {
|
||||||
"Content-Type": "text/html",
|
"Content-Type": "text/html",
|
||||||
},
|
},
|
||||||
}).then((res) => res.data);
|
}).then((res) => res.data);
|
||||||
//console.log(response)
|
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,17 +64,20 @@ async function main1() {
|
||||||
group: "givewell-questions-unprocessed",
|
group: "givewell-questions-unprocessed",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// main1()
|
|
||||||
|
|
||||||
async function main2() {
|
export const givewellopenphil: Platform = {
|
||||||
let rawdata = fs.readFileSync("./input/givewellopenphil-questions.json", {
|
name: "givewellopenphil",
|
||||||
|
async fetcher() {
|
||||||
|
// main1()
|
||||||
|
return; // not necessary to refill the DB every time
|
||||||
|
const rawdata = fs.readFileSync("./input/givewellopenphil-questions.json", {
|
||||||
encoding: "utf-8",
|
encoding: "utf-8",
|
||||||
});
|
});
|
||||||
let data = JSON.parse(rawdata);
|
const data = JSON.parse(rawdata);
|
||||||
let dataWithDate = data.map((datum) => ({
|
const dataWithDate = data.map((datum: any) => ({
|
||||||
...datum,
|
...datum,
|
||||||
timestamp: "2021-02-23",
|
timestamp: "2021-02-23",
|
||||||
}));
|
}));
|
||||||
await databaseUpsert({ group: "givewellopenphil", contents: dataWithDate });
|
return dataWithDate;
|
||||||
}
|
},
|
||||||
main2();
|
};
|
|
@ -1,129 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
import { Tabletojson } from "tabletojson";
|
|
||||||
import tunnel from "tunnel";
|
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { hash } from "../utils/hash";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let endpoint = "https://goodjudgment.io/superforecasts/";
|
|
||||||
String.prototype.replaceAll = function replaceAll(search, replace) {
|
|
||||||
return this.split(search).join(replace);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Tunelling
|
|
||||||
/* Support functions */
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
export async function goodjudgment() {
|
|
||||||
// Proxy fuckery
|
|
||||||
let proxy;
|
|
||||||
/*
|
|
||||||
* try {
|
|
||||||
proxy = await axios
|
|
||||||
.get("http://pubproxy.com/api/proxy")
|
|
||||||
.then((query) => query.data);
|
|
||||||
console.log(proxy);
|
|
||||||
} catch (error) {
|
|
||||||
console.log("Proxy generation failed; using backup proxy instead");
|
|
||||||
// hard-coded backup proxy
|
|
||||||
*/
|
|
||||||
proxy = {
|
|
||||||
ip: process.env.BACKUP_PROXY_IP,
|
|
||||||
port: process.env.BACKUP_PROXY_PORT,
|
|
||||||
};
|
|
||||||
// }
|
|
||||||
let agent = tunnel.httpsOverHttp({
|
|
||||||
proxy: {
|
|
||||||
host: proxy.ip,
|
|
||||||
port: proxy.port,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
let content = await axios
|
|
||||||
.request({
|
|
||||||
url: "https://goodjudgment.io/superforecasts/",
|
|
||||||
method: "get",
|
|
||||||
headers: {
|
|
||||||
"User-Agent": "Chrome",
|
|
||||||
},
|
|
||||||
// agent,
|
|
||||||
// port: 80,
|
|
||||||
})
|
|
||||||
.then((query) => query.data);
|
|
||||||
|
|
||||||
// Processing
|
|
||||||
let results = [];
|
|
||||||
let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false });
|
|
||||||
jsonTable.shift(); // deletes first element
|
|
||||||
jsonTable.pop(); // deletes last element
|
|
||||||
// console.log(jsonTable)
|
|
||||||
for (let table of jsonTable) {
|
|
||||||
// console.log(table)
|
|
||||||
let title = table[0]["0"].split("\t\t\t").splice(3)[0];
|
|
||||||
if (title != undefined) {
|
|
||||||
title = title.replaceAll("</a>", "");
|
|
||||||
let id = `goodjudgment-${hash(title)}`;
|
|
||||||
let description = table
|
|
||||||
.filter((row) => row["0"].includes("BACKGROUND:"))
|
|
||||||
.map((row) => row["0"])
|
|
||||||
.map((text) =>
|
|
||||||
text
|
|
||||||
.split("BACKGROUND:")[1]
|
|
||||||
.split("Examples of Superforecaster")[0]
|
|
||||||
.split("AT A GLANCE")[0]
|
|
||||||
.replaceAll("\n\n", "\n")
|
|
||||||
.split("\n")
|
|
||||||
.slice(3)
|
|
||||||
.join(" ")
|
|
||||||
.replaceAll(" ", "")
|
|
||||||
.replaceAll("<br> ", "")
|
|
||||||
)[0];
|
|
||||||
let options = table
|
|
||||||
.filter((row) => "4" in row)
|
|
||||||
.map((row) => ({
|
|
||||||
name: row["2"]
|
|
||||||
.split('<span class="qTitle">')[1]
|
|
||||||
.replace("</span>", ""),
|
|
||||||
probability: Number(row["3"].split("%")[0]) / 100,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
}));
|
|
||||||
let analysis = table.filter((row) =>
|
|
||||||
row[0] ? row[0].toLowerCase().includes("commentary") : false
|
|
||||||
);
|
|
||||||
// "Examples of Superforecaster Commentary" / Analysis
|
|
||||||
// The following is necessary twice, because we want to check if there is an empty list, and then get the first element of the first element of the list.
|
|
||||||
analysis = analysis ? analysis[0] : "";
|
|
||||||
analysis = analysis ? analysis[0] : ""; // not a duplicate
|
|
||||||
// console.log(analysis)
|
|
||||||
let standardObj = {
|
|
||||||
id: id,
|
|
||||||
title: title,
|
|
||||||
url: endpoint,
|
|
||||||
platform: "Good Judgment",
|
|
||||||
description: description,
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
stars: calculateStars("Good Judgment", {}),
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
superforecastercommentary: analysis || "",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
results.push(standardObj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// console.log(results.slice(0,10))
|
|
||||||
let string = JSON.stringify(results, null, 2);
|
|
||||||
console.log(results);
|
|
||||||
await databaseUpsert({ contents: results, group: "goodjudgment" });
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
"Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js"
|
|
||||||
);
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
// goodjudgment()
|
|
125
src/backend/platforms/goodjudgment.ts
Normal file
125
src/backend/platforms/goodjudgment.ts
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
import { Tabletojson } from "tabletojson";
|
||||||
|
import tunnel from "tunnel";
|
||||||
|
|
||||||
|
import { hash } from "../utils/hash";
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let endpoint = "https://goodjudgment.io/superforecasts/";
|
||||||
|
String.prototype.replaceAll = function replaceAll(search, replace) {
|
||||||
|
return this.split(search).join(replace);
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Body */
|
||||||
|
export const goodjudgment: Platform = {
|
||||||
|
name: "goodjudgment",
|
||||||
|
async fetcher() {
|
||||||
|
// Proxy fuckery
|
||||||
|
let proxy;
|
||||||
|
/*
|
||||||
|
* try {
|
||||||
|
proxy = await axios
|
||||||
|
.get("http://pubproxy.com/api/proxy")
|
||||||
|
.then((query) => query.data);
|
||||||
|
console.log(proxy);
|
||||||
|
} catch (error) {
|
||||||
|
console.log("Proxy generation failed; using backup proxy instead");
|
||||||
|
// hard-coded backup proxy
|
||||||
|
*/
|
||||||
|
proxy = {
|
||||||
|
ip: process.env.BACKUP_PROXY_IP,
|
||||||
|
port: process.env.BACKUP_PROXY_PORT,
|
||||||
|
};
|
||||||
|
// }
|
||||||
|
let agent = tunnel.httpsOverHttp({
|
||||||
|
proxy: {
|
||||||
|
host: proxy.ip,
|
||||||
|
port: proxy.port,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let content = await axios
|
||||||
|
.request({
|
||||||
|
url: "https://goodjudgment.io/superforecasts/",
|
||||||
|
method: "get",
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "Chrome",
|
||||||
|
},
|
||||||
|
// agent,
|
||||||
|
// port: 80,
|
||||||
|
})
|
||||||
|
.then((query) => query.data);
|
||||||
|
|
||||||
|
// Processing
|
||||||
|
let results = [];
|
||||||
|
let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false });
|
||||||
|
jsonTable.shift(); // deletes first element
|
||||||
|
jsonTable.pop(); // deletes last element
|
||||||
|
// console.log(jsonTable)
|
||||||
|
for (let table of jsonTable) {
|
||||||
|
// console.log(table)
|
||||||
|
let title = table[0]["0"].split("\t\t\t").splice(3)[0];
|
||||||
|
if (title != undefined) {
|
||||||
|
title = title.replaceAll("</a>", "");
|
||||||
|
let id = `goodjudgment-${hash(title)}`;
|
||||||
|
let description = table
|
||||||
|
.filter((row) => row["0"].includes("BACKGROUND:"))
|
||||||
|
.map((row) => row["0"])
|
||||||
|
.map((text) =>
|
||||||
|
text
|
||||||
|
.split("BACKGROUND:")[1]
|
||||||
|
.split("Examples of Superforecaster")[0]
|
||||||
|
.split("AT A GLANCE")[0]
|
||||||
|
.replaceAll("\n\n", "\n")
|
||||||
|
.split("\n")
|
||||||
|
.slice(3)
|
||||||
|
.join(" ")
|
||||||
|
.replaceAll(" ", "")
|
||||||
|
.replaceAll("<br> ", "")
|
||||||
|
)[0];
|
||||||
|
let options = table
|
||||||
|
.filter((row) => "4" in row)
|
||||||
|
.map((row) => ({
|
||||||
|
name: row["2"]
|
||||||
|
.split('<span class="qTitle">')[1]
|
||||||
|
.replace("</span>", ""),
|
||||||
|
probability: Number(row["3"].split("%")[0]) / 100,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
}));
|
||||||
|
let analysis = table.filter((row) =>
|
||||||
|
row[0] ? row[0].toLowerCase().includes("commentary") : false
|
||||||
|
);
|
||||||
|
// "Examples of Superforecaster Commentary" / Analysis
|
||||||
|
// The following is necessary twice, because we want to check if there is an empty list, and then get the first element of the first element of the list.
|
||||||
|
analysis = analysis ? analysis[0] : "";
|
||||||
|
analysis = analysis ? analysis[0] : ""; // not a duplicate
|
||||||
|
// console.log(analysis)
|
||||||
|
let standardObj = {
|
||||||
|
id: id,
|
||||||
|
title: title,
|
||||||
|
url: endpoint,
|
||||||
|
platform: "Good Judgment",
|
||||||
|
description: description,
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
stars: calculateStars("Good Judgment", {}),
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
superforecastercommentary: analysis || "",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
results.push(standardObj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
"Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js"
|
||||||
|
);
|
||||||
|
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -2,10 +2,10 @@
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { Tabletojson } from "tabletojson";
|
import { Tabletojson } from "tabletojson";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
import toMarkdown from "../utils/toMarkdown";
|
import toMarkdown from "../utils/toMarkdown";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let htmlEndPoint = "https://www.gjopen.com/questions?page=";
|
let htmlEndPoint = "https://www.gjopen.com/questions?page=";
|
||||||
|
@ -150,7 +150,7 @@ function isEnd(html) {
|
||||||
return isEndBool;
|
return isEndBool;
|
||||||
}
|
}
|
||||||
|
|
||||||
function sleep(ms) {
|
function sleep(ms: number) {
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,11 +221,10 @@ async function goodjudgmentopen_inner(cookie) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
console.log(results);
|
|
||||||
if (results.length > 0) {
|
if (results.length === 0) {
|
||||||
await databaseUpsert({ contents: results, group: "goodjudmentopen" });
|
|
||||||
} else {
|
|
||||||
console.log("Not updating results, as process was not signed in");
|
console.log("Not updating results, as process was not signed in");
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = Date.now();
|
let end = Date.now();
|
||||||
|
@ -233,9 +232,14 @@ async function goodjudgmentopen_inner(cookie) {
|
||||||
console.log(
|
console.log(
|
||||||
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function goodjudgmentopen() {
|
export const goodjudmentopen: Platform = {
|
||||||
|
name: "goodjudmentopen", // note the typo! current table name is without `g`, `goodjudmentopen`
|
||||||
|
async fetcher() {
|
||||||
let cookie = process.env.GOODJUDGMENTOPENCOOKIE;
|
let cookie = process.env.GOODJUDGMENTOPENCOOKIE;
|
||||||
await applyIfSecretExists(cookie, goodjudgmentopen_inner);
|
return await applyIfSecretExists(cookie, goodjudgmentopen_inner);
|
||||||
}
|
},
|
||||||
|
};
|
85
src/backend/platforms/index.ts
Normal file
85
src/backend/platforms/index.ts
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
import { databaseUpsert } from "../database/database-wrapper";
|
||||||
|
import { betfair } from "./betfair";
|
||||||
|
import { fantasyscotus } from "./fantasyscotus";
|
||||||
|
import { foretold } from "./foretold";
|
||||||
|
import { givewellopenphil } from "./givewellopenphil";
|
||||||
|
import { goodjudgment } from "./goodjudgment";
|
||||||
|
import { goodjudmentopen } from "./goodjudmentopen";
|
||||||
|
import { infer } from "./infer";
|
||||||
|
import { kalshi } from "./kalshi";
|
||||||
|
import { manifoldmarkets } from "./manifoldmarkets";
|
||||||
|
import { metaculus } from "./metaculus";
|
||||||
|
import { polymarket } from "./polymarket";
|
||||||
|
import { predictit } from "./predictit";
|
||||||
|
import { rootclaim } from "./rootclaim";
|
||||||
|
import { smarkets } from "./smarkets";
|
||||||
|
import { wildeford } from "./wildeford";
|
||||||
|
import { xrisk } from "./xrisk";
|
||||||
|
|
||||||
|
export interface Forecast {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
url: string;
|
||||||
|
description: string;
|
||||||
|
platform: string;
|
||||||
|
options: any[];
|
||||||
|
timestamp: string;
|
||||||
|
qualityindicators: any;
|
||||||
|
extra?: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetcher should return null if platform failed to fetch forecasts for some reason
|
||||||
|
export type PlatformFetcher = () => Promise<Forecast[] | null>;
|
||||||
|
|
||||||
|
export interface Platform {
|
||||||
|
name: string;
|
||||||
|
fetcher?: PlatformFetcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
// draft for the future callback-based streaming/chunking API:
|
||||||
|
// interface FetchOptions {
|
||||||
|
// since?: string; // some kind of cursor, Date object or opaque string?
|
||||||
|
// save: (forecasts: Forecast[]) => Promise<void>;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// export type PlatformFetcher = (options: FetchOptions) => Promise<void>;
|
||||||
|
|
||||||
|
// interface Platform {
|
||||||
|
// name: string;
|
||||||
|
// color?: string;
|
||||||
|
// longName: string;
|
||||||
|
// fetcher: PlatformFetcher;
|
||||||
|
// }
|
||||||
|
|
||||||
|
export const platforms: Platform[] = [
|
||||||
|
betfair,
|
||||||
|
fantasyscotus,
|
||||||
|
foretold,
|
||||||
|
givewellopenphil,
|
||||||
|
goodjudgment,
|
||||||
|
goodjudmentopen,
|
||||||
|
infer,
|
||||||
|
kalshi,
|
||||||
|
manifoldmarkets,
|
||||||
|
metaculus,
|
||||||
|
polymarket,
|
||||||
|
predictit,
|
||||||
|
rootclaim,
|
||||||
|
smarkets,
|
||||||
|
wildeford,
|
||||||
|
xrisk,
|
||||||
|
];
|
||||||
|
|
||||||
|
export const processPlatform = async (platform: Platform) => {
|
||||||
|
if (!platform.fetcher) {
|
||||||
|
console.log(`Platform ${platform.name} doesn't have a fetcher, skipping`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let results = await platform.fetcher();
|
||||||
|
if (results && results.length) {
|
||||||
|
await databaseUpsert({ contents: results, group: platform.name });
|
||||||
|
console.log("Done");
|
||||||
|
} else {
|
||||||
|
console.log(`Platform ${platform.name} didn't return any results`);
|
||||||
|
}
|
||||||
|
};
|
|
@ -2,10 +2,10 @@
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { Tabletojson } from "tabletojson";
|
import { Tabletojson } from "tabletojson";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
import toMarkdown from "../utils/toMarkdown";
|
import toMarkdown from "../utils/toMarkdown";
|
||||||
|
import { Forecast, Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let htmlEndPoint = "https://www.infer-pub.com/questions";
|
let htmlEndPoint = "https://www.infer-pub.com/questions";
|
||||||
|
@ -182,7 +182,7 @@ function sleep(ms) {
|
||||||
async function infer_inner(cookie) {
|
async function infer_inner(cookie) {
|
||||||
let i = 1;
|
let i = 1;
|
||||||
let response = await fetchPage(i, cookie);
|
let response = await fetchPage(i, cookie);
|
||||||
let results = [];
|
let results: Forecast[] = [];
|
||||||
let init = Date.now();
|
let init = Date.now();
|
||||||
// console.log("Downloading... This might take a couple of minutes. Results will be shown.")
|
// console.log("Downloading... This might take a couple of minutes. Results will be shown.")
|
||||||
while (!isEnd(response) && isSignedIn(response)) {
|
while (!isEnd(response) && isSignedIn(response)) {
|
||||||
|
@ -263,20 +263,24 @@ async function infer_inner(cookie) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (results.length > 0) {
|
|
||||||
await databaseUpsert({ contents: results, group: "infer" });
|
|
||||||
} else {
|
|
||||||
console.log("Not updating results, as process was not signed in");
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = Date.now();
|
let end = Date.now();
|
||||||
let difference = end - init;
|
let difference = end - init;
|
||||||
console.log(
|
console.log(
|
||||||
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (results.length === 0) {
|
||||||
|
console.log("Not updating results, as process was not signed in");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function infer() {
|
export const infer: Platform = {
|
||||||
|
name: "infer",
|
||||||
|
async fetcher() {
|
||||||
let cookie = process.env.INFER_COOKIE;
|
let cookie = process.env.INFER_COOKIE;
|
||||||
await applyIfSecretExists(cookie, infer_inner);
|
return await applyIfSecretExists(cookie, infer_inner);
|
||||||
}
|
},
|
||||||
|
};
|
|
@ -1,7 +1,8 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||||
|
@ -68,12 +69,10 @@ async function processMarkets(markets) {
|
||||||
return results; //resultsProcessed
|
return results; //resultsProcessed
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Body */
|
export const kalshi: Platform = {
|
||||||
export async function kalshi() {
|
name: "kalshi",
|
||||||
|
fetcher: async function () {
|
||||||
let markets = await fetchAllMarkets();
|
let markets = await fetchAllMarkets();
|
||||||
let results = await processMarkets(markets); // somehow needed
|
return await processMarkets(markets);
|
||||||
await databaseUpsert({ contents: results, group: "kalshi" });
|
},
|
||||||
|
};
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
// kalshi()
|
|
|
@ -1,7 +1,8 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let endpoint = "https://manifold.markets/api/v0/markets";
|
let endpoint = "https://manifold.markets/api/v0/markets";
|
||||||
|
@ -87,14 +88,12 @@ async function processPredictions(predictions) {
|
||||||
return unresolvedResults; //resultsProcessed
|
return unresolvedResults; //resultsProcessed
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Body */
|
export const manifoldmarkets: Platform = {
|
||||||
|
name: "manifoldmarkets",
|
||||||
export async function manifoldmarkets() {
|
async fetcher() {
|
||||||
let data = await fetchData();
|
let data = await fetchData();
|
||||||
let results = await processPredictions(data); // somehow needed
|
let results = await processPredictions(data); // somehow needed
|
||||||
showStatistics(results);
|
showStatistics(results);
|
||||||
await databaseUpsert({ contents: results, group: "manifoldmarkets" });
|
return results;
|
||||||
|
},
|
||||||
console.log("Done");
|
};
|
||||||
}
|
|
||||||
// manifoldmarkets()
|
|
|
@ -1,196 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
import toMarkdown from "../utils/toMarkdown";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page=";
|
|
||||||
let now = new Date().toISOString();
|
|
||||||
let DEBUG_MODE = "off";
|
|
||||||
let SLEEP_TIME = 5000;
|
|
||||||
/* Support functions */
|
|
||||||
async function fetchMetaculusQuestions(next) {
|
|
||||||
// Numbers about a given address: how many, how much, at what price, etc.
|
|
||||||
let response;
|
|
||||||
let data;
|
|
||||||
try {
|
|
||||||
response = await axios({
|
|
||||||
url: next,
|
|
||||||
method: "GET",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
});
|
|
||||||
data = response.data;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Error in async function fetchMetaculusQuestions(next)`);
|
|
||||||
if (!!error.response.headers["retry-after"]) {
|
|
||||||
let timeout = error.response.headers["retry-after"];
|
|
||||||
console.log(`Timeout: ${timeout}`);
|
|
||||||
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
|
||||||
} else {
|
|
||||||
await sleep(SLEEP_TIME);
|
|
||||||
}
|
|
||||||
console.log(error);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
response = await axios({
|
|
||||||
url: next,
|
|
||||||
method: "GET",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
});
|
|
||||||
data = response.data;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
return { results: [] };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// console.log(response)
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function sleep(ms) {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchMetaculusQuestionDescription(slug) {
|
|
||||||
try {
|
|
||||||
let response = await axios({
|
|
||||||
method: "get",
|
|
||||||
url: "https://www.metaculus.com" + slug,
|
|
||||||
}).then((response) => response.data);
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Error in: fetchMetaculusQuestionDescription`);
|
|
||||||
console.log(
|
|
||||||
`We encountered some error when attempting to fetch a metaculus page. Trying again`
|
|
||||||
);
|
|
||||||
if (
|
|
||||||
typeof error.response != "undefined" &&
|
|
||||||
typeof error.response.headers != "undefined" &&
|
|
||||||
typeof error.response.headers["retry-after"] != "undefined"
|
|
||||||
) {
|
|
||||||
let timeout = error.response.headers["retry-after"];
|
|
||||||
console.log(`Timeout: ${timeout}`);
|
|
||||||
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
|
||||||
} else {
|
|
||||||
await sleep(SLEEP_TIME);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
let response = await axios({
|
|
||||||
method: "get",
|
|
||||||
url: "https://www.metaculus.com" + slug,
|
|
||||||
}).then((response) => response.data);
|
|
||||||
// console.log(response)
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(
|
|
||||||
`We encountered some error when attempting to fetch a metaculus page.`
|
|
||||||
);
|
|
||||||
console.log("Error", error);
|
|
||||||
throw "Giving up";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
|
|
||||||
export async function metaculus() {
|
|
||||||
// let metaculusQuestionsInit = await fetchMetaculusQuestions(1)
|
|
||||||
// let numQueries = Math.round(Number(metaculusQuestionsInit.count) / 20)
|
|
||||||
// console.log(`Downloading... This might take a while. Total number of queries: ${numQueries}`)
|
|
||||||
// for (let i = 4; i <= numQueries; i++) { // change numQueries to 10 if one want to just test
|
|
||||||
let all_questions = [];
|
|
||||||
let next = "https://www.metaculus.com/api2/questions/";
|
|
||||||
let i = 1;
|
|
||||||
while (next) {
|
|
||||||
if (i % 20 == 0) {
|
|
||||||
console.log("Sleeping for 500ms");
|
|
||||||
await sleep(SLEEP_TIME);
|
|
||||||
}
|
|
||||||
console.log(`\nQuery #${i}`);
|
|
||||||
let metaculusQuestions = await fetchMetaculusQuestions(next);
|
|
||||||
let results = metaculusQuestions.results;
|
|
||||||
let j = false;
|
|
||||||
for (let result of results) {
|
|
||||||
if (result.publish_time < now && now < result.resolve_time) {
|
|
||||||
await sleep(SLEEP_TIME / 2);
|
|
||||||
let questionPage = await fetchMetaculusQuestionDescription(
|
|
||||||
result.page_url
|
|
||||||
);
|
|
||||||
if (!questionPage.includes("A public prediction by")) {
|
|
||||||
// console.log(questionPage)
|
|
||||||
let descriptionraw = questionPage.split(
|
|
||||||
`<div class="content" ng-bind-html-compile="qctrl.question.description_html">`
|
|
||||||
)[1]; //.split(`<div class="question__content">`)[1]
|
|
||||||
let descriptionprocessed1 = descriptionraw.split("</div>")[0];
|
|
||||||
let descriptionprocessed2 = toMarkdown(descriptionprocessed1);
|
|
||||||
let description = descriptionprocessed2;
|
|
||||||
|
|
||||||
let isbinary = result.possibilities.type == "binary";
|
|
||||||
let options = [];
|
|
||||||
if (isbinary) {
|
|
||||||
let probability = Number(result.community_prediction.full.q2);
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
name: "Yes",
|
|
||||||
probability: probability,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "No",
|
|
||||||
probability: 1 - probability,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
let id = `metaculus-${result.id}`;
|
|
||||||
let interestingInfo = {
|
|
||||||
id: id,
|
|
||||||
title: result.title,
|
|
||||||
url: "https://www.metaculus.com" + result.page_url,
|
|
||||||
platform: "Metaculus",
|
|
||||||
description: description,
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
numforecasts: Number(result.number_of_predictions),
|
|
||||||
stars: calculateStars("Metaculus", {
|
|
||||||
numforecasts: result.number_of_predictions,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
resolution_data: {
|
|
||||||
publish_time: result.publish_time,
|
|
||||||
resolution: result.resolution,
|
|
||||||
close_time: result.close_time,
|
|
||||||
resolve_time: result.resolve_time,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
//"status": result.status,
|
|
||||||
//"publish_time": result.publish_time,
|
|
||||||
//"close_time": result.close_time,
|
|
||||||
//"type": result.possibilities.type, // We want binary ones here.
|
|
||||||
//"last_activity_time": result.last_activity_time,
|
|
||||||
};
|
|
||||||
if (Number(result.number_of_predictions) >= 10) {
|
|
||||||
console.log(`- ${interestingInfo.title}`);
|
|
||||||
all_questions.push(interestingInfo);
|
|
||||||
if ((!j && i % 20 == 0) || DEBUG_MODE == "on") {
|
|
||||||
console.log(interestingInfo);
|
|
||||||
j = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log("- [Skipping public prediction]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
next = metaculusQuestions.next;
|
|
||||||
i = i + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
await databaseUpsert({ contents: all_questions, group: "metaculus" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
//metaculus()
|
|
195
src/backend/platforms/metaculus.ts
Normal file
195
src/backend/platforms/metaculus.ts
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import toMarkdown from "../utils/toMarkdown";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page=";
|
||||||
|
let now = new Date().toISOString();
|
||||||
|
let DEBUG_MODE = "off";
|
||||||
|
let SLEEP_TIME = 5000;
|
||||||
|
/* Support functions */
|
||||||
|
async function fetchMetaculusQuestions(next) {
|
||||||
|
// Numbers about a given address: how many, how much, at what price, etc.
|
||||||
|
let response;
|
||||||
|
let data;
|
||||||
|
try {
|
||||||
|
response = await axios({
|
||||||
|
url: next,
|
||||||
|
method: "GET",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
});
|
||||||
|
data = response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Error in async function fetchMetaculusQuestions(next)`);
|
||||||
|
if (!!error.response.headers["retry-after"]) {
|
||||||
|
let timeout = error.response.headers["retry-after"];
|
||||||
|
console.log(`Timeout: ${timeout}`);
|
||||||
|
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
||||||
|
} else {
|
||||||
|
await sleep(SLEEP_TIME);
|
||||||
|
}
|
||||||
|
console.log(error);
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
response = await axios({
|
||||||
|
url: next,
|
||||||
|
method: "GET",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
});
|
||||||
|
data = response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
return { results: [] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// console.log(response)
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sleep(ms) {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchMetaculusQuestionDescription(slug) {
|
||||||
|
try {
|
||||||
|
let response = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: "https://www.metaculus.com" + slug,
|
||||||
|
}).then((response) => response.data);
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Error in: fetchMetaculusQuestionDescription`);
|
||||||
|
console.log(
|
||||||
|
`We encountered some error when attempting to fetch a metaculus page. Trying again`
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
typeof error.response != "undefined" &&
|
||||||
|
typeof error.response.headers != "undefined" &&
|
||||||
|
typeof error.response.headers["retry-after"] != "undefined"
|
||||||
|
) {
|
||||||
|
let timeout = error.response.headers["retry-after"];
|
||||||
|
console.log(`Timeout: ${timeout}`);
|
||||||
|
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
||||||
|
} else {
|
||||||
|
await sleep(SLEEP_TIME);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
let response = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: "https://www.metaculus.com" + slug,
|
||||||
|
}).then((response) => response.data);
|
||||||
|
// console.log(response)
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
console.log(
|
||||||
|
`We encountered some error when attempting to fetch a metaculus page.`
|
||||||
|
);
|
||||||
|
console.log("Error", error);
|
||||||
|
throw "Giving up";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const metaculus: Platform = {
|
||||||
|
name: "metaculus",
|
||||||
|
async fetcher() {
|
||||||
|
// let metaculusQuestionsInit = await fetchMetaculusQuestions(1)
|
||||||
|
// let numQueries = Math.round(Number(metaculusQuestionsInit.count) / 20)
|
||||||
|
// console.log(`Downloading... This might take a while. Total number of queries: ${numQueries}`)
|
||||||
|
// for (let i = 4; i <= numQueries; i++) { // change numQueries to 10 if one want to just test }
|
||||||
|
let all_questions = [];
|
||||||
|
let next = "https://www.metaculus.com/api2/questions/";
|
||||||
|
let i = 1;
|
||||||
|
while (next) {
|
||||||
|
if (i % 20 == 0) {
|
||||||
|
console.log("Sleeping for 500ms");
|
||||||
|
await sleep(SLEEP_TIME);
|
||||||
|
}
|
||||||
|
console.log(`\nQuery #${i}`);
|
||||||
|
let metaculusQuestions = await fetchMetaculusQuestions(next);
|
||||||
|
let results = metaculusQuestions.results;
|
||||||
|
let j = false;
|
||||||
|
for (let result of results) {
|
||||||
|
if (result.publish_time < now && now < result.resolve_time) {
|
||||||
|
await sleep(SLEEP_TIME / 2);
|
||||||
|
let questionPage = await fetchMetaculusQuestionDescription(
|
||||||
|
result.page_url
|
||||||
|
);
|
||||||
|
if (!questionPage.includes("A public prediction by")) {
|
||||||
|
// console.log(questionPage)
|
||||||
|
let descriptionraw = questionPage.split(
|
||||||
|
`<div class="content" ng-bind-html-compile="qctrl.question.description_html">`
|
||||||
|
)[1]; //.split(`<div class="question__content">`)[1]
|
||||||
|
let descriptionprocessed1 = descriptionraw.split("</div>")[0];
|
||||||
|
let descriptionprocessed2 = toMarkdown(descriptionprocessed1);
|
||||||
|
let description = descriptionprocessed2;
|
||||||
|
|
||||||
|
let isbinary = result.possibilities.type == "binary";
|
||||||
|
let options = [];
|
||||||
|
if (isbinary) {
|
||||||
|
let probability = Number(result.community_prediction.full.q2);
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
name: "Yes",
|
||||||
|
probability: probability,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No",
|
||||||
|
probability: 1 - probability,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
let id = `metaculus-${result.id}`;
|
||||||
|
let interestingInfo = {
|
||||||
|
id: id,
|
||||||
|
title: result.title,
|
||||||
|
url: "https://www.metaculus.com" + result.page_url,
|
||||||
|
platform: "Metaculus",
|
||||||
|
description: description,
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
numforecasts: Number(result.number_of_predictions),
|
||||||
|
stars: calculateStars("Metaculus", {
|
||||||
|
numforecasts: result.number_of_predictions,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
resolution_data: {
|
||||||
|
publish_time: result.publish_time,
|
||||||
|
resolution: result.resolution,
|
||||||
|
close_time: result.close_time,
|
||||||
|
resolve_time: result.resolve_time,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
//"status": result.status,
|
||||||
|
//"publish_time": result.publish_time,
|
||||||
|
//"close_time": result.close_time,
|
||||||
|
//"type": result.possibilities.type, // We want binary ones here.
|
||||||
|
//"last_activity_time": result.last_activity_time,
|
||||||
|
};
|
||||||
|
if (Number(result.number_of_predictions) >= 10) {
|
||||||
|
console.log(`- ${interestingInfo.title}`);
|
||||||
|
all_questions.push(interestingInfo);
|
||||||
|
if ((!j && i % 20 == 0) || DEBUG_MODE == "on") {
|
||||||
|
console.log(interestingInfo);
|
||||||
|
j = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log("- [Skipping public prediction]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
next = metaculusQuestions.next;
|
||||||
|
i = i + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return all_questions;
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,135 +0,0 @@
|
||||||
import axios from "axios";
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let graphQLendpoint =
|
|
||||||
"https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
|
||||||
let units = 10 ** 6;
|
|
||||||
|
|
||||||
async function fetchAllContractInfo() {
|
|
||||||
// for info which the polymarket graphql API
|
|
||||||
let response = await axios
|
|
||||||
.get(
|
|
||||||
"https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&closed=false&_limit=-1"
|
|
||||||
// "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&_limit=-1" to get all markets, including closed ones
|
|
||||||
)
|
|
||||||
.then((query) => query.data);
|
|
||||||
response = response.filter((res) => res.closed != true);
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchIndividualContractData(marketMakerAddress) {
|
|
||||||
let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 7; // last week
|
|
||||||
let response = await axios({
|
|
||||||
url: graphQLendpoint,
|
|
||||||
method: "POST",
|
|
||||||
headers: { "Content-Type": "application/json" },
|
|
||||||
data: JSON.stringify({
|
|
||||||
query: `
|
|
||||||
{
|
|
||||||
fixedProductMarketMakers(first: 1000
|
|
||||||
where: {
|
|
||||||
id: "${marketMakerAddress}"
|
|
||||||
lastActiveDay_gt: ${daysSinceEra}
|
|
||||||
}){
|
|
||||||
id
|
|
||||||
creator
|
|
||||||
creationTimestamp
|
|
||||||
fee
|
|
||||||
tradesQuantity
|
|
||||||
buysQuantity
|
|
||||||
sellsQuantity
|
|
||||||
lastActiveDay
|
|
||||||
outcomeTokenPrices
|
|
||||||
outcomeTokenAmounts
|
|
||||||
liquidityParameter
|
|
||||||
collateralBuyVolume
|
|
||||||
collateralSellVolume
|
|
||||||
conditions {
|
|
||||||
outcomeSlotCount
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.then((res) => res.data)
|
|
||||||
.then((res) => res.data.fixedProductMarketMakers);
|
|
||||||
// console.log(response)
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchAll() {
|
|
||||||
let results = [];
|
|
||||||
let webpageEndpointData = await fetchAllContractInfo();
|
|
||||||
for (let marketInfo of webpageEndpointData) {
|
|
||||||
let address = marketInfo.marketMakerAddress;
|
|
||||||
let addressLowerCase = address.toLowerCase();
|
|
||||||
if (marketInfo.outcomes[0] != "Long" || marketInfo.outcomes[1] != "Long") {
|
|
||||||
let moreMarketAnswer = await fetchIndividualContractData(
|
|
||||||
addressLowerCase
|
|
||||||
);
|
|
||||||
if (moreMarketAnswer.length > 0) {
|
|
||||||
let moreMarketInfo = moreMarketAnswer[0];
|
|
||||||
let id = `polymarket-${addressLowerCase.slice(0, 10)}`;
|
|
||||||
// console.log(id);
|
|
||||||
let numforecasts = Number(moreMarketInfo.tradesQuantity);
|
|
||||||
let tradevolume =
|
|
||||||
(Number(moreMarketInfo.collateralBuyVolume) +
|
|
||||||
Number(moreMarketInfo.collateralSellVolume)) /
|
|
||||||
units;
|
|
||||||
let liquidity = Number(moreMarketInfo.liquidityParameter) / units;
|
|
||||||
// let isbinary = Number(moreMarketInfo.conditions[0].outcomeSlotCount) == 2
|
|
||||||
// let percentage = Number(moreMarketInfo.outcomeTokenPrices[0]) * 100
|
|
||||||
// let percentageFormatted = isbinary ? (percentage.toFixed(0) + "%") : "none"
|
|
||||||
let options = [];
|
|
||||||
for (let outcome in moreMarketInfo.outcomeTokenPrices) {
|
|
||||||
options.push({
|
|
||||||
name: marketInfo.outcomes[outcome],
|
|
||||||
probability: moreMarketInfo.outcomeTokenPrices[outcome],
|
|
||||||
type: "PROBABILITY",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = {
|
|
||||||
id: id,
|
|
||||||
title: marketInfo.question,
|
|
||||||
url: "https://polymarket.com/market/" + marketInfo.slug,
|
|
||||||
platform: "PolyMarket",
|
|
||||||
description: marketInfo.description,
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
numforecasts: numforecasts.toFixed(0),
|
|
||||||
liquidity: liquidity.toFixed(2),
|
|
||||||
tradevolume: tradevolume.toFixed(2),
|
|
||||||
stars: calculateStars("Polymarket", {
|
|
||||||
liquidity,
|
|
||||||
option: options[0],
|
|
||||||
volume: tradevolume,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
extra: {
|
|
||||||
address: marketInfo.address,
|
|
||||||
},
|
|
||||||
/*
|
|
||||||
*/
|
|
||||||
};
|
|
||||||
if (marketInfo.category != "Sports") {
|
|
||||||
// console.log(result)
|
|
||||||
results.push(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function polymarket() {
|
|
||||||
let results = await fetchAll();
|
|
||||||
await databaseUpsert({ contents: results, group: "polymarket" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
// polymarket();
|
|
134
src/backend/platforms/polymarket.ts
Normal file
134
src/backend/platforms/polymarket.ts
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Forecast, Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let graphQLendpoint =
|
||||||
|
"https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||||
|
let units = 10 ** 6;
|
||||||
|
|
||||||
|
async function fetchAllContractInfo() {
|
||||||
|
// for info which the polymarket graphql API
|
||||||
|
let response = await axios
|
||||||
|
.get(
|
||||||
|
"https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&closed=false&_limit=-1"
|
||||||
|
// "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&_limit=-1" to get all markets, including closed ones
|
||||||
|
)
|
||||||
|
.then((query) => query.data);
|
||||||
|
response = response.filter((res) => res.closed != true);
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchIndividualContractData(marketMakerAddress) {
|
||||||
|
let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 7; // last week
|
||||||
|
let response = await axios({
|
||||||
|
url: graphQLendpoint,
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
data: JSON.stringify({
|
||||||
|
query: `
|
||||||
|
{
|
||||||
|
fixedProductMarketMakers(first: 1000
|
||||||
|
where: {
|
||||||
|
id: "${marketMakerAddress}"
|
||||||
|
lastActiveDay_gt: ${daysSinceEra}
|
||||||
|
}){
|
||||||
|
id
|
||||||
|
creator
|
||||||
|
creationTimestamp
|
||||||
|
fee
|
||||||
|
tradesQuantity
|
||||||
|
buysQuantity
|
||||||
|
sellsQuantity
|
||||||
|
lastActiveDay
|
||||||
|
outcomeTokenPrices
|
||||||
|
outcomeTokenAmounts
|
||||||
|
liquidityParameter
|
||||||
|
collateralBuyVolume
|
||||||
|
collateralSellVolume
|
||||||
|
conditions {
|
||||||
|
outcomeSlotCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.then((res) => res.data)
|
||||||
|
.then((res) => res.data.fixedProductMarketMakers);
|
||||||
|
// console.log(response)
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const polymarket: Platform = {
|
||||||
|
name: "polymarket",
|
||||||
|
async fetcher() {
|
||||||
|
let results: Forecast[] = [];
|
||||||
|
let webpageEndpointData = await fetchAllContractInfo();
|
||||||
|
for (let marketInfo of webpageEndpointData) {
|
||||||
|
let address = marketInfo.marketMakerAddress;
|
||||||
|
let addressLowerCase = address.toLowerCase();
|
||||||
|
if (
|
||||||
|
marketInfo.outcomes[0] != "Long" ||
|
||||||
|
marketInfo.outcomes[1] != "Long"
|
||||||
|
) {
|
||||||
|
let moreMarketAnswer = await fetchIndividualContractData(
|
||||||
|
addressLowerCase
|
||||||
|
);
|
||||||
|
if (moreMarketAnswer.length > 0) {
|
||||||
|
let moreMarketInfo = moreMarketAnswer[0];
|
||||||
|
let id = `polymarket-${addressLowerCase.slice(0, 10)}`;
|
||||||
|
// console.log(id);
|
||||||
|
let numforecasts = Number(moreMarketInfo.tradesQuantity);
|
||||||
|
let tradevolume =
|
||||||
|
(Number(moreMarketInfo.collateralBuyVolume) +
|
||||||
|
Number(moreMarketInfo.collateralSellVolume)) /
|
||||||
|
units;
|
||||||
|
let liquidity = Number(moreMarketInfo.liquidityParameter) / units;
|
||||||
|
// let isbinary = Number(moreMarketInfo.conditions[0].outcomeSlotCount) == 2
|
||||||
|
// let percentage = Number(moreMarketInfo.outcomeTokenPrices[0]) * 100
|
||||||
|
// let percentageFormatted = isbinary ? (percentage.toFixed(0) + "%") : "none"
|
||||||
|
let options = [];
|
||||||
|
for (let outcome in moreMarketInfo.outcomeTokenPrices) {
|
||||||
|
options.push({
|
||||||
|
name: marketInfo.outcomes[outcome],
|
||||||
|
probability: moreMarketInfo.outcomeTokenPrices[outcome],
|
||||||
|
type: "PROBABILITY",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let result: Forecast = {
|
||||||
|
id: id,
|
||||||
|
title: marketInfo.question,
|
||||||
|
url: "https://polymarket.com/market/" + marketInfo.slug,
|
||||||
|
platform: "PolyMarket",
|
||||||
|
description: marketInfo.description,
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
numforecasts: numforecasts.toFixed(0),
|
||||||
|
liquidity: liquidity.toFixed(2),
|
||||||
|
tradevolume: tradevolume.toFixed(2),
|
||||||
|
stars: calculateStars("Polymarket", {
|
||||||
|
liquidity,
|
||||||
|
option: options[0],
|
||||||
|
volume: tradevolume,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
address: marketInfo.address,
|
||||||
|
},
|
||||||
|
/*
|
||||||
|
*/
|
||||||
|
};
|
||||||
|
if (marketInfo.category !== "Sports") {
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,112 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
import toMarkdown from "../utils/toMarkdown";
|
|
||||||
|
|
||||||
/* Support functions */
|
|
||||||
async function fetchmarkets() {
|
|
||||||
let response = await axios({
|
|
||||||
method: "get",
|
|
||||||
url: "https://www.predictit.org/api/marketdata/all/",
|
|
||||||
});
|
|
||||||
let openMarkets = response.data.markets.filter(
|
|
||||||
(market) => market.status == "Open"
|
|
||||||
);
|
|
||||||
return openMarkets;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchmarketrules(market_id) {
|
|
||||||
let response = await axios({
|
|
||||||
method: "get",
|
|
||||||
url: "https://www.predictit.org/api/Market/" + market_id,
|
|
||||||
});
|
|
||||||
return response.data.rule;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchmarketvolumes() {
|
|
||||||
let response = await axios({
|
|
||||||
method: "get",
|
|
||||||
url: "https://predictit-f497e.firebaseio.com/marketStats.json",
|
|
||||||
});
|
|
||||||
return response.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function sleep(ms) {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
export async function predictit() {
|
|
||||||
let markets = await fetchmarkets();
|
|
||||||
let marketVolumes = await fetchmarketvolumes();
|
|
||||||
|
|
||||||
markets = markets.map((market) => ({
|
|
||||||
...market,
|
|
||||||
TotalSharesTraded: marketVolumes[market.id]["TotalSharesTraded"],
|
|
||||||
}));
|
|
||||||
// console.log(markets)
|
|
||||||
|
|
||||||
let results = [];
|
|
||||||
for (let market of markets) {
|
|
||||||
// console.log(market.name)
|
|
||||||
let id = `predictit-${market.id}`;
|
|
||||||
let isbinary = market.contracts.length == 1;
|
|
||||||
await sleep(3000 * (1 + Math.random()));
|
|
||||||
let descriptionraw = await fetchmarketrules(market.id);
|
|
||||||
let descriptionprocessed1 = toMarkdown(descriptionraw);
|
|
||||||
let description = descriptionprocessed1;
|
|
||||||
let shares_volume = market["TotalSharesTraded"];
|
|
||||||
// let percentageFormatted = isbinary ? Number(Number(market.contracts[0].lastTradePrice) * 100).toFixed(0) + "%" : "none"
|
|
||||||
|
|
||||||
let options = market.contracts.map((contract) => ({
|
|
||||||
name: contract.name,
|
|
||||||
probability: contract.lastTradePrice,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
}));
|
|
||||||
let totalValue = options
|
|
||||||
.map((element) => Number(element.probability))
|
|
||||||
.reduce((a, b) => a + b, 0);
|
|
||||||
|
|
||||||
if (options.length != 1 && totalValue > 1) {
|
|
||||||
options = options.map((element) => ({
|
|
||||||
...element,
|
|
||||||
probability: Number(element.probability) / totalValue,
|
|
||||||
}));
|
|
||||||
} else if (options.length == 1) {
|
|
||||||
let option = options[0];
|
|
||||||
let probability = option["probability"];
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
name: "Yes",
|
|
||||||
probability: probability,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "No",
|
|
||||||
probability: 1 - probability,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
let obj = {
|
|
||||||
id: id,
|
|
||||||
title: market["name"],
|
|
||||||
url: market.url,
|
|
||||||
platform: "PredictIt",
|
|
||||||
description: description,
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
stars: calculateStars("PredictIt", {}),
|
|
||||||
shares_volume: shares_volume,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
// console.log(obj)
|
|
||||||
results.push(obj);
|
|
||||||
}
|
|
||||||
await databaseUpsert({ contents: results, group: "predictit" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
115
src/backend/platforms/predictit.ts
Normal file
115
src/backend/platforms/predictit.ts
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import toMarkdown from "../utils/toMarkdown";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Support functions */
|
||||||
|
async function fetchmarkets() {
|
||||||
|
let response = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: "https://www.predictit.org/api/marketdata/all/",
|
||||||
|
});
|
||||||
|
let openMarkets = response.data.markets.filter(
|
||||||
|
(market) => market.status == "Open"
|
||||||
|
);
|
||||||
|
return openMarkets;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchmarketrules(market_id) {
|
||||||
|
let response = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: "https://www.predictit.org/api/Market/" + market_id,
|
||||||
|
});
|
||||||
|
return response.data.rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchmarketvolumes() {
|
||||||
|
let response = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: "https://predictit-f497e.firebaseio.com/marketStats.json",
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sleep(ms: number) {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Body */
|
||||||
|
export const predictit: Platform = {
|
||||||
|
name: "predictit",
|
||||||
|
async fetcher() {
|
||||||
|
let markets = await fetchmarkets();
|
||||||
|
let marketVolumes = await fetchmarketvolumes();
|
||||||
|
|
||||||
|
markets = markets.map((market) => ({
|
||||||
|
...market,
|
||||||
|
TotalSharesTraded: marketVolumes[market.id]["TotalSharesTraded"],
|
||||||
|
}));
|
||||||
|
// console.log(markets)
|
||||||
|
|
||||||
|
let results = [];
|
||||||
|
for (let market of markets) {
|
||||||
|
// console.log(market.name)
|
||||||
|
let id = `predictit-${market.id}`;
|
||||||
|
let isbinary = market.contracts.length == 1;
|
||||||
|
await sleep(3000 * (1 + Math.random()));
|
||||||
|
let descriptionraw = await fetchmarketrules(market.id);
|
||||||
|
let descriptionprocessed1 = toMarkdown(descriptionraw);
|
||||||
|
let description = descriptionprocessed1;
|
||||||
|
let shares_volume = market["TotalSharesTraded"];
|
||||||
|
// let percentageFormatted = isbinary ? Number(Number(market.contracts[0].lastTradePrice) * 100).toFixed(0) + "%" : "none"
|
||||||
|
|
||||||
|
let options = market.contracts.map((contract) => ({
|
||||||
|
name: contract.name,
|
||||||
|
probability: contract.lastTradePrice,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
}));
|
||||||
|
let totalValue = options
|
||||||
|
.map((element) => Number(element.probability))
|
||||||
|
.reduce((a, b) => a + b, 0);
|
||||||
|
|
||||||
|
if (options.length != 1 && totalValue > 1) {
|
||||||
|
options = options.map((element) => ({
|
||||||
|
...element,
|
||||||
|
probability: Number(element.probability) / totalValue,
|
||||||
|
}));
|
||||||
|
} else if (options.length == 1) {
|
||||||
|
let option = options[0];
|
||||||
|
let probability = option["probability"];
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
name: "Yes",
|
||||||
|
probability: probability,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "No",
|
||||||
|
probability: 1 - probability,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
let obj = {
|
||||||
|
id: id,
|
||||||
|
title: market["name"],
|
||||||
|
url: market.url,
|
||||||
|
platform: "PredictIt",
|
||||||
|
description: description,
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
stars: calculateStars("PredictIt", {}),
|
||||||
|
shares_volume: shares_volume,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
// console.log(obj)
|
||||||
|
results.push(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,65 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
import toMarkdown from "../utils/toMarkdown";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let jsonEndpoint =
|
|
||||||
"https://www.rootclaim.com/main_page_stories?number=100&offset=0"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
|
||||||
|
|
||||||
async function fetchAllRootclaims() {
|
|
||||||
// for info which the polymarket graphql API
|
|
||||||
let response = await axios
|
|
||||||
.get(jsonEndpoint)
|
|
||||||
.then((response) => response.data);
|
|
||||||
if (response.length != response[0] + 1) {
|
|
||||||
console.log(response.length);
|
|
||||||
console.log(response[0]);
|
|
||||||
//throw Error("Rootclaim's backend has changed.")
|
|
||||||
}
|
|
||||||
response.shift();
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchAndProcessData() {
|
|
||||||
let claims = await fetchAllRootclaims();
|
|
||||||
let results = [];
|
|
||||||
for (let claim of claims) {
|
|
||||||
let id = `rootclaim-${claim.slug.toLowerCase()}`;
|
|
||||||
let options = [];
|
|
||||||
for (let scenario of claim.scenarios) {
|
|
||||||
//console.log(scenario)
|
|
||||||
options.push({
|
|
||||||
name: toMarkdown(scenario.text).replace("\n", "").replace("'", "'"),
|
|
||||||
probability: scenario.net_prob / 100,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let claimUrlPath = claim.created_at < "2020" ? "claims" : "analysis";
|
|
||||||
let obj = {
|
|
||||||
id: id,
|
|
||||||
title: toMarkdown(claim.question).replace("\n", ""),
|
|
||||||
url: `https://www.rootclaim.com/${claimUrlPath}/${claim.slug}`,
|
|
||||||
platform: "Rootclaim",
|
|
||||||
description: toMarkdown(claim.background).replace("'", "'"),
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
numforecasts: 1,
|
|
||||||
stars: calculateStars("Rootclaim", {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
results.push(obj);
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
export async function rootclaim() {
|
|
||||||
let results = await fetchAndProcessData();
|
|
||||||
await databaseUpsert({ contents: results, group: "rootclaim" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
|
||||||
//rootclaim()
|
|
62
src/backend/platforms/rootclaim.ts
Normal file
62
src/backend/platforms/rootclaim.ts
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import toMarkdown from "../utils/toMarkdown";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let jsonEndpoint =
|
||||||
|
"https://www.rootclaim.com/main_page_stories?number=100&offset=0"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||||
|
|
||||||
|
async function fetchAllRootclaims() {
|
||||||
|
// for info which the polymarket graphql API
|
||||||
|
let response = await axios
|
||||||
|
.get(jsonEndpoint)
|
||||||
|
.then((response) => response.data);
|
||||||
|
if (response.length != response[0] + 1) {
|
||||||
|
console.log(response.length);
|
||||||
|
console.log(response[0]);
|
||||||
|
//throw Error("Rootclaim's backend has changed.")
|
||||||
|
}
|
||||||
|
response.shift();
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const rootclaim: Platform = {
|
||||||
|
name: "rootclaim",
|
||||||
|
async fetcher() {
|
||||||
|
let claims = await fetchAllRootclaims();
|
||||||
|
let results = [];
|
||||||
|
for (let claim of claims) {
|
||||||
|
let id = `rootclaim-${claim.slug.toLowerCase()}`;
|
||||||
|
let options = [];
|
||||||
|
for (let scenario of claim.scenarios) {
|
||||||
|
//console.log(scenario)
|
||||||
|
options.push({
|
||||||
|
name: toMarkdown(scenario.text)
|
||||||
|
.replace("\n", "")
|
||||||
|
.replace("'", "'"),
|
||||||
|
probability: scenario.net_prob / 100,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let claimUrlPath = claim.created_at < "2020" ? "claims" : "analysis";
|
||||||
|
let obj = {
|
||||||
|
id: id,
|
||||||
|
title: toMarkdown(claim.question).replace("\n", ""),
|
||||||
|
url: `https://www.rootclaim.com/${claimUrlPath}/${claim.slug}`,
|
||||||
|
platform: "Rootclaim",
|
||||||
|
description: toMarkdown(claim.background).replace("'", "'"),
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
numforecasts: 1,
|
||||||
|
stars: calculateStars("Rootclaim", {}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
results.push(obj);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,180 +0,0 @@
|
||||||
/* Imports */
|
|
||||||
import axios from "axios";
|
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
|
||||||
|
|
||||||
/* Definitions */
|
|
||||||
let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/";
|
|
||||||
let VERBOSE = false;
|
|
||||||
let empty = () => 0;
|
|
||||||
/* Support functions */
|
|
||||||
|
|
||||||
async function fetchEvents(url) {
|
|
||||||
let response = await axios({
|
|
||||||
url: htmlEndPointEntrance + url,
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "text/html",
|
|
||||||
},
|
|
||||||
}).then((res) => res.data);
|
|
||||||
VERBOSE ? console.log(response) : empty();
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchMarkets(eventid) {
|
|
||||||
let response = await axios({
|
|
||||||
url: `https://api.smarkets.com/v3/events/${eventid}/markets/`,
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "text/json",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.then((res) => res.data)
|
|
||||||
.then((res) => res.markets);
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchContracts(marketid) {
|
|
||||||
let response = await axios({
|
|
||||||
url: `https://api.smarkets.com/v3/markets/${marketid}/contracts/`,
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "text/html",
|
|
||||||
},
|
|
||||||
}).then((res) => res.data);
|
|
||||||
VERBOSE ? console.log(response) : empty();
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchPrices(marketid) {
|
|
||||||
let response = await axios({
|
|
||||||
url: `https://api.smarkets.com/v3/markets/${marketid}/last_executed_prices/`,
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "text/html",
|
|
||||||
},
|
|
||||||
}).then((res) => res.data);
|
|
||||||
VERBOSE ? console.log(response) : empty();
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
|
|
||||||
export async function smarkets() {
|
|
||||||
let htmlPath =
|
|
||||||
"?state=new&state=upcoming&state=live&type_domain=politics&type_scope=single_event&with_new_type=true&sort=id&limit=50";
|
|
||||||
|
|
||||||
let events = [];
|
|
||||||
while (htmlPath) {
|
|
||||||
let data = await fetchEvents(htmlPath);
|
|
||||||
events.push(...data.events);
|
|
||||||
htmlPath = data.pagination.next_page;
|
|
||||||
}
|
|
||||||
VERBOSE ? console.log(events) : empty();
|
|
||||||
let markets = [];
|
|
||||||
for (let event of events) {
|
|
||||||
VERBOSE ? console.log(Date.now()) : empty();
|
|
||||||
VERBOSE ? console.log(event.name) : empty();
|
|
||||||
let eventMarkets = await fetchMarkets(event.id);
|
|
||||||
eventMarkets = eventMarkets.map((market) => ({
|
|
||||||
...market,
|
|
||||||
slug: event.full_slug,
|
|
||||||
}));
|
|
||||||
VERBOSE ? console.log("Markets fetched") : empty();
|
|
||||||
VERBOSE ? console.log(event.id) : empty();
|
|
||||||
VERBOSE ? console.log(eventMarkets) : empty();
|
|
||||||
markets.push(...eventMarkets);
|
|
||||||
//let lastPrices = await fetchPrices(market.id)
|
|
||||||
}
|
|
||||||
VERBOSE ? console.log(markets) : empty();
|
|
||||||
|
|
||||||
let results = [];
|
|
||||||
for (let market of markets) {
|
|
||||||
VERBOSE ? console.log("================") : empty();
|
|
||||||
VERBOSE ? console.log("Market: ", market) : empty();
|
|
||||||
let id = `smarkets-${market.id}`;
|
|
||||||
let name = market.name;
|
|
||||||
|
|
||||||
let contracts = await fetchContracts(market.id);
|
|
||||||
VERBOSE ? console.log("Contracts: ", contracts) : empty();
|
|
||||||
let prices = await fetchPrices(market.id);
|
|
||||||
VERBOSE
|
|
||||||
? console.log("Prices: ", prices["last_executed_prices"][market.id])
|
|
||||||
: empty();
|
|
||||||
|
|
||||||
let optionsObj = {};
|
|
||||||
for (let contract of contracts["contracts"]) {
|
|
||||||
optionsObj[contract.id] = { name: contract.name };
|
|
||||||
}
|
|
||||||
for (let price of prices["last_executed_prices"][market.id]) {
|
|
||||||
optionsObj[price.contract_id] = {
|
|
||||||
...optionsObj[price.contract_id],
|
|
||||||
probability: price.last_executed_price
|
|
||||||
? Number(price.last_executed_price)
|
|
||||||
: null,
|
|
||||||
type: "PROBABILITY",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
let options: any[] = Object.values(optionsObj);
|
|
||||||
// monkey patch the case where there are only two options and only one has traded.
|
|
||||||
if (
|
|
||||||
options.length == 2 &&
|
|
||||||
options.map((option) => option.probability).includes(null)
|
|
||||||
) {
|
|
||||||
let nonNullPrice =
|
|
||||||
options[0].probability == null
|
|
||||||
? options[1].probability
|
|
||||||
: options[0].probability;
|
|
||||||
options = options.map((option) => {
|
|
||||||
let probability = option.probability;
|
|
||||||
return {
|
|
||||||
...option,
|
|
||||||
probability: probability == null ? 100 - nonNullPrice : probability,
|
|
||||||
// yes, 100, because prices are not yet normalized.
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize normally
|
|
||||||
let totalValue = options
|
|
||||||
.map((element) => Number(element.probability))
|
|
||||||
.reduce((a, b) => a + b, 0);
|
|
||||||
|
|
||||||
options = options.map((element) => ({
|
|
||||||
...element,
|
|
||||||
probability: Number(element.probability) / totalValue,
|
|
||||||
}));
|
|
||||||
VERBOSE ? console.log(options) : empty();
|
|
||||||
|
|
||||||
/*
|
|
||||||
if(contracts["contracts"].length == 2){
|
|
||||||
isBinary = true
|
|
||||||
percentage = ( Number(prices["last_executed_prices"][market.id][0].last_executed_price) + (100 - Number(prices["last_executed_prices"][market.id][1].last_executed_price)) ) / 2
|
|
||||||
percentage = Math.round(percentage)+"%"
|
|
||||||
let contractName = contracts["contracts"][0].name
|
|
||||||
name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
let result = {
|
|
||||||
id: id,
|
|
||||||
title: name,
|
|
||||||
url: "https://smarkets.com/event/" + market.event_id + market.slug,
|
|
||||||
platform: "Smarkets",
|
|
||||||
description: market.description,
|
|
||||||
options: options,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
qualityindicators: {
|
|
||||||
stars: calculateStars("Smarkets", {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
VERBOSE ? console.log(result) : empty();
|
|
||||||
results.push(result);
|
|
||||||
}
|
|
||||||
VERBOSE ? console.log(results) : empty();
|
|
||||||
|
|
||||||
await databaseUpsert({ contents: results, group: "smarkets" });
|
|
||||||
VERBOSE ? console.log(JSON.stringify(results, null, 4)) : empty();
|
|
||||||
VERBOSE ? console.dir(results, { depth: null }) : empty();
|
|
||||||
}
|
|
||||||
//smarkets()
|
|
177
src/backend/platforms/smarkets.ts
Normal file
177
src/backend/platforms/smarkets.ts
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
/* Imports */
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
/* Definitions */
|
||||||
|
let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/";
|
||||||
|
let VERBOSE = false;
|
||||||
|
let empty = () => 0;
|
||||||
|
/* Support functions */
|
||||||
|
|
||||||
|
async function fetchEvents(url) {
|
||||||
|
let response = await axios({
|
||||||
|
url: htmlEndPointEntrance + url,
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "text/html",
|
||||||
|
},
|
||||||
|
}).then((res) => res.data);
|
||||||
|
VERBOSE ? console.log(response) : empty();
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchMarkets(eventid) {
|
||||||
|
let response = await axios({
|
||||||
|
url: `https://api.smarkets.com/v3/events/${eventid}/markets/`,
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "text/json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then((res) => res.data)
|
||||||
|
.then((res) => res.markets);
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchContracts(marketid) {
|
||||||
|
let response = await axios({
|
||||||
|
url: `https://api.smarkets.com/v3/markets/${marketid}/contracts/`,
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "text/html",
|
||||||
|
},
|
||||||
|
}).then((res) => res.data);
|
||||||
|
VERBOSE ? console.log(response) : empty();
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchPrices(marketid) {
|
||||||
|
let response = await axios({
|
||||||
|
url: `https://api.smarkets.com/v3/markets/${marketid}/last_executed_prices/`,
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "text/html",
|
||||||
|
},
|
||||||
|
}).then((res) => res.data);
|
||||||
|
VERBOSE ? console.log(response) : empty();
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const smarkets: Platform = {
|
||||||
|
name: "smarkets",
|
||||||
|
async fetcher() {
|
||||||
|
let htmlPath =
|
||||||
|
"?state=new&state=upcoming&state=live&type_domain=politics&type_scope=single_event&with_new_type=true&sort=id&limit=50";
|
||||||
|
|
||||||
|
let events = [];
|
||||||
|
while (htmlPath) {
|
||||||
|
let data = await fetchEvents(htmlPath);
|
||||||
|
events.push(...data.events);
|
||||||
|
htmlPath = data.pagination.next_page;
|
||||||
|
}
|
||||||
|
VERBOSE ? console.log(events) : empty();
|
||||||
|
let markets = [];
|
||||||
|
for (let event of events) {
|
||||||
|
VERBOSE ? console.log(Date.now()) : empty();
|
||||||
|
VERBOSE ? console.log(event.name) : empty();
|
||||||
|
let eventMarkets = await fetchMarkets(event.id);
|
||||||
|
eventMarkets = eventMarkets.map((market) => ({
|
||||||
|
...market,
|
||||||
|
slug: event.full_slug,
|
||||||
|
}));
|
||||||
|
VERBOSE ? console.log("Markets fetched") : empty();
|
||||||
|
VERBOSE ? console.log(event.id) : empty();
|
||||||
|
VERBOSE ? console.log(eventMarkets) : empty();
|
||||||
|
markets.push(...eventMarkets);
|
||||||
|
//let lastPrices = await fetchPrices(market.id)
|
||||||
|
}
|
||||||
|
VERBOSE ? console.log(markets) : empty();
|
||||||
|
|
||||||
|
let results = [];
|
||||||
|
for (let market of markets) {
|
||||||
|
VERBOSE ? console.log("================") : empty();
|
||||||
|
VERBOSE ? console.log("Market: ", market) : empty();
|
||||||
|
let id = `smarkets-${market.id}`;
|
||||||
|
let name = market.name;
|
||||||
|
|
||||||
|
let contracts = await fetchContracts(market.id);
|
||||||
|
VERBOSE ? console.log("Contracts: ", contracts) : empty();
|
||||||
|
let prices = await fetchPrices(market.id);
|
||||||
|
VERBOSE
|
||||||
|
? console.log("Prices: ", prices["last_executed_prices"][market.id])
|
||||||
|
: empty();
|
||||||
|
|
||||||
|
let optionsObj = {};
|
||||||
|
for (let contract of contracts["contracts"]) {
|
||||||
|
optionsObj[contract.id] = { name: contract.name };
|
||||||
|
}
|
||||||
|
for (let price of prices["last_executed_prices"][market.id]) {
|
||||||
|
optionsObj[price.contract_id] = {
|
||||||
|
...optionsObj[price.contract_id],
|
||||||
|
probability: price.last_executed_price
|
||||||
|
? Number(price.last_executed_price)
|
||||||
|
: null,
|
||||||
|
type: "PROBABILITY",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let options: any[] = Object.values(optionsObj);
|
||||||
|
// monkey patch the case where there are only two options and only one has traded.
|
||||||
|
if (
|
||||||
|
options.length == 2 &&
|
||||||
|
options.map((option) => option.probability).includes(null)
|
||||||
|
) {
|
||||||
|
let nonNullPrice =
|
||||||
|
options[0].probability == null
|
||||||
|
? options[1].probability
|
||||||
|
: options[0].probability;
|
||||||
|
options = options.map((option) => {
|
||||||
|
let probability = option.probability;
|
||||||
|
return {
|
||||||
|
...option,
|
||||||
|
probability: probability == null ? 100 - nonNullPrice : probability,
|
||||||
|
// yes, 100, because prices are not yet normalized.
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize normally
|
||||||
|
let totalValue = options
|
||||||
|
.map((element) => Number(element.probability))
|
||||||
|
.reduce((a, b) => a + b, 0);
|
||||||
|
|
||||||
|
options = options.map((element) => ({
|
||||||
|
...element,
|
||||||
|
probability: Number(element.probability) / totalValue,
|
||||||
|
}));
|
||||||
|
VERBOSE ? console.log(options) : empty();
|
||||||
|
|
||||||
|
/*
|
||||||
|
if(contracts["contracts"].length == 2){
|
||||||
|
isBinary = true
|
||||||
|
percentage = ( Number(prices["last_executed_prices"][market.id][0].last_executed_price) + (100 - Number(prices["last_executed_prices"][market.id][1].last_executed_price)) ) / 2
|
||||||
|
percentage = Math.round(percentage)+"%"
|
||||||
|
let contractName = contracts["contracts"][0].name
|
||||||
|
name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
let result = {
|
||||||
|
id: id,
|
||||||
|
title: name,
|
||||||
|
url: "https://smarkets.com/event/" + market.event_id + market.slug,
|
||||||
|
platform: "Smarkets",
|
||||||
|
description: market.description,
|
||||||
|
options: options,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
qualityindicators: {
|
||||||
|
stars: calculateStars("Smarkets", {}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
VERBOSE ? console.log(result) : empty();
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
VERBOSE ? console.log(results) : empty();
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,11 +1,10 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
// import axios from "axios"
|
|
||||||
import { GoogleSpreadsheet } from "google-spreadsheet";
|
import { GoogleSpreadsheet } from "google-spreadsheet";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||||
import { hash } from "../utils/hash";
|
import { hash } from "../utils/hash";
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL
|
const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL
|
||||||
|
@ -113,22 +112,17 @@ async function processPredictions(predictions) {
|
||||||
uniqueTitles.push(result.title);
|
uniqueTitles.push(result.title);
|
||||||
});
|
});
|
||||||
return uniqueResults;
|
return uniqueResults;
|
||||||
// console.log(results)
|
|
||||||
// console.log(results.map(result => result.options))
|
|
||||||
// processPredictions()
|
|
||||||
}
|
}
|
||||||
/* Body */
|
|
||||||
export async function wildeford_inner(google_api_key) {
|
export async function wildeford_inner(google_api_key) {
|
||||||
let predictions = await fetchGoogleDoc(google_api_key);
|
let predictions = await fetchGoogleDoc(google_api_key);
|
||||||
let results = await processPredictions(predictions); // somehow needed
|
return await processPredictions(predictions);
|
||||||
// console.log(results.sort((a,b) => (a.title > b.title)))
|
|
||||||
await databaseUpsert({ contents: results, group: "wildeford" });
|
|
||||||
|
|
||||||
console.log("Done");
|
|
||||||
}
|
}
|
||||||
//example()
|
|
||||||
|
|
||||||
export async function wildeford() {
|
export const wildeford: Platform = {
|
||||||
|
name: "wildeford",
|
||||||
|
async fetcher() {
|
||||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
|
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
|
||||||
await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner);
|
return await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner);
|
||||||
}
|
},
|
||||||
|
};
|
15
src/backend/platforms/xrisk.ts
Normal file
15
src/backend/platforms/xrisk.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import fs from "fs";
|
||||||
|
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
export const xrisk: Platform = {
|
||||||
|
name: "xrisk",
|
||||||
|
async fetcher() {
|
||||||
|
return; // not necessary to refill the DB every time
|
||||||
|
let fileRaw = fs.readFileSync("./input/xrisk-questions.json", {
|
||||||
|
encoding: "utf-8",
|
||||||
|
});
|
||||||
|
const results = JSON.parse(fileRaw);
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
};
|
|
@ -4,13 +4,15 @@ import { databaseReadWithReadCredentials } from "../database/database-wrapper";
|
||||||
import { mergeEverythingInner } from "../flow/mergeEverything";
|
import { mergeEverythingInner } from "../flow/mergeEverything";
|
||||||
|
|
||||||
let cookie = process.env.ALGOLIA_MASTER_API_KEY;
|
let cookie = process.env.ALGOLIA_MASTER_API_KEY;
|
||||||
const client = algoliasearch("96UD3NTQ7L", cookie);
|
const algoliaAppId = process.env.NEXT_PUBLIC_ALGOLIA_APP_ID;
|
||||||
|
const client = algoliasearch(algoliaAppId, cookie);
|
||||||
|
console.log(`Initializing algolia index for ${algoliaAppId}`);
|
||||||
const index = client.initIndex("metaforecast");
|
const index = client.initIndex("metaforecast");
|
||||||
|
|
||||||
export async function rebuildAlgoliaDatabaseTheHardWay() {
|
export async function rebuildAlgoliaDatabaseTheHardWay() {
|
||||||
console.log("Doing this the hard way");
|
console.log("Doing this the hard way");
|
||||||
let records = await mergeEverythingInner();
|
let records = await mergeEverythingInner();
|
||||||
records = records.map((record, index) => ({
|
records = records.map((record, index: number) => ({
|
||||||
...record,
|
...record,
|
||||||
has_numforecasts: record.numforecasts ? true : false,
|
has_numforecasts: record.numforecasts ? true : false,
|
||||||
objectID: index,
|
objectID: index,
|
||||||
|
@ -28,21 +30,23 @@ export async function rebuildAlgoliaDatabaseTheHardWay() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let getoptionsstringforsearch = (record) => {
|
let getoptionsstringforsearch = (record: any) => {
|
||||||
let result = "";
|
let result = "";
|
||||||
if (!!record.options && record.options.length > 0) {
|
if (!!record.options && record.options.length > 0) {
|
||||||
result = record.options
|
result = record.options
|
||||||
.map((option) => option.name || null)
|
.map((option: any) => option.name || null)
|
||||||
.filter((x) => x != null)
|
.filter((x: any) => x != null)
|
||||||
.join(", ");
|
.join(", ");
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
||||||
let records = await databaseReadWithReadCredentials({ group: "combined" });
|
let records: any[] = await databaseReadWithReadCredentials({
|
||||||
|
group: "combined",
|
||||||
|
});
|
||||||
|
|
||||||
records = records.map((record, index) => ({
|
records = records.map((record, index: number) => ({
|
||||||
...record,
|
...record,
|
||||||
has_numforecasts: record.numforecasts ? true : false,
|
has_numforecasts: record.numforecasts ? true : false,
|
||||||
objectID: index,
|
objectID: index,
|
||||||
|
@ -50,11 +54,11 @@ export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
||||||
}));
|
}));
|
||||||
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
|
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
|
||||||
|
|
||||||
|
console.log(index.appId, index.indexName);
|
||||||
|
|
||||||
if (index.exists()) {
|
if (index.exists()) {
|
||||||
console.log("Index exists");
|
console.log("Index exists");
|
||||||
index
|
await index.replaceAllObjects(records, { safe: true });
|
||||||
.replaceAllObjects(records, { safe: true })
|
|
||||||
.catch((error) => console.log(error));
|
|
||||||
console.log(
|
console.log(
|
||||||
`Pushed ${records.length} records. Algolia will update asynchronously`
|
`Pushed ${records.length} records. Algolia will update asynchronously`
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
export async function applyIfSecretExists(cookie, fun) {
|
export async function applyIfSecretExists<T>(
|
||||||
|
cookie,
|
||||||
|
fun: (...args: any[]) => T
|
||||||
|
) {
|
||||||
if (cookie) {
|
if (cookie) {
|
||||||
await fun(cookie);
|
return await fun(cookie);
|
||||||
} else if (!cookie) {
|
} else if (!cookie) {
|
||||||
console.log(
|
console.log(
|
||||||
`Cannot proceed with ${fun.name} because cookie does not exist`
|
`Cannot proceed with ${fun.name} because cookie does not exist`
|
||||||
|
|
|
@ -5,10 +5,9 @@ import fs from "fs";
|
||||||
let locationData = "../../data/";
|
let locationData = "../../data/";
|
||||||
|
|
||||||
/* Body */
|
/* Body */
|
||||||
let rawdata = fs.readFileSync(
|
let rawdata = fs.readFileSync("./input/xrisk-questions.json", {
|
||||||
"/home/nuno/Documents/core/software/fresh/js/metaforecasts/metaforecasts-mongo/src/input/xrisk-questions.json",
|
encoding: "utf-8",
|
||||||
{ encoding: "utf-8" }
|
});
|
||||||
);
|
|
||||||
let data = JSON.parse(rawdata);
|
let data = JSON.parse(rawdata);
|
||||||
|
|
||||||
let results = [];
|
let results = [];
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
import algoliasearch from "algoliasearch";
|
import algoliasearch from "algoliasearch";
|
||||||
|
|
||||||
const client = algoliasearch("96UD3NTQ7L", "618dbd0092971388cfd43aac1ae5f1f5"); // Only search.
|
const client = algoliasearch(
|
||||||
|
process.env.NEXT_PUBLIC_ALGOLIA_APP_ID,
|
||||||
|
process.env.NEXT_PUBLIC_ALGOLIA_SEARCH_KEY
|
||||||
|
);
|
||||||
const index = client.initIndex("metaforecast");
|
const index = client.initIndex("metaforecast");
|
||||||
|
|
||||||
let buildFilter = ({
|
let buildFilter = ({
|
||||||
|
|
Loading…
Reference in New Issue
Block a user