refactor: strict typescript
also: - save history simultaneously with question data - update squiggle - minor refactorings
This commit is contained in:
parent
da03fa8804
commit
f37a49e398
734
package-lock.json
generated
734
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
|
@ -33,15 +33,18 @@
|
|||
"@pothos/plugin-prisma": "^3.4.0",
|
||||
"@pothos/plugin-relay": "^3.10.0",
|
||||
"@prisma/client": "^3.11.1",
|
||||
"@quri/squiggle-lang": "^0.2.8",
|
||||
"@tailwindcss/forms": "^0.4.0",
|
||||
"@tailwindcss/typography": "^0.5.1",
|
||||
"@types/chroma-js": "^2.1.3",
|
||||
"@types/dom-to-image": "^2.6.4",
|
||||
"@types/google-spreadsheet": "^3.2.1",
|
||||
"@types/jsdom": "^16.2.14",
|
||||
"@types/nprogress": "^0.2.0",
|
||||
"@types/react": "^17.0.39",
|
||||
"@types/react-copy-to-clipboard": "^5.0.2",
|
||||
"@types/textversionjs": "^1.1.1",
|
||||
"@types/tunnel": "^0.0.3",
|
||||
"airtable": "^0.11.1",
|
||||
"algoliasearch": "^4.10.3",
|
||||
"autoprefixer": "^10.1.0",
|
||||
|
@ -88,7 +91,6 @@
|
|||
"react-safe": "^1.3.0",
|
||||
"react-select": "^5.2.2",
|
||||
"remark-gfm": "^3.0.1",
|
||||
"squiggle-experimental": "^0.1.9",
|
||||
"tabletojson": "^2.0.4",
|
||||
"tailwindcss": "^3.0.22",
|
||||
"textversionjs": "^1.1.3",
|
||||
|
|
|
@ -3,14 +3,7 @@ import { executeJobByName } from "./jobs";
|
|||
|
||||
/* Do everything */
|
||||
export async function doEverything() {
|
||||
let jobNames = [
|
||||
...platforms.map((platform) => platform.name),
|
||||
"merge",
|
||||
"algolia",
|
||||
"history",
|
||||
"netlify",
|
||||
];
|
||||
// Removed Good Judgment from the fetcher, doing it using cron instead because cloudflare blocks the utility on heroku.
|
||||
let jobNames = [...platforms.map((platform) => platform.name), "algolia"];
|
||||
|
||||
console.log("");
|
||||
console.log("");
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import { prisma } from "../../database/prisma";
|
||||
|
||||
export async function updateHistory() {
|
||||
const questions = await prisma.question.findMany({});
|
||||
await prisma.history.createMany({
|
||||
data: questions.map((q) => ({
|
||||
...q,
|
||||
idref: q.id,
|
||||
})),
|
||||
});
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
import { doEverything } from "../flow/doEverything";
|
||||
import { updateHistory } from "../flow/history/updateHistory";
|
||||
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
|
||||
import { rebuildFrontpage } from "../frontpage";
|
||||
import { platforms, processPlatform } from "../platforms";
|
||||
import { rebuildAlgoliaDatabase } from "../utils/algolia";
|
||||
import { sleep } from "../utils/sleep";
|
||||
|
||||
interface Job {
|
||||
name: string;
|
||||
|
@ -23,16 +22,6 @@ export const jobs: Job[] = [
|
|||
message: 'Rebuild algolia database ("index")',
|
||||
run: rebuildAlgoliaDatabase,
|
||||
},
|
||||
{
|
||||
name: "history",
|
||||
message: "Update history",
|
||||
run: updateHistory,
|
||||
},
|
||||
{
|
||||
name: "netlify",
|
||||
message: `Rebuild netlify site with new data`,
|
||||
run: rebuildNetlifySiteWithNewData,
|
||||
},
|
||||
{
|
||||
name: "frontpage",
|
||||
message: "Rebuild frontpage",
|
||||
|
@ -46,10 +35,6 @@ export const jobs: Job[] = [
|
|||
},
|
||||
];
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function tryCatchTryAgain(fun: () => Promise<void>) {
|
||||
try {
|
||||
console.log("Initial try");
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||
|
||||
async function rebuildNetlifySiteWithNewData_inner(cookie: string) {
|
||||
let payload = {};
|
||||
let response = await axios.post(cookie, payload);
|
||||
let data = response.data;
|
||||
console.log(data);
|
||||
}
|
||||
|
||||
export async function rebuildNetlifySiteWithNewData() {
|
||||
const cookie = process.env.REBUIDNETLIFYHOOKURL || "";
|
||||
await applyIfSecretExists(cookie, rebuildNetlifySiteWithNewData_inner);
|
||||
}
|
|
@ -2,11 +2,10 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import readline from "readline";
|
||||
import util from "util";
|
||||
|
||||
import { executeJobByName, jobs } from "./flow/jobs";
|
||||
|
||||
let generateWhatToDoMessage = () => {
|
||||
const generateWhatToDoMessage = () => {
|
||||
const color = "\x1b[36m";
|
||||
const resetColor = "\x1b[0m";
|
||||
let completeMessages = [
|
||||
|
@ -23,10 +22,10 @@ let generateWhatToDoMessage = () => {
|
|||
return completeMessages;
|
||||
};
|
||||
|
||||
let whattodoMessage = generateWhatToDoMessage();
|
||||
const whattodoMessage = generateWhatToDoMessage();
|
||||
|
||||
/* BODY */
|
||||
let commandLineUtility = async () => {
|
||||
const commandLineUtility = async () => {
|
||||
const pickOption = async () => {
|
||||
if (process.argv.length === 3) {
|
||||
return process.argv[2]; // e.g., npm run cli polymarket
|
||||
|
@ -37,9 +36,15 @@ let commandLineUtility = async () => {
|
|||
output: process.stdout,
|
||||
});
|
||||
|
||||
const question = util.promisify(rl.question).bind(rl);
|
||||
const question = (query: string) => {
|
||||
return new Promise((resolve: (s: string) => void) => {
|
||||
rl.question(query, resolve);
|
||||
});
|
||||
};
|
||||
|
||||
const answer = await question(whattodoMessage);
|
||||
rl.close();
|
||||
|
||||
return answer;
|
||||
};
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ async function fetchData() {
|
|||
return response;
|
||||
}
|
||||
|
||||
async function processPredictions(predictions) {
|
||||
async function processPredictions(predictions: any[]) {
|
||||
let results = await predictions.map((prediction) => {
|
||||
const id = `${platformName}-${prediction.id}`;
|
||||
const probability = prediction.probability;
|
||||
|
|
|
@ -27,7 +27,7 @@ const arraysEqual = (a: string[], b: string[]) => {
|
|||
return true;
|
||||
};
|
||||
|
||||
const mergeRunners = (runnerCatalog, runnerBook) => {
|
||||
const mergeRunners = (runnerCatalog: any, runnerBook: any) => {
|
||||
let keys = Object.keys(runnerCatalog);
|
||||
let result = [];
|
||||
for (let key of keys) {
|
||||
|
@ -45,16 +45,13 @@ async function fetchPredictions() {
|
|||
const response = await axios({
|
||||
url: endpoint,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
httpsAgent: agent,
|
||||
}).then((response) => response.data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async function whipIntoShape(data) {
|
||||
async function whipIntoShape(data: any) {
|
||||
let catalogues = data.market_catalogues;
|
||||
let books = data.market_books;
|
||||
let keys1 = Object.keys(catalogues).sort();
|
||||
|
@ -78,7 +75,7 @@ async function whipIntoShape(data) {
|
|||
return results;
|
||||
}
|
||||
|
||||
async function processPredictions(data) {
|
||||
async function processPredictions(data: any) {
|
||||
let predictions = await whipIntoShape(data);
|
||||
// console.log(JSON.stringify(predictions, null, 4))
|
||||
let results: FetchedQuestion[] = predictions.map((prediction) => {
|
||||
|
@ -87,13 +84,17 @@ async function processPredictions(data) {
|
|||
} */
|
||||
let id = `${platformName}-${prediction.marketId}`;
|
||||
let normalizationFactor = prediction.options
|
||||
.filter((option) => option.status == "ACTIVE" && option.totalMatched > 0)
|
||||
.map((option) => option.lastPriceTraded)
|
||||
.map((x) => 1 / x)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
.filter(
|
||||
(option: any) => option.status == "ACTIVE" && option.totalMatched > 0
|
||||
)
|
||||
.map((option: any) => option.lastPriceTraded)
|
||||
.map((x: any) => 1 / x)
|
||||
.reduce((a: any, b: any) => a + b, 0);
|
||||
let options = prediction.options
|
||||
.filter((option) => option.status == "ACTIVE" && option.totalMatched > 0)
|
||||
.map((option) => ({
|
||||
.filter(
|
||||
(option: any) => option.status == "ACTIVE" && option.totalMatched > 0
|
||||
)
|
||||
.map((option: any) => ({
|
||||
name: option.runnerName,
|
||||
probability:
|
||||
option.lastPriceTraded != 0
|
||||
|
@ -142,7 +143,7 @@ export const betfair: Platform = {
|
|||
color: "#3d674a",
|
||||
async fetcher() {
|
||||
const data = await fetchPredictions();
|
||||
const results = await processPredictions(data); // somehow needed
|
||||
const results = await processPredictions(data);
|
||||
return results;
|
||||
},
|
||||
calculateStars(data) {
|
||||
|
|
|
@ -29,7 +29,7 @@ async function fetchData() {
|
|||
return response;
|
||||
}
|
||||
|
||||
async function getPredictionsData(caseUrl) {
|
||||
async function getPredictionsData(caseUrl: string) {
|
||||
let newCaseUrl = `https://fantasyscotus.net/user-predictions${caseUrl}?filterscount=0&groupscount=0&sortdatafield=username&sortorder=asc&pagenum=0&pagesize=20&recordstartindex=0&recordendindex=20&_=${unixtime}`;
|
||||
//console.log(newCaseUrl)
|
||||
let predictions = await axios({
|
||||
|
@ -49,7 +49,7 @@ async function getPredictionsData(caseUrl) {
|
|||
}).then((res) => res.data);
|
||||
|
||||
let predictionsAffirm = predictions.filter(
|
||||
(prediction) => prediction.percent_affirm > 50
|
||||
(prediction: any) => prediction.percent_affirm > 50
|
||||
);
|
||||
//console.log(predictions)
|
||||
//console.log(predictionsAffirm.length/predictions.length)
|
||||
|
@ -61,7 +61,7 @@ async function getPredictionsData(caseUrl) {
|
|||
};
|
||||
}
|
||||
|
||||
async function processData(data) {
|
||||
async function processData(data: any) {
|
||||
let events = data.object_list;
|
||||
let historicalPercentageCorrect = data.stats.pcnt_correct;
|
||||
let historicalProbabilityCorrect =
|
||||
|
|
|
@ -18,8 +18,10 @@ let highQualityCommunities = [
|
|||
];
|
||||
|
||||
/* Support functions */
|
||||
async function fetchAllCommunityQuestions(communityId) {
|
||||
let response = await axios({
|
||||
async function fetchAllCommunityQuestions(communityId: string) {
|
||||
// TODO - fetch foretold graphql schema to type the result properly?
|
||||
// (should be doable with graphql-code-generator, why not)
|
||||
const response = await axios({
|
||||
url: graphQLendpoint,
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
|
@ -30,10 +32,10 @@ async function fetchAllCommunityQuestions(communityId) {
|
|||
channelId: "${communityId}",
|
||||
states: OPEN,
|
||||
first: 500
|
||||
){
|
||||
) {
|
||||
total
|
||||
edges{
|
||||
node{
|
||||
edges {
|
||||
node {
|
||||
id
|
||||
name
|
||||
valueType
|
||||
|
@ -52,8 +54,8 @@ async function fetchAllCommunityQuestions(communityId) {
|
|||
})
|
||||
.then((res) => res.data)
|
||||
.then((res) => res.data.measurables.edges);
|
||||
//console.log(response)
|
||||
return response;
|
||||
|
||||
return response as any[];
|
||||
}
|
||||
|
||||
export const foretold: Platform = {
|
||||
|
@ -67,11 +69,11 @@ export const foretold: Platform = {
|
|||
questions = questions.map((question) => question.node);
|
||||
questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions
|
||||
questions.forEach((question) => {
|
||||
let id = `${platformName}-${question.id}`;
|
||||
const id = `${platformName}-${question.id}`;
|
||||
|
||||
let options: FetchedQuestion["options"] = [];
|
||||
if (question.valueType == "PERCENTAGE") {
|
||||
let probability = question.previousAggregate.value.percentage;
|
||||
const probability = question.previousAggregate.value.percentage;
|
||||
options = [
|
||||
{
|
||||
name: "Yes",
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
/* Imports */
|
||||
import axios from "axios";
|
||||
import { Tabletojson } from "tabletojson";
|
||||
import tunnel from "tunnel";
|
||||
|
||||
import { average } from "../../utils";
|
||||
import { hash } from "../utils/hash";
|
||||
|
@ -18,7 +17,7 @@ export const goodjudgment: Platform = {
|
|||
color: "#7d4f1b",
|
||||
async fetcher() {
|
||||
// Proxy fuckery
|
||||
let proxy;
|
||||
// let proxy;
|
||||
/*
|
||||
* try {
|
||||
proxy = await axios
|
||||
|
@ -29,19 +28,19 @@ export const goodjudgment: Platform = {
|
|||
console.log("Proxy generation failed; using backup proxy instead");
|
||||
// hard-coded backup proxy
|
||||
*/
|
||||
proxy = {
|
||||
ip: process.env.BACKUP_PROXY_IP,
|
||||
port: process.env.BACKUP_PROXY_PORT,
|
||||
};
|
||||
// }
|
||||
let agent = tunnel.httpsOverHttp({
|
||||
proxy: {
|
||||
host: proxy.ip,
|
||||
port: proxy.port,
|
||||
},
|
||||
});
|
||||
// proxy = {
|
||||
// ip: process.env.BACKUP_PROXY_IP,
|
||||
// port: process.env.BACKUP_PROXY_PORT,
|
||||
// };
|
||||
// // }
|
||||
// let agent = tunnel.httpsOverHttp({
|
||||
// proxy: {
|
||||
// host: proxy.ip,
|
||||
// port: proxy.port,
|
||||
// },
|
||||
// });
|
||||
|
||||
let content = await axios
|
||||
const content = await axios
|
||||
.request({
|
||||
url: "https://goodjudgment.io/superforecasts/",
|
||||
method: "get",
|
||||
|
@ -58,17 +57,16 @@ export const goodjudgment: Platform = {
|
|||
let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false });
|
||||
jsonTable.shift(); // deletes first element
|
||||
jsonTable.pop(); // deletes last element
|
||||
// console.log(jsonTable)
|
||||
|
||||
for (let table of jsonTable) {
|
||||
// console.log(table)
|
||||
let title = table[0]["0"].split("\t\t\t").splice(3)[0];
|
||||
if (title != undefined) {
|
||||
title = title.replaceAll("</a>", "");
|
||||
let id = `${platformName}-${hash(title)}`;
|
||||
let description = table
|
||||
.filter((row) => row["0"].includes("BACKGROUND:"))
|
||||
.map((row) => row["0"])
|
||||
.map((text) =>
|
||||
const id = `${platformName}-${hash(title)}`;
|
||||
const description = table
|
||||
.filter((row: any) => row["0"].includes("BACKGROUND:"))
|
||||
.map((row: any) => row["0"])
|
||||
.map((text: any) =>
|
||||
text
|
||||
.split("BACKGROUND:")[1]
|
||||
.split("Examples of Superforecaster")[0]
|
||||
|
@ -80,16 +78,16 @@ export const goodjudgment: Platform = {
|
|||
.replaceAll(" ", "")
|
||||
.replaceAll("<br> ", "")
|
||||
)[0];
|
||||
let options = table
|
||||
.filter((row) => "4" in row)
|
||||
.map((row) => ({
|
||||
const options = table
|
||||
.filter((row: any) => "4" in row)
|
||||
.map((row: any) => ({
|
||||
name: row["2"]
|
||||
.split('<span class="qTitle">')[1]
|
||||
.replace("</span>", ""),
|
||||
probability: Number(row["3"].split("%")[0]) / 100,
|
||||
type: "PROBABILITY",
|
||||
}));
|
||||
let analysis = table.filter((row) =>
|
||||
let analysis = table.filter((row: any) =>
|
||||
row[0] ? row[0].toLowerCase().includes("commentary") : false
|
||||
);
|
||||
// "Examples of Superforecaster Commentary" / Analysis
|
||||
|
|
|
@ -4,6 +4,7 @@ import { Tabletojson } from "tabletojson";
|
|||
|
||||
import { average } from "../../utils";
|
||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||
import { sleep } from "../utils/sleep";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
|
@ -23,11 +24,10 @@ const id = () => 0;
|
|||
/* Support functions */
|
||||
|
||||
async function fetchPage(page: number, cookie: string) {
|
||||
let response = await axios({
|
||||
const response: string = await axios({
|
||||
url: htmlEndPoint + page,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
Cookie: cookie,
|
||||
},
|
||||
}).then((res) => res.data);
|
||||
|
@ -36,11 +36,10 @@ async function fetchPage(page: number, cookie: string) {
|
|||
}
|
||||
|
||||
async function fetchStats(questionUrl: string, cookie: string) {
|
||||
let response = await axios({
|
||||
let response: string = await axios({
|
||||
url: questionUrl + "/stats",
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
Cookie: cookie,
|
||||
Referer: questionUrl,
|
||||
},
|
||||
|
@ -74,7 +73,7 @@ async function fetchStats(questionUrl: string, cookie: string) {
|
|||
let optionsHtmlElement = "<table" + response.split("tbody")[1] + "table>";
|
||||
let tablesAsJson = Tabletojson.convert(optionsHtmlElement);
|
||||
let firstTable = tablesAsJson[0];
|
||||
options = firstTable.map((element) => ({
|
||||
options = firstTable.map((element: any) => ({
|
||||
name: element["0"],
|
||||
probability: Number(element["1"].replace("%", "")) / 100,
|
||||
type: "PROBABILITY",
|
||||
|
@ -133,7 +132,7 @@ function isSignedIn(html: string) {
|
|||
return isSignedInBool;
|
||||
}
|
||||
|
||||
function reachedEnd(html) {
|
||||
function reachedEnd(html: string) {
|
||||
let reachedEndBool = html.includes("No questions match your filter");
|
||||
if (reachedEndBool) {
|
||||
//console.log(html)
|
||||
|
@ -142,10 +141,6 @@ function reachedEnd(html) {
|
|||
return reachedEndBool;
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/* Body */
|
||||
|
||||
async function goodjudgmentopen_inner(cookie: string) {
|
||||
|
@ -176,7 +171,11 @@ async function goodjudgmentopen_inner(cookie: string) {
|
|||
}
|
||||
}
|
||||
let questionNumRegex = new RegExp("questions/([0-9]+)");
|
||||
let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0];
|
||||
const questionNumMatch = url.match(questionNumRegex);
|
||||
if (!questionNumMatch) {
|
||||
throw new Error(`Couldn't find question num in ${url}`);
|
||||
}
|
||||
let questionNum = questionNumMatch[1];
|
||||
let id = `${platformName}-${questionNum}`;
|
||||
let question = {
|
||||
id: id,
|
||||
|
|
|
@ -2,8 +2,8 @@ import axios from "axios";
|
|||
|
||||
import { Question } from "@prisma/client";
|
||||
|
||||
import { AlgoliaQuestion } from "../../backend/utils/algolia";
|
||||
import { prisma } from "../database/prisma";
|
||||
import { AlgoliaQuestion } from "../utils/algolia";
|
||||
import { FetchedQuestion, Platform, prepareQuestion } from "./";
|
||||
|
||||
/* Definitions */
|
||||
|
@ -12,7 +12,7 @@ const searchEndpoint =
|
|||
|
||||
const apiEndpoint = "https://guesstimate.herokuapp.com";
|
||||
|
||||
const modelToQuestion = (model: any): Question => {
|
||||
const modelToQuestion = (model: any): ReturnType<typeof prepareQuestion> => {
|
||||
const { description } = model;
|
||||
// const description = model.description
|
||||
// ? model.description.replace(/\n/g, " ").replace(/ /g, " ")
|
||||
|
@ -77,12 +77,11 @@ async function search(query: string): Promise<AlgoliaQuestion[]> {
|
|||
const fetchQuestion = async (id: number): Promise<Question> => {
|
||||
const response = await axios({ url: `${apiEndpoint}/spaces/${id}` });
|
||||
let q = modelToQuestion(response.data);
|
||||
q = await prisma.question.upsert({
|
||||
return await prisma.question.upsert({
|
||||
where: { id: q.id },
|
||||
create: q,
|
||||
update: q,
|
||||
});
|
||||
return q;
|
||||
};
|
||||
|
||||
export const guesstimate: Platform & {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { Question } from "@prisma/client";
|
||||
|
||||
import { QuestionOption } from "../../common/types";
|
||||
import { prisma } from "../database/prisma";
|
||||
import { betfair } from "./betfair";
|
||||
import { fantasyscotus } from "./fantasyscotus";
|
||||
|
@ -45,11 +46,7 @@ export type FetchedQuestion = Omit<
|
|||
> & {
|
||||
timestamp?: Date;
|
||||
extra?: object; // required in DB but annoying to return empty; also this is slightly stricter than Prisma's JsonValue
|
||||
options: {
|
||||
name?: string;
|
||||
probability?: number;
|
||||
type: "PROBABILITY";
|
||||
}[]; // stronger type than Prisma's JsonValue
|
||||
options: QuestionOption[]; // stronger type than Prisma's JsonValue
|
||||
qualityindicators: Omit<QualityIndicators, "stars">; // slightly stronger type than Prisma's JsonValue
|
||||
};
|
||||
|
||||
|
@ -92,10 +89,23 @@ export const platforms: Platform[] = [
|
|||
xrisk,
|
||||
];
|
||||
|
||||
// Typing notes:
|
||||
// There's a difference between prisma's Question type (type returned from `find` and `findMany`) and its input types due to JsonValue vs InputJsonValue mismatch.
|
||||
// On the other hand, we can't use Prisma.QuestionUpdateInput or Prisma.QuestionCreateManyInput either, because we use this question in guesstimate's code for preparing questions from guesstimate models...
|
||||
// So here we build a new type which should be ok to use both in place of prisma's Question type and as an input to its update or create methods.
|
||||
type PreparedQuestion = Omit<
|
||||
Question,
|
||||
"extra" | "qualityindicators" | "options"
|
||||
> & {
|
||||
extra: NonNullable<Question["extra"]>;
|
||||
qualityindicators: NonNullable<Question["qualityindicators"]>;
|
||||
options: NonNullable<Question["options"]>;
|
||||
};
|
||||
|
||||
export const prepareQuestion = (
|
||||
q: FetchedQuestion,
|
||||
platform: Platform
|
||||
): Question => {
|
||||
): PreparedQuestion => {
|
||||
return {
|
||||
extra: {},
|
||||
timestamp: new Date(),
|
||||
|
@ -131,8 +141,8 @@ export const processPlatform = async (platform: Platform) => {
|
|||
const fetchedIdsSet = new Set(fetchedIds);
|
||||
const oldIdsSet = new Set(oldIds);
|
||||
|
||||
const createdQuestions: Question[] = [];
|
||||
const updatedQuestions: Question[] = [];
|
||||
const createdQuestions: PreparedQuestion[] = [];
|
||||
const updatedQuestions: PreparedQuestion[] = [];
|
||||
const deletedIds = oldIds.filter((id) => !fetchedIdsSet.has(id));
|
||||
|
||||
for (const q of fetchedQuestions.map((q) => prepareQuestion(q, platform))) {
|
||||
|
@ -163,6 +173,13 @@ export const processPlatform = async (platform: Platform) => {
|
|||
},
|
||||
});
|
||||
|
||||
await prisma.history.createMany({
|
||||
data: [...createdQuestions, ...updatedQuestions].map((q) => ({
|
||||
...q,
|
||||
idref: q.id,
|
||||
})),
|
||||
});
|
||||
|
||||
console.log(
|
||||
`Done, ${deletedIds.length} deleted, ${updatedQuestions.length} updated, ${createdQuestions.length} created`
|
||||
);
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
/* Imports */
|
||||
import axios from "axios";
|
||||
|
||||
import { FullQuestionOption } from "../../common/types";
|
||||
import { average } from "../../utils";
|
||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||
import { measureTime } from "../utils/measureTime";
|
||||
import { sleep } from "../utils/sleep";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
|
@ -16,20 +18,20 @@ const SLEEP_TIME_EXTRA = 2000;
|
|||
|
||||
/* Support functions */
|
||||
|
||||
function cleanDescription(text) {
|
||||
function cleanDescription(text: string) {
|
||||
let md = toMarkdown(text);
|
||||
let result = md.replaceAll("---", "-").replaceAll(" ", " ");
|
||||
return result;
|
||||
}
|
||||
|
||||
async function fetchPage(page, cookie) {
|
||||
async function fetchPage(page: number, cookie: string) {
|
||||
console.log(`Page #${page}`);
|
||||
if (page == 1) {
|
||||
cookie = cookie.split(";")[0]; // Interesting that it otherwise doesn't work :(
|
||||
}
|
||||
let urlEndpoint = `${htmlEndPoint}/?page=${page}`;
|
||||
console.log(urlEndpoint);
|
||||
let response = await axios({
|
||||
const response: string = await axios({
|
||||
url: urlEndpoint,
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
@ -41,8 +43,8 @@ async function fetchPage(page, cookie) {
|
|||
return response;
|
||||
}
|
||||
|
||||
async function fetchStats(questionUrl, cookie) {
|
||||
let response = await axios({
|
||||
async function fetchStats(questionUrl: string, cookie: string) {
|
||||
let response: string = await axios({
|
||||
url: questionUrl + "/stats",
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
@ -56,7 +58,7 @@ async function fetchStats(questionUrl, cookie) {
|
|||
throw Error("Not logged in");
|
||||
}
|
||||
// Init
|
||||
let options = [];
|
||||
let options: FullQuestionOption[] = [];
|
||||
|
||||
// Parse the embedded json
|
||||
let htmlElements = response.split("\n");
|
||||
|
@ -81,7 +83,7 @@ async function fetchStats(questionUrl, cookie) {
|
|||
questionType.includes("Forecast::Question") ||
|
||||
!questionType.includes("Forecast::MultiTimePeriodQuestion")
|
||||
) {
|
||||
options = firstEmbeddedJson.question.answers.map((answer) => ({
|
||||
options = firstEmbeddedJson.question.answers.map((answer: any) => ({
|
||||
name: answer.name,
|
||||
probability: answer.normalized_probability,
|
||||
type: "PROBABILITY",
|
||||
|
@ -91,12 +93,11 @@ async function fetchStats(questionUrl, cookie) {
|
|||
options[0].probability > 1
|
||||
? 1 - options[0].probability / 100
|
||||
: 1 - options[0].probability;
|
||||
let optionNo = {
|
||||
options.push({
|
||||
name: "No",
|
||||
probability: probabilityNo,
|
||||
type: "PROBABILITY",
|
||||
};
|
||||
options.push(optionNo);
|
||||
});
|
||||
}
|
||||
}
|
||||
let result = {
|
||||
|
@ -112,7 +113,7 @@ async function fetchStats(questionUrl, cookie) {
|
|||
return result;
|
||||
}
|
||||
|
||||
function isSignedIn(html) {
|
||||
function isSignedIn(html: string) {
|
||||
let isSignedInBool = !(
|
||||
html.includes("You need to sign in or sign up before continuing") ||
|
||||
html.includes("Sign up")
|
||||
|
@ -124,7 +125,7 @@ function isSignedIn(html) {
|
|||
return isSignedInBool;
|
||||
}
|
||||
|
||||
function reachedEnd(html) {
|
||||
function reachedEnd(html: string) {
|
||||
let reachedEndBool = html.includes("No questions match your filter");
|
||||
if (reachedEndBool) {
|
||||
//console.log(html)
|
||||
|
@ -133,10 +134,6 @@ function reachedEnd(html) {
|
|||
return reachedEndBool;
|
||||
}
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/* Body */
|
||||
|
||||
async function infer_inner(cookie: string) {
|
||||
|
@ -169,14 +166,18 @@ async function infer_inner(cookie: string) {
|
|||
await sleep(Math.random() * SLEEP_TIME_RANDOM + SLEEP_TIME_EXTRA); // don't be as noticeable
|
||||
|
||||
try {
|
||||
let moreinfo = await fetchStats(url, cookie);
|
||||
let questionNumRegex = new RegExp("questions/([0-9]+)");
|
||||
let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0];
|
||||
let id = `${platformName}-${questionNum}`;
|
||||
const moreinfo = await fetchStats(url, cookie);
|
||||
const questionNumRegex = new RegExp("questions/([0-9]+)");
|
||||
const questionNumMatch = url.match(questionNumRegex);
|
||||
if (!questionNumMatch) {
|
||||
throw new Error(`Couldn't find question num in ${url}`);
|
||||
}
|
||||
let questionNum = questionNumMatch[1];
|
||||
const id = `${platformName}-${questionNum}`;
|
||||
let question: FetchedQuestion = {
|
||||
id: id,
|
||||
title: title,
|
||||
url: url,
|
||||
id,
|
||||
title,
|
||||
url,
|
||||
...moreinfo,
|
||||
};
|
||||
console.log(JSON.stringify(question, null, 4));
|
||||
|
@ -231,7 +232,7 @@ export const infer: Platform = {
|
|||
color: "#223900",
|
||||
async fetcher() {
|
||||
let cookie = process.env.INFER_COOKIE;
|
||||
return await applyIfSecretExists(cookie, infer_inner);
|
||||
return (await applyIfSecretExists(cookie, infer_inner)) || null;
|
||||
},
|
||||
calculateStars(data) {
|
||||
let nuno = () => 2;
|
||||
|
|
|
@ -6,18 +6,17 @@ import { FetchedQuestion, Platform } from "./";
|
|||
|
||||
/* Definitions */
|
||||
const platformName = "kalshi";
|
||||
let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||
let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/";
|
||||
|
||||
async function fetchAllMarkets() {
|
||||
// for info which the polymarket graphql API
|
||||
let response = await axios
|
||||
.get(jsonEndpoint)
|
||||
.then((response) => response.data.markets);
|
||||
// console.log(response)
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async function processMarkets(markets) {
|
||||
async function processMarkets(markets: any[]) {
|
||||
let dateNow = new Date().toISOString();
|
||||
// console.log(markets)
|
||||
markets = markets.filter((market) => market.close_date > dateNow);
|
||||
|
|
|
@ -6,7 +6,7 @@ import { FetchedQuestion, Platform } from "./";
|
|||
|
||||
/* Definitions */
|
||||
const platformName = "manifold";
|
||||
let endpoint = "https://manifold.markets/api/v0/markets";
|
||||
const endpoint = "https://manifold.markets/api/v0/markets";
|
||||
// See https://manifoldmarkets.notion.site/Manifold-Markets-API-5e7d0aef4dcf452bb04b319e178fabc5
|
||||
|
||||
/* Support functions */
|
||||
|
@ -43,8 +43,8 @@ function showStatistics(results: FetchedQuestion[]) {
|
|||
);
|
||||
}
|
||||
|
||||
async function processPredictions(predictions) {
|
||||
let results: FetchedQuestion[] = await predictions.map((prediction) => {
|
||||
function processPredictions(predictions: any[]): FetchedQuestion[] {
|
||||
let results: FetchedQuestion[] = predictions.map((prediction) => {
|
||||
let id = `${platformName}-${prediction.id}`; // oops, doesn't match platform name
|
||||
let probability = prediction.probability;
|
||||
let options: FetchedQuestion["options"] = [
|
||||
|
@ -90,7 +90,7 @@ export const manifold: Platform = {
|
|||
color: "#793466",
|
||||
async fetcher() {
|
||||
let data = await fetchData();
|
||||
let results = await processPredictions(data); // somehow needed
|
||||
let results = processPredictions(data); // somehow needed
|
||||
showStatistics(results);
|
||||
return results;
|
||||
},
|
||||
|
|
|
@ -2,17 +2,18 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { average } from "../../utils";
|
||||
import { sleep } from "../utils/sleep";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "metaculus";
|
||||
let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page=";
|
||||
let now = new Date().toISOString();
|
||||
let DEBUG_MODE = "off";
|
||||
let SLEEP_TIME = 5000;
|
||||
|
||||
/* Support functions */
|
||||
async function fetchMetaculusQuestions(next) {
|
||||
async function fetchMetaculusQuestions(next: string) {
|
||||
// Numbers about a given address: how many, how much, at what price, etc.
|
||||
let response;
|
||||
let data;
|
||||
|
@ -25,14 +26,16 @@ async function fetchMetaculusQuestions(next) {
|
|||
data = response.data;
|
||||
} catch (error) {
|
||||
console.log(`Error in async function fetchMetaculusQuestions(next)`);
|
||||
if (!!error.response.headers["retry-after"]) {
|
||||
let timeout = error.response.headers["retry-after"];
|
||||
console.log(error);
|
||||
if (axios.isAxiosError(error)) {
|
||||
if (error.response?.headers["retry-after"]) {
|
||||
const timeout = error.response.headers["retry-after"];
|
||||
console.log(`Timeout: ${timeout}`);
|
||||
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
||||
} else {
|
||||
await sleep(SLEEP_TIME);
|
||||
}
|
||||
console.log(error);
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
response = await axios({
|
||||
|
@ -50,11 +53,7 @@ async function fetchMetaculusQuestions(next) {
|
|||
return data;
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function fetchMetaculusQuestionDescription(slug) {
|
||||
async function fetchMetaculusQuestionDescription(slug: string) {
|
||||
try {
|
||||
let response = await axios({
|
||||
method: "get",
|
||||
|
@ -67,11 +66,12 @@ async function fetchMetaculusQuestionDescription(slug) {
|
|||
`We encountered some error when attempting to fetch a metaculus page. Trying again`
|
||||
);
|
||||
if (
|
||||
axios.isAxiosError(error) &&
|
||||
typeof error.response != "undefined" &&
|
||||
typeof error.response.headers != "undefined" &&
|
||||
typeof error.response.headers["retry-after"] != "undefined"
|
||||
) {
|
||||
let timeout = error.response.headers["retry-after"];
|
||||
const timeout = error.response.headers["retry-after"];
|
||||
console.log(`Timeout: ${timeout}`);
|
||||
await sleep(Number(timeout) * 1000 + SLEEP_TIME);
|
||||
} else {
|
||||
|
@ -190,6 +190,7 @@ export const metaculus: Platform = {
|
|||
|
||||
return all_questions;
|
||||
},
|
||||
|
||||
calculateStars(data) {
|
||||
const { numforecasts } = data.qualityindicators;
|
||||
let nuno = () =>
|
||||
|
|
|
@ -6,8 +6,8 @@ import { FetchedQuestion, Platform } from "./";
|
|||
|
||||
/* Definitions */
|
||||
const platformName = "polymarket";
|
||||
let graphQLendpoint =
|
||||
"https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
|
||||
const graphQLendpoint =
|
||||
"https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5";
|
||||
let units = 10 ** 6;
|
||||
|
||||
async function fetchAllContractInfo() {
|
||||
|
@ -18,11 +18,11 @@ async function fetchAllContractInfo() {
|
|||
// "https://strapi-matic.poly.market/markets?active=true&_sort=volume:desc&_limit=-1" to get all markets, including closed ones
|
||||
)
|
||||
.then((query) => query.data);
|
||||
response = response.filter((res) => res.closed != true);
|
||||
response = response.filter((res: any) => res.closed != true);
|
||||
return response;
|
||||
}
|
||||
|
||||
async function fetchIndividualContractData(marketMakerAddress) {
|
||||
async function fetchIndividualContractData(marketMakerAddress: string) {
|
||||
let daysSinceEra = Math.round(Date.now() / (1000 * 24 * 60 * 60)) - 7; // last week
|
||||
let response = await axios({
|
||||
url: graphQLendpoint,
|
||||
|
@ -59,7 +59,7 @@ async function fetchIndividualContractData(marketMakerAddress) {
|
|||
})
|
||||
.then((res) => res.data)
|
||||
.then((res) => res.data.fixedProductMarketMakers);
|
||||
// console.log(response)
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
|
@ -93,7 +93,7 @@ export const polymarket: Platform = {
|
|||
// let isbinary = Number(moreMarketInfo.conditions[0].outcomeSlotCount) == 2
|
||||
// let percentage = Number(moreMarketInfo.outcomeTokenPrices[0]) * 100
|
||||
// let percentageFormatted = isbinary ? (percentage.toFixed(0) + "%") : "none"
|
||||
let options = [];
|
||||
let options: FetchedQuestion["options"] = [];
|
||||
for (let outcome in moreMarketInfo.outcomeTokenPrices) {
|
||||
options.push({
|
||||
name: String(marketInfo.outcomes[outcome]),
|
||||
|
@ -107,7 +107,7 @@ export const polymarket: Platform = {
|
|||
title: marketInfo.question,
|
||||
url: "https://polymarket.com/market/" + marketInfo.slug,
|
||||
description: marketInfo.description,
|
||||
options: options,
|
||||
options,
|
||||
qualityindicators: {
|
||||
numforecasts: numforecasts.toFixed(0),
|
||||
liquidity: liquidity.toFixed(2),
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { average } from "../../utils";
|
||||
import { sleep } from "../utils/sleep";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
const platformName = "predictit";
|
||||
|
||||
/* Support functions */
|
||||
async function fetchmarkets() {
|
||||
let response = await axios({
|
||||
async function fetchmarkets(): Promise<any[]> {
|
||||
const response = await axios({
|
||||
method: "get",
|
||||
url: "https://www.predictit.org/api/marketdata/all/",
|
||||
});
|
||||
let openMarkets = response.data.markets.filter(
|
||||
(market) => market.status == "Open"
|
||||
const openMarkets = response.data.markets.filter(
|
||||
(market: any) => market.status == "Open"
|
||||
);
|
||||
return openMarkets;
|
||||
}
|
||||
|
||||
async function fetchmarketrules(market_id) {
|
||||
async function fetchmarketrules(market_id: string | number) {
|
||||
let response = await axios({
|
||||
method: "get",
|
||||
url: "https://www.predictit.org/api/Market/" + market_id,
|
||||
|
@ -34,10 +35,6 @@ async function fetchmarketvolumes() {
|
|||
return response.data;
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/* Body */
|
||||
export const predictit: Platform = {
|
||||
name: platformName,
|
||||
|
@ -65,13 +62,15 @@ export const predictit: Platform = {
|
|||
let shares_volume = market["TotalSharesTraded"];
|
||||
// let percentageFormatted = isbinary ? Number(Number(market.contracts[0].lastTradePrice) * 100).toFixed(0) + "%" : "none"
|
||||
|
||||
let options = market.contracts.map((contract) => ({
|
||||
name: contract.name,
|
||||
probability: contract.lastTradePrice,
|
||||
let options: FetchedQuestion["options"] = (market.contracts as any[]).map(
|
||||
(contract) => ({
|
||||
name: String(contract.name),
|
||||
probability: Number(contract.lastTradePrice),
|
||||
type: "PROBABILITY",
|
||||
}));
|
||||
})
|
||||
);
|
||||
let totalValue = options
|
||||
.map((element) => Number(element.probability))
|
||||
.map((element: any) => Number(element.probability))
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (options.length != 1 && totalValue > 1) {
|
||||
|
@ -81,7 +80,7 @@ export const predictit: Platform = {
|
|||
}));
|
||||
} else if (options.length == 1) {
|
||||
let option = options[0];
|
||||
let probability = option["probability"];
|
||||
let probability = option.probability;
|
||||
options = [
|
||||
{
|
||||
name: "Yes",
|
||||
|
@ -90,7 +89,7 @@ export const predictit: Platform = {
|
|||
},
|
||||
{
|
||||
name: "No",
|
||||
probability: 1 - probability,
|
||||
probability: 1 - (probability || 0),
|
||||
type: "PROBABILITY",
|
||||
},
|
||||
];
|
||||
|
|
|
@ -55,7 +55,7 @@ export const rootclaim: Platform = {
|
|||
for (const claim of claims) {
|
||||
const id = `${platformName}-${claim.slug.toLowerCase()}`;
|
||||
|
||||
let options = [];
|
||||
let options: FetchedQuestion["options"] = [];
|
||||
for (let scenario of claim.scenarios) {
|
||||
options.push({
|
||||
name: toMarkdown(scenario.name || scenario.text)
|
||||
|
@ -76,7 +76,7 @@ export const rootclaim: Platform = {
|
|||
title: toMarkdown(claim.question).replace("\n", ""),
|
||||
url,
|
||||
description: toMarkdown(description).replace("'", "'"),
|
||||
options: options,
|
||||
options,
|
||||
qualityindicators: {
|
||||
numforecasts: 1,
|
||||
},
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { QuestionOption } from "../../common/types";
|
||||
import { average } from "../../utils";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
|
@ -7,57 +8,51 @@ import { FetchedQuestion, Platform } from "./";
|
|||
const platformName = "smarkets";
|
||||
let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/";
|
||||
let VERBOSE = false;
|
||||
let empty = () => 0;
|
||||
|
||||
/* Support functions */
|
||||
|
||||
async function fetchEvents(url) {
|
||||
let response = await axios({
|
||||
async function fetchEvents(url: string) {
|
||||
const response = await axios({
|
||||
url: htmlEndPointEntrance + url,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
}).then((res) => res.data);
|
||||
VERBOSE ? console.log(response) : empty();
|
||||
VERBOSE && console.log(response);
|
||||
return response;
|
||||
}
|
||||
|
||||
async function fetchMarkets(eventid) {
|
||||
let response = await axios({
|
||||
async function fetchMarkets(eventid: string) {
|
||||
const response = await axios({
|
||||
url: `https://api.smarkets.com/v3/events/${eventid}/markets/`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/json",
|
||||
},
|
||||
})
|
||||
.then((res) => res.data)
|
||||
.then((res) => res.markets);
|
||||
return response;
|
||||
}
|
||||
|
||||
async function fetchContracts(marketid) {
|
||||
let response = await axios({
|
||||
async function fetchContracts(marketid: string) {
|
||||
const response = await axios({
|
||||
url: `https://api.smarkets.com/v3/markets/${marketid}/contracts/`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
}).then((res) => res.data);
|
||||
VERBOSE ? console.log(response) : empty();
|
||||
return response;
|
||||
VERBOSE && console.log(response);
|
||||
|
||||
if (!(response.contracts instanceof Array)) {
|
||||
throw new Error("Invalid response while fetching contracts");
|
||||
}
|
||||
return response.contracts as any[];
|
||||
}
|
||||
|
||||
async function fetchPrices(marketid) {
|
||||
let response = await axios({
|
||||
async function fetchPrices(marketid: string) {
|
||||
const response = await axios({
|
||||
url: `https://api.smarkets.com/v3/markets/${marketid}/last_executed_prices/`,
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
}).then((res) => res.data);
|
||||
VERBOSE ? console.log(response) : empty();
|
||||
return response;
|
||||
VERBOSE && console.log(response);
|
||||
if (!response.last_executed_prices) {
|
||||
throw new Error("Invalid response while fetching prices");
|
||||
}
|
||||
return response.last_executed_prices;
|
||||
}
|
||||
|
||||
export const smarkets: Platform = {
|
||||
|
@ -70,77 +65,91 @@ export const smarkets: Platform = {
|
|||
|
||||
let events = [];
|
||||
while (htmlPath) {
|
||||
let data = await fetchEvents(htmlPath);
|
||||
const data = await fetchEvents(htmlPath);
|
||||
events.push(...data.events);
|
||||
htmlPath = data.pagination.next_page;
|
||||
}
|
||||
VERBOSE ? console.log(events) : empty();
|
||||
VERBOSE && console.log(events);
|
||||
|
||||
let markets = [];
|
||||
for (let event of events) {
|
||||
VERBOSE ? console.log(Date.now()) : empty();
|
||||
VERBOSE ? console.log(event.name) : empty();
|
||||
for (const event of events) {
|
||||
VERBOSE && console.log(Date.now());
|
||||
VERBOSE && console.log(event.name);
|
||||
|
||||
let eventMarkets = await fetchMarkets(event.id);
|
||||
eventMarkets = eventMarkets.map((market) => ({
|
||||
eventMarkets = eventMarkets.map((market: any) => ({
|
||||
...market,
|
||||
// smarkets doesn't have separate urls for different markets in a single event
|
||||
// we could use anchors (e.g. https://smarkets.com/event/886716/politics/uk/uk-party-leaders/next-conservative-leader#contract-collapse-9815728-control), but it's unclear if they aren't going to change
|
||||
slug: event.full_slug,
|
||||
}));
|
||||
VERBOSE ? console.log("Markets fetched") : empty();
|
||||
VERBOSE ? console.log(event.id) : empty();
|
||||
VERBOSE ? console.log(eventMarkets) : empty();
|
||||
VERBOSE && console.log("Markets fetched");
|
||||
VERBOSE && console.log(event.id);
|
||||
VERBOSE && console.log(eventMarkets);
|
||||
markets.push(...eventMarkets);
|
||||
//let lastPrices = await fetchPrices(market.id)
|
||||
}
|
||||
VERBOSE ? console.log(markets) : empty();
|
||||
VERBOSE && console.log(markets);
|
||||
|
||||
let results = [];
|
||||
for (let market of markets) {
|
||||
VERBOSE ? console.log("================") : empty();
|
||||
VERBOSE ? console.log("Market: ", market) : empty();
|
||||
let id = `${platformName}-${market.id}`;
|
||||
let name = market.name;
|
||||
VERBOSE && console.log("================");
|
||||
VERBOSE && console.log("Market: ", market);
|
||||
|
||||
let contracts = await fetchContracts(market.id);
|
||||
VERBOSE ? console.log("Contracts: ", contracts) : empty();
|
||||
VERBOSE && console.log("Contracts: ", contracts);
|
||||
let prices = await fetchPrices(market.id);
|
||||
VERBOSE
|
||||
? console.log("Prices: ", prices["last_executed_prices"][market.id])
|
||||
: empty();
|
||||
VERBOSE && console.log("Prices: ", prices[market.id]);
|
||||
|
||||
let optionsObj = {};
|
||||
for (let contract of contracts["contracts"]) {
|
||||
optionsObj[contract.id] = { name: contract.name };
|
||||
let optionsObj: {
|
||||
[k: string]: QuestionOption;
|
||||
} = {};
|
||||
|
||||
const contractIdToName = Object.fromEntries(
|
||||
contracts.map((c) => [c.id as string, c.name as string])
|
||||
);
|
||||
|
||||
for (const price of prices[market.id]) {
|
||||
const contractName = contractIdToName[price.contract_id];
|
||||
if (!contractName) {
|
||||
console.warn(
|
||||
`Couldn't find contract ${price.contract_id} in contracts data, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
for (let price of prices["last_executed_prices"][market.id]) {
|
||||
optionsObj[price.contract_id] = {
|
||||
...optionsObj[price.contract_id],
|
||||
name: contractName,
|
||||
probability: price.last_executed_price
|
||||
? Number(price.last_executed_price)
|
||||
: null,
|
||||
: undefined,
|
||||
type: "PROBABILITY",
|
||||
};
|
||||
}
|
||||
let options: any[] = Object.values(optionsObj);
|
||||
let options: QuestionOption[] = Object.values(optionsObj);
|
||||
// monkey patch the case where there are only two options and only one has traded.
|
||||
if (
|
||||
options.length == 2 &&
|
||||
options.map((option) => option.probability).includes(null)
|
||||
options.map((option) => option.probability).includes(undefined)
|
||||
) {
|
||||
let nonNullPrice =
|
||||
const nonNullPrice =
|
||||
options[0].probability == null
|
||||
? options[1].probability
|
||||
: options[0].probability;
|
||||
|
||||
if (nonNullPrice != null) {
|
||||
options = options.map((option) => {
|
||||
let probability = option.probability;
|
||||
return {
|
||||
...option,
|
||||
probability: probability == null ? 100 - nonNullPrice : probability,
|
||||
probability:
|
||||
probability == null ? 100 - nonNullPrice : probability,
|
||||
// yes, 100, because prices are not yet normalized.
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize normally
|
||||
let totalValue = options
|
||||
const totalValue = options
|
||||
.map((element) => Number(element.probability))
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
|
@ -148,30 +157,32 @@ export const smarkets: Platform = {
|
|||
...element,
|
||||
probability: Number(element.probability) / totalValue,
|
||||
}));
|
||||
VERBOSE ? console.log(options) : empty();
|
||||
VERBOSE && console.log(options);
|
||||
|
||||
/*
|
||||
if(contracts["contracts"].length == 2){
|
||||
if(contracts.length == 2){
|
||||
isBinary = true
|
||||
percentage = ( Number(prices["last_executed_prices"][market.id][0].last_executed_price) + (100 - Number(prices["last_executed_prices"][market.id][1].last_executed_price)) ) / 2
|
||||
percentage = ( Number(prices[market.id][0].last_executed_price) + (100 - Number(prices[market.id][1].last_executed_price)) ) / 2
|
||||
percentage = Math.round(percentage)+"%"
|
||||
let contractName = contracts["contracts"][0].name
|
||||
name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`)
|
||||
let contractName = contracts[0].name
|
||||
name = name+ (contractName=="Yes"?'':` (${contracts[0].name})`)
|
||||
}
|
||||
*/
|
||||
let result: FetchedQuestion = {
|
||||
id: id,
|
||||
title: name,
|
||||
const id = `${platformName}-${market.id}`;
|
||||
const title = market.name;
|
||||
const result: FetchedQuestion = {
|
||||
id,
|
||||
title,
|
||||
url: "https://smarkets.com/event/" + market.event_id + market.slug,
|
||||
description: market.description,
|
||||
options: options,
|
||||
options,
|
||||
timestamp: new Date(),
|
||||
qualityindicators: {},
|
||||
};
|
||||
VERBOSE ? console.log(result) : empty();
|
||||
VERBOSE && console.log(result);
|
||||
results.push(result);
|
||||
}
|
||||
VERBOSE ? console.log(results) : empty();
|
||||
VERBOSE && console.log(results);
|
||||
return results;
|
||||
},
|
||||
calculateStars(data) {
|
||||
|
|
|
@ -13,7 +13,7 @@ const endpoint = `https://docs.google.com/spreadsheets/d/${SHEET_ID}/edit#gid=0`
|
|||
// https://docs.google.com/spreadsheets/d/1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0/edit#gid=0
|
||||
/* Support functions */
|
||||
|
||||
const formatRow = (row) => {
|
||||
const formatRow = (row: string[]) => {
|
||||
let colNames = [
|
||||
"Prediction Date",
|
||||
"Prediction",
|
||||
|
@ -23,15 +23,15 @@ const formatRow = (row) => {
|
|||
"Prediction Right?",
|
||||
"Brier Score",
|
||||
"Notes",
|
||||
];
|
||||
let result = {};
|
||||
row.forEach((col, i) => {
|
||||
] as const;
|
||||
let result: Partial<{ [k in typeof colNames[number]]: string }> = {};
|
||||
row.forEach((col: string, i) => {
|
||||
result[colNames[i]] = col;
|
||||
});
|
||||
return result;
|
||||
return result as Required<typeof result>;
|
||||
};
|
||||
|
||||
async function fetchGoogleDoc(google_api_key) {
|
||||
async function fetchGoogleDoc(google_api_key: string) {
|
||||
// https://gist.github.com/micalevisk/9bc831bd4b3e5a3f62b9810330129c59
|
||||
let results = [];
|
||||
const doc = new GoogleSpreadsheet(SHEET_ID);
|
||||
|
@ -41,7 +41,7 @@ async function fetchGoogleDoc(google_api_key) {
|
|||
console.log(">>", doc.title);
|
||||
|
||||
const sheet = doc.sheetsByIndex[0];
|
||||
const rows = await sheet.getRows({ offset: 0 });
|
||||
const rows = await sheet.getRows();
|
||||
|
||||
console.log("# " + rows[0]._sheet.headerValues.join(","));
|
||||
let isEnd = false;
|
||||
|
@ -68,7 +68,9 @@ async function fetchGoogleDoc(google_api_key) {
|
|||
return results;
|
||||
}
|
||||
|
||||
async function processPredictions(predictions) {
|
||||
async function processPredictions(
|
||||
predictions: Awaited<ReturnType<typeof fetchGoogleDoc>>
|
||||
) {
|
||||
let currentPredictions = predictions.filter(
|
||||
(prediction) => prediction["Actual"] == "Unknown"
|
||||
);
|
||||
|
@ -101,8 +103,8 @@ async function processPredictions(predictions) {
|
|||
});
|
||||
|
||||
results = results.reverse();
|
||||
let uniqueTitles = [];
|
||||
let uniqueResults = [];
|
||||
let uniqueTitles: string[] = [];
|
||||
let uniqueResults: FetchedQuestion[] = [];
|
||||
results.forEach((result) => {
|
||||
if (!uniqueTitles.includes(result.title)) uniqueResults.push(result);
|
||||
uniqueTitles.push(result.title);
|
||||
|
@ -110,7 +112,7 @@ async function processPredictions(predictions) {
|
|||
return uniqueResults;
|
||||
}
|
||||
|
||||
export async function wildeford_inner(google_api_key) {
|
||||
export async function wildeford_inner(google_api_key: string) {
|
||||
let predictions = await fetchGoogleDoc(google_api_key);
|
||||
return await processPredictions(predictions);
|
||||
}
|
||||
|
@ -121,7 +123,7 @@ export const wildeford: Platform = {
|
|||
color: "#984158",
|
||||
async fetcher() {
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
|
||||
return await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner);
|
||||
return (await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner)) || null;
|
||||
},
|
||||
calculateStars(data) {
|
||||
let nuno = () => 3;
|
||||
|
|
|
@ -14,8 +14,8 @@ export const xrisk: Platform = {
|
|||
let fileRaw = fs.readFileSync("./input/xrisk-questions.json", {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
let results = JSON.parse(fileRaw);
|
||||
results = results.map((item) => {
|
||||
let parsedData = JSON.parse(fileRaw);
|
||||
const results = parsedData.map((item: any) => {
|
||||
item.extra = item.moreoriginsdata;
|
||||
delete item.moreoriginsdata;
|
||||
return {
|
||||
|
|
|
@ -44,7 +44,7 @@ export async function rebuildAlgoliaDatabase() {
|
|||
})
|
||||
);
|
||||
|
||||
if (index.exists()) {
|
||||
if (await index.exists()) {
|
||||
console.log("Index exists");
|
||||
await index.replaceAllObjects(records, { safe: true });
|
||||
console.log(
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { prisma } from "../../database/prisma";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
/* Utilities */
|
||||
|
||||
/* Support functions */
|
||||
const getQualityIndicators = (question) =>
|
||||
Object.entries(question.qualityindicators)
|
||||
.map((entry) => `${entry[0]}: ${entry[1]}`)
|
||||
.join("; ");
|
||||
|
||||
/* Body */
|
||||
|
||||
const main = async () => {
|
||||
let highQualityPlatforms = [
|
||||
"CSET-foretell",
|
||||
"Foretold",
|
||||
"Good Judgment Open",
|
||||
"Metaculus",
|
||||
"PredictIt",
|
||||
"Rootclaim",
|
||||
];
|
||||
const json = await prisma.question.findMany({});
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
||||
const questionsFromGoodPlatforms = json.filter((question) =>
|
||||
highQualityPlatforms.includes(question.platform)
|
||||
);
|
||||
const tsv =
|
||||
"index\ttitle\turl\tqualityindicators\n" +
|
||||
questionsFromGoodPlatforms
|
||||
.map((question, index) => {
|
||||
let row = `${index}\t${question.title}\t${
|
||||
question.url
|
||||
}\t${getQualityIndicators(question)}`;
|
||||
console.log(row);
|
||||
return row;
|
||||
})
|
||||
.join("\n");
|
||||
//console.log(tsv)
|
||||
|
||||
// let string = JSON.stringify(json, null, 2)
|
||||
fs.writeFileSync("metaforecasts.tsv", tsv);
|
||||
};
|
||||
main();
|
|
@ -1,48 +0,0 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { shuffleArray } from "../../../utils";
|
||||
import { prisma } from "../../database/prisma";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
/* Utilities */
|
||||
|
||||
/* Support functions */
|
||||
let getQualityIndicators = (question) =>
|
||||
Object.entries(question.qualityindicators)
|
||||
.map((entry) => `${entry[0]}: ${entry[1]}`)
|
||||
.join("; ");
|
||||
|
||||
/* Body */
|
||||
|
||||
let main = async () => {
|
||||
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
|
||||
let json = await prisma.question.findMany({});
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(question => question.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
||||
let questionsFromGoodPlatforms = json.filter((question) =>
|
||||
highQualityPlatforms.includes(question.platform)
|
||||
);
|
||||
let questionsFromGoodPlatformsShuffled = shuffleArray(
|
||||
questionsFromGoodPlatforms
|
||||
);
|
||||
let tsv =
|
||||
"index\ttitle\turl\tqualityindicators\n" +
|
||||
questionsFromGoodPlatforms
|
||||
.map((question, index) => {
|
||||
let row = `${index}\t${question.title}\t${
|
||||
question.url
|
||||
}\t${getQualityIndicators(question)}`;
|
||||
console.log(row);
|
||||
return row;
|
||||
})
|
||||
.join("\n");
|
||||
//console.log(tsv)
|
||||
|
||||
// let string = JSON.stringify(json, null, 2)
|
||||
fs.writeFileSync("metaforecasts_metaculus_v2.tsv", tsv);
|
||||
};
|
||||
main();
|
|
@ -11,7 +11,7 @@ let locationData = "./data/";
|
|||
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
|
||||
async function main() {
|
||||
const data = await prisma.question.findMany({});
|
||||
const processDescription = (description) => {
|
||||
const processDescription = (description: string | null | undefined) => {
|
||||
if (description == null || description == undefined || description == "") {
|
||||
return "";
|
||||
} else {
|
||||
|
|
|
@ -10,7 +10,7 @@ let rawdata = fs.readFileSync("../data/merged-questions.json", {
|
|||
});
|
||||
let data = JSON.parse(rawdata);
|
||||
|
||||
let results = [];
|
||||
let results: any[] = [];
|
||||
for (let datum of data) {
|
||||
// do something
|
||||
}
|
||||
|
|
3
src/backend/utils/sleep.ts
Normal file
3
src/backend/utils/sleep.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
22
src/common/types.ts
Normal file
22
src/common/types.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
import { QuestionFragment } from "../web/fragments.generated";
|
||||
|
||||
// this type is good both for backend (e.g. FetchedQuestion["options"]) and for graphql shapes
|
||||
export type QuestionOption = {
|
||||
name?: string;
|
||||
probability?: number;
|
||||
type: "PROBABILITY";
|
||||
};
|
||||
|
||||
export type FullQuestionOption = Exclude<
|
||||
QuestionOption,
|
||||
"name" | "probability"
|
||||
> & {
|
||||
name: NonNullable<QuestionOption["name"]>;
|
||||
probability: NonNullable<QuestionOption["probability"]>;
|
||||
};
|
||||
|
||||
export const isFullQuestionOption = (
|
||||
option: QuestionOption | QuestionFragment["options"][0]
|
||||
): option is FullQuestionOption => {
|
||||
return option.name != null && option.probability != null;
|
||||
};
|
|
@ -1,5 +1,6 @@
|
|||
import { NextApiRequest, NextApiResponse } from "next/types";
|
||||
import { runMePlease } from "squiggle-experimental/dist/index.js";
|
||||
|
||||
import { run } from "@quri/squiggle-lang";
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
|
@ -24,6 +25,6 @@ $ curl -X POST -H "Content-Type: application/json" -d '{"model": "1 to 4"}'
|
|||
});
|
||||
} else {
|
||||
console.log(body.model);
|
||||
res.status(200).send(runMePlease(body.model));
|
||||
res.status(200).send(run(body.model));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { addDays, startOfDay, startOfToday, startOfTomorrow } from "date-fns";
|
||||
|
||||
import { isFullQuestionOption } from "../../../../common/types";
|
||||
import { QuestionWithHistoryFragment } from "../../../fragments.generated";
|
||||
import { isQuestionBinary } from "../../../utils";
|
||||
import { isFullQuestionOption } from "../../utils";
|
||||
|
||||
export type ChartSeries = { x: Date; y: number; name: string }[];
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { FullQuestionOption, isFullQuestionOption } from "../../../common/types";
|
||||
import { QuestionFragment } from "../../fragments.generated";
|
||||
import { isQuestionBinary } from "../../utils";
|
||||
import { formatProbability, FullQuestionOption, isFullQuestionOption } from "../utils";
|
||||
import { formatProbability } from "../utils";
|
||||
|
||||
const textColor = (probability: number) => {
|
||||
if (probability < 0.03) {
|
||||
|
|
|
@ -8,20 +8,3 @@ export const formatProbability = (probability: number) => {
|
|||
: percentage.toFixed(0) + "%";
|
||||
return percentageCapped;
|
||||
};
|
||||
|
||||
import { QuestionFragment } from "../fragments.generated";
|
||||
|
||||
export type QuestionOption = QuestionFragment["options"][0];
|
||||
export type FullQuestionOption = Exclude<
|
||||
QuestionOption,
|
||||
"name" | "probability"
|
||||
> & {
|
||||
name: NonNullable<QuestionOption["name"]>;
|
||||
probability: NonNullable<QuestionOption["probability"]>;
|
||||
};
|
||||
|
||||
export const isFullQuestionOption = (
|
||||
option: QuestionOption
|
||||
): option is FullQuestionOption => {
|
||||
return option.name != null && option.probability != null;
|
||||
};
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
"dom.iterable",
|
||||
"esnext"
|
||||
],
|
||||
"strict": false,
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"incremental": true,
|
||||
"moduleResolution": "node",
|
||||
|
|
Loading…
Reference in New Issue
Block a user