From 4eeab9c8614b6c8d63344678f5f404fe02d2e6ef Mon Sep 17 00:00:00 2001 From: Vyacheslav Matyukhin Date: Wed, 30 Mar 2022 13:53:22 +0300 Subject: [PATCH 1/5] WIP: no schemas --- src/backend/database/database-wrapper.ts | 35 +--- src/backend/database/pg-wrapper.ts | 189 +++++++-------------- src/backend/flow/history/updateHistory.ts | 11 +- src/backend/flow/mergeEverything.ts | 6 +- src/backend/frontpage.ts | 9 +- src/pages/api/create-dashboard-from-ids.ts | 1 - src/pages/api/dashboard-by-id.ts | 2 - src/pages/api/questions.ts | 2 +- 8 files changed, 74 insertions(+), 181 deletions(-) diff --git a/src/backend/database/database-wrapper.ts b/src/backend/database/database-wrapper.ts index c96088c..7ca2679 100644 --- a/src/backend/database/database-wrapper.ts +++ b/src/backend/database/database-wrapper.ts @@ -1,43 +1,18 @@ import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper"; -const dateUpToYear = () => new Date().toISOString().slice(0, 4); -const dateUpToMonth = () => - new Date().toISOString().slice(0, 7).replace("-", "_"); - export async function databaseUpsert({ contents, group }) { // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ - switch (group) { - case "combined": - await pgUpsert({ contents, schema: "latest", tableName: "combined" }); - break; - case "history": - await pgUpsert({ - contents, - schema: "history", - tableName: `h${dateUpToYear()}`, - }); - await pgUpsert({ - contents, - schema: "history", - tableName: `h${dateUpToMonth()}`, - }); - break; - default: - await pgUpsert({ contents, schema: "latest", tableName: group }); - } + const tableName = group === "history" ? "h2022" : group; + await pgUpsert({ contents, tableName }); } const readWithReader = async ( group: string, - reader: (opts: { schema: string; tableName: string }) => Promise + reader: (opts: { tableName: string }) => Promise ) => { - const schema = group === "history" ? "history" : "latest"; - const tableName = group === "history" ? `h${dateUpToMonth()}` : group; - const response = await reader({ - schema, - tableName, - }); + const tableName = group === "history" ? "h2022" : group; + const response = await reader({ tableName }); console.log("Postgres: "); console.log(response.slice(0, 2)); diff --git a/src/backend/database/pg-wrapper.ts b/src/backend/database/pg-wrapper.ts index 0b21c9b..90b129a 100644 --- a/src/backend/database/pg-wrapper.ts +++ b/src/backend/database/pg-wrapper.ts @@ -6,7 +6,6 @@ import { measureTime } from "../utils/measureTime"; import { roughSizeOfObject } from "../utils/roughSize"; // Definitions -const schemas = ["latest", "history"]; const year = Number(new Date().toISOString().slice(0, 4)); const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number const allowed_months = [...Array(12).keys()] @@ -25,12 +24,10 @@ const tableNamesWhiteListHistory = [ ...allowed_years, ...allowed_year_month_histories, ]; -const createFullName = (schemaName, namesArray) => - namesArray.map((name) => `${schemaName}.${name}`); const tableWhiteList = [ - ...createFullName("latest", tableNamesWhitelistLatest), - ...createFullName("history", tableNamesWhiteListHistory), - "latest.dashboards", + ...tableNamesWhitelistLatest, + ...tableNamesWhiteListHistory, + "dashboards", ]; /* Postgres database connection code */ @@ -80,12 +77,11 @@ export const runPgCommand = async ({ }; // Initialize -let dropTable = (schema: string, table: string) => - `DROP TABLE IF EXISTS ${schema}.${table}`; -let createIndex = (schema: string, table: string) => - `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`; -let createUniqueIndex = (schema: string, table: string) => - `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`; +let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`; +let createIndex = (table: string) => + `CREATE INDEX ${table}_id_index ON ${table} (id);`; +let createUniqueIndex = (table: string) => + `CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`; async function pgInitializeScaffolding() { async function setPermissionsForPublicUser() { @@ -97,38 +93,30 @@ async function pgInitializeScaffolding() { await runPgCommand({ command, pool: readWritePool }); } - let buildGrantSelectForSchema = (schema: string) => - `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`; - for (let schema of schemas) { - await runPgCommand({ - command: buildGrantSelectForSchema(schema), - pool: readWritePool, - }); - } + await runPgCommand({ + command: + "GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user", + pool: readWritePool, + }); - let alterDefaultPrivilegesForSchema = (schema: string) => - `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`; - for (let schema of schemas) { - await runPgCommand({ - command: alterDefaultPrivilegesForSchema(schema), - pool: readWritePool, - }); - } + await runPgCommand({ + command: + "ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user", + pool: readWritePool, + }); } let YOLO = false; if (YOLO) { console.log("Create schemas"); - for (let schema of schemas) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, - pool: readWritePool, - }); - } + await runPgCommand({ + command: `CREATE SCHEMA IF NOT EXISTS public`, + pool: readWritePool, + }); console.log(""); console.log("Set search path"); await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, + command: `SET search_path TO public;`, pool: readWritePool, }); console.log(""); @@ -143,10 +131,7 @@ async function pgInitializeScaffolding() { } } -let buildMetaforecastTable = ( - schema: string, - table: string -) => `CREATE TABLE ${schema}.${table} ( +let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} ( id text, title text, url text, @@ -166,23 +151,15 @@ async function pgInitializeLatest() { let schema = "latest"; for (let table of tableNamesWhitelistLatest) { await runPgCommand({ - command: dropTable(schema, table), + command: dropTable(schema), pool: readWritePool, }); await runPgCommand({ - command: buildMetaforecastTable(schema, table), + command: buildMetaforecastTable(schema), pool: readWritePool, }); - /* - if (schema == "history") { - await runPgCommand({ - command: createIndex(schema, table), - pool: readWritePool, - }); - } else { - */ await runPgCommand({ - command: createUniqueIndex(schema, table), + command: createUniqueIndex(schema), pool: readWritePool, }); //} @@ -197,7 +174,7 @@ async function pgInitializeLatest() { async function pgInitializeDashboards() { let buildDashboard = () => - `CREATE TABLE latest.dashboards ( + `CREATE TABLE dashboards ( id text, title text, description text, @@ -208,23 +185,10 @@ async function pgInitializeDashboards() { );`; let YOLO = false; if (YOLO) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS history;`, - pool: readWritePool, - }); - console.log(""); - - console.log("Set search path"); - await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, - pool: readWritePool, - }); - console.log(""); - console.log("Create dashboard table and its index"); await runPgCommand({ - command: dropTable("latest", "dashboards"), + command: dropTable("dashboards"), pool: readWritePool, }); @@ -234,7 +198,7 @@ async function pgInitializeDashboards() { }); await runPgCommand({ - command: createUniqueIndex("latest", "dashboards"), + command: createUniqueIndex("dashboards"), pool: readWritePool, }); console.log(""); @@ -245,10 +209,7 @@ async function pgInitializeDashboards() { } } -let buildHistoryTable = ( - schema: string, - table: string -) => `CREATE TABLE ${schema}.${table} ( +let buildHistoryTable = (table: string) => `CREATE TABLE ${table} ( id text, title text, url text, @@ -264,41 +225,21 @@ export async function pgInitializeHistories() { let YOLO = false; if (YOLO) { console.log("Drop all previous history tables (Danger!)"); - await runPgCommand({ - command: `DROP SCHEMA history CASCADE;`, - pool: readWritePool, - }); - console.log(""); - - console.log("Create schemas"); - for (let schema of schemas) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, - pool: readWritePool, - }); - } - console.log(""); - - console.log("Set search path"); - await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, - pool: readWritePool, - }); + console.log("TODO - drop history tables"); // hope we won't need it until we get proper migrations console.log(""); console.log("Create tables & their indexes"); - let schema = "history"; for (let table of tableNamesWhiteListHistory) { await runPgCommand({ - command: dropTable(schema, table), + command: dropTable(table), pool: readWritePool, }); await runPgCommand({ - command: buildHistoryTable(schema, table), + command: buildHistoryTable(table), pool: readWritePool, }); await runPgCommand({ - command: createIndex(schema, table), // Not unique!! + command: createIndex(table), // Not unique!! pool: readWritePool, }); } @@ -314,11 +255,11 @@ async function pgInitializeFrontpage() { let YOLO = false; if (YOLO) { await runPgCommand({ - command: dropTable("latest", "frontpage"), + command: dropTable("frontpage"), pool: readWritePool, }); await runPgCommand({ - command: `CREATE TABLE latest.frontpage ( + command: `CREATE TABLE frontpage ( id serial primary key, frontpage_full jsonb, frontpage_sliced jsonb @@ -342,64 +283,51 @@ export async function pgInitialize() { // Read async function pgReadWithPool({ - schema, tableName, pool, }: { - schema: string; tableName: string; pool: Pool; }) { - if (tableWhiteList.includes(`${schema}.${tableName}`)) { - let command = `SELECT * from ${schema}.${tableName}`; + if (tableWhiteList.includes(tableName)) { + let command = `SELECT * from ${tableName}`; let response = await runPgCommand({ command, pool }); let results = response.results; return results; } else { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } } -export async function pgRead({ - schema, - tableName, -}: { - schema: string; - tableName: string; -}) { - return await pgReadWithPool({ schema, tableName, pool: readWritePool }); +export async function pgRead({ tableName }: { tableName: string }) { + return await pgReadWithPool({ tableName, pool: readWritePool }); } export async function pgReadWithReadCredentials({ - schema, tableName, }: { - schema: string; tableName: string; }) { // currently does not work. /* return await pgReadWithPool({ - schema, tableName, pool: readOnlyPool, }); */ - return await pgReadWithPool({ schema, tableName, pool: readWritePool }); + return await pgReadWithPool({ tableName, pool: readWritePool }); } export async function pgGetByIds({ ids, - schema, table, }: { ids: string[]; - schema: string; table: string; }) { let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3) - let command = `SELECT * from ${schema}.${table} where id in ${idstring}`; + let command = `SELECT * from ${table} where id in ${idstring}`; // see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn let response = await runPgCommand({ command, pool: readWritePool }); let results = response.results; @@ -409,23 +337,21 @@ export async function pgGetByIds({ export async function pgBulkInsert({ data, - schema, tableName, client, }: { data: Forecast[]; - schema: string; tableName: string; client: PoolClient; }) { - if (!tableWhiteList.includes(`${schema}.${tableName}`)) { + if (!tableWhiteList.includes(tableName)) { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } const generateQuery = (rows: number) => { - let text = `INSERT INTO ${schema}.${tableName} VALUES`; + let text = `INSERT INTO ${tableName} VALUES`; const cols = 10; const parts: string[] = []; for (let r = 0; r < rows; r++) { @@ -478,9 +404,9 @@ export async function pgBulkInsert({ } } -export async function pgInsertIntoDashboard({ datum, schema, tableName }) { - if (tableWhiteList.includes(`${schema}.${tableName}`)) { - let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`; +export async function pgInsertIntoDashboard({ datum, tableName }) { + if (tableWhiteList.includes(tableName)) { + let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`; let timestamp = datum.timestamp || new Date().toISOString(); timestamp = timestamp.slice(0, 19).replace("T", " "); let values = [ @@ -505,7 +431,7 @@ export async function pgInsertIntoDashboard({ datum, schema, tableName }) { return result; } else { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } } @@ -532,16 +458,15 @@ pgInsertIntoDashboard({ ], creator: "Nuño Sempere", }, - schema: "latest", tableName: "dashboards", }); */ -export async function pgUpsert({ contents, schema, tableName }) { - if (!tableWhiteList.includes(`${schema}.${tableName}`)) { +export async function pgUpsert({ contents, tableName }) { + if (!tableWhiteList.includes(tableName)) { console.log("tableWhiteList:"); console.log(tableWhiteList); throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } @@ -553,7 +478,7 @@ export async function pgUpsert({ contents, schema, tableName }) { client.query(`DELETE FROM latest.${tableName}`); } console.log( - `Upserting ${contents.length} rows into postgres table ${schema}.${tableName}.` + `Upserting ${contents.length} rows into postgres table ${tableName}.` ); console.log( `Expected to take ${Number((contents.length * 831.183) / 4422).toFixed( @@ -563,13 +488,13 @@ export async function pgUpsert({ contents, schema, tableName }) { )} minutes` ); - await pgBulkInsert({ data: contents, schema, tableName, client }); + await pgBulkInsert({ data: contents, tableName, client }); console.log( `Inserted ${ contents.length } rows with approximate cummulative size ${roughSizeOfObject( contents - )} MB into ${schema}.${tableName}.` + )} MB into ${tableName}.` ); console.log("Sample: "); diff --git a/src/backend/flow/history/updateHistory.ts b/src/backend/flow/history/updateHistory.ts index b4733dc..6c09bcf 100644 --- a/src/backend/flow/history/updateHistory.ts +++ b/src/backend/flow/history/updateHistory.ts @@ -1,12 +1,9 @@ -import { - databaseReadWithReadCredentials, - databaseUpsert, -} from "../../database/database-wrapper"; +import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper"; export async function updateHistory() { - let latest = await databaseReadWithReadCredentials({ group: "combined" }); - await databaseUpsert({ + let latest = await pgReadWithReadCredentials({ tableName: "combined" }); + await pgUpsert({ contents: latest, - group: "history", + tableName: "h2022", }); } diff --git a/src/backend/flow/mergeEverything.ts b/src/backend/flow/mergeEverything.ts index ba50b88..20c8dad 100644 --- a/src/backend/flow/mergeEverything.ts +++ b/src/backend/flow/mergeEverything.ts @@ -1,4 +1,4 @@ -import { databaseRead, databaseUpsert } from "../database/database-wrapper"; +import { pgRead, pgUpsert } from "../database/pg-wrapper"; import { platforms } from "../platforms"; /* Merge everything */ @@ -7,7 +7,7 @@ export async function mergeEverythingInner() { let merged = []; for (let platform of platforms) { const platformName = platform.name; - let json = await databaseRead({ group: platformName }); + let json = await pgRead({ tableName: platformName }); console.log(`${platformName} has ${json.length} questions\n`); merged = merged.concat(json); } @@ -23,6 +23,6 @@ export async function mergeEverythingInner() { export async function mergeEverything() { let merged = await mergeEverythingInner(); - await databaseUpsert({ contents: merged, group: "combined" }); + await pgUpsert({ contents: merged, tableName: "combined" }); console.log("Done"); } diff --git a/src/backend/frontpage.ts b/src/backend/frontpage.ts index 2ba4318..aed0fbc 100644 --- a/src/backend/frontpage.ts +++ b/src/backend/frontpage.ts @@ -3,7 +3,7 @@ import { pgRead, readWritePool } from "./database/pg-wrapper"; export async function getFrontpageRaw() { const client = await readWritePool.connect(); const res = await client.query( - "SELECT frontpage_sliced FROM latest.frontpage ORDER BY id DESC LIMIT 1" + "SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1" ); if (!res.rows.length) return []; console.log(res.rows[0].frontpage_sliced); @@ -13,7 +13,7 @@ export async function getFrontpageRaw() { export async function getFrontpageFullRaw() { const client = await readWritePool.connect(); const res = await client.query( - "SELECT frontpage_full FROM latest.frontpage ORDER BY id DESC LIMIT 1" + "SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1" ); if (!res.rows.length) return []; console.log(res.rows[0]); @@ -38,14 +38,13 @@ export async function getFrontpage() { export async function rebuildFrontpage() { const frontpageFull = await pgRead({ - schema: "latest", tableName: "combined", }); const client = await readWritePool.connect(); const frontpageSliced = ( await client.query(` - SELECT * FROM latest.combined + SELECT * FROM combined WHERE (qualityindicators->>'stars')::int >= 3 AND description != '' @@ -56,7 +55,7 @@ export async function rebuildFrontpage() { const start = Date.now(); await client.query( - "INSERT INTO latest.frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", + "INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", [JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)] ); diff --git a/src/pages/api/create-dashboard-from-ids.ts b/src/pages/api/create-dashboard-from-ids.ts index 3b06f27..a782be2 100644 --- a/src/pages/api/create-dashboard-from-ids.ts +++ b/src/pages/api/create-dashboard-from-ids.ts @@ -27,7 +27,6 @@ export default async function handler( creator: body.creator || "", extra: [], }, - schema: "latest", tableName: "dashboards", }); res.status(200).send({ diff --git a/src/pages/api/dashboard-by-id.ts b/src/pages/api/dashboard-by-id.ts index e89d5c3..366c234 100644 --- a/src/pages/api/dashboard-by-id.ts +++ b/src/pages/api/dashboard-by-id.ts @@ -16,7 +16,6 @@ export default async function handler( console.log(id); let dashboardItemArray = await pgGetByIds({ ids: [id], - schema: "latest", table: "dashboards", }); if (!!dashboardItemArray && dashboardItemArray.length > 0) { @@ -24,7 +23,6 @@ export default async function handler( console.log(dashboardItem); let dashboardContents = await pgGetByIds({ ids: dashboardItem.contents, - schema: "latest", table: "combined", }); res.status(200).send({ diff --git a/src/pages/api/questions.ts b/src/pages/api/questions.ts index 4d925ad..f06b6cf 100644 --- a/src/pages/api/questions.ts +++ b/src/pages/api/questions.ts @@ -6,7 +6,7 @@ export default async function handler( req: NextApiRequest, res: NextApiResponse ) { - let allQuestions = await pgRead({ schema: "latest", tableName: "combined" }); + let allQuestions = await pgRead({ tableName: "combined" }); console.log(allQuestions.map((element) => element.title).slice(0, 5)); console.log("..."); res.status(200).json(allQuestions); From a1ba23e340f754fd65651174f06a71bd30fbede5 Mon Sep 17 00:00:00 2001 From: Vyacheslav Matyukhin Date: Thu, 31 Mar 2022 13:00:09 +0300 Subject: [PATCH 2/5] feat: data in public schema; merge history tables --- src/backend/database/database-wrapper.ts | 30 --- src/backend/database/pg-wrapper.ts | 175 +++++++----------- src/backend/flow/history/updateHistory.ts | 3 +- src/backend/flow/mergeEverything.ts | 2 +- src/backend/manual/manualDownload.ts | 4 +- src/backend/manual/noSchemaMigrate.ts | 38 ++++ src/backend/platforms/example-fetch.ts | 19 +- src/backend/platforms/givewellopenphil.ts | 9 +- src/backend/platforms/index.ts | 8 +- src/backend/utils/algolia.ts | 6 +- .../pullForecastsToCSVForRating.ts | 4 +- .../pullMetaculusForecastsToCSVForRating.ts | 4 +- .../misc/process-forecasts-into-elicit.ts | 4 +- src/pages/api/create-dashboard-from-ids.ts | 1 - tsconfig.json | 3 +- 15 files changed, 136 insertions(+), 174 deletions(-) delete mode 100644 src/backend/database/database-wrapper.ts create mode 100644 src/backend/manual/noSchemaMigrate.ts diff --git a/src/backend/database/database-wrapper.ts b/src/backend/database/database-wrapper.ts deleted file mode 100644 index 7ca2679..0000000 --- a/src/backend/database/database-wrapper.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper"; - -export async function databaseUpsert({ contents, group }) { - // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. - // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ - const tableName = group === "history" ? "h2022" : group; - await pgUpsert({ contents, tableName }); -} - -const readWithReader = async ( - group: string, - reader: (opts: { tableName: string }) => Promise -) => { - const tableName = group === "history" ? "h2022" : group; - const response = await reader({ tableName }); - - console.log("Postgres: "); - console.log(response.slice(0, 2)); - console.log(""); - - return response; -}; - -export async function databaseRead({ group }) { - return await readWithReader(group, pgRead); -} - -export async function databaseReadWithReadCredentials({ group }) { - return await readWithReader(group, pgReadWithReadCredentials); -} diff --git a/src/backend/database/pg-wrapper.ts b/src/backend/database/pg-wrapper.ts index 90b129a..567ce1e 100644 --- a/src/backend/database/pg-wrapper.ts +++ b/src/backend/database/pg-wrapper.ts @@ -5,30 +5,11 @@ import { hash } from "../utils/hash"; import { measureTime } from "../utils/measureTime"; import { roughSizeOfObject } from "../utils/roughSize"; -// Definitions -const year = Number(new Date().toISOString().slice(0, 4)); -const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number -const allowed_months = [...Array(12).keys()] - .map((x) => x + 1) - .map((x) => (String(x).length == 1 ? `0${x}` : x)); -const allowed_year_month_histories = [].concat( - ...allowed_years.map((year) => - allowed_months.map((month) => `${year}_${month}`) - ) -); // h2022_01 -const tableNamesWhitelistLatest = [ - "combined", - ...platforms.map((platform) => platform.name), -]; -const tableNamesWhiteListHistory = [ - ...allowed_years, - ...allowed_year_month_histories, -]; -const tableWhiteList = [ - ...tableNamesWhitelistLatest, - ...tableNamesWhiteListHistory, - "dashboards", -]; +const platformTableNames = platforms.map((platform) => platform.name); + +const forecastTableNames = [...platformTableNames, "combined", "history"]; + +const allTableNames = [...forecastTableNames, "dashboards", "frontpage"]; /* Postgres database connection code */ const databaseURL = process.env.DIGITALOCEAN_POSTGRES; @@ -107,20 +88,6 @@ async function pgInitializeScaffolding() { } let YOLO = false; if (YOLO) { - console.log("Create schemas"); - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS public`, - pool: readWritePool, - }); - console.log(""); - - console.log("Set search path"); - await runPgCommand({ - command: `SET search_path TO public;`, - pool: readWritePool, - }); - console.log(""); - console.log("Set public user permissions"); await setPermissionsForPublicUser(); console.log(""); @@ -148,21 +115,19 @@ async function pgInitializeLatest() { let YOLO = false; if (YOLO) { console.log("Create tables & their indexes"); - let schema = "latest"; - for (let table of tableNamesWhitelistLatest) { + for (const table of platformTableNames) { await runPgCommand({ - command: dropTable(schema), + command: dropTable(table), pool: readWritePool, }); await runPgCommand({ - command: buildMetaforecastTable(schema), + command: buildMetaforecastTable(table), pool: readWritePool, }); await runPgCommand({ - command: createUniqueIndex(schema), + command: createUniqueIndex(table), pool: readWritePool, }); - //} } console.log(""); } else { @@ -224,25 +189,19 @@ let buildHistoryTable = (table: string) => `CREATE TABLE ${table} ( export async function pgInitializeHistories() { let YOLO = false; if (YOLO) { - console.log("Drop all previous history tables (Danger!)"); - console.log("TODO - drop history tables"); // hope we won't need it until we get proper migrations - console.log(""); - - console.log("Create tables & their indexes"); - for (let table of tableNamesWhiteListHistory) { - await runPgCommand({ - command: dropTable(table), - pool: readWritePool, - }); - await runPgCommand({ - command: buildHistoryTable(table), - pool: readWritePool, - }); - await runPgCommand({ - command: createIndex(table), // Not unique!! - pool: readWritePool, - }); - } + console.log("Create history table & index"); + await runPgCommand({ + command: dropTable("history"), + pool: readWritePool, + }); + await runPgCommand({ + command: buildHistoryTable("history"), + pool: readWritePool, + }); + await runPgCommand({ + command: createIndex("history"), // Not unique!! + pool: readWritePool, + }); console.log(""); } else { console.log( @@ -289,16 +248,15 @@ async function pgReadWithPool({ tableName: string; pool: Pool; }) { - if (tableWhiteList.includes(tableName)) { - let command = `SELECT * from ${tableName}`; - let response = await runPgCommand({ command, pool }); - let results = response.results; - return results; - } else { + if (!allTableNames.includes(tableName)) { throw Error( `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } + let command = `SELECT * from ${tableName}`; + let response = await runPgCommand({ command, pool }); + let results = response.results; + return results; } export async function pgRead({ tableName }: { tableName: string }) { @@ -344,7 +302,7 @@ export async function pgBulkInsert({ tableName: string; client: PoolClient; }) { - if (!tableWhiteList.includes(tableName)) { + if (!forecastTableNames.includes(tableName)) { throw Error( `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); @@ -404,36 +362,30 @@ export async function pgBulkInsert({ } } -export async function pgInsertIntoDashboard({ datum, tableName }) { - if (tableWhiteList.includes(tableName)) { - let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`; - let timestamp = datum.timestamp || new Date().toISOString(); - timestamp = timestamp.slice(0, 19).replace("T", " "); - let values = [ - hash(JSON.stringify(datum.contents)), - datum.title || "", - datum.description || "", - JSON.stringify(datum.contents || []), - timestamp, // fixed - datum.creator || "", - JSON.stringify(datum.extra || []), - ]; - const client = await readWritePool.connect(); - let result; - try { - result = await client.query(text, values); - } catch (error) { - console.log(error); - } finally { - client.release(); - } - // console.log(result) - return result; - } else { - throw Error( - `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` - ); +export async function pgInsertIntoDashboard({ datum }) { + let text = `INSERT INTO dashboards VALUES($1, $2, $3, $4, $5, $6, $7)`; + let timestamp = datum.timestamp || new Date().toISOString(); + timestamp = timestamp.slice(0, 19).replace("T", " "); + let values = [ + hash(JSON.stringify(datum.contents)), + datum.title || "", + datum.description || "", + JSON.stringify(datum.contents || []), + timestamp, // fixed + datum.creator || "", + JSON.stringify(datum.extra || []), + ]; + const client = await readWritePool.connect(); + let result; + try { + result = await client.query(text, values); + } catch (error) { + console.log(error); + } finally { + client.release(); } + // console.log(result) + return result; } /* For reference id text, @@ -461,10 +413,16 @@ pgInsertIntoDashboard({ tableName: "dashboards", }); */ -export async function pgUpsert({ contents, tableName }) { - if (!tableWhiteList.includes(tableName)) { - console.log("tableWhiteList:"); - console.log(tableWhiteList); +export async function pgUpsert({ + contents, + tableName, + replace, +}: { + contents: Forecast[]; + tableName: string; + replace: boolean; +}) { + if (!forecastTableNames.includes(tableName)) { throw Error( `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); @@ -474,19 +432,12 @@ export async function pgUpsert({ contents, tableName }) { const client = await readWritePool.connect(); try { await client.query("BEGIN"); - if (schema === "latest") { - client.query(`DELETE FROM latest.${tableName}`); + if (replace) { + client.query(`DELETE FROM ${tableName}`); } console.log( `Upserting ${contents.length} rows into postgres table ${tableName}.` ); - console.log( - `Expected to take ${Number((contents.length * 831.183) / 4422).toFixed( - 2 - )} seconds or ${Number((contents.length * 13.85305) / 4422).toFixed( - 2 - )} minutes` - ); await pgBulkInsert({ data: contents, tableName, client }); console.log( diff --git a/src/backend/flow/history/updateHistory.ts b/src/backend/flow/history/updateHistory.ts index 6c09bcf..dfb5387 100644 --- a/src/backend/flow/history/updateHistory.ts +++ b/src/backend/flow/history/updateHistory.ts @@ -4,6 +4,7 @@ export async function updateHistory() { let latest = await pgReadWithReadCredentials({ tableName: "combined" }); await pgUpsert({ contents: latest, - tableName: "h2022", + tableName: "history", + replace: false, }); } diff --git a/src/backend/flow/mergeEverything.ts b/src/backend/flow/mergeEverything.ts index 20c8dad..c29cb8a 100644 --- a/src/backend/flow/mergeEverything.ts +++ b/src/backend/flow/mergeEverything.ts @@ -23,6 +23,6 @@ export async function mergeEverythingInner() { export async function mergeEverything() { let merged = await mergeEverythingInner(); - await pgUpsert({ contents: merged, tableName: "combined" }); + await pgUpsert({ contents: merged, tableName: "combined", replace: true }); console.log("Done"); } diff --git a/src/backend/manual/manualDownload.ts b/src/backend/manual/manualDownload.ts index d6422ab..d722f81 100644 --- a/src/backend/manual/manualDownload.ts +++ b/src/backend/manual/manualDownload.ts @@ -2,10 +2,10 @@ import "dotenv/config"; import fs from "fs"; -import { databaseReadWithReadCredentials } from "../database/database-wrapper"; +import { pgReadWithReadCredentials } from "../database/pg-wrapper"; let main = async () => { - let json = await databaseReadWithReadCredentials({ group: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "combined" }); let string = JSON.stringify(json, null, 2); let filename = "metaforecasts.json"; fs.writeFileSync(filename, string); diff --git a/src/backend/manual/noSchemaMigrate.ts b/src/backend/manual/noSchemaMigrate.ts new file mode 100644 index 0000000..534d71b --- /dev/null +++ b/src/backend/manual/noSchemaMigrate.ts @@ -0,0 +1,38 @@ +import "dotenv/config"; + +import { readWritePool } from "../database/pg-wrapper"; +import { platforms } from "../platforms"; + +const migrate = async () => { + const client = await readWritePool.connect(); + + const execQuery = async (q: string) => { + console.log(q); + await client.query(q); + }; + + try { + await client.query("BEGIN"); + const copyTable = async (from: string, to: string) => { + await execQuery(`DROP TABLE IF EXISTS ${to}`); + await execQuery(`CREATE TABLE ${to} (LIKE ${from} INCLUDING ALL)`); + await execQuery(`INSERT INTO ${to} SELECT * FROM ${from}`); + }; + + for (const platform of platforms) { + await copyTable(`latest.${platform.name}`, platform.name); + } + await copyTable("latest.dashboards", "dashboards"); + await copyTable("latest.combined", "combined"); + await copyTable("latest.frontpage", "frontpage"); + await copyTable("history.h2022", "history"); + await client.query("COMMIT"); + } catch (e) { + await client.query("ROLLBACK"); + throw e; + } finally { + client.release(); + } +}; + +migrate(); diff --git a/src/backend/platforms/example-fetch.ts b/src/backend/platforms/example-fetch.ts index 5e85051..3d6c5aa 100644 --- a/src/backend/platforms/example-fetch.ts +++ b/src/backend/platforms/example-fetch.ts @@ -1,8 +1,8 @@ /* Imports */ import axios from "axios"; -import { databaseUpsert } from "../database/database-wrapper"; import { calculateStars } from "../utils/stars"; +import { Platform } from "./"; /* Definitions */ let endpoint = "https://example.com/"; @@ -59,12 +59,11 @@ async function processPredictions(predictions) { /* Body */ -export async function example() { - let data = await fetchData(); - let results = await processPredictions(data); // somehow needed - // console.log(results) - // let string = JSON.stringify(results, null, 2) - await databaseUpsert({ contents: results, group: "example" }); - console.log("Done"); -} -//example() +export const example: Platform = { + name: "example", + async fetcher() { + let data = await fetchData(); + let results = await processPredictions(data); // somehow needed + return results; + }, +}; diff --git a/src/backend/platforms/givewellopenphil.ts b/src/backend/platforms/givewellopenphil.ts index 31077f4..561cba2 100644 --- a/src/backend/platforms/givewellopenphil.ts +++ b/src/backend/platforms/givewellopenphil.ts @@ -2,7 +2,6 @@ import axios from "axios"; import fs from "fs"; -import { databaseUpsert } from "../database/database-wrapper"; import { calculateStars } from "../utils/stars"; import { Platform } from "./"; @@ -59,10 +58,10 @@ async function main1() { // console.log(result) results.push(result); } - await databaseUpsert({ - contents: results, - group: "givewell-questions-unprocessed", - }); + // await databaseUpsert({ + // contents: results, + // group: "givewell-questions-unprocessed", + // }); } export const givewellopenphil: Platform = { diff --git a/src/backend/platforms/index.ts b/src/backend/platforms/index.ts index 3d28c14..49ac05c 100644 --- a/src/backend/platforms/index.ts +++ b/src/backend/platforms/index.ts @@ -1,4 +1,4 @@ -import { databaseUpsert } from "../database/database-wrapper"; +import { pgUpsert } from "../database/pg-wrapper"; import { betfair } from "./betfair"; import { fantasyscotus } from "./fantasyscotus"; import { foretold } from "./foretold"; @@ -112,7 +112,11 @@ export const processPlatform = async (platform: Platform) => { } let results = await platform.fetcher(); if (results && results.length) { - await databaseUpsert({ contents: results, group: platform.name }); + await pgUpsert({ + contents: results, + tableName: platform.name, + replace: true, + }); console.log("Done"); } else { console.log(`Platform ${platform.name} didn't return any results`); diff --git a/src/backend/utils/algolia.ts b/src/backend/utils/algolia.ts index 32a138c..f46b7c6 100644 --- a/src/backend/utils/algolia.ts +++ b/src/backend/utils/algolia.ts @@ -1,6 +1,6 @@ import algoliasearch from "algoliasearch"; -import { databaseReadWithReadCredentials } from "../database/database-wrapper"; +import { pgReadWithReadCredentials } from "../database/pg-wrapper"; import { mergeEverythingInner } from "../flow/mergeEverything"; let cookie = process.env.ALGOLIA_MASTER_API_KEY; @@ -41,8 +41,8 @@ let getoptionsstringforsearch = (record: any) => { }; export async function rebuildAlgoliaDatabaseTheEasyWay() { - let records: any[] = await databaseReadWithReadCredentials({ - group: "combined", + let records: any[] = await pgReadWithReadCredentials({ + tableName: "combined", }); records = records.map((record, index: number) => ({ diff --git a/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts b/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts index 944649d..b4e8530 100644 --- a/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts +++ b/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts @@ -1,7 +1,7 @@ /* Imports */ import fs from "fs"; -import { databaseReadWithReadCredentials } from "../../database/database-wrapper"; +import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; /* Definitions */ @@ -24,7 +24,7 @@ let main = async () => { "PredictIt", "Rootclaim", ]; - let json = await databaseReadWithReadCredentials({ group: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "combined" }); console.log(json.length); //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts b/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts index 943852f..fd3726a 100644 --- a/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts +++ b/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts @@ -1,7 +1,7 @@ /* Imports */ import fs from "fs"; -import { databaseReadWithReadCredentials } from "../../database/database-wrapper"; +import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; /* Definitions */ @@ -26,7 +26,7 @@ let shuffleArray = (array) => { let main = async () => { let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await databaseReadWithReadCredentials({ group: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "combined" }); console.log(json.length); //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/backend/utils/misc/process-forecasts-into-elicit.ts b/src/backend/utils/misc/process-forecasts-into-elicit.ts index e423365..8279f27 100644 --- a/src/backend/utils/misc/process-forecasts-into-elicit.ts +++ b/src/backend/utils/misc/process-forecasts-into-elicit.ts @@ -1,7 +1,7 @@ /* Imports */ import fs from "fs"; -import { databaseReadWithReadCredentials } from "../../database/database-wrapper"; +import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; /* Definitions */ let locationData = "./data/"; @@ -9,7 +9,7 @@ let locationData = "./data/"; /* Body */ // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src async function main() { - let data = await databaseReadWithReadCredentials({ group: "combined" }); //JSON.parse(rawdata) + let data = await pgReadWithReadCredentials({ tableName: "combined" }); //JSON.parse(rawdata) let processDescription = (description) => { if (description == null || description == undefined || description == "") { return ""; diff --git a/src/pages/api/create-dashboard-from-ids.ts b/src/pages/api/create-dashboard-from-ids.ts index a782be2..73c4d9e 100644 --- a/src/pages/api/create-dashboard-from-ids.ts +++ b/src/pages/api/create-dashboard-from-ids.ts @@ -27,7 +27,6 @@ export default async function handler( creator: body.creator || "", extra: [], }, - tableName: "dashboards", }); res.status(200).send({ dashboardId: id, diff --git a/tsconfig.json b/tsconfig.json index 42bfacc..246f41d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,7 +17,8 @@ "incremental": true, "moduleResolution": "node", "resolveJsonModule": true, - "isolatedModules": true + "isolatedModules": true, + "allowJs": true }, "include": [ "next-env.d.ts", From 72db637972b1d6b9cb360d2c201ebc48bb746916 Mon Sep 17 00:00:00 2001 From: Vyacheslav Matyukhin Date: Fri, 1 Apr 2022 23:24:35 +0300 Subject: [PATCH 3/5] feat: new db structure; platform labels --- .gitignore | 9 +- src/backend/database/pg-wrapper.ts | 49 ++++--- src/backend/flow/history/updateHistory.ts | 3 +- src/backend/flow/jobs.ts | 8 -- src/backend/flow/mergeEverything.ts | 28 ---- src/backend/frontpage.ts | 25 +--- src/backend/manual/manualDownload.ts | 2 +- src/backend/manual/noSchemaMigrate.ts | 64 +++++++++- src/backend/platforms/betfair.ts | 12 +- src/backend/platforms/example-fetch.ts | 4 +- src/backend/platforms/fantasyscotus.ts | 18 ++- src/backend/platforms/foretold.ts | 13 +- src/backend/platforms/givewellopenphil.ts | 10 +- src/backend/platforms/goodjudgment.ts | 11 +- ...goodjudmentopen.ts => goodjudgmentopen.ts} | 12 +- src/backend/platforms/index.ts | 16 ++- src/backend/platforms/infer.ts | 9 +- src/backend/platforms/kalshi.ts | 11 +- .../{manifoldmarkets.ts => manifold.ts} | 11 +- src/backend/platforms/metaculus.ts | 11 +- src/backend/platforms/polymarket.ts | 11 +- src/backend/platforms/predictit.ts | 13 +- src/backend/platforms/rootclaim.ts | 18 +-- src/backend/platforms/smarkets.ts | 13 +- src/backend/platforms/wildeford.ts | 11 +- src/backend/platforms/xrisk.ts | 14 +- src/backend/utils/algolia.ts | 32 ++--- .../pullForecastsToCSVForRating.ts | 2 +- .../pullMetaculusForecastsToCSVForRating.ts | 2 +- .../misc/process-forecasts-into-elicit.ts | 2 +- src/backend/utils/stars.ts | 94 +++++++------- src/pages/api/all-forecasts.ts | 4 +- src/pages/api/create-dashboard-from-ids.ts | 4 +- src/pages/api/dashboard-by-id.ts | 2 +- src/pages/api/frontpage.ts | 4 +- src/pages/api/questions.ts | 2 +- src/pages/capture.tsx | 10 +- src/pages/index.tsx | 10 +- src/pages/secretEmbed.tsx | 17 +-- src/web/display/dashboardCreator.tsx | 2 +- src/web/display/displayForecasts.tsx | 120 +++++++++++------- .../display/displayOneForecastForCapture.tsx | 8 +- src/web/display/multiSelectPlatforms.tsx | 45 ++++++- src/web/platforms.ts | 63 +-------- src/web/search/CommonDisplay.tsx | 109 ++++++---------- src/web/search/anySearchPage.tsx | 85 +++++++++++-- src/web/worker/searchAccordingToQueryData.ts | 56 ++++---- src/web/worker/searchGuesstimate.ts | 87 ++++++------- src/web/worker/searchWithAlgolia.ts | 48 +++---- 49 files changed, 637 insertions(+), 577 deletions(-) delete mode 100644 src/backend/flow/mergeEverything.ts rename src/backend/platforms/{goodjudmentopen.ts => goodjudgmentopen.ts} (96%) rename src/backend/platforms/{manifoldmarkets.ts => manifold.ts} (90%) diff --git a/.gitignore b/.gitignore index 0efe581..ea40f98 100644 --- a/.gitignore +++ b/.gitignore @@ -26,12 +26,6 @@ npm-debug.log* yarn-debug.log* yarn-error.log* -# local env files -.env.local -.env.development.local -.env.test.local -.env.production.local - # vercel .vercel @@ -41,5 +35,4 @@ package-lock.json ## use yarn.lock instead # Local Netlify folder .netlify -/.env -/.env.production +/.env* diff --git a/src/backend/database/pg-wrapper.ts b/src/backend/database/pg-wrapper.ts index 567ce1e..73f8369 100644 --- a/src/backend/database/pg-wrapper.ts +++ b/src/backend/database/pg-wrapper.ts @@ -1,13 +1,11 @@ import { Pool, PoolClient } from "pg"; -import { Forecast, platforms } from "../platforms"; +import { Forecast } from "../platforms"; import { hash } from "../utils/hash"; import { measureTime } from "../utils/measureTime"; import { roughSizeOfObject } from "../utils/roughSize"; -const platformTableNames = platforms.map((platform) => platform.name); - -const forecastTableNames = [...platformTableNames, "combined", "history"]; +const forecastTableNames = ["questions", "history"]; const allTableNames = [...forecastTableNames, "dashboards", "frontpage"]; @@ -111,28 +109,27 @@ let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} ( extra json );`; -async function pgInitializeLatest() { +async function pgInitializeQuestions() { let YOLO = false; if (YOLO) { console.log("Create tables & their indexes"); - for (const table of platformTableNames) { - await runPgCommand({ - command: dropTable(table), - pool: readWritePool, - }); - await runPgCommand({ - command: buildMetaforecastTable(table), - pool: readWritePool, - }); - await runPgCommand({ - command: createUniqueIndex(table), - pool: readWritePool, - }); - } + const table = "questions"; + await runPgCommand({ + command: dropTable(table), + pool: readWritePool, + }); + await runPgCommand({ + command: buildMetaforecastTable(table), + pool: readWritePool, + }); + await runPgCommand({ + command: createUniqueIndex(table), + pool: readWritePool, + }); console.log(""); } else { console.log( - "pgInitializeLatest: This command is dangerous, set YOLO to true in the code to invoke it" + "pgInitializeQuestions: This command is dangerous, set YOLO to true in the code to invoke it" ); } } @@ -234,7 +231,7 @@ async function pgInitializeFrontpage() { export async function pgInitialize() { await pgInitializeScaffolding(); - await pgInitializeLatest(); + await pgInitializeQuestions(); await pgInitializeHistories(); await pgInitializeDashboards(); await pgInitializeFrontpage(); @@ -416,11 +413,11 @@ pgInsertIntoDashboard({ export async function pgUpsert({ contents, tableName, - replace, + replacePlatform, }: { contents: Forecast[]; tableName: string; - replace: boolean; + replacePlatform?: string; }) { if (!forecastTableNames.includes(tableName)) { throw Error( @@ -432,8 +429,10 @@ export async function pgUpsert({ const client = await readWritePool.connect(); try { await client.query("BEGIN"); - if (replace) { - client.query(`DELETE FROM ${tableName}`); + if (replacePlatform) { + await client.query(`DELETE FROM ${tableName} WHERE platform = $1`, [ + replacePlatform, + ]); } console.log( `Upserting ${contents.length} rows into postgres table ${tableName}.` diff --git a/src/backend/flow/history/updateHistory.ts b/src/backend/flow/history/updateHistory.ts index dfb5387..8132390 100644 --- a/src/backend/flow/history/updateHistory.ts +++ b/src/backend/flow/history/updateHistory.ts @@ -1,10 +1,9 @@ import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper"; export async function updateHistory() { - let latest = await pgReadWithReadCredentials({ tableName: "combined" }); + let latest = await pgReadWithReadCredentials({ tableName: "questions" }); await pgUpsert({ contents: latest, tableName: "history", - replace: false, }); } diff --git a/src/backend/flow/jobs.ts b/src/backend/flow/jobs.ts index 4d79004..7da315c 100644 --- a/src/backend/flow/jobs.ts +++ b/src/backend/flow/jobs.ts @@ -1,7 +1,6 @@ import { pgInitialize } from "../database/pg-wrapper"; import { doEverything } from "../flow/doEverything"; import { updateHistory } from "../flow/history/updateHistory"; -import { mergeEverything } from "../flow/mergeEverything"; import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData"; import { rebuildFrontpage } from "../frontpage"; import { platforms, processPlatform } from "../platforms"; @@ -20,13 +19,6 @@ export const jobs: Job[] = [ message: `Download predictions from ${platform.name}`, run: () => processPlatform(platform), })), - { - name: "merge", - message: - "Merge tables into one big table (and push the result to a pg database)", - run: mergeEverything, - separate: true, - }, { name: "algolia", message: 'Rebuild algolia database ("index")', diff --git a/src/backend/flow/mergeEverything.ts b/src/backend/flow/mergeEverything.ts deleted file mode 100644 index c29cb8a..0000000 --- a/src/backend/flow/mergeEverything.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { pgRead, pgUpsert } from "../database/pg-wrapper"; -import { platforms } from "../platforms"; - -/* Merge everything */ - -export async function mergeEverythingInner() { - let merged = []; - for (let platform of platforms) { - const platformName = platform.name; - let json = await pgRead({ tableName: platformName }); - console.log(`${platformName} has ${json.length} questions\n`); - merged = merged.concat(json); - } - let mergedprocessed = merged.map((element) => ({ - ...element, - optionsstringforsearch: element.options - .map((option) => option.name) - .join(", "), - })); - console.log(`In total, there are ${mergedprocessed.length} questions`); - return mergedprocessed; -} - -export async function mergeEverything() { - let merged = await mergeEverythingInner(); - await pgUpsert({ contents: merged, tableName: "combined", replace: true }); - console.log("Done"); -} diff --git a/src/backend/frontpage.ts b/src/backend/frontpage.ts index aed0fbc..b339a4b 100644 --- a/src/backend/frontpage.ts +++ b/src/backend/frontpage.ts @@ -1,6 +1,7 @@ import { pgRead, readWritePool } from "./database/pg-wrapper"; +import { Forecast } from "./platforms"; -export async function getFrontpageRaw() { +export async function getFrontpage(): Promise { const client = await readWritePool.connect(); const res = await client.query( "SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1" @@ -10,7 +11,7 @@ export async function getFrontpageRaw() { return res.rows[0].frontpage_sliced; } -export async function getFrontpageFullRaw() { +export async function getFrontpageFull(): Promise { const client = await readWritePool.connect(); const res = await client.query( "SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1" @@ -20,31 +21,15 @@ export async function getFrontpageFullRaw() { return res.rows[0].frontpage_full; } -export async function getFrontpage() { - let frontPageForecastsCompatibleWithFuse = []; - try { - let data = await getFrontpageRaw(); - frontPageForecastsCompatibleWithFuse = data.map((result) => ({ - item: result, - score: 0, - })); - return frontPageForecastsCompatibleWithFuse; - } catch (error) { - console.log(error); - } finally { - return frontPageForecastsCompatibleWithFuse; - } -} - export async function rebuildFrontpage() { const frontpageFull = await pgRead({ - tableName: "combined", + tableName: "questions", }); const client = await readWritePool.connect(); const frontpageSliced = ( await client.query(` - SELECT * FROM combined + SELECT * FROM questions WHERE (qualityindicators->>'stars')::int >= 3 AND description != '' diff --git a/src/backend/manual/manualDownload.ts b/src/backend/manual/manualDownload.ts index d722f81..5b174cb 100644 --- a/src/backend/manual/manualDownload.ts +++ b/src/backend/manual/manualDownload.ts @@ -5,7 +5,7 @@ import fs from "fs"; import { pgReadWithReadCredentials } from "../database/pg-wrapper"; let main = async () => { - let json = await pgReadWithReadCredentials({ tableName: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "questions" }); let string = JSON.stringify(json, null, 2); let filename = "metaforecasts.json"; fs.writeFileSync(filename, string); diff --git a/src/backend/manual/noSchemaMigrate.ts b/src/backend/manual/noSchemaMigrate.ts index 534d71b..eca83b0 100644 --- a/src/backend/manual/noSchemaMigrate.ts +++ b/src/backend/manual/noSchemaMigrate.ts @@ -1,7 +1,6 @@ import "dotenv/config"; import { readWritePool } from "../database/pg-wrapper"; -import { platforms } from "../platforms"; const migrate = async () => { const client = await readWritePool.connect(); @@ -11,6 +10,25 @@ const migrate = async () => { await client.query(q); }; + const platformTitleToName = { + Betfair: "betfair", + FantasySCOTUS: "fantasyscotus", + Foretold: "foretold", + "GiveWell/OpenPhilanthropy": "givewellopenphil", + "Good Judgment": "goodjudgement", + "Good Judgment Open": "goodjudgmentopen", + Infer: "infer", + Kalshi: "kalshi", + "Manifold Markets": "manifold", + Metaculus: "metaculus", + "Peter Wildeford": "wildeford", + PolyMarket: "polymarket", + PredictIt: "predictit", + Rootclaim: "rootclaim", + Smarkets: "smarkets", + "X-risk estimates": "xrisk", + }; + try { await client.query("BEGIN"); const copyTable = async (from: string, to: string) => { @@ -19,13 +37,49 @@ const migrate = async () => { await execQuery(`INSERT INTO ${to} SELECT * FROM ${from}`); }; - for (const platform of platforms) { - await copyTable(`latest.${platform.name}`, platform.name); - } await copyTable("latest.dashboards", "dashboards"); - await copyTable("latest.combined", "combined"); + await copyTable("latest.combined", "questions"); await copyTable("latest.frontpage", "frontpage"); await copyTable("history.h2022", "history"); + + for (const [title, name] of Object.entries(platformTitleToName)) { + console.log(`Updating ${title} -> ${name}`); + for (const table of ["questions", "history"]) { + await client.query( + `UPDATE ${table} SET platform=$1 WHERE platform=$2`, + [name, title] + ); + } + } + + console.log("Fixing GJOpen ids in questions and history"); + for (const table of ["questions", "history"]) { + await client.query( + `UPDATE ${table} SET id=REPLACE(id, 'goodjudmentopen-', 'goodjudgmentopen-') WHERE id LIKE 'goodjudmentopen-%'` + ); + } + + const fixId = (id: string) => + id.replace("goodjudmentopen-", "goodjudgmentopen-"); + + console.log( + "Please rebuild frontpage manually - current version includes invalid GJOpen and xrisk ids" + ); + + const updateDashboards = async () => { + const res = await client.query("SELECT id, contents FROM dashboards"); + for (const row of res.rows) { + let { id, contents } = row; + contents = contents.map(fixId); + await client.query( + "UPDATE dashboards SET contents = $1 WHERE id = $2", + [JSON.stringify(contents), id] + ); + } + }; + console.log("Updating dashboards"); + await updateDashboards(); + await client.query("COMMIT"); } catch (e) { await client.query("ROLLBACK"); diff --git a/src/backend/platforms/betfair.ts b/src/backend/platforms/betfair.ts index 867df56..e7a1b1e 100644 --- a/src/backend/platforms/betfair.ts +++ b/src/backend/platforms/betfair.ts @@ -5,6 +5,8 @@ import https from "https"; import { calculateStars } from "../utils/stars"; import { Forecast, Platform } from "./"; +const platformName = "betfair"; + /* Definitions */ let endpoint = process.env.SECRET_BETFAIR_ENDPOINT; @@ -121,12 +123,14 @@ async function processPredictions(data) { id: id, title: title, url: `https://www.betfair.com/exchange/plus/politics/market/${prediction.marketId}`, - platform: "Betfair", + platform: platformName, description: description, options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("Betfair", { volume: prediction.totalMatched }), + stars: calculateStars(platformName, { + volume: prediction.totalMatched, + }), volume: prediction.totalMatched, }, }; @@ -136,7 +140,9 @@ async function processPredictions(data) { } export const betfair: Platform = { - name: "betfair", + name: platformName, + label: "Betfair", + color: "#3d674a", async fetcher() { const data = await fetchPredictions(); const results = await processPredictions(data); // somehow needed diff --git a/src/backend/platforms/example-fetch.ts b/src/backend/platforms/example-fetch.ts index 3d6c5aa..7c2e9ac 100644 --- a/src/backend/platforms/example-fetch.ts +++ b/src/backend/platforms/example-fetch.ts @@ -23,7 +23,7 @@ async function fetchData() { async function processPredictions(predictions) { let results = await predictions.map((prediction) => { - let id = `platform-${prediction.id}`; + let id = `example-${prediction.id}`; let probability = prediction.probability; let options = [ { @@ -61,6 +61,8 @@ async function processPredictions(predictions) { export const example: Platform = { name: "example", + label: "Example platform", + color: "#ff0000", async fetcher() { let data = await fetchData(); let results = await processPredictions(data); // somehow needed diff --git a/src/backend/platforms/fantasyscotus.ts b/src/backend/platforms/fantasyscotus.ts index d32a32b..9b0470e 100644 --- a/src/backend/platforms/fantasyscotus.ts +++ b/src/backend/platforms/fantasyscotus.ts @@ -2,7 +2,9 @@ import axios from "axios"; import { calculateStars } from "../utils/stars"; -import { Platform } from "./"; +import { Forecast, Platform } from "./"; + +const platformName = "fantasyscotus"; /* Definitions */ let unixtime = new Date().getTime(); @@ -65,19 +67,19 @@ async function processData(data) { let historicalPercentageCorrect = data.stats.pcnt_correct; let historicalProbabilityCorrect = Number(historicalPercentageCorrect.replace("%", "")) / 100; - let results = []; + let results: Forecast[] = []; for (let event of events) { if (event.accuracy == "") { - let id = `fantasyscotus-${event.id}`; + let id = `${platformName}-${event.id}`; // if the thing hasn't already resolved let predictionData = await getPredictionsData(event.docket_url); let pAffirm = predictionData.proportionAffirm; //let trackRecord = event.prediction.includes("Affirm") ? historicalProbabilityCorrect : 1-historicalProbabilityCorrect - let eventObject = { + let eventObject: Forecast = { id: id, title: `In ${event.short_name}, the SCOTUS will affirm the lower court's decision`, url: `https://fantasyscotus.net/user-predictions${event.docket_url}`, - platform: "FantasySCOTUS", + platform: platformName, description: `${(pAffirm * 100).toFixed(2)}% (${ predictionData.numAffirm } out of ${ @@ -100,7 +102,7 @@ async function processData(data) { timestamp: new Date().toISOString(), qualityindicators: { numforecasts: Number(predictionData.numForecasts), - stars: calculateStars("FantasySCOTUS", {}), + stars: calculateStars(platformName, {}), }, }; results.push(eventObject); @@ -112,7 +114,9 @@ async function processData(data) { /* Body */ export const fantasyscotus: Platform = { - name: "fantasyscotus", + name: platformName, + label: "FantasySCOTUS", + color: "#231149", async fetcher() { let rawData = await fetchData(); let results = await processData(rawData); diff --git a/src/backend/platforms/foretold.ts b/src/backend/platforms/foretold.ts index b41f655..92a27bd 100644 --- a/src/backend/platforms/foretold.ts +++ b/src/backend/platforms/foretold.ts @@ -5,6 +5,9 @@ import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ + +const platformName = "foretold"; + let graphQLendpoint = "https://api.foretold.io/graphql"; let highQualityCommunities = [ "0104d8e8-07e4-464b-8b32-74ef22b49f21", @@ -54,7 +57,9 @@ async function fetchAllCommunityQuestions(communityId) { } export const foretold: Platform = { - name: "foretold", + name: platformName, + label: "Foretold", + color: "#62520b", async fetcher() { let results = []; for (let community of highQualityCommunities) { @@ -62,7 +67,7 @@ export const foretold: Platform = { questions = questions.map((question) => question.node); questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions questions.forEach((question) => { - let id = `foretold-${question.id}`; + let id = `${platformName}-${question.id}`; let options = []; if (question.valueType == "PERCENTAGE") { let probability = question.previousAggregate.value.percentage; @@ -83,13 +88,13 @@ export const foretold: Platform = { id: id, title: question.name, url: `https://www.foretold.io/c/${community}/m/${question.id}`, - platform: "Foretold", + platform: platformName, description: "", options: options, timestamp: new Date().toISOString(), qualityindicators: { numforecasts: Math.floor(Number(question.measurementCount) / 2), - stars: calculateStars("Foretold", {}), + stars: calculateStars(platformName, {}), }, /*liquidity: liquidity.toFixed(2), tradevolume: tradevolume.toFixed(2), diff --git a/src/backend/platforms/givewellopenphil.ts b/src/backend/platforms/givewellopenphil.ts index 561cba2..13aa8ae 100644 --- a/src/backend/platforms/givewellopenphil.ts +++ b/src/backend/platforms/givewellopenphil.ts @@ -5,6 +5,8 @@ import fs from "fs"; import { calculateStars } from "../utils/stars"; import { Platform } from "./"; +const platformName = "givewellopenphil"; + /* Support functions */ async function fetchPage(url: string) { let response = await axios({ @@ -48,11 +50,11 @@ async function main1() { let result = { title: title, url: url, - platform: "GiveWell", + platform: platformName, description: description, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("GiveWell/OpenPhilanthropy", {}), + stars: calculateStars(platformName, {}), }, }; // Note: This requires some processing afterwards // console.log(result) @@ -65,7 +67,9 @@ async function main1() { } export const givewellopenphil: Platform = { - name: "givewellopenphil", + name: platformName, + label: "GiveWell/OpenPhilanthropy", + color: "#32407e", async fetcher() { // main1() return; // not necessary to refill the DB every time diff --git a/src/backend/platforms/goodjudgment.ts b/src/backend/platforms/goodjudgment.ts index 992bee2..17227fa 100644 --- a/src/backend/platforms/goodjudgment.ts +++ b/src/backend/platforms/goodjudgment.ts @@ -8,6 +8,7 @@ import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ +const platformName = "goodjudgment"; let endpoint = "https://goodjudgment.io/superforecasts/"; String.prototype.replaceAll = function replaceAll(search, replace) { return this.split(search).join(replace); @@ -15,7 +16,9 @@ String.prototype.replaceAll = function replaceAll(search, replace) { /* Body */ export const goodjudgment: Platform = { - name: "goodjudgment", + name: platformName, + label: "Good Judgment", + color: "#7d4f1b", async fetcher() { // Proxy fuckery let proxy; @@ -64,7 +67,7 @@ export const goodjudgment: Platform = { let title = table[0]["0"].split("\t\t\t").splice(3)[0]; if (title != undefined) { title = title.replaceAll("", ""); - let id = `goodjudgment-${hash(title)}`; + let id = `${platformName}-${hash(title)}`; let description = table .filter((row) => row["0"].includes("BACKGROUND:")) .map((row) => row["0"]) @@ -101,12 +104,12 @@ export const goodjudgment: Platform = { id: id, title: title, url: endpoint, - platform: "Good Judgment", + platform: platformName, description: description, options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("Good Judgment", {}), + stars: calculateStars(platformName, {}), }, extra: { superforecastercommentary: analysis || "", diff --git a/src/backend/platforms/goodjudmentopen.ts b/src/backend/platforms/goodjudgmentopen.ts similarity index 96% rename from src/backend/platforms/goodjudmentopen.ts rename to src/backend/platforms/goodjudgmentopen.ts index e2ce6b0..7b970f8 100644 --- a/src/backend/platforms/goodjudmentopen.ts +++ b/src/backend/platforms/goodjudgmentopen.ts @@ -8,6 +8,8 @@ import toMarkdown from "../utils/toMarkdown"; import { Platform } from "./"; /* Definitions */ +const platformName = "goodjudgmentopen"; + let htmlEndPoint = "https://www.gjopen.com/questions?page="; let annoyingPromptUrls = [ "https://www.gjopen.com/questions/1933-what-forecasting-questions-should-we-ask-what-questions-would-you-like-to-forecast-on-gjopen", @@ -185,12 +187,12 @@ async function goodjudgmentopen_inner(cookie) { } let questionNumRegex = new RegExp("questions/([0-9]+)"); let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0]; - let id = `goodjudmentopen-${questionNum}`; + let id = `${platformName}-${questionNum}`; let question = { id: id, title: title, url: url, - platform: "Good Judgment Open", + platform: platformName, ...moreinfo, }; if (j % 30 == 0 || DEBUG_MODE == "on") { @@ -236,8 +238,10 @@ async function goodjudgmentopen_inner(cookie) { return results; } -export const goodjudmentopen: Platform = { - name: "goodjudmentopen", // note the typo! current table name is without `g`, `goodjudmentopen` +export const goodjudgmentopen: Platform = { + name: platformName, + label: "Good Judgment Open", + color: "#002455", async fetcher() { let cookie = process.env.GOODJUDGMENTOPENCOOKIE; return await applyIfSecretExists(cookie, goodjudgmentopen_inner); diff --git a/src/backend/platforms/index.ts b/src/backend/platforms/index.ts index 49ac05c..38068aa 100644 --- a/src/backend/platforms/index.ts +++ b/src/backend/platforms/index.ts @@ -4,10 +4,10 @@ import { fantasyscotus } from "./fantasyscotus"; import { foretold } from "./foretold"; import { givewellopenphil } from "./givewellopenphil"; import { goodjudgment } from "./goodjudgment"; -import { goodjudmentopen } from "./goodjudmentopen"; +import { goodjudgmentopen } from "./goodjudgmentopen"; import { infer } from "./infer"; import { kalshi } from "./kalshi"; -import { manifoldmarkets } from "./manifoldmarkets"; +import { manifold } from "./manifold"; import { metaculus } from "./metaculus"; import { polymarket } from "./polymarket"; import { predictit } from "./predictit"; @@ -67,7 +67,9 @@ export interface Forecast { export type PlatformFetcher = () => Promise; export interface Platform { - name: string; + name: string; // short name for ids and `platform` db column, e.g. "xrisk" + label: string; // longer name for displaying on frontend etc., e.g. "X-risk estimates" + color: string; // used on frontend fetcher?: PlatformFetcher; } @@ -92,10 +94,10 @@ export const platforms: Platform[] = [ foretold, givewellopenphil, goodjudgment, - goodjudmentopen, + goodjudgmentopen, infer, kalshi, - manifoldmarkets, + manifold, metaculus, polymarket, predictit, @@ -114,8 +116,8 @@ export const processPlatform = async (platform: Platform) => { if (results && results.length) { await pgUpsert({ contents: results, - tableName: platform.name, - replace: true, + tableName: "questions", + replacePlatform: platform.name, }); console.log("Done"); } else { diff --git a/src/backend/platforms/infer.ts b/src/backend/platforms/infer.ts index 6a9bfba..5a9301f 100644 --- a/src/backend/platforms/infer.ts +++ b/src/backend/platforms/infer.ts @@ -8,6 +8,7 @@ import toMarkdown from "../utils/toMarkdown"; import { Forecast, Platform } from "./"; /* Definitions */ +const platformName = "infer"; let htmlEndPoint = "https://www.infer-pub.com/questions"; String.prototype.replaceAll = function replaceAll(search, replace) { return this.split(search).join(replace); @@ -145,7 +146,7 @@ async function fetchStats(questionUrl, cookie) { qualityindicators: { numforecasts: Number(numforecasts), numforecasters: Number(numforecasters), - stars: calculateStars("Infer", { numforecasts }), + stars: calculateStars(platformName, { numforecasts }), }, }; @@ -218,7 +219,7 @@ async function infer_inner(cookie) { let moreinfo = await fetchStats(url, cookie); let questionNumRegex = new RegExp("questions/([0-9]+)"); let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0]; - let id = `infer-${questionNum}`; + let id = `${platformName}-${questionNum}`; let question = { id: id, title: title, @@ -278,7 +279,9 @@ async function infer_inner(cookie) { } export const infer: Platform = { - name: "infer", + name: platformName, + label: "Infer", + color: "#223900", async fetcher() { let cookie = process.env.INFER_COOKIE; return await applyIfSecretExists(cookie, infer_inner); diff --git a/src/backend/platforms/kalshi.ts b/src/backend/platforms/kalshi.ts index 212506a..c785a6a 100644 --- a/src/backend/platforms/kalshi.ts +++ b/src/backend/platforms/kalshi.ts @@ -5,6 +5,7 @@ import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ +const platformName = "kalshi"; let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' async function fetchAllMarkets() { @@ -34,17 +35,17 @@ async function processMarkets(markets) { type: "PROBABILITY", }, ]; - let id = `kalshi-${market.id}`; + let id = `${platformName}-${market.id}`; let result = { id: id, title: market.title.replaceAll("*", ""), url: `https://kalshi.com/markets/${market.ticker_name}`, - platform: "Kalshi", + platform: platformName, description: `${market.settle_details}. The resolution source is: ${market.ranged_group_name} (${market.settle_source_url})`, options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("Kalshi", { + stars: calculateStars(platformName, { shares_volume: market.volume, interest: market.open_interest, }), @@ -70,7 +71,9 @@ async function processMarkets(markets) { } export const kalshi: Platform = { - name: "kalshi", + name: platformName, + label: "Kalshi", + color: "#615691", fetcher: async function () { let markets = await fetchAllMarkets(); return await processMarkets(markets); diff --git a/src/backend/platforms/manifoldmarkets.ts b/src/backend/platforms/manifold.ts similarity index 90% rename from src/backend/platforms/manifoldmarkets.ts rename to src/backend/platforms/manifold.ts index 0969243..9368013 100644 --- a/src/backend/platforms/manifoldmarkets.ts +++ b/src/backend/platforms/manifold.ts @@ -5,6 +5,7 @@ import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ +const platformName = "manifold"; let endpoint = "https://manifold.markets/api/v0/markets"; // See https://manifoldmarkets.notion.site/Manifold-Markets-API-5e7d0aef4dcf452bb04b319e178fabc5 @@ -44,7 +45,7 @@ function showStatistics(results) { async function processPredictions(predictions) { let results = await predictions.map((prediction) => { - let id = `manifold-${prediction.id}`; + let id = `manifold-${prediction.id}`; // oops, doesn't match platform name let probability = prediction.probability; let options = [ { @@ -67,7 +68,7 @@ async function processPredictions(predictions) { options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("Manifold Markets", { + stars: calculateStars(platformName, { volume7Days: prediction.volume7Days, volume24Hours: prediction.volume24Hours, pool: prediction.pool, @@ -88,8 +89,10 @@ async function processPredictions(predictions) { return unresolvedResults; //resultsProcessed } -export const manifoldmarkets: Platform = { - name: "manifoldmarkets", +export const manifold: Platform = { + name: platformName, + label: "Manifold Markets", + color: "#793466", async fetcher() { let data = await fetchData(); let results = await processPredictions(data); // somehow needed diff --git a/src/backend/platforms/metaculus.ts b/src/backend/platforms/metaculus.ts index 78369ee..f04b12f 100644 --- a/src/backend/platforms/metaculus.ts +++ b/src/backend/platforms/metaculus.ts @@ -6,6 +6,7 @@ import toMarkdown from "../utils/toMarkdown"; import { Platform } from "./"; /* Definitions */ +const platformName = "metaculus"; let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; let now = new Date().toISOString(); let DEBUG_MODE = "off"; @@ -94,7 +95,9 @@ async function fetchMetaculusQuestionDescription(slug) { } export const metaculus: Platform = { - name: "metaculus", + name: platformName, + label: "Metaculus", + color: "#006669", async fetcher() { // let metaculusQuestionsInit = await fetchMetaculusQuestions(1) // let numQueries = Math.round(Number(metaculusQuestionsInit.count) / 20) @@ -144,18 +147,18 @@ export const metaculus: Platform = { }, ]; } - let id = `metaculus-${result.id}`; + let id = `${platformName}-${result.id}`; let interestingInfo = { id: id, title: result.title, url: "https://www.metaculus.com" + result.page_url, - platform: "Metaculus", + platform: platformName, description: description, options: options, timestamp: new Date().toISOString(), qualityindicators: { numforecasts: Number(result.number_of_predictions), - stars: calculateStars("Metaculus", { + stars: calculateStars(platformName, { numforecasts: result.number_of_predictions, }), }, diff --git a/src/backend/platforms/polymarket.ts b/src/backend/platforms/polymarket.ts index 175b93c..c5e24ab 100644 --- a/src/backend/platforms/polymarket.ts +++ b/src/backend/platforms/polymarket.ts @@ -5,6 +5,7 @@ import { calculateStars } from "../utils/stars"; import { Forecast, Platform } from "./"; /* Definitions */ +const platformName = "polymarket"; let graphQLendpoint = "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-5"; // "https://api.thegraph.com/subgraphs/name/polymarket/matic-markets-4"// "https://api.thegraph.com/subgraphs/name/tokenunion/polymarket-matic"//"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' let units = 10 ** 6; @@ -63,7 +64,9 @@ async function fetchIndividualContractData(marketMakerAddress) { } export const polymarket: Platform = { - name: "polymarket", + name: platformName, + label: "PolyMarket", + color: "#00314e", async fetcher() { let results: Forecast[] = []; let webpageEndpointData = await fetchAllContractInfo(); @@ -79,7 +82,7 @@ export const polymarket: Platform = { ); if (moreMarketAnswer.length > 0) { let moreMarketInfo = moreMarketAnswer[0]; - let id = `polymarket-${addressLowerCase.slice(0, 10)}`; + let id = `${platformName}-${addressLowerCase.slice(0, 10)}`; // console.log(id); let numforecasts = Number(moreMarketInfo.tradesQuantity); let tradevolume = @@ -103,7 +106,7 @@ export const polymarket: Platform = { id: id, title: marketInfo.question, url: "https://polymarket.com/market/" + marketInfo.slug, - platform: "PolyMarket", + platform: platformName, description: marketInfo.description, options: options, timestamp: new Date().toISOString(), @@ -111,7 +114,7 @@ export const polymarket: Platform = { numforecasts: numforecasts.toFixed(0), liquidity: liquidity.toFixed(2), tradevolume: tradevolume.toFixed(2), - stars: calculateStars("Polymarket", { + stars: calculateStars(platformName, { liquidity, option: options[0], volume: tradevolume, diff --git a/src/backend/platforms/predictit.ts b/src/backend/platforms/predictit.ts index fb0f645..55e44cc 100644 --- a/src/backend/platforms/predictit.ts +++ b/src/backend/platforms/predictit.ts @@ -1,10 +1,11 @@ -/* Imports */ import axios from "axios"; import { calculateStars } from "../utils/stars"; import toMarkdown from "../utils/toMarkdown"; import { Platform } from "./"; +const platformName = "predictit"; + /* Support functions */ async function fetchmarkets() { let response = await axios({ @@ -39,7 +40,9 @@ function sleep(ms: number) { /* Body */ export const predictit: Platform = { - name: "predictit", + name: platformName, + label: "PredictIt", + color: "#460c00", async fetcher() { let markets = await fetchmarkets(); let marketVolumes = await fetchmarketvolumes(); @@ -53,7 +56,7 @@ export const predictit: Platform = { let results = []; for (let market of markets) { // console.log(market.name) - let id = `predictit-${market.id}`; + let id = `${platformName}-${market.id}`; let isbinary = market.contracts.length == 1; await sleep(3000 * (1 + Math.random())); let descriptionraw = await fetchmarketrules(market.id); @@ -97,12 +100,12 @@ export const predictit: Platform = { id: id, title: market["name"], url: market.url, - platform: "PredictIt", + platform: platformName, description: description, options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("PredictIt", {}), + stars: calculateStars(platformName, {}), shares_volume: shares_volume, }, }; diff --git a/src/backend/platforms/rootclaim.ts b/src/backend/platforms/rootclaim.ts index 6520458..1a23f31 100644 --- a/src/backend/platforms/rootclaim.ts +++ b/src/backend/platforms/rootclaim.ts @@ -1,11 +1,11 @@ -/* Imports */ import axios from "axios"; import { calculateStars } from "../utils/stars"; import toMarkdown from "../utils/toMarkdown"; -import { Platform } from "./"; +import { Forecast, Platform } from "./"; /* Definitions */ +const platformName = "rootclaim"; let jsonEndpoint = "https://www.rootclaim.com/main_page_stories?number=100&offset=0"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' @@ -24,12 +24,14 @@ async function fetchAllRootclaims() { } export const rootclaim: Platform = { - name: "rootclaim", + name: platformName, + label: "Rootclaim", + color: "#0d1624", async fetcher() { let claims = await fetchAllRootclaims(); - let results = []; + let results: Forecast[] = []; for (let claim of claims) { - let id = `rootclaim-${claim.slug.toLowerCase()}`; + let id = `${platformName}-${claim.slug.toLowerCase()}`; let options = []; for (let scenario of claim.scenarios) { //console.log(scenario) @@ -42,17 +44,17 @@ export const rootclaim: Platform = { }); } let claimUrlPath = claim.created_at < "2020" ? "claims" : "analysis"; - let obj = { + let obj: Forecast = { id: id, title: toMarkdown(claim.question).replace("\n", ""), url: `https://www.rootclaim.com/${claimUrlPath}/${claim.slug}`, - platform: "Rootclaim", + platform: platformName, description: toMarkdown(claim.background).replace("'", "'"), options: options, timestamp: new Date().toISOString(), qualityindicators: { numforecasts: 1, - stars: calculateStars("Rootclaim", {}), + stars: calculateStars(platformName, {}), }, }; results.push(obj); diff --git a/src/backend/platforms/smarkets.ts b/src/backend/platforms/smarkets.ts index 4a922de..4be7a54 100644 --- a/src/backend/platforms/smarkets.ts +++ b/src/backend/platforms/smarkets.ts @@ -1,13 +1,14 @@ -/* Imports */ import axios from "axios"; import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ +const platformName = "smarkets"; let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/"; let VERBOSE = false; let empty = () => 0; + /* Support functions */ async function fetchEvents(url) { @@ -60,7 +61,9 @@ async function fetchPrices(marketid) { } export const smarkets: Platform = { - name: "smarkets", + name: platformName, + label: "Smarkets", + color: "#6f5b41", async fetcher() { let htmlPath = "?state=new&state=upcoming&state=live&type_domain=politics&type_scope=single_event&with_new_type=true&sort=id&limit=50"; @@ -93,7 +96,7 @@ export const smarkets: Platform = { for (let market of markets) { VERBOSE ? console.log("================") : empty(); VERBOSE ? console.log("Market: ", market) : empty(); - let id = `smarkets-${market.id}`; + let id = `${platformName}-${market.id}`; let name = market.name; let contracts = await fetchContracts(market.id); @@ -160,12 +163,12 @@ export const smarkets: Platform = { id: id, title: name, url: "https://smarkets.com/event/" + market.event_id + market.slug, - platform: "Smarkets", + platform: platformName, description: market.description, options: options, timestamp: new Date().toISOString(), qualityindicators: { - stars: calculateStars("Smarkets", {}), + stars: calculateStars(platformName, {}), }, }; VERBOSE ? console.log(result) : empty(); diff --git a/src/backend/platforms/wildeford.ts b/src/backend/platforms/wildeford.ts index 727739a..e628516 100644 --- a/src/backend/platforms/wildeford.ts +++ b/src/backend/platforms/wildeford.ts @@ -7,6 +7,7 @@ import { calculateStars } from "../utils/stars"; import { Platform } from "./"; /* Definitions */ +const platformName = "wildeford"; const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL const endpoint = `https://docs.google.com/spreadsheets/d/${SHEET_ID}/edit#gid=0`; // https://docs.google.com/spreadsheets/d/1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0/edit#gid=0 @@ -73,7 +74,7 @@ async function processPredictions(predictions) { ); let results = currentPredictions.map((prediction) => { let title = prediction["Prediction"].replace(" [update]", ""); - let id = `wildeford-${hash(title)}`; + let id = `${platformName}-${hash(title)}`; let probability = Number(prediction["Odds"].replace("%", "")) / 100; let options = [ { @@ -91,14 +92,14 @@ async function processPredictions(predictions) { id: id, title: title, url: prediction["url"], - platform: "Peter Wildeford", + platform: platformName, description: prediction["Notes"] || "", options: options, timestamp: new Date( Date.parse(prediction["Prediction Date"] + "Z") ).toISOString(), qualityindicators: { - stars: calculateStars("Peter Wildeford", null), + stars: calculateStars(platformName, null), }, }; return result; @@ -120,7 +121,9 @@ export async function wildeford_inner(google_api_key) { } export const wildeford: Platform = { - name: "wildeford", + name: platformName, + label: "Peter Wildeford", + color: "#984158", async fetcher() { const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY; // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey return await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner); diff --git a/src/backend/platforms/xrisk.ts b/src/backend/platforms/xrisk.ts index b474b46..e4affaf 100644 --- a/src/backend/platforms/xrisk.ts +++ b/src/backend/platforms/xrisk.ts @@ -1,15 +1,25 @@ import fs from "fs"; +import { hash } from "../utils/hash"; import { Platform } from "./"; +const platformName = "xrisk"; + export const xrisk: Platform = { name: "xrisk", + label: "X-risk estimates", + color: "#272600", async fetcher() { - return; // not necessary to refill the DB every time + // return; // not necessary to refill the DB every time let fileRaw = fs.readFileSync("./input/xrisk-questions.json", { encoding: "utf-8", }); - const results = JSON.parse(fileRaw); + let results = JSON.parse(fileRaw); + results = results.map((item) => ({ + ...item, + id: `${platformName}-${hash(item.title + " | " + item.url)}`, // some titles are non-unique, but title+url pair is always unique + platform: platformName, + })); return results; }, }; diff --git a/src/backend/utils/algolia.ts b/src/backend/utils/algolia.ts index f46b7c6..1cc3bb4 100644 --- a/src/backend/utils/algolia.ts +++ b/src/backend/utils/algolia.ts @@ -1,34 +1,13 @@ import algoliasearch from "algoliasearch"; import { pgReadWithReadCredentials } from "../database/pg-wrapper"; -import { mergeEverythingInner } from "../flow/mergeEverything"; +import { platforms } from "../platforms"; let cookie = process.env.ALGOLIA_MASTER_API_KEY; const algoliaAppId = process.env.NEXT_PUBLIC_ALGOLIA_APP_ID; const client = algoliasearch(algoliaAppId, cookie); const index = client.initIndex("metaforecast"); -export async function rebuildAlgoliaDatabaseTheHardWay() { - console.log("Doing this the hard way"); - let records = await mergeEverythingInner(); - records = records.map((record, index: number) => ({ - ...record, - has_numforecasts: record.numforecasts ? true : false, - objectID: index, - })); - // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ - - if (index.exists()) { - console.log("Index exists"); - index - .replaceAllObjects(records, { safe: true }) - .catch((error) => console.log(error)); - console.log( - `Pushed ${records.length} records. Algolia will update asynchronously` - ); - } -} - let getoptionsstringforsearch = (record: any) => { let result = ""; if (!!record.options && record.options.length > 0) { @@ -42,11 +21,16 @@ let getoptionsstringforsearch = (record: any) => { export async function rebuildAlgoliaDatabaseTheEasyWay() { let records: any[] = await pgReadWithReadCredentials({ - tableName: "combined", + tableName: "questions", }); + const platformNameToLabel = Object.fromEntries( + platforms.map((platform) => [platform.name, platform.label]) + ); + records = records.map((record, index: number) => ({ ...record, + platformLabel: platformNameToLabel[record.platform] || record.platform, has_numforecasts: record.numforecasts ? true : false, objectID: index, optionsstringforsearch: getoptionsstringforsearch(record), @@ -62,4 +46,4 @@ export async function rebuildAlgoliaDatabaseTheEasyWay() { } } -export const rebuildAlgoliaDatabase = rebuildAlgoliaDatabaseTheEasyWay; //rebuildAlgoliaDatabaseTheHardWay +export const rebuildAlgoliaDatabase = rebuildAlgoliaDatabaseTheEasyWay; diff --git a/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts b/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts index b4e8530..6f68afc 100644 --- a/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts +++ b/src/backend/utils/evaluations/pullForecastsToCSVForRating.ts @@ -24,7 +24,7 @@ let main = async () => { "PredictIt", "Rootclaim", ]; - let json = await pgReadWithReadCredentials({ tableName: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "questions" }); console.log(json.length); //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts b/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts index fd3726a..7394638 100644 --- a/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts +++ b/src/backend/utils/evaluations/pullMetaculusForecastsToCSVForRating.ts @@ -26,7 +26,7 @@ let shuffleArray = (array) => { let main = async () => { let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await pgReadWithReadCredentials({ tableName: "combined" }); + let json = await pgReadWithReadCredentials({ tableName: "questions" }); console.log(json.length); //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/backend/utils/misc/process-forecasts-into-elicit.ts b/src/backend/utils/misc/process-forecasts-into-elicit.ts index 8279f27..7724bbb 100644 --- a/src/backend/utils/misc/process-forecasts-into-elicit.ts +++ b/src/backend/utils/misc/process-forecasts-into-elicit.ts @@ -9,7 +9,7 @@ let locationData = "./data/"; /* Body */ // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src async function main() { - let data = await pgReadWithReadCredentials({ tableName: "combined" }); //JSON.parse(rawdata) + let data = await pgReadWithReadCredentials({ tableName: "questions" }); //JSON.parse(rawdata) let processDescription = (description) => { if (description == null || description == undefined || description == "") { return ""; diff --git a/src/backend/utils/stars.ts b/src/backend/utils/stars.ts index 7b174d7..69a57f9 100644 --- a/src/backend/utils/stars.ts +++ b/src/backend/utils/stars.ts @@ -105,7 +105,7 @@ function calculateStarsGiveWellOpenPhil(data) { return starsInteger; } -function calculateStarsGoodJudment(data) { +function calculateStarsGoodJudgment(data) { let nuno = (data) => 4; let eli = (data) => 4; let misha = (data) => 3.5; @@ -114,7 +114,7 @@ function calculateStarsGoodJudment(data) { return starsInteger; } -function calculateStarsGoodJudmentOpen(data) { +function calculateStarsGoodJudgmentOpen(data) { let nuno = (data) => (data.numforecasts > 100 ? 3 : 2); let eli = (data) => 3; let misha = (data) => @@ -173,7 +173,7 @@ function calculateStarsLadbrokes(data) { return starsInteger; } -function calculateStarsManifoldMarkets(data) { +function calculateStarsManifold(data) { let nuno = (data) => data.volume7Days > 250 || (data.pool > 500 && data.volume7Days > 100) ? 2 @@ -268,15 +268,56 @@ function calculateStarsWilliamHill(data) { return starsInteger; } -export function calculateStars(platform, data) { +export function calculateStars(platform: string, data) { let stars = 2; switch (platform) { + case "betfair": + stars = calculateStarsBetfair(data); + break; + case "infer": + stars = calculateStarsInfer(data); + break; + case "foretold": + stars = calculateStarsForetold(data); + break; + case "givewellopenphil": + stars = calculateStarsGiveWellOpenPhil(data); + break; + case "goodjudgment": + stars = calculateStarsGoodJudgment(data); + break; + case "goodjudgmentopen": + stars = calculateStarsGoodJudgmentOpen(data); + break; + case "kalshi": + stars = calculateStarsKalshi(data); + break; + case "manifold": + stars = calculateStarsManifold(data); + break; + case "metaculus": + stars = calculateStarsMetaculus(data); + break; + case "polymarket": + stars = calculateStarsPolymarket(data); + break; + case "predictit": + stars = calculateStarsPredictIt(data); + break; + case "rootclaim": + stars = calculateStarsRootclaim(data); + break; + case "smarkets": + stars = calculateStarsSmarkets(data); + break; + case "wildeford": + stars = calculateStarsWildeford(data); + break; + + // deprecated case "AstralCodexTen": stars = calculateStarsAstralCodexTen(data); break; - case "Betfair": - stars = calculateStarsBetfair(data); - break; case "CoupCast": stars = calculateStarsCoupCast(data); break; @@ -289,54 +330,15 @@ export function calculateStars(platform, data) { case "Estimize": stars = calculateStarsEstimize(data); break; - case "Foretold": - stars = calculateStarsForetold(data); - break; - case "GiveWell/OpenPhilanthropy": - stars = calculateStarsGiveWellOpenPhil(data); - break; - case "Good Judgment": - stars = calculateStarsGoodJudment(data); - break; - case "Good Judgment Open": - stars = calculateStarsGoodJudmentOpen(data); - break; case "Hypermind": stars = calculateStarsHypermind(data); break; - case "Infer": - stars = calculateStarsInfer(data); - break; - case "Kalshi": - stars = calculateStarsKalshi(data); - break; case "Ladbrokes": stars = calculateStarsLadbrokes(data); break; - case "Manifold Markets": - stars = calculateStarsManifoldMarkets(data); - break; - case "Metaculus": - stars = calculateStarsMetaculus(data); - break; case "Omen": stars = calculateStarsOmen(data); break; - case "Polymarket": - stars = calculateStarsPolymarket(data); - break; - case "PredictIt": - stars = calculateStarsPredictIt(data); - break; - case "Rootclaim": - stars = calculateStarsRootclaim(data); - break; - case "Smarkets": - stars = calculateStarsSmarkets(data); - break; - case "Peter Wildeford": - stars = calculateStarsWildeford(data); - break; case "WilliamHill": stars = calculateStarsWilliamHill(data); break; diff --git a/src/pages/api/all-forecasts.ts b/src/pages/api/all-forecasts.ts index 09e4668..f10c55e 100644 --- a/src/pages/api/all-forecasts.ts +++ b/src/pages/api/all-forecasts.ts @@ -1,12 +1,12 @@ import { NextApiRequest, NextApiResponse } from "next/types"; -import { getFrontpageFullRaw } from "../../backend/frontpage"; +import { getFrontpageFull } from "../../backend/frontpage"; export default async function handler( req: NextApiRequest, res: NextApiResponse ) { - let frontpageFull = await getFrontpageFullRaw(); + let frontpageFull = await getFrontpageFull(); console.log(frontpageFull.map((element) => element.title).slice(0, 5)); console.log("..."); res.status(200).json(frontpageFull); diff --git a/src/pages/api/create-dashboard-from-ids.ts b/src/pages/api/create-dashboard-from-ids.ts index 73c4d9e..be4ec52 100644 --- a/src/pages/api/create-dashboard-from-ids.ts +++ b/src/pages/api/create-dashboard-from-ids.ts @@ -1,7 +1,7 @@ -import crypto from "crypto"; import { NextApiRequest, NextApiResponse } from "next/types"; import { pgInsertIntoDashboard } from "../../backend/database/pg-wrapper"; +import { hash } from "../../backend/utils/hash"; export default async function handler( req: NextApiRequest, @@ -14,8 +14,6 @@ export default async function handler( let body = req.body; console.log(body); - const hash = (s: string) => - crypto.createHash("sha256").update(s).digest("hex").slice(0, 10); try { let id = hash(JSON.stringify(body.ids)); let pgResponse = await pgInsertIntoDashboard({ diff --git a/src/pages/api/dashboard-by-id.ts b/src/pages/api/dashboard-by-id.ts index 366c234..c79734f 100644 --- a/src/pages/api/dashboard-by-id.ts +++ b/src/pages/api/dashboard-by-id.ts @@ -23,7 +23,7 @@ export default async function handler( console.log(dashboardItem); let dashboardContents = await pgGetByIds({ ids: dashboardItem.contents, - table: "combined", + table: "questions", }); res.status(200).send({ dashboardContents, diff --git a/src/pages/api/frontpage.ts b/src/pages/api/frontpage.ts index 713ae95..d4a3b96 100644 --- a/src/pages/api/frontpage.ts +++ b/src/pages/api/frontpage.ts @@ -1,12 +1,12 @@ import { NextApiRequest, NextApiResponse } from "next/types"; -import { getFrontpageRaw } from "../../backend/frontpage"; +import { getFrontpage } from "../../backend/frontpage"; export default async function handler( req: NextApiRequest, res: NextApiResponse ) { - let frontpageElements = await getFrontpageRaw(); + let frontpageElements = await getFrontpage(); console.log(frontpageElements.map((element) => element.title).slice(0, 5)); console.log("..."); res.status(200).json(frontpageElements); diff --git a/src/pages/api/questions.ts b/src/pages/api/questions.ts index f06b6cf..837e009 100644 --- a/src/pages/api/questions.ts +++ b/src/pages/api/questions.ts @@ -6,7 +6,7 @@ export default async function handler( req: NextApiRequest, res: NextApiResponse ) { - let allQuestions = await pgRead({ tableName: "combined" }); + let allQuestions = await pgRead({ tableName: "questions" }); console.log(allQuestions.map((element) => element.title).slice(0, 5)); console.log("..."); res.status(200).json(allQuestions); diff --git a/src/pages/capture.tsx b/src/pages/capture.tsx index 279d86a..6cd6905 100644 --- a/src/pages/capture.tsx +++ b/src/pages/capture.tsx @@ -8,17 +8,11 @@ import CommonDisplay from "../web/search/CommonDisplay"; export { getServerSideProps } from "../web/search/anySearchPage"; -const CapturePage: NextPage = ({ - defaultResults, - initialResults, - initialQueryParameters, -}) => { +const CapturePage: NextPage = (props) => { return ( = ({ - defaultResults, - initialResults, - initialQueryParameters, -}) => { +const IndexPage: NextPage = (props) => { return ( {result ? displayForecast({ - ...result.item, - score: result.score, + ...result, showTimeStamp: true, expandFooterToFullWidth: true, }) diff --git a/src/web/display/dashboardCreator.tsx b/src/web/display/dashboardCreator.tsx index 67779b2..d678963 100644 --- a/src/web/display/dashboardCreator.tsx +++ b/src/web/display/dashboardCreator.tsx @@ -3,7 +3,7 @@ import React, { useState } from "react"; let exampleInput = `{ "title": "Random example", "description": "Just a random description of a random example", - "ids": [ "metaculus-372", "goodjudmentopen-2244", "metaculus-7550", "kalshi-09d060ee-b184-4167-b86b-d773e56b4162", "wildeford-5d1a04e1a8", "metaculus-2817" ], + "ids": [ "metaculus-372", "goodjudgmentopen-2244", "metaculus-7550", "kalshi-09d060ee-b184-4167-b86b-d773e56b4162", "wildeford-5d1a04e1a8", "metaculus-2817" ], "creator": "Peter Parker" }`; diff --git a/src/web/display/displayForecasts.tsx b/src/web/display/displayForecasts.tsx index 32d1ca7..6d07933 100644 --- a/src/web/display/displayForecasts.tsx +++ b/src/web/display/displayForecasts.tsx @@ -3,6 +3,8 @@ import React from "react"; import { FaRegClipboard } from "react-icons/fa"; import ReactMarkdown from "react-markdown"; +import { FrontendForecast } from "../platforms"; + /* Definitions */ /* Support functions */ @@ -350,9 +352,15 @@ let checkIfDisplayTimeStampAtBottom = (qualityIndicators) => { } }; -let getCurrencySymbolIfNeeded = ({ indicator, platform }) => { +let getCurrencySymbolIfNeeded = ({ + indicator, + platform, +}: { + indicator: any; + platform: string; +}) => { let indicatorsWhichNeedCurrencySymbol = ["Volume", "Interest", "Liquidity"]; - let dollarPlatforms = ["PredictIt", "Kalshi", "PolyMarket"]; + let dollarPlatforms = ["predictit", "kalshi", "polymarket"]; if (indicatorsWhichNeedCurrencySymbol.includes(indicator)) { if (dollarPlatforms.includes(platform)) { return "$"; @@ -461,7 +469,13 @@ let showFirstQualityIndicator = ({ } }; -let displayQualityIndicators = ({ +const displayQualityIndicators: React.FC<{ + numforecasts: number; + timestamp: number; + showTimeStamp: boolean; + qualityindicators: any; + platform: string; // id string - e.g. "goodjudgment", not "Good Judgment" +}> = ({ numforecasts, timestamp, showTimeStamp, @@ -504,6 +518,7 @@ let displayQualityIndicators = ({ let forecastFooter = ({ stars, platform, + platformLabel, numforecasts, qualityindicators, timestamp, @@ -532,7 +547,7 @@ let forecastFooter = ({ expandFooterToFullWidth ? "place-self-center" : "self-center" } col-span-1 font-bold ${debuggingWithBackground ? "bg-red-100" : ""}`} > - {platform + {platformLabel .replace("Good Judgment Open", "GJOpen") .replace("OpenPhilanthropy", "OpenPhil") .replace("AstralCodexTen", "ACX") @@ -559,22 +574,30 @@ let forecastFooter = ({ /* Body */ -export function displayForecast({ - id, - title, - url, - platform, - author, - description, - options, - qualityindicators, - timestamp, - visualization, - score, +interface SingleProps { + forecast: FrontendForecast; + showTimeStamp: boolean; + expandFooterToFullWidth: boolean; + showIdToggle?: boolean; +} + +export const displayForecast: React.FC = ({ + forecast: { + id, + title, + url, + platform, + platformLabel, + description, + options, + qualityindicators, + timestamp, + visualization, + }, showTimeStamp, expandFooterToFullWidth, showIdToggle, -}) { +}) => { // const [isJustCopiedSignalVisible, setIsJustCopiedSignalVisible] = useState(false) const isJustCopiedSignalVisible = false; @@ -588,7 +611,7 @@ export function displayForecast({
@@ -657,7 +680,7 @@ export function displayForecast({ {(options.length != 2 || (options[0].name != "Yes" && options[0].name != "No")) && ( <> -
+
{formatForecastOptions(options)}
@@ -680,13 +703,13 @@ export function displayForecast({ )} - {platform !== "Guesstimate" && options.length < 3 && ( -
+ {platform !== "guesstimate" && options.length < 3 && ( +
{displayMarkdown(description)}
)} - {platform === "Guesstimate" && ( + {platform === "guesstimate" && ( {`Last updated: ${timestamp ? timestamp.slice(0, 10) : "unknown"}`}
-
+
{forecastFooter({ stars: qualityindicators.stars, - platform: author || platform, + platform: platform, + platformLabel: platformLabel || platform, // author || platformLabel, numforecasts: qualityindicators.numforecasts, qualityindicators, timestamp, @@ -718,30 +742,40 @@ export function displayForecast({
); +}; + +interface Props { + results: FrontendForecast[]; + numDisplay: number; + showIdToggle: boolean; } -export default function displayForecasts({ +const DisplayForecasts: React.FC = ({ results, numDisplay, showIdToggle, -}) { - return !!results && !!results.slice ? ( - results.slice(0, numDisplay).map((fuseSearchResult) => { - /*let displayWithMetaculusCapture = +}) => { + if (!results) { + return <>; + } + return ( + <> + {results.slice(0, numDisplay).map((result) => { + /*let displayWithMetaculusCapture = fuseSearchResult.item.platform == "Metaculus" ? metaculusEmbed(fuseSearchResult.item) : displayForecast({ ...fuseSearchResult.item }); */ - let display = displayForecast({ - ...fuseSearchResult.item, - score: fuseSearchResult.score, - showTimeStamp: false, - expandFooterToFullWidth: false, - showIdToggle, - }); - return display; - }) - ) : ( - <> + const display = displayForecast({ + forecast: result, + showTimeStamp: false, + expandFooterToFullWidth: false, + showIdToggle, + }); + return display; + })} + ); -} +}; + +export default DisplayForecasts; diff --git a/src/web/display/displayOneForecastForCapture.tsx b/src/web/display/displayOneForecastForCapture.tsx index 147e621..72a97f4 100644 --- a/src/web/display/displayOneForecastForCapture.tsx +++ b/src/web/display/displayOneForecastForCapture.tsx @@ -2,16 +2,16 @@ import domtoimage from "dom-to-image"; // https://github.com/tsayen/dom-to-image import { useEffect, useRef, useState } from "react"; import { CopyToClipboard } from "react-copy-to-clipboard"; +import { FrontendForecast } from "../platforms"; import { uploadToImgur } from "../worker/uploadToImgur"; import { displayForecast } from "./displayForecasts"; -function displayOneForecastInner(result, containerRef) { +function displayOneForecastInner(result: FrontendForecast, containerRef) { return (
{result ? displayForecast({ - ...result.item, - score: result.score, + forecast: result, showTimeStamp: true, expandFooterToFullWidth: true, }) @@ -170,7 +170,7 @@ let generateMetaculusSource = (result, hasDisplayBeenCaptured) => { }; interface Props { - result: any; + result: FrontendForecast; } const DisplayOneForecast: React.FC = ({ result }) => { diff --git a/src/web/display/multiSelectPlatforms.tsx b/src/web/display/multiSelectPlatforms.tsx index 23ac069..21feb0d 100644 --- a/src/web/display/multiSelectPlatforms.tsx +++ b/src/web/display/multiSelectPlatforms.tsx @@ -2,7 +2,7 @@ import chroma from "chroma-js"; import React from "react"; import Select from "react-select"; -import { platformsWithLabels } from "../platforms"; +import { PlatformConfig } from "../platforms"; const colourStyles = { control: (styles) => ({ ...styles, backgroundColor: "white" }), @@ -59,17 +59,48 @@ const colourStyles = { }), }; -export default function MultiSelectPlatform({ onChange, value }) { +interface Props { + onChange: (platforms: string[]) => void; + value: string[]; + platformsConfig: PlatformConfig[]; +} + +export const MultiSelectPlatform: React.FC = ({ + onChange, + value, + platformsConfig, +}) => { + type Option = { + value: string; + label: string; + color: string; + }; + + const options: Option[] = platformsConfig.map((platform) => ({ + value: platform.name, + label: platform.label, + color: platform.color, + })); + + const id2option: { [k: string]: Option } = {}; + for (const option of options) id2option[option.value] = option; + + const selectValue = value.map((v) => id2option[v]).filter((v) => v); + + const onSelectChange = (newValue: Option[]) => { + onChange(newValue.map((o) => o.value)); + }; + return (