Merge pull request #42 from QURIresearch/no-schemas
Should be fine. Tables in `public` schema are ready. Fixed secretEmbed and dashboards, fixed small bugs in platforms code (I'm starting to dislike the fact that platform fetchers have to populate `platform` field and set `id` to `${platform}-${shortId}`, it seems error-prone; maybe platforms should remove an object with `id: shortId` and without `platform`, and then an external code would populate it as needed... but then we'd need a different type for "fetcher output result", and I'm still hesitant). Steps for the next half-hour: - [ ] move tables to new DB instance in the US (copied with pg_dump/pg_restore) - [ ] merge code, deploy on heroku/netlify - [ ] switch to the new db - [ ] update algolia index - [ ] check that everything works - [ ] drop schemas (from new DB, not a problem even in case of significant issues since we have backups and the old DB)
This commit is contained in:
commit
5ccbb7fabc
9
.gitignore
vendored
9
.gitignore
vendored
|
@ -26,12 +26,6 @@ npm-debug.log*
|
|||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
|
@ -41,5 +35,4 @@ package-lock.json ## use yarn.lock instead
|
|||
# Local Netlify folder
|
||||
.netlify
|
||||
|
||||
/.env
|
||||
/.env.production
|
||||
/.env*
|
||||
|
|
|
@ -1,55 +0,0 @@
|
|||
import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper";
|
||||
|
||||
const dateUpToYear = () => new Date().toISOString().slice(0, 4);
|
||||
const dateUpToMonth = () =>
|
||||
new Date().toISOString().slice(0, 7).replace("-", "_");
|
||||
|
||||
export async function databaseUpsert({ contents, group }) {
|
||||
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
|
||||
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
|
||||
switch (group) {
|
||||
case "combined":
|
||||
await pgUpsert({ contents, schema: "latest", tableName: "combined" });
|
||||
break;
|
||||
case "history":
|
||||
await pgUpsert({
|
||||
contents,
|
||||
schema: "history",
|
||||
tableName: `h${dateUpToYear()}`,
|
||||
});
|
||||
await pgUpsert({
|
||||
contents,
|
||||
schema: "history",
|
||||
tableName: `h${dateUpToMonth()}`,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
await pgUpsert({ contents, schema: "latest", tableName: group });
|
||||
}
|
||||
}
|
||||
|
||||
const readWithReader = async (
|
||||
group: string,
|
||||
reader: (opts: { schema: string; tableName: string }) => Promise<any>
|
||||
) => {
|
||||
const schema = group === "history" ? "history" : "latest";
|
||||
const tableName = group === "history" ? `h${dateUpToMonth()}` : group;
|
||||
const response = await reader({
|
||||
schema,
|
||||
tableName,
|
||||
});
|
||||
|
||||
console.log("Postgres: ");
|
||||
console.log(response.slice(0, 2));
|
||||
console.log("");
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
export async function databaseRead({ group }) {
|
||||
return await readWithReader(group, pgRead);
|
||||
}
|
||||
|
||||
export async function databaseReadWithReadCredentials({ group }) {
|
||||
return await readWithReader(group, pgReadWithReadCredentials);
|
||||
}
|
|
@ -1,37 +1,13 @@
|
|||
import { Pool, PoolClient } from "pg";
|
||||
|
||||
import { Forecast, platforms } from "../platforms";
|
||||
import { Forecast } from "../platforms";
|
||||
import { hash } from "../utils/hash";
|
||||
import { measureTime } from "../utils/measureTime";
|
||||
import { roughSizeOfObject } from "../utils/roughSize";
|
||||
|
||||
// Definitions
|
||||
const schemas = ["latest", "history"];
|
||||
const year = Number(new Date().toISOString().slice(0, 4));
|
||||
const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number
|
||||
const allowed_months = [...Array(12).keys()]
|
||||
.map((x) => x + 1)
|
||||
.map((x) => (String(x).length == 1 ? `0${x}` : x));
|
||||
const allowed_year_month_histories = [].concat(
|
||||
...allowed_years.map((year) =>
|
||||
allowed_months.map((month) => `${year}_${month}`)
|
||||
)
|
||||
); // h2022_01
|
||||
const tableNamesWhitelistLatest = [
|
||||
"combined",
|
||||
...platforms.map((platform) => platform.name),
|
||||
];
|
||||
const tableNamesWhiteListHistory = [
|
||||
...allowed_years,
|
||||
...allowed_year_month_histories,
|
||||
];
|
||||
const createFullName = (schemaName, namesArray) =>
|
||||
namesArray.map((name) => `${schemaName}.${name}`);
|
||||
const tableWhiteList = [
|
||||
...createFullName("latest", tableNamesWhitelistLatest),
|
||||
...createFullName("history", tableNamesWhiteListHistory),
|
||||
"latest.dashboards",
|
||||
];
|
||||
const forecastTableNames = ["questions", "history"];
|
||||
|
||||
const allTableNames = [...forecastTableNames, "dashboards", "frontpage"];
|
||||
|
||||
/* Postgres database connection code */
|
||||
const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
|
||||
|
@ -80,12 +56,11 @@ export const runPgCommand = async ({
|
|||
};
|
||||
|
||||
// Initialize
|
||||
let dropTable = (schema: string, table: string) =>
|
||||
`DROP TABLE IF EXISTS ${schema}.${table}`;
|
||||
let createIndex = (schema: string, table: string) =>
|
||||
`CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
|
||||
let createUniqueIndex = (schema: string, table: string) =>
|
||||
`CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
|
||||
let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`;
|
||||
let createIndex = (table: string) =>
|
||||
`CREATE INDEX ${table}_id_index ON ${table} (id);`;
|
||||
let createUniqueIndex = (table: string) =>
|
||||
`CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`;
|
||||
|
||||
async function pgInitializeScaffolding() {
|
||||
async function setPermissionsForPublicUser() {
|
||||
|
@ -97,42 +72,20 @@ async function pgInitializeScaffolding() {
|
|||
await runPgCommand({ command, pool: readWritePool });
|
||||
}
|
||||
|
||||
let buildGrantSelectForSchema = (schema: string) =>
|
||||
`GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`;
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: buildGrantSelectForSchema(schema),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
await runPgCommand({
|
||||
command:
|
||||
"GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user",
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
let alterDefaultPrivilegesForSchema = (schema: string) =>
|
||||
`ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`;
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: alterDefaultPrivilegesForSchema(schema),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
await runPgCommand({
|
||||
command:
|
||||
"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user",
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create schemas");
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: `CREATE SCHEMA IF NOT EXISTS ${schema}`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
console.log("");
|
||||
|
||||
console.log("Set search path");
|
||||
await runPgCommand({
|
||||
command: `SET search_path TO ${schemas.join(",")},public;`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Set public user permissions");
|
||||
await setPermissionsForPublicUser();
|
||||
console.log("");
|
||||
|
@ -143,10 +96,7 @@ async function pgInitializeScaffolding() {
|
|||
}
|
||||
}
|
||||
|
||||
let buildMetaforecastTable = (
|
||||
schema: string,
|
||||
table: string
|
||||
) => `CREATE TABLE ${schema}.${table} (
|
||||
let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} (
|
||||
id text,
|
||||
title text,
|
||||
url text,
|
||||
|
@ -159,45 +109,34 @@ let buildMetaforecastTable = (
|
|||
extra json
|
||||
);`;
|
||||
|
||||
async function pgInitializeLatest() {
|
||||
async function pgInitializeQuestions() {
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create tables & their indexes");
|
||||
let schema = "latest";
|
||||
for (let table of tableNamesWhitelistLatest) {
|
||||
await runPgCommand({
|
||||
command: dropTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildMetaforecastTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
/*
|
||||
if (schema == "history") {
|
||||
await runPgCommand({
|
||||
command: createIndex(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
} else {
|
||||
*/
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
//}
|
||||
}
|
||||
const table = "questions";
|
||||
await runPgCommand({
|
||||
command: dropTable(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildMetaforecastTable(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeLatest: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
"pgInitializeQuestions: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function pgInitializeDashboards() {
|
||||
let buildDashboard = () =>
|
||||
`CREATE TABLE latest.dashboards (
|
||||
`CREATE TABLE dashboards (
|
||||
id text,
|
||||
title text,
|
||||
description text,
|
||||
|
@ -208,23 +147,10 @@ async function pgInitializeDashboards() {
|
|||
);`;
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
await runPgCommand({
|
||||
command: `CREATE SCHEMA IF NOT EXISTS history;`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Set search path");
|
||||
await runPgCommand({
|
||||
command: `SET search_path TO ${schemas.join(",")},public;`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Create dashboard table and its index");
|
||||
|
||||
await runPgCommand({
|
||||
command: dropTable("latest", "dashboards"),
|
||||
command: dropTable("dashboards"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
|
@ -234,7 +160,7 @@ async function pgInitializeDashboards() {
|
|||
});
|
||||
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex("latest", "dashboards"),
|
||||
command: createUniqueIndex("dashboards"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
@ -245,10 +171,7 @@ async function pgInitializeDashboards() {
|
|||
}
|
||||
}
|
||||
|
||||
let buildHistoryTable = (
|
||||
schema: string,
|
||||
table: string
|
||||
) => `CREATE TABLE ${schema}.${table} (
|
||||
let buildHistoryTable = (table: string) => `CREATE TABLE ${table} (
|
||||
id text,
|
||||
title text,
|
||||
url text,
|
||||
|
@ -263,45 +186,19 @@ let buildHistoryTable = (
|
|||
export async function pgInitializeHistories() {
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Drop all previous history tables (Danger!)");
|
||||
console.log("Create history table & index");
|
||||
await runPgCommand({
|
||||
command: `DROP SCHEMA history CASCADE;`,
|
||||
command: dropTable("history"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Create schemas");
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: `CREATE SCHEMA IF NOT EXISTS ${schema}`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
console.log("");
|
||||
|
||||
console.log("Set search path");
|
||||
await runPgCommand({
|
||||
command: `SET search_path TO ${schemas.join(",")},public;`,
|
||||
command: buildHistoryTable("history"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createIndex("history"), // Not unique!!
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Create tables & their indexes");
|
||||
let schema = "history";
|
||||
for (let table of tableNamesWhiteListHistory) {
|
||||
await runPgCommand({
|
||||
command: dropTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildHistoryTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createIndex(schema, table), // Not unique!!
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
|
@ -314,11 +211,11 @@ async function pgInitializeFrontpage() {
|
|||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
await runPgCommand({
|
||||
command: dropTable("latest", "frontpage"),
|
||||
command: dropTable("frontpage"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: `CREATE TABLE latest.frontpage (
|
||||
command: `CREATE TABLE frontpage (
|
||||
id serial primary key,
|
||||
frontpage_full jsonb,
|
||||
frontpage_sliced jsonb
|
||||
|
@ -334,7 +231,7 @@ async function pgInitializeFrontpage() {
|
|||
|
||||
export async function pgInitialize() {
|
||||
await pgInitializeScaffolding();
|
||||
await pgInitializeLatest();
|
||||
await pgInitializeQuestions();
|
||||
await pgInitializeHistories();
|
||||
await pgInitializeDashboards();
|
||||
await pgInitializeFrontpage();
|
||||
|
@ -342,64 +239,50 @@ export async function pgInitialize() {
|
|||
|
||||
// Read
|
||||
async function pgReadWithPool({
|
||||
schema,
|
||||
tableName,
|
||||
pool,
|
||||
}: {
|
||||
schema: string;
|
||||
tableName: string;
|
||||
pool: Pool;
|
||||
}) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let command = `SELECT * from ${schema}.${tableName}`;
|
||||
let response = await runPgCommand({ command, pool });
|
||||
let results = response.results;
|
||||
return results;
|
||||
} else {
|
||||
if (!allTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
let command = `SELECT * from ${tableName}`;
|
||||
let response = await runPgCommand({ command, pool });
|
||||
let results = response.results;
|
||||
return results;
|
||||
}
|
||||
|
||||
export async function pgRead({
|
||||
schema,
|
||||
tableName,
|
||||
}: {
|
||||
schema: string;
|
||||
tableName: string;
|
||||
}) {
|
||||
return await pgReadWithPool({ schema, tableName, pool: readWritePool });
|
||||
export async function pgRead({ tableName }: { tableName: string }) {
|
||||
return await pgReadWithPool({ tableName, pool: readWritePool });
|
||||
}
|
||||
|
||||
export async function pgReadWithReadCredentials({
|
||||
schema,
|
||||
tableName,
|
||||
}: {
|
||||
schema: string;
|
||||
tableName: string;
|
||||
}) {
|
||||
// currently does not work.
|
||||
/* return await pgReadWithPool({
|
||||
schema,
|
||||
tableName,
|
||||
pool: readOnlyPool,
|
||||
});
|
||||
*/
|
||||
return await pgReadWithPool({ schema, tableName, pool: readWritePool });
|
||||
return await pgReadWithPool({ tableName, pool: readWritePool });
|
||||
}
|
||||
|
||||
export async function pgGetByIds({
|
||||
ids,
|
||||
schema,
|
||||
table,
|
||||
}: {
|
||||
ids: string[];
|
||||
schema: string;
|
||||
table: string;
|
||||
}) {
|
||||
let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
|
||||
let command = `SELECT * from ${schema}.${table} where id in ${idstring}`;
|
||||
let command = `SELECT * from ${table} where id in ${idstring}`;
|
||||
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
|
||||
let response = await runPgCommand({ command, pool: readWritePool });
|
||||
let results = response.results;
|
||||
|
@ -409,23 +292,21 @@ export async function pgGetByIds({
|
|||
|
||||
export async function pgBulkInsert({
|
||||
data,
|
||||
schema,
|
||||
tableName,
|
||||
client,
|
||||
}: {
|
||||
data: Forecast[];
|
||||
schema: string;
|
||||
tableName: string;
|
||||
client: PoolClient;
|
||||
}) {
|
||||
if (!tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
if (!forecastTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
|
||||
const generateQuery = (rows: number) => {
|
||||
let text = `INSERT INTO ${schema}.${tableName} VALUES`;
|
||||
let text = `INSERT INTO ${tableName} VALUES`;
|
||||
const cols = 10;
|
||||
const parts: string[] = [];
|
||||
for (let r = 0; r < rows; r++) {
|
||||
|
@ -478,36 +359,30 @@ export async function pgBulkInsert({
|
|||
}
|
||||
}
|
||||
|
||||
export async function pgInsertIntoDashboard({ datum, schema, tableName }) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`;
|
||||
let timestamp = datum.timestamp || new Date().toISOString();
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||
let values = [
|
||||
hash(JSON.stringify(datum.contents)),
|
||||
datum.title || "",
|
||||
datum.description || "",
|
||||
JSON.stringify(datum.contents || []),
|
||||
timestamp, // fixed
|
||||
datum.creator || "",
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
const client = await readWritePool.connect();
|
||||
let result;
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(result)
|
||||
return result;
|
||||
} else {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
export async function pgInsertIntoDashboard({ datum }) {
|
||||
let text = `INSERT INTO dashboards VALUES($1, $2, $3, $4, $5, $6, $7)`;
|
||||
let timestamp = datum.timestamp || new Date().toISOString();
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||
let values = [
|
||||
hash(JSON.stringify(datum.contents)),
|
||||
datum.title || "",
|
||||
datum.description || "",
|
||||
JSON.stringify(datum.contents || []),
|
||||
timestamp, // fixed
|
||||
datum.creator || "",
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
const client = await readWritePool.connect();
|
||||
let result;
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(result)
|
||||
return result;
|
||||
}
|
||||
/* For reference
|
||||
id text,
|
||||
|
@ -532,16 +407,21 @@ pgInsertIntoDashboard({
|
|||
],
|
||||
creator: "Nuño Sempere",
|
||||
},
|
||||
schema: "latest",
|
||||
tableName: "dashboards",
|
||||
});
|
||||
*/
|
||||
export async function pgUpsert({ contents, schema, tableName }) {
|
||||
if (!tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
console.log("tableWhiteList:");
|
||||
console.log(tableWhiteList);
|
||||
export async function pgUpsert({
|
||||
contents,
|
||||
tableName,
|
||||
replacePlatform,
|
||||
}: {
|
||||
contents: Forecast[];
|
||||
tableName: string;
|
||||
replacePlatform?: string;
|
||||
}) {
|
||||
if (!forecastTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -549,27 +429,22 @@ export async function pgUpsert({ contents, schema, tableName }) {
|
|||
const client = await readWritePool.connect();
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
if (schema === "latest") {
|
||||
client.query(`DELETE FROM latest.${tableName}`);
|
||||
if (replacePlatform) {
|
||||
await client.query(`DELETE FROM ${tableName} WHERE platform = $1`, [
|
||||
replacePlatform,
|
||||
]);
|
||||
}
|
||||
console.log(
|
||||
`Upserting ${contents.length} rows into postgres table ${schema}.${tableName}.`
|
||||
);
|
||||
console.log(
|
||||
`Expected to take ${Number((contents.length * 831.183) / 4422).toFixed(
|
||||
2
|
||||
)} seconds or ${Number((contents.length * 13.85305) / 4422).toFixed(
|
||||
2
|
||||
)} minutes`
|
||||
`Upserting ${contents.length} rows into postgres table ${tableName}.`
|
||||
);
|
||||
|
||||
await pgBulkInsert({ data: contents, schema, tableName, client });
|
||||
await pgBulkInsert({ data: contents, tableName, client });
|
||||
console.log(
|
||||
`Inserted ${
|
||||
contents.length
|
||||
} rows with approximate cummulative size ${roughSizeOfObject(
|
||||
contents
|
||||
)} MB into ${schema}.${tableName}.`
|
||||
)} MB into ${tableName}.`
|
||||
);
|
||||
|
||||
console.log("Sample: ");
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
import {
|
||||
databaseReadWithReadCredentials,
|
||||
databaseUpsert,
|
||||
} from "../../database/database-wrapper";
|
||||
import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper";
|
||||
|
||||
export async function updateHistory() {
|
||||
let latest = await databaseReadWithReadCredentials({ group: "combined" });
|
||||
await databaseUpsert({
|
||||
let latest = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
await pgUpsert({
|
||||
contents: latest,
|
||||
group: "history",
|
||||
tableName: "history",
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { pgInitialize } from "../database/pg-wrapper";
|
||||
import { doEverything } from "../flow/doEverything";
|
||||
import { updateHistory } from "../flow/history/updateHistory";
|
||||
import { mergeEverything } from "../flow/mergeEverything";
|
||||
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
|
||||
import { rebuildFrontpage } from "../frontpage";
|
||||
import { platforms, processPlatform } from "../platforms";
|
||||
|
@ -20,13 +19,6 @@ export const jobs: Job[] = [
|
|||
message: `Download predictions from ${platform.name}`,
|
||||
run: () => processPlatform(platform),
|
||||
})),
|
||||
{
|
||||
name: "merge",
|
||||
message:
|
||||
"Merge tables into one big table (and push the result to a pg database)",
|
||||
run: mergeEverything,
|
||||
separate: true,
|
||||
},
|
||||
{
|
||||
name: "algolia",
|
||||
message: 'Rebuild algolia database ("index")',
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
import { databaseRead, databaseUpsert } from "../database/database-wrapper";
|
||||
import { platforms } from "../platforms";
|
||||
|
||||
/* Merge everything */
|
||||
|
||||
export async function mergeEverythingInner() {
|
||||
let merged = [];
|
||||
for (let platform of platforms) {
|
||||
const platformName = platform.name;
|
||||
let json = await databaseRead({ group: platformName });
|
||||
console.log(`${platformName} has ${json.length} questions\n`);
|
||||
merged = merged.concat(json);
|
||||
}
|
||||
let mergedprocessed = merged.map((element) => ({
|
||||
...element,
|
||||
optionsstringforsearch: element.options
|
||||
.map((option) => option.name)
|
||||
.join(", "),
|
||||
}));
|
||||
console.log(`In total, there are ${mergedprocessed.length} questions`);
|
||||
return mergedprocessed;
|
||||
}
|
||||
|
||||
export async function mergeEverything() {
|
||||
let merged = await mergeEverythingInner();
|
||||
await databaseUpsert({ contents: merged, group: "combined" });
|
||||
console.log("Done");
|
||||
}
|
|
@ -1,51 +1,35 @@
|
|||
import { pgRead, readWritePool } from "./database/pg-wrapper";
|
||||
import { Forecast } from "./platforms";
|
||||
|
||||
export async function getFrontpageRaw() {
|
||||
export async function getFrontpage(): Promise<Forecast[]> {
|
||||
const client = await readWritePool.connect();
|
||||
const res = await client.query(
|
||||
"SELECT frontpage_sliced FROM latest.frontpage ORDER BY id DESC LIMIT 1"
|
||||
"SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1"
|
||||
);
|
||||
if (!res.rows.length) return [];
|
||||
console.log(res.rows[0].frontpage_sliced);
|
||||
return res.rows[0].frontpage_sliced;
|
||||
}
|
||||
|
||||
export async function getFrontpageFullRaw() {
|
||||
export async function getFrontpageFull(): Promise<Forecast[]> {
|
||||
const client = await readWritePool.connect();
|
||||
const res = await client.query(
|
||||
"SELECT frontpage_full FROM latest.frontpage ORDER BY id DESC LIMIT 1"
|
||||
"SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1"
|
||||
);
|
||||
if (!res.rows.length) return [];
|
||||
console.log(res.rows[0]);
|
||||
return res.rows[0].frontpage_full;
|
||||
}
|
||||
|
||||
export async function getFrontpage() {
|
||||
let frontPageForecastsCompatibleWithFuse = [];
|
||||
try {
|
||||
let data = await getFrontpageRaw();
|
||||
frontPageForecastsCompatibleWithFuse = data.map((result) => ({
|
||||
item: result,
|
||||
score: 0,
|
||||
}));
|
||||
return frontPageForecastsCompatibleWithFuse;
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
return frontPageForecastsCompatibleWithFuse;
|
||||
}
|
||||
}
|
||||
|
||||
export async function rebuildFrontpage() {
|
||||
const frontpageFull = await pgRead({
|
||||
schema: "latest",
|
||||
tableName: "combined",
|
||||
tableName: "questions",
|
||||
});
|
||||
|
||||
const client = await readWritePool.connect();
|
||||
const frontpageSliced = (
|
||||
await client.query(`
|
||||
SELECT * FROM latest.combined
|
||||
SELECT * FROM questions
|
||||
WHERE
|
||||
(qualityindicators->>'stars')::int >= 3
|
||||
AND description != ''
|
||||
|
@ -56,7 +40,7 @@ export async function rebuildFrontpage() {
|
|||
|
||||
const start = Date.now();
|
||||
await client.query(
|
||||
"INSERT INTO latest.frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
|
||||
"INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
|
||||
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
|
||||
);
|
||||
|
||||
|
|
|
@ -2,10 +2,10 @@ import "dotenv/config";
|
|||
|
||||
import fs from "fs";
|
||||
|
||||
import { databaseReadWithReadCredentials } from "../database/database-wrapper";
|
||||
import { pgReadWithReadCredentials } from "../database/pg-wrapper";
|
||||
|
||||
let main = async () => {
|
||||
let json = await databaseReadWithReadCredentials({ group: "combined" });
|
||||
let json = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
let string = JSON.stringify(json, null, 2);
|
||||
let filename = "metaforecasts.json";
|
||||
fs.writeFileSync(filename, string);
|
||||
|
|
92
src/backend/manual/noSchemaMigrate.ts
Normal file
92
src/backend/manual/noSchemaMigrate.ts
Normal file
|
@ -0,0 +1,92 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import { readWritePool } from "../database/pg-wrapper";
|
||||
|
||||
const migrate = async () => {
|
||||
const client = await readWritePool.connect();
|
||||
|
||||
const execQuery = async (q: string) => {
|
||||
console.log(q);
|
||||
await client.query(q);
|
||||
};
|
||||
|
||||
const platformTitleToName = {
|
||||
Betfair: "betfair",
|
||||
FantasySCOTUS: "fantasyscotus",
|
||||
Foretold: "foretold",
|
||||
"GiveWell/OpenPhilanthropy": "givewellopenphil",
|
||||
"Good Judgment": "goodjudgement",
|
||||
"Good Judgment Open": "goodjudgmentopen",
|
||||
Infer: "infer",
|
||||
Kalshi: "kalshi",
|
||||
"Manifold Markets": "manifold",
|
||||
Metaculus: "metaculus",
|
||||
"Peter Wildeford": "wildeford",
|
||||
PolyMarket: "polymarket",
|
||||
PredictIt: "predictit",
|
||||
Rootclaim: "rootclaim",
|
||||
Smarkets: "smarkets",
|
||||
"X-risk estimates": "xrisk",
|
||||
};
|
||||
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
const copyTable = async (from: string, to: string) => {
|
||||
await execQuery(`DROP TABLE IF EXISTS ${to}`);
|
||||
await execQuery(`CREATE TABLE ${to} (LIKE ${from} INCLUDING ALL)`);
|
||||
await execQuery(`INSERT INTO ${to} SELECT * FROM ${from}`);
|
||||
};
|
||||
|
||||
await copyTable("latest.dashboards", "dashboards");
|
||||
await copyTable("latest.combined", "questions");
|
||||
await copyTable("latest.frontpage", "frontpage");
|
||||
await copyTable("history.h2022", "history");
|
||||
|
||||
for (const [title, name] of Object.entries(platformTitleToName)) {
|
||||
console.log(`Updating ${title} -> ${name}`);
|
||||
for (const table of ["questions", "history"]) {
|
||||
await client.query(
|
||||
`UPDATE ${table} SET platform=$1 WHERE platform=$2`,
|
||||
[name, title]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Fixing GJOpen ids in questions and history");
|
||||
for (const table of ["questions", "history"]) {
|
||||
await client.query(
|
||||
`UPDATE ${table} SET id=REPLACE(id, 'goodjudmentopen-', 'goodjudgmentopen-') WHERE id LIKE 'goodjudmentopen-%'`
|
||||
);
|
||||
}
|
||||
|
||||
const fixId = (id: string) =>
|
||||
id.replace("goodjudmentopen-", "goodjudgmentopen-");
|
||||
|
||||
console.log(
|
||||
"Please rebuild frontpage manually - current version includes invalid GJOpen and xrisk ids"
|
||||
);
|
||||
|
||||
const updateDashboards = async () => {
|
||||
const res = await client.query("SELECT id, contents FROM dashboards");
|
||||
for (const row of res.rows) {
|
||||
let { id, contents } = row;
|
||||
contents = contents.map(fixId);
|
||||
await client.query(
|
||||
"UPDATE dashboards SET contents = $1 WHERE id = $2",
|
||||
[JSON.stringify(contents), id]
|
||||
);
|
||||
}
|
||||
};
|
||||
console.log("Updating dashboards");
|
||||
await updateDashboards();
|
||||
|
||||
await client.query("COMMIT");
|
||||
} catch (e) {
|
||||
await client.query("ROLLBACK");
|
||||
throw e;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
};
|
||||
|
||||
migrate();
|
|
@ -1,11 +1,12 @@
|
|||
/* Imports */
|
||||
import axios from "axios";
|
||||
|
||||
import { databaseUpsert } from "../database/database-wrapper";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = "https://example.com/";
|
||||
const platformName = "example";
|
||||
const endpoint = "https://example.com/";
|
||||
|
||||
/* Support functions */
|
||||
|
||||
|
@ -23,7 +24,7 @@ async function fetchData() {
|
|||
|
||||
async function processPredictions(predictions) {
|
||||
let results = await predictions.map((prediction) => {
|
||||
let id = `platform-${prediction.id}`;
|
||||
let id = `${platformName}-${prediction.id}`;
|
||||
let probability = prediction.probability;
|
||||
let options = [
|
||||
{
|
||||
|
@ -40,12 +41,12 @@ async function processPredictions(predictions) {
|
|||
let result = {
|
||||
title: prediction.title,
|
||||
url: `https://example.com`,
|
||||
platform: "Example",
|
||||
platform: platformName,
|
||||
description: prediction.description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars("Example", {
|
||||
stars: calculateStars(platformName, {
|
||||
/* some: somex, factors: factors */
|
||||
}),
|
||||
other: prediction.otherx,
|
||||
|
@ -59,12 +60,13 @@ async function processPredictions(predictions) {
|
|||
|
||||
/* Body */
|
||||
|
||||
export async function example() {
|
||||
let data = await fetchData();
|
||||
let results = await processPredictions(data); // somehow needed
|
||||
// console.log(results)
|
||||
// let string = JSON.stringify(results, null, 2)
|
||||
await databaseUpsert({ contents: results, group: "example" });
|
||||
console.log("Done");
|
||||
}
|
||||
//example()
|
||||
export const example: Platform = {
|
||||
name: platformName,
|
||||
label: "Example platform",
|
||||
color: "#ff0000",
|
||||
async fetcher() {
|
||||
let data = await fetchData();
|
||||
let results = await processPredictions(data); // somehow needed
|
||||
return results;
|
||||
},
|
||||
};
|
|
@ -5,6 +5,8 @@ import https from "https";
|
|||
import { calculateStars } from "../utils/stars";
|
||||
import { Forecast, Platform } from "./";
|
||||
|
||||
const platformName = "betfair";
|
||||
|
||||
/* Definitions */
|
||||
let endpoint = process.env.SECRET_BETFAIR_ENDPOINT;
|
||||
|
||||
|
@ -82,7 +84,7 @@ async function processPredictions(data) {
|
|||
/* if(Math.floor(Math.random() * 10) % 20 ==0){
|
||||
console.log(JSON.stringify(prediction, null, 4))
|
||||
} */
|
||||
let id = `betfair-${prediction.marketId}`;
|
||||
let id = `${platformName}-${prediction.marketId}`;
|
||||
let normalizationFactor = prediction.options
|
||||
.filter((option) => option.status == "ACTIVE" && option.totalMatched > 0)
|
||||
.map((option) => option.lastPriceTraded)
|
||||
|
@ -121,12 +123,14 @@ async function processPredictions(data) {
|
|||
id: id,
|
||||
title: title,
|
||||
url: `https://www.betfair.com/exchange/plus/politics/market/${prediction.marketId}`,
|
||||
platform: "Betfair",
|
||||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars("Betfair", { volume: prediction.totalMatched }),
|
||||
stars: calculateStars(platformName, {
|
||||
volume: prediction.totalMatched,
|
||||
}),
|
||||
volume: prediction.totalMatched,
|
||||
},
|
||||
};
|
||||
|
@ -136,7 +140,9 @@ async function processPredictions(data) {
|
|||
}
|
||||
|
||||
export const betfair: Platform = {
|
||||
name: "betfair",
|
||||
name: platformName,
|
||||
label: "Betfair",
|
||||
color: "#3d674a",
|
||||
async fetcher() {
|
||||
const data = await fetchPredictions();
|
||||
const results = await processPredictions(data); // somehow needed
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { Forecast, Platform } from "./";
|
||||
|
||||
const platformName = "fantasyscotus";
|
||||
|
||||
/* Definitions */
|
||||
let unixtime = new Date().getTime();
|
||||
|
@ -65,19 +67,19 @@ async function processData(data) {
|
|||
let historicalPercentageCorrect = data.stats.pcnt_correct;
|
||||
let historicalProbabilityCorrect =
|
||||
Number(historicalPercentageCorrect.replace("%", "")) / 100;
|
||||
let results = [];
|
||||
let results: Forecast[] = [];
|
||||
for (let event of events) {
|
||||
if (event.accuracy == "") {
|
||||
let id = `fantasyscotus-${event.id}`;
|
||||
let id = `${platformName}-${event.id}`;
|
||||
// if the thing hasn't already resolved
|
||||
let predictionData = await getPredictionsData(event.docket_url);
|
||||
let pAffirm = predictionData.proportionAffirm;
|
||||
//let trackRecord = event.prediction.includes("Affirm") ? historicalProbabilityCorrect : 1-historicalProbabilityCorrect
|
||||
let eventObject = {
|
||||
let eventObject: Forecast = {
|
||||
id: id,
|
||||
title: `In ${event.short_name}, the SCOTUS will affirm the lower court's decision`,
|
||||
url: `https://fantasyscotus.net/user-predictions${event.docket_url}`,
|
||||
platform: "FantasySCOTUS",
|
||||
platform: platformName,
|
||||
description: `${(pAffirm * 100).toFixed(2)}% (${
|
||||
predictionData.numAffirm
|
||||
} out of ${
|
||||
|
@ -100,7 +102,7 @@ async function processData(data) {
|
|||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: Number(predictionData.numForecasts),
|
||||
stars: calculateStars("FantasySCOTUS", {}),
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
};
|
||||
results.push(eventObject);
|
||||
|
@ -112,7 +114,9 @@ async function processData(data) {
|
|||
|
||||
/* Body */
|
||||
export const fantasyscotus: Platform = {
|
||||
name: "fantasyscotus",
|
||||
name: platformName,
|
||||
label: "FantasySCOTUS",
|
||||
color: "#231149",
|
||||
async fetcher() {
|
||||
let rawData = await fetchData();
|
||||
let results = await processData(rawData);
|
||||
|
|
|
@ -5,6 +5,9 @@ import { calculateStars } from "../utils/stars";
|
|||
import { Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
const platformName = "foretold";
|
||||
|
||||
let graphQLendpoint = "https://api.foretold.io/graphql";
|
||||
let highQualityCommunities = [
|
||||
"0104d8e8-07e4-464b-8b32-74ef22b49f21",
|
||||
|
@ -54,7 +57,9 @@ async function fetchAllCommunityQuestions(communityId) {
|
|||
}
|
||||
|
||||
export const foretold: Platform = {
|
||||
name: "foretold",
|
||||
name: platformName,
|
||||
label: "Foretold",
|
||||
color: "#62520b",
|
||||
async fetcher() {
|
||||
let results = [];
|
||||
for (let community of highQualityCommunities) {
|
||||
|
@ -62,7 +67,7 @@ export const foretold: Platform = {
|
|||
questions = questions.map((question) => question.node);
|
||||
questions = questions.filter((question) => question.previousAggregate); // Questions without any predictions
|
||||
questions.forEach((question) => {
|
||||
let id = `foretold-${question.id}`;
|
||||
let id = `${platformName}-${question.id}`;
|
||||
let options = [];
|
||||
if (question.valueType == "PERCENTAGE") {
|
||||
let probability = question.previousAggregate.value.percentage;
|
||||
|
@ -83,13 +88,13 @@ export const foretold: Platform = {
|
|||
id: id,
|
||||
title: question.name,
|
||||
url: `https://www.foretold.io/c/${community}/m/${question.id}`,
|
||||
platform: "Foretold",
|
||||
platform: platformName,
|
||||
description: "",
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: Math.floor(Number(question.measurementCount) / 2),
|
||||
stars: calculateStars("Foretold", {}),
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
/*liquidity: liquidity.toFixed(2),
|
||||
tradevolume: tradevolume.toFixed(2),
|
||||
|
|
|
@ -2,10 +2,11 @@
|
|||
import axios from "axios";
|
||||
import fs from "fs";
|
||||
|
||||
import { databaseUpsert } from "../database/database-wrapper";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
|
||||
const platformName = "givewellopenphil";
|
||||
|
||||
/* Support functions */
|
||||
async function fetchPage(url: string) {
|
||||
let response = await axios({
|
||||
|
@ -49,24 +50,26 @@ async function main1() {
|
|||
let result = {
|
||||
title: title,
|
||||
url: url,
|
||||
platform: "GiveWell",
|
||||
platform: platformName,
|
||||
description: description,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars("GiveWell/OpenPhilanthropy", {}),
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
}; // Note: This requires some processing afterwards
|
||||
// console.log(result)
|
||||
results.push(result);
|
||||
}
|
||||
await databaseUpsert({
|
||||
contents: results,
|
||||
group: "givewell-questions-unprocessed",
|
||||
});
|
||||
// await databaseUpsert({
|
||||
// contents: results,
|
||||
// group: "givewell-questions-unprocessed",
|
||||
// });
|
||||
}
|
||||
|
||||
export const givewellopenphil: Platform = {
|
||||
name: "givewellopenphil",
|
||||
name: platformName,
|
||||
label: "GiveWell/OpenPhilanthropy",
|
||||
color: "#32407e",
|
||||
async fetcher() {
|
||||
// main1()
|
||||
return; // not necessary to refill the DB every time
|
||||
|
@ -76,6 +79,7 @@ export const givewellopenphil: Platform = {
|
|||
const data = JSON.parse(rawdata);
|
||||
const dataWithDate = data.map((datum: any) => ({
|
||||
...datum,
|
||||
platform: platformName,
|
||||
timestamp: "2021-02-23",
|
||||
}));
|
||||
return dataWithDate;
|
||||
|
|
|
@ -8,6 +8,7 @@ import { calculateStars } from "../utils/stars";
|
|||
import { Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "goodjudgment";
|
||||
let endpoint = "https://goodjudgment.io/superforecasts/";
|
||||
String.prototype.replaceAll = function replaceAll(search, replace) {
|
||||
return this.split(search).join(replace);
|
||||
|
@ -15,7 +16,9 @@ String.prototype.replaceAll = function replaceAll(search, replace) {
|
|||
|
||||
/* Body */
|
||||
export const goodjudgment: Platform = {
|
||||
name: "goodjudgment",
|
||||
name: platformName,
|
||||
label: "Good Judgment",
|
||||
color: "#7d4f1b",
|
||||
async fetcher() {
|
||||
// Proxy fuckery
|
||||
let proxy;
|
||||
|
@ -64,7 +67,7 @@ export const goodjudgment: Platform = {
|
|||
let title = table[0]["0"].split("\t\t\t").splice(3)[0];
|
||||
if (title != undefined) {
|
||||
title = title.replaceAll("</a>", "");
|
||||
let id = `goodjudgment-${hash(title)}`;
|
||||
let id = `${platformName}-${hash(title)}`;
|
||||
let description = table
|
||||
.filter((row) => row["0"].includes("BACKGROUND:"))
|
||||
.map((row) => row["0"])
|
||||
|
@ -101,12 +104,12 @@ export const goodjudgment: Platform = {
|
|||
id: id,
|
||||
title: title,
|
||||
url: endpoint,
|
||||
platform: "Good Judgment",
|
||||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars("Good Judgment", {}),
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
extra: {
|
||||
superforecastercommentary: analysis || "",
|
||||
|
|
|
@ -8,6 +8,8 @@ import toMarkdown from "../utils/toMarkdown";
|
|||
import { Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "goodjudgmentopen";
|
||||
|
||||
let htmlEndPoint = "https://www.gjopen.com/questions?page=";
|
||||
let annoyingPromptUrls = [
|
||||
"https://www.gjopen.com/questions/1933-what-forecasting-questions-should-we-ask-what-questions-would-you-like-to-forecast-on-gjopen",
|
||||
|
@ -185,12 +187,12 @@ async function goodjudgmentopen_inner(cookie) {
|
|||
}
|
||||
let questionNumRegex = new RegExp("questions/([0-9]+)");
|
||||
let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0];
|
||||
let id = `goodjudmentopen-${questionNum}`;
|
||||
let id = `${platformName}-${questionNum}`;
|
||||
let question = {
|
||||
id: id,
|
||||
title: title,
|
||||
url: url,
|
||||
platform: "Good Judgment Open",
|
||||
platform: platformName,
|
||||
...moreinfo,
|
||||
};
|
||||
if (j % 30 == 0 || DEBUG_MODE == "on") {
|
||||
|
@ -236,8 +238,10 @@ async function goodjudgmentopen_inner(cookie) {
|
|||
return results;
|
||||
}
|
||||
|
||||
export const goodjudmentopen: Platform = {
|
||||
name: "goodjudmentopen", // note the typo! current table name is without `g`, `goodjudmentopen`
|
||||
export const goodjudgmentopen: Platform = {
|
||||
name: platformName,
|
||||
label: "Good Judgment Open",
|
||||
color: "#002455",
|
||||
async fetcher() {
|
||||
let cookie = process.env.GOODJUDGMENTOPENCOOKIE;
|
||||
return await applyIfSecretExists(cookie, goodjudgmentopen_inner);
|
|
@ -1,13 +1,13 @@
|
|||
import { databaseUpsert } from "../database/database-wrapper";
|
||||
import { pgUpsert } from "../database/pg-wrapper";
|
||||
import { betfair } from "./betfair";
|
||||
import { fantasyscotus } from "./fantasyscotus";
|
||||
import { foretold } from "./foretold";
|
||||
import { givewellopenphil } from "./givewellopenphil";
|
||||
import { goodjudgment } from "./goodjudgment";
|
||||
import { goodjudmentopen } from "./goodjudmentopen";
|
||||
import { goodjudgmentopen } from "./goodjudgmentopen";
|
||||
import { infer } from "./infer";
|
||||
import { kalshi } from "./kalshi";
|
||||
import { manifoldmarkets } from "./manifoldmarkets";
|
||||
import { manifold } from "./manifold";
|
||||
import { metaculus } from "./metaculus";
|
||||
import { polymarket } from "./polymarket";
|
||||
import { predictit } from "./predictit";
|
||||
|
@ -67,7 +67,9 @@ export interface Forecast {
|
|||
export type PlatformFetcher = () => Promise<Forecast[] | null>;
|
||||
|
||||
export interface Platform {
|
||||
name: string;
|
||||
name: string; // short name for ids and `platform` db column, e.g. "xrisk"
|
||||
label: string; // longer name for displaying on frontend etc., e.g. "X-risk estimates"
|
||||
color: string; // used on frontend
|
||||
fetcher?: PlatformFetcher;
|
||||
}
|
||||
|
||||
|
@ -92,10 +94,10 @@ export const platforms: Platform[] = [
|
|||
foretold,
|
||||
givewellopenphil,
|
||||
goodjudgment,
|
||||
goodjudmentopen,
|
||||
goodjudgmentopen,
|
||||
infer,
|
||||
kalshi,
|
||||
manifoldmarkets,
|
||||
manifold,
|
||||
metaculus,
|
||||
polymarket,
|
||||
predictit,
|
||||
|
@ -112,7 +114,11 @@ export const processPlatform = async (platform: Platform) => {
|
|||
}
|
||||
let results = await platform.fetcher();
|
||||
if (results && results.length) {
|
||||
await databaseUpsert({ contents: results, group: platform.name });
|
||||
await pgUpsert({
|
||||
contents: results,
|
||||
tableName: "questions",
|
||||
replacePlatform: platform.name,
|
||||
});
|
||||
console.log("Done");
|
||||
} else {
|
||||
console.log(`Platform ${platform.name} didn't return any results`);
|
||||
|
|
|
@ -3,11 +3,13 @@ import axios from "axios";
|
|||
import { Tabletojson } from "tabletojson";
|
||||
|
||||
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||
import { measureTime } from "../utils/measureTime";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { Forecast, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "infer";
|
||||
let htmlEndPoint = "https://www.infer-pub.com/questions";
|
||||
String.prototype.replaceAll = function replaceAll(search, replace) {
|
||||
return this.split(search).join(replace);
|
||||
|
@ -145,7 +147,7 @@ async function fetchStats(questionUrl, cookie) {
|
|||
qualityindicators: {
|
||||
numforecasts: Number(numforecasts),
|
||||
numforecasters: Number(numforecasters),
|
||||
stars: calculateStars("Infer", { numforecasts }),
|
||||
stars: calculateStars(platformName, { numforecasts }),
|
||||
|