diff --git a/src/database/database-wrapper.js b/src/database/database-wrapper.js index 4d1ea72..879bde5 100644 --- a/src/database/database-wrapper.js +++ b/src/database/database-wrapper.js @@ -1,36 +1,107 @@ import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js" -import { pgUpsert } from "./pg-wrapper.js" +import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js" export async function databaseUpsert({ contents, group }) { + // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ let mongoDocName; switch (group) { case 'combined': mongoDocName = "metaforecasts" await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") - await pgUpsert({contents, schema: "latest", tableName: "combined"}) + await pgUpsert({ contents, schema: "latest", tableName: "combined" }) break; case 'history': let currentDate = new Date() let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") mongoDocName = `metaforecast_history_${dateUpToMonth}` await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase") - await pgUpsert({contents, schema: "history", tableName: "combined"}) + // await pgUpsert({ contents, schema: "history", tableName: "combined" }) break; default: mongoDocName = `${group}-questions` await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") - await pgUpsert({contents, schema: "latest", tableName: group}) + await pgUpsert({ contents, schema: "latest", tableName: group }) } } // databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") -export const databaseRead = mongoRead; +export async function databaseRead({ group }) { + let response, mongoDocName, responseMongo, responsePg + let currentDate = new Date() + let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02 + + let displayPossibleResponses = (response1, response2) => { + console.log("Possible responses:") + console.log("Mongo: ") + console.log(response1.slice(0, 2)) + console.log("Postgres: ") + console.log(response2.slice(0, 2)) + console.log("") + } + + switch (group) { + case 'combined': + mongoDocName = "metaforecasts" + responseMongo = await mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase") + responsePg = await pgRead({ schema: "latest", tableName: "combined" }) + displayPossibleResponses(responseMongo, responsePg) + break; + case 'history': + mongoDocName = `metaforecast_history_${dateUpToMonth}` + responseMongo = await mongoRead(mongoDocName, "metaforecastHistory", "metaforecastDatabase") + // responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month. + break; + default: + mongoDocName = `${group}-questions` + responseMongo = mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase") + responsePg = await pgRead({ schema: "latest", tableName: group }) + + } + + + + response = responseMongo // responsePg + return response +} // databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") -export const databaseReadWithReadCredentials = mongoReadWithReadCredentials; -// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") +export async function databaseReadWithReadCredentials({ group }) { + let response, mongoDocName, responseMongo, responsePg + let currentDate = new Date() + let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02 -export const databaseGetAllElements = mongoGetAllElements; -// databaseGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") \ No newline at end of file + let displayPossibleResponses = (response1, response2) => { + console.log("Possible responses:") + console.log("Mongo: ") + console.log(response1.slice(0, 2)) + console.log("Postgres: ") + console.log(response2.slice(0, 2)) + console.log("") + } + + switch (group) { + case 'combined': + mongoDocName = "metaforecasts" + responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastCollection", "metaforecastDatabase") + responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: "combined" }) + displayPossibleResponses(responseMongo, responsePg) + break; + case 'history': + mongoDocName = `metaforecast_history_${dateUpToMonth}` + responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastHistory", "metaforecastDatabase") + // responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month. + break; + default: + mongoDocName = `${group}-questions` + responseMongo = mongoRemongoReadWithReadCredentialsad(mongoDocName, "metaforecastCollection", "metaforecastDatabase") + responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: group }) + displayPossibleResponses(responseMongo, responsePg) + } + + response = responseMongo // responsePg + return response +} +//= ; +// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") \ No newline at end of file diff --git a/src/database/pg-wrapper.js b/src/database/pg-wrapper.js index 96a05b3..3519108 100644 --- a/src/database/pg-wrapper.js +++ b/src/database/pg-wrapper.js @@ -12,21 +12,35 @@ const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...cre /* Postgres database connection code */ -const pool = new Pool({ - connectionString: process.env.DATABASE_URL || getSecret("heroku-postgres"), +const databaseURL = getSecret("digitalocean-postgres") +// process.env.DATABASE_URL || getSecret("heroku-postgres") +const readWritePool = new Pool({ + connectionString: databaseURL, ssl: { rejectUnauthorized: false } }); +const readOnlyDatabaseURL = "postgresql://public_read_only_user:ffKhp52FJNNa8cKK@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" +const readOnlyPool = new Pool({ + connectionString: readOnlyDatabaseURL, + ssl: { + rejectUnauthorized: false + } +}); // Helpers -const runPgCommand = async (query) => { - console.log(query) - const client = await pool.connect(); - const result = await client.query(query); - const results = { 'results': (result) ? result.rows : null }; - client.release(); +const runPgCommand = async ({ command, pool }) => { + console.log(command) + try{ + const client = await pool.connect(); + const result = await client.query(command); + const results = { 'results': (result) ? result.rows : null }; + }catch(error){ + console.log(error) + }finally{ + client.release(); + } // console.log(results) return results } @@ -48,40 +62,76 @@ let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table} let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);` let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);` -export async function pgInitialize() { +async function setPermissionsForPublicUser() { - for (let schema of schemas) { - runPgCommand(`CREATE SCHEMA IF NOT EXISTS ${schema}`) + let initCommands = ["REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;", + "GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;"] + for (let command of initCommands) { + await runPgCommand({ command, pool: readWritePool }) } - runPgCommand(`SET search_path TO ${schemas.join(",")},public;`) + let buildGrantSelectForSchema = (schema) => `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user` + for (let schema of schemas) { + await runPgCommand({ command: buildGrantSelectForSchema(schema), pool: readWritePool }) + } + + let alterDefaultPrivilegesForSchema = (schema) => `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user` + for (let schema of schemas) { + await runPgCommand({ command: alterDefaultPrivilegesForSchema(schema), pool: readWritePool }) + } + +} +export async function pgInitialize() { + console.log("Create schemas") + for (let schema of schemas) { + await runPgCommand({ command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, pool: readWritePool }) + } + console.log("") + + console.log("Set search path") + await runPgCommand({ command: `SET search_path TO ${schemas.join(",")},public;`, pool: readWritePool }) + console.log("") + + console.log("Set public user permissions") + await setPermissionsForPublicUser() + console.log("") + + console.log("Create tables & their indexes") for (let schema of schemas) { for (let table of tableNamesWhitelist) { - await runPgCommand(dropTable(schema, table)) - await runPgCommand(buildMetaforecastTable(schema, table)) + await runPgCommand({ command: dropTable(schema, table), pool: readWritePool }) + await runPgCommand({ command: buildMetaforecastTable(schema, table), pool: readWritePool }) if (schema == "history") { - await runPgCommand(createIndex(schema, table)) + await runPgCommand({ command: createIndex(schema, table), pool: readWritePool }) } else { - await runPgCommand(createUniqueIndex(schema, table)) + await runPgCommand({ command: createUniqueIndex(schema, table), pool: readWritePool }) } } } - + console.log("") } // pgInitialize() // Read -export async function pgRead({schema, tableName}) { +async function pgReadWithPool({ schema, tableName, pool }) { if (tableWhiteList.includes(`${schema}.${tableName}`)) { let command = `SELECT * from ${schema}.${tableName}` - let response = await runPgCommand(command) - let results = response. results + let response = await runPgCommand({ command, pool }) + let results = response.results return results } else { throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`) } } +export async function pgRead({ schema, tableName }) { + return await pgReadWithPool({ schema, tableName, pool: readWritePool }) +} + +export async function pgReadWithReadCredentials({ schema, tableName }) { + return await pgReadWithPool({ schema, tableName, readOnlyPool: readOnlyPool }) +} + export async function pgInsert({ datum, schema, tableName }) { if (tableWhiteList.includes(`${schema}.${tableName}`)) { let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` @@ -99,10 +149,14 @@ export async function pgInsert({ datum, schema, tableName }) { JSON.stringify(datum.qualityindicators || []), JSON.stringify(datum.extra || []) ] - - const client = await pool.connect(); - const result = await client.query(text, values); - client.release(); + try{ + const client = await readWritePool.connect(); + const result = await client.query(text, values); + }catch(error){ + console.log(error) + }finally{ + client.release(); + } // console.log(result) return result } else { @@ -160,10 +214,10 @@ export async function pgUpsert({ contents, schema, tableName }) { } } console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`) - let check = await pgRead({schema, tableName}) + let check = await pgRead({ schema, tableName }) console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`) console.log("Sample: ") - console.log(JSON.stringify(check.slice(0,1), null, 4)); + console.log(JSON.stringify(check.slice(0, 1), null, 4)); //console.log(JSON.stringify(check.slice(0, 1), null, 4)); diff --git a/src/flow/history/addToHistory.js b/src/flow/history/addToHistory.js index 2c5de16..7996021 100644 --- a/src/flow/history/addToHistory.js +++ b/src/flow/history/addToHistory.js @@ -1,65 +1,66 @@ import { writeFileSync } from "fs" import { databaseReadWithReadCredentials, databaseUpsert } from "../../database/database-wrapper.js" -let databaseRead = databaseReadWithReadCredentials let isEmptyArray = arr => arr.length == 0 -export async function addToHistory(){ +export async function addToHistory() { let currentDate = new Date() - let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") + let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") - let currentJSONwithMetaculus = await databaseRead("metaforecasts") + let currentJSONwithMetaculus = await databaseReadWithReadCredentials({ group: "combined" }) let currentJSON = currentJSONwithMetaculus.filter(element => element.platform != "Metaculus" && element.platform != "Estimize") // without Metaculus // console.log(currentJSON.slice(0,20)) // console.log(currentJSON) - let historyJSON = await databaseRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") + let historyJSON = await databaseReadWithReadCredentials({ group: "history" }) // console.log(historyJSON) - let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) + let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url))) // console.log(currentForecastsWithAHistory) - let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) + let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url))) // console.log(currentForecastsWithoutAHistory) - + // Add both types of forecast let newHistoryJSON = [] - for(let historyElement of historyJSON){ - let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url ) + for (let historyElement of historyJSON) { + let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url) // console.log(correspondingNewElement) - if(!isEmptyArray(correspondingNewElementArray)){ + if (!isEmptyArray(correspondingNewElementArray)) { let correspondingNewElement = correspondingNewElementArray[0] let timeStampOfNewElement = correspondingNewElement.timestamp let doesHistoryAlreadyContainElement = historyElement.history.map(element => element.timestamp).includes(timeStampOfNewElement) - if(!doesHistoryAlreadyContainElement){ - let historyWithNewElement = historyElement["history"].concat({ - "timestamp": correspondingNewElement.timestamp, - "options": correspondingNewElement.options, - "qualityindicators": correspondingNewElement.qualityindicators - }) - let newHistoryElement = {...correspondingNewElement, "history": historyWithNewElement} - // If some element (like the description) changes, we keep the new one. - newHistoryJSON.push(newHistoryElement) - }else{ - newHistoryJSON.push(historyElement) + if (!doesHistoryAlreadyContainElement) { + let historyWithNewElement = historyElement["history"].concat({ + "timestamp": correspondingNewElement.timestamp, + "options": correspondingNewElement.options, + "qualityindicators": correspondingNewElement.qualityindicators + }) + let newHistoryElement = { ...correspondingNewElement, "history": historyWithNewElement } + // If some element (like the description) changes, we keep the new one. + newHistoryJSON.push(newHistoryElement) + } else { + newHistoryJSON.push(historyElement) } - }else{ + } else { // console.log(historyElement) newHistoryJSON.push(historyElement) } } - for(let currentForecast of currentForecastsWithoutAHistory){ - let newHistoryElement = ({...currentForecast, "history": [{ - "timestamp": currentForecast.timestamp, - "options": currentForecast.options, - "qualityindicators": currentForecast.qualityindicators - }]}) + for (let currentForecast of currentForecastsWithoutAHistory) { + let newHistoryElement = ({ + ...currentForecast, "history": [{ + "timestamp": currentForecast.timestamp, + "options": currentForecast.options, + "qualityindicators": currentForecast.qualityindicators + }] + }) delete newHistoryElement.timestamp delete newHistoryElement.options delete newHistoryElement.qualityindicators newHistoryJSON.push(newHistoryElement) } - await databaseUpsert({contents: newHistoryJSON, group: "history"}) + await databaseUpsert({ contents: newHistoryJSON, group: "history" }) // console.log(newHistoryJSON.slice(0,5)) // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2)) diff --git a/src/flow/history/createHistoryForMonth.js b/src/flow/history/createHistoryForMonth.js index 5f4457a..8cd708f 100644 --- a/src/flow/history/createHistoryForMonth.js +++ b/src/flow/history/createHistoryForMonth.js @@ -1,9 +1,9 @@ import { databaseRead, databaseUpsert } from "../../database/database-wrapper.js" -export async function createHistoryForMonth(){ +export async function createHistoryForMonth() { let currentDate = new Date() - let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") - let metaforecasts = await databaseRead("metaforecasts") + let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") + let metaforecasts = await databaseRead({ group: "combined" }) let metaforecastsHistorySeed = metaforecasts.map(element => { // let moreoriginsdata = element.author ? ({author: element.author}) : ({}) return ({ @@ -18,10 +18,10 @@ export async function createHistoryForMonth(){ qualityindicators: element.qualityindicators }], extra: element.extra || {} - }) + }) }).filter(element => element.platform != "Metaculus" && element.platform != "Estimize") //console.log(metaforecastsHistorySeed) - await databaseUpsert({contents: metaforecastsHistorySeed, group: "history"}) + await databaseUpsert({ contents: metaforecastsHistorySeed, group: "history" }) } ////createInitialHistory() \ No newline at end of file diff --git a/src/flow/history/old/addToHistory_old.js b/src/flow/history/old/addToHistory_old.js deleted file mode 100644 index 7ba4491..0000000 --- a/src/flow/history/old/addToHistory_old.js +++ /dev/null @@ -1,68 +0,0 @@ -import { writeFileSync } from "fs" -import { databaseReadWithReadCredentials, databaseUpsert } from "../database-wrapper.js" -let databaseRead = databaseReadWithReadCredentials -let isEmptyArray = arr => arr.length == 0 - -export async function addToHistory(){ - // throw new Error("Not today") - let currentJSON = await databaseRead("metaforecasts") - // console.log(currentJSON) - let historyJSON = await databaseRead("metaforecast_history") - // console.log(historyJSON) - - let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) - // console.log(currentForecastsWithAHistory) - - let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) - // console.log(currentForecastsWithoutAHistory) - - // Add both types of forecast - let newHistoryJSON = [] - for(let historyElement of historyJSON){ - let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url ) - // console.log(correspondingNewElement) - if(!isEmptyArray(correspondingNewElementArray)){ - let correspondingNewElement = correspondingNewElementArray[0] - let timeStampOfNewElement = correspondingNewElement.timestamp - let doesHistoryAlreadyContainElement = historyElement.history.map(element => element.timestamp).includes(timeStampOfNewElement) - if(!doesHistoryAlreadyContainElement){ - let historyWithNewElement = historyElement["history"].concat({ - "timestamp": correspondingNewElement.timestamp, - "options": correspondingNewElement.options, - "qualityindicators": correspondingNewElement.qualityindicators - }) - let newHistoryElement = {...correspondingNewElement, "history": historyWithNewElement} - // If some element (like the description) changes, we keep the new one. - newHistoryJSON.push(newHistoryElement) - }else{ - newHistoryJSON.push(historyElement) - } - }else{ - // console.log(historyElement) - newHistoryJSON.push(historyElement) - } - } - - for(let currentForecast of currentForecastsWithoutAHistory){ - let newHistoryElement = ({...currentForecast, "history": [{ - "timestamp": currentForecast.timestamp, - "options": currentForecast.options, - "qualityindicators": currentForecast.qualityindicators - }]}) - delete newHistoryElement.timestamp - delete newHistoryElement.options - delete newHistoryElement.qualityindicators - newHistoryJSON.push(newHistoryElement) - } - - databaseUpsert(newHistoryJSON, "metaforecast_history") - // console.log(newHistoryJSON.slice(0,5)) - // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2)) - // writefile(JSON.stringify(newHistoryJSON, null, 2), "metaforecasts_history", "", ".json") - //console.log(newHistoryJSON) - /* - let forecastsAlreadyInHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) - */ - console.log(new Date().toISOString()) -} -// addToHistory() diff --git a/src/flow/history/old/createInitialHistoryWithMetaculus.js b/src/flow/history/old/createInitialHistoryWithMetaculus.js index 01bb17e..14b8653 100644 --- a/src/flow/history/old/createInitialHistoryWithMetaculus.js +++ b/src/flow/history/old/createInitialHistoryWithMetaculus.js @@ -1,7 +1,7 @@ import { databaseRead, databaseUpsert } from "../database-wrapper.js" let createInitialHistory = async () => { - let metaforecasts = await databaseRead("metaforecasts") + let metaforecasts = await databaseRead({ group: "combined" }) let metaforecastsHistorySeed = metaforecasts.map(element => { // let moreoriginsdata = element.author ? ({author: element.author}) : ({}) return ({ @@ -16,10 +16,10 @@ let createInitialHistory = async () => { qualityindicators: element.qualityindicators }], extra: element.extra || {} - }) + }) }) console.log(metaforecastsHistorySeed) - await databaseUpsert(metaforecastsHistorySeed, "metaforecast_history") + await databaseUpsert({ contents: metaforecastsHistorySeed, group: "history" }) } createInitialHistory() \ No newline at end of file diff --git a/src/flow/mergeEverything.js b/src/flow/mergeEverything.js index 94e9ec0..87799ff 100644 --- a/src/flow/mergeEverything.js +++ b/src/flow/mergeEverything.js @@ -1,12 +1,11 @@ import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; import { platformNames } from "../platforms/all-platforms.js" /* Merge everything */ -let suffix = "-questions"; export async function mergeEverythingInner() { let merged = []; for (let platformName of platformNames) { - let json = await databaseRead(platformName + suffix); + let json = await databaseRead({ group: platformName }); console.log(`${platformName} has ${json.length} questions\n`); merged = merged.concat(json); } diff --git a/src/manual/downloadFrontpage.js b/src/manual/downloadFrontpage.js index 39beab2..9bdf2be 100644 --- a/src/manual/downloadFrontpage.js +++ b/src/manual/downloadFrontpage.js @@ -27,7 +27,7 @@ let shuffle = (array) => { let main = async () => { let init = Date.now(); - let json = await databaseReadWithReadCredentials("metaforecasts"); + let json = await databaseReadWithReadCredentials({ group: "combined" }); json = json.filter( (forecast) => diff --git a/src/manual/manualDownload.js b/src/manual/manualDownload.js index b0cb153..5528912 100644 --- a/src/manual/manualDownload.js +++ b/src/manual/manualDownload.js @@ -3,7 +3,7 @@ import fs from "fs" import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" let main = async () => { - let json = await databaseReadWithReadCredentials("metaforecasts") + let json = await databaseReadWithReadCredentials({ group: "combined" }) let string = JSON.stringify(json, null, 2) let filename = 'metaforecasts.json' fs.writeFileSync(filename, string); diff --git a/src/manual/manualSendToMongo.js b/src/manual/manualSendToMongo.js index 4bb23ab..0a4969d 100644 --- a/src/manual/manualSendToMongo.js +++ b/src/manual/manualSendToMongo.js @@ -1,5 +1,5 @@ import fs from "fs"; -import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */ diff --git a/src/utils/algolia.js b/src/utils/algolia.js index 5b4a0f2..7b162ac 100644 --- a/src/utils/algolia.js +++ b/src/utils/algolia.js @@ -1,35 +1,35 @@ import algoliasearch from 'algoliasearch'; import fs from "fs" -import {getSecret} from "./getSecrets.js" +import { getSecret } from "./getSecrets.js" import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" import { mergeEverythingInner } from '../flow/mergeEverything.js'; let cookie = process.env.ALGOLIA_MASTER_API_KEY || getSecret("algolia") -const client = algoliasearch('96UD3NTQ7L', cookie); +const client = algoliasearch('96UD3NTQ7L', cookie); const index = client.initIndex('metaforecast'); -export async function rebuildAlgoliaDatabaseTheHardWay(){ +export async function rebuildAlgoliaDatabaseTheHardWay() { console.log("Doing this the hard way") let records = await mergeEverythingInner() - records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) + records = records.map((record, index) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index })) // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ - - if(index.exists()){ + + if (index.exists()) { console.log("Index exists") - index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error)) + index.replaceAllObjects(records, { safe: true }).catch(error => console.log(error)) console.log(`Pushed ${records.length} records. Algolia will update asynchronously`) } } -export async function rebuildAlgoliaDatabaseTheEasyWay(){ - let records = await databaseReadWithReadCredentials("metaforecasts") - records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) +export async function rebuildAlgoliaDatabaseTheEasyWay() { + let records = await databaseReadWithReadCredentials({ group: "combined" }) + records = records.map((record, index) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index })) // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ - - if(index.exists()){ + + if (index.exists()) { console.log("Index exists") - index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error)) + index.replaceAllObjects(records, { safe: true }).catch(error => console.log(error)) console.log(`Pushed ${records.length} records. Algolia will update asynchronously`) } } diff --git a/src/utils/evaluations/pullForecastsToCSVForRating.js b/src/utils/evaluations/pullForecastsToCSVForRating.js index 0afb9a3..a1e7de1 100644 --- a/src/utils/evaluations/pullForecastsToCSVForRating.js +++ b/src/utils/evaluations/pullForecastsToCSVForRating.js @@ -13,13 +13,13 @@ let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators let main = async () => { let highQualityPlatforms = ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await databaseReadWithReadCredentials("metaforecasts") + let json = await databaseReadWithReadCredentials({ group: "combined" }) console.log(json.length) //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) - + let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform)) - let tsv = "index\ttitle\turl\tqualityindicators\n"+forecastsFromGoodPlatforms + let tsv = "index\ttitle\turl\tqualityindicators\n" + forecastsFromGoodPlatforms .map((forecast, index) => { let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}` console.log(row) diff --git a/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js b/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js index 5c06c99..bfb5ec8 100644 --- a/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js +++ b/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js @@ -10,26 +10,26 @@ import { databaseReadWithReadCredentials } from "../database-wrapper.js" let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators).map(entry => `${entry[0]}: ${entry[1]}`).join("; ") let shuffleArray = (array) => { - // See: https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array - for (let i = array.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)); - [array[i], array[j]] = [array[j], array[i]]; - } - return array + // See: https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array + for (let i = array.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [array[i], array[j]] = [array[j], array[i]]; + } + return array } /* Body */ let main = async () => { - let highQualityPlatforms = [ 'Metaculus' ] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await databaseReadWithReadCredentials("metaforecasts") + let highQualityPlatforms = ['Metaculus'] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] + let json = await databaseReadWithReadCredentials({ group: "combined" }) console.log(json.length) //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) - + let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform)) - let forecastsFromGoodPlatformsShuffled = shuffleArray(forecastsFromGoodPlatforms) - let tsv = "index\ttitle\turl\tqualityindicators\n"+forecastsFromGoodPlatforms + let forecastsFromGoodPlatformsShuffled = shuffleArray(forecastsFromGoodPlatforms) + let tsv = "index\ttitle\turl\tqualityindicators\n" + forecastsFromGoodPlatforms .map((forecast, index) => { let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}` console.log(row) diff --git a/src/utils/misc/process-forecasts-into-elicit.js b/src/utils/misc/process-forecasts-into-elicit.js index da8af0e..ec29843 100644 --- a/src/utils/misc/process-forecasts-into-elicit.js +++ b/src/utils/misc/process-forecasts-into-elicit.js @@ -8,7 +8,7 @@ let locationData = "./data/" /* Body */ // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src async function main(){ - let data = await databaseReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata) + let data = await databaseReadWithReadCredentials({ group: "combined" }) //JSON.parse(rawdata) let processDescription = (description) => { if(description == null || description == undefined || description == ""){ return ""