diff --git a/data/astralcodexten-questions.json b/data/old/astralcodexten-questions.json similarity index 100% rename from data/astralcodexten-questions.json rename to data/old/astralcodexten-questions.json diff --git a/data/betfair-questions.json b/data/old/betfair-questions.json similarity index 100% rename from data/betfair-questions.json rename to data/old/betfair-questions.json diff --git a/data/csetforetell-questions.json b/data/old/csetforetell-questions.json similarity index 100% rename from data/csetforetell-questions.json rename to data/old/csetforetell-questions.json diff --git a/data/elicit-output.txt b/data/old/elicit-output.txt similarity index 100% rename from data/elicit-output.txt rename to data/old/elicit-output.txt diff --git a/data/elicit-questions.csv b/data/old/elicit-questions.csv similarity index 100% rename from data/elicit-questions.csv rename to data/old/elicit-questions.csv diff --git a/data/elicit-questions.json b/data/old/elicit-questions.json similarity index 100% rename from data/elicit-questions.json rename to data/old/elicit-questions.json diff --git a/data/fantasyscotus-questions.json b/data/old/fantasyscotus-questions.json similarity index 100% rename from data/fantasyscotus-questions.json rename to data/old/fantasyscotus-questions.json diff --git a/data/foretold-questions.json b/data/old/foretold-questions.json similarity index 100% rename from data/foretold-questions.json rename to data/old/foretold-questions.json diff --git a/data/frontpage.json b/data/old/frontpage.json similarity index 100% rename from data/frontpage.json rename to data/old/frontpage.json diff --git a/data/goodjudgment-questions.json b/data/old/goodjudgment-questions.json similarity index 100% rename from data/goodjudgment-questions.json rename to data/old/goodjudgment-questions.json diff --git a/data/goodjudmentopen-questions.json b/data/old/goodjudmentopen-questions.json similarity index 100% rename from data/goodjudmentopen-questions.json rename to data/old/goodjudmentopen-questions.json diff --git a/data/hypermind-questions.json b/data/old/hypermind-questions.json similarity index 100% rename from data/hypermind-questions.json rename to data/old/hypermind-questions.json diff --git a/data/ladbrokes-questions.json b/data/old/ladbrokes-questions.json similarity index 100% rename from data/ladbrokes-questions.json rename to data/old/ladbrokes-questions.json diff --git a/data/metaculus-questions.json b/data/old/metaculus-questions.json similarity index 100% rename from data/metaculus-questions.json rename to data/old/metaculus-questions.json diff --git a/data/metaforecasts.json b/data/old/metaforecasts.json similarity index 100% rename from data/metaforecasts.json rename to data/old/metaforecasts.json diff --git a/data/metaforecasts.png b/data/old/metaforecasts.png similarity index 100% rename from data/metaforecasts.png rename to data/old/metaforecasts.png diff --git a/data/metaforecasts.tsv b/data/old/metaforecasts.tsv similarity index 100% rename from data/metaforecasts.tsv rename to data/old/metaforecasts.tsv diff --git a/data/metaforecasts_history.json b/data/old/metaforecasts_history.json similarity index 100% rename from data/metaforecasts_history.json rename to data/old/metaforecasts_history.json diff --git a/data/metaforecasts_history_temp.json b/data/old/metaforecasts_history_temp.json similarity index 100% rename from data/metaforecasts_history_temp.json rename to data/old/metaforecasts_history_temp.json diff --git a/data/omen-questions.json b/data/old/omen-questions.json similarity index 100% rename from data/omen-questions.json rename to data/old/omen-questions.json diff --git a/data/polymarket-questions.json b/data/old/polymarket-questions.json similarity index 100% rename from data/polymarket-questions.json rename to data/old/polymarket-questions.json diff --git a/data/predictit-questions.json b/data/old/predictit-questions.json similarity index 100% rename from data/predictit-questions.json rename to data/old/predictit-questions.json diff --git a/data/smarkets-questions.json b/data/old/smarkets-questions.json similarity index 100% rename from data/smarkets-questions.json rename to data/old/smarkets-questions.json diff --git a/data/stars.csv b/data/old/stars.csv similarity index 100% rename from data/stars.csv rename to data/old/stars.csv diff --git a/data/template-questions.json b/data/old/template-questions.json similarity index 100% rename from data/template-questions.json rename to data/old/template-questions.json diff --git a/data/williamhill-questions.json b/data/old/williamhill-questions.json similarity index 100% rename from data/williamhill-questions.json rename to data/old/williamhill-questions.json diff --git a/data/old/MichaelADatabaseOfXRiskEstimates.csv b/data/older/MichaelADatabaseOfXRiskEstimates.csv similarity index 100% rename from data/old/MichaelADatabaseOfXRiskEstimates.csv rename to data/older/MichaelADatabaseOfXRiskEstimates.csv diff --git a/data/old/givewellopenphil-questions-old-withouttimestampandqualityindicators.json b/data/older/givewellopenphil-questions-old-withouttimestampandqualityindicators.json similarity index 100% rename from data/old/givewellopenphil-questions-old-withouttimestampandqualityindicators.json rename to data/older/givewellopenphil-questions-old-withouttimestampandqualityindicators.json diff --git a/data/old/givewellopenphil-questions-processed-old-format.json b/data/older/givewellopenphil-questions-processed-old-format.json similarity index 100% rename from data/old/givewellopenphil-questions-processed-old-format.json rename to data/older/givewellopenphil-questions-processed-old-format.json diff --git a/data/old/givewellopenphil-questions-raw.json b/data/older/givewellopenphil-questions-raw.json similarity index 100% rename from data/old/givewellopenphil-questions-raw.json rename to data/older/givewellopenphil-questions-raw.json diff --git a/data/old/xrisk-questions-raw-indirect-voice.json b/data/older/xrisk-questions-raw-indirect-voice.json similarity index 100% rename from data/old/xrisk-questions-raw-indirect-voice.json rename to data/older/xrisk-questions-raw-indirect-voice.json diff --git a/data/old/xrisk-questions-raw.json b/data/older/xrisk-questions-raw.json similarity index 100% rename from data/old/xrisk-questions-raw.json rename to data/older/xrisk-questions-raw.json diff --git a/data/old/xrisk-questions.json b/data/older/xrisk-questions.json similarity index 100% rename from data/old/xrisk-questions.json rename to data/older/xrisk-questions.json diff --git a/src/database/database-wrapper.js b/src/database/database-wrapper.js new file mode 100644 index 0000000..c3ceafa --- /dev/null +++ b/src/database/database-wrapper.js @@ -0,0 +1,13 @@ +import {mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements} from "./mongo-wrapper.js" + +export const databaseUpsert = mongoUpsert; +// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") + +export const databaseRead = mongoRead; +// databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") + +export const databaseReadWithReadCredentials = mongoReadWithReadCredentials; +// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") + +export const databaseGetAllElements = mongoGetAllElements; +// databaseGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") \ No newline at end of file diff --git a/src/database/mongo-wrapper.js b/src/database/mongo-wrapper.js index f99da35..b219a02 100644 --- a/src/database/mongo-wrapper.js +++ b/src/database/mongo-wrapper.js @@ -35,7 +35,7 @@ function roughSizeOfObject(object) { return megaBytesRounded; } -export async function upsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { +export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { const url = process.env.MONGODB_URL || getCookie("mongodb"); const client = new MongoClient(url); try { @@ -58,7 +58,7 @@ export async function upsert(contents, documentName, collectionName = "metaforec // Insert a single document, wait for promise so we can read it back // const p = await collection.insertOne(metaforecastDocument); - await collection.replaceOne(filter, document, { upsert: true }); + await collection.replaceOne(filter, document, { databaseUpsert: true }); console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`) // Find one document diff --git a/src/database/pg-wrapper.js b/src/database/pg-wrapper.js new file mode 100644 index 0000000..c0238ae --- /dev/null +++ b/src/database/pg-wrapper.js @@ -0,0 +1,49 @@ +import pkg from 'pg'; +const { Pool } = pkg + +/* Postgres database connection code */ +const pool = new Pool({ + connectionString: process.env.DATABASE_URL, + ssl: { + rejectUnauthorized: false + } +}); + +const tableWhiteList = ["latest.combined"] + +export async function pgRead(tableName="latest.combined"){ + if(tableWhiteList.includes(tableName)){ + const client = await pool.connect(); + const result = await client.query(`SELECT * from ${tableName}`); + const results = { 'results': (result) ? result.rows : null}; + // response.render('pages/db', results ); + client.release(); + return results + }else{ + throw Error("Table not in whitelist; stopping to avoid tricky sql injections") + } +} + +export async function pgInsert(data, tableName="latest.combined"){ + if(tableWhiteList.includes(tableName)){ + let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` + let values = [ + data.id, + data.title, + data.url, + data.platform, + data.description || '', + data.options || [], + data.timestamp || Date.now(), // fix + data.stars || (data.qualityindicators ? data.qualityindicators.stars : 2), + data.qualityindicators || [], + data.extra || [] + ] + + const client = await pool.connect(); + const result = await client.query(text, values); + client.release(); + }else{ + throw Error("Table not in whitelist; stopping to avoid tricky sql injections") + } +} diff --git a/src/flow/history/addToHistory.js b/src/flow/history/addToHistory.js index f6587d7..190eb2c 100644 --- a/src/flow/history/addToHistory.js +++ b/src/flow/history/addToHistory.js @@ -1,17 +1,17 @@ import { writeFileSync } from "fs" -import { mongoReadWithReadCredentials, upsert } from "../../database/mongo-wrapper.js" -let mongoRead = mongoReadWithReadCredentials +import { databaseReadWithReadCredentials, databaseUpsert } from "../../database/database-wrapper.js" +let databaseRead = databaseReadWithReadCredentials let isEmptyArray = arr => arr.length == 0 export async function addToHistory(){ let currentDate = new Date() let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") - let currentJSONwithMetaculus = await mongoRead("metaforecasts") + let currentJSONwithMetaculus = await databaseRead("metaforecasts") let currentJSON = currentJSONwithMetaculus.filter(element => element.platform != "Metaculus" && element.platform != "Estimize") // without Metaculus // console.log(currentJSON.slice(0,20)) // console.log(currentJSON) - let historyJSON = await mongoRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") + let historyJSON = await databaseRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") // console.log(historyJSON) let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) @@ -59,7 +59,7 @@ export async function addToHistory(){ newHistoryJSON.push(newHistoryElement) } - await upsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") + await databaseUpsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") // console.log(newHistoryJSON.slice(0,5)) // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2)) diff --git a/src/flow/history/createHistoryForMonth.js b/src/flow/history/createHistoryForMonth.js index 879ec8d..ff55a2c 100644 --- a/src/flow/history/createHistoryForMonth.js +++ b/src/flow/history/createHistoryForMonth.js @@ -1,9 +1,9 @@ -import { mongoRead, upsert } from "../../database/mongo-wrapper.js" +import { databaseRead, databaseUpsert } from "../../database/database-wrapper.js" export async function createHistoryForMonth(){ let currentDate = new Date() let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") - let metaforecasts = await mongoRead("metaforecasts") + let metaforecasts = await databaseRead("metaforecasts") let metaforecastsHistorySeed = metaforecasts.map(element => { // let moreoriginsdata = element.author ? ({author: element.author}) : ({}) return ({ @@ -21,7 +21,7 @@ export async function createHistoryForMonth(){ }) }).filter(element => element.platform != "Metaculus" && element.platform != "Estimize") //console.log(metaforecastsHistorySeed) - await upsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") + await databaseUpsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") } ////createInitialHistory() \ No newline at end of file diff --git a/src/flow/history/old/addToHistory_old.js b/src/flow/history/old/addToHistory_old.js index 7789b41..7ba4491 100644 --- a/src/flow/history/old/addToHistory_old.js +++ b/src/flow/history/old/addToHistory_old.js @@ -1,13 +1,13 @@ import { writeFileSync } from "fs" -import { mongoReadWithReadCredentials, upsert } from "../mongo-wrapper.js" -let mongoRead = mongoReadWithReadCredentials +import { databaseReadWithReadCredentials, databaseUpsert } from "../database-wrapper.js" +let databaseRead = databaseReadWithReadCredentials let isEmptyArray = arr => arr.length == 0 export async function addToHistory(){ // throw new Error("Not today") - let currentJSON = await mongoRead("metaforecasts") + let currentJSON = await databaseRead("metaforecasts") // console.log(currentJSON) - let historyJSON = await mongoRead("metaforecast_history") + let historyJSON = await databaseRead("metaforecast_history") // console.log(historyJSON) let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) @@ -55,7 +55,7 @@ export async function addToHistory(){ newHistoryJSON.push(newHistoryElement) } - upsert(newHistoryJSON, "metaforecast_history") + databaseUpsert(newHistoryJSON, "metaforecast_history") // console.log(newHistoryJSON.slice(0,5)) // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2)) // writefile(JSON.stringify(newHistoryJSON, null, 2), "metaforecasts_history", "", ".json") diff --git a/src/flow/history/old/createInitialHistoryWithMetaculus.js b/src/flow/history/old/createInitialHistoryWithMetaculus.js index e1cff01..01bb17e 100644 --- a/src/flow/history/old/createInitialHistoryWithMetaculus.js +++ b/src/flow/history/old/createInitialHistoryWithMetaculus.js @@ -1,7 +1,7 @@ -import { mongoRead, upsert } from "../mongo-wrapper.js" +import { databaseRead, databaseUpsert } from "../database-wrapper.js" let createInitialHistory = async () => { - let metaforecasts = await mongoRead("metaforecasts") + let metaforecasts = await databaseRead("metaforecasts") let metaforecastsHistorySeed = metaforecasts.map(element => { // let moreoriginsdata = element.author ? ({author: element.author}) : ({}) return ({ @@ -19,7 +19,7 @@ let createInitialHistory = async () => { }) }) console.log(metaforecastsHistorySeed) - await upsert(metaforecastsHistorySeed, "metaforecast_history") + await databaseUpsert(metaforecastsHistorySeed, "metaforecast_history") } createInitialHistory() \ No newline at end of file diff --git a/src/flow/mergeEverything.js b/src/flow/mergeEverything.js index 77b4910..8959cba 100644 --- a/src/flow/mergeEverything.js +++ b/src/flow/mergeEverything.js @@ -1,4 +1,4 @@ -import { mongoRead, upsert } from "../database/mongo-wrapper.js"; +import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; import { platformNames } from "../platforms/all-platforms.js" /* Merge everything */ let suffix = "-questions"; @@ -6,7 +6,7 @@ let suffix = "-questions"; export async function mergeEverythingInner() { let merged = []; for (let platformName of platformNames) { - let json = await mongoRead(platformName + suffix); + let json = await databaseRead(platformName + suffix); console.log(`${platformName} has ${json.length} questions\n`); merged = merged.concat(json); } @@ -22,6 +22,6 @@ export async function mergeEverythingInner() { export async function mergeEverything() { let merged = await mergeEverythingInner(); - await upsert(merged, "metaforecasts"); + await databaseUpsert(merged, "metaforecasts"); console.log("Done"); } diff --git a/src/manual/downloadFrontpage.js b/src/manual/downloadFrontpage.js index 111d248..39beab2 100644 --- a/src/manual/downloadFrontpage.js +++ b/src/manual/downloadFrontpage.js @@ -1,5 +1,5 @@ import fs from "fs"; -import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js"; +import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"; let filename = "/home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend/data/frontpage.json"; @@ -27,7 +27,7 @@ let shuffle = (array) => { let main = async () => { let init = Date.now(); - let json = await mongoReadWithReadCredentials("metaforecasts"); + let json = await databaseReadWithReadCredentials("metaforecasts"); json = json.filter( (forecast) => diff --git a/src/manual/manualDownload.js b/src/manual/manualDownload.js index e6df9d7..b0cb153 100644 --- a/src/manual/manualDownload.js +++ b/src/manual/manualDownload.js @@ -1,9 +1,9 @@ import fs from "fs" -import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js" +import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" let main = async () => { - let json = await mongoReadWithReadCredentials("metaforecasts") + let json = await databaseReadWithReadCredentials("metaforecasts") let string = JSON.stringify(json, null, 2) let filename = 'metaforecasts.json' fs.writeFileSync(filename, string); diff --git a/src/manual/manualSendToMongo.js b/src/manual/manualSendToMongo.js index 941f36c..f6f7e8f 100644 --- a/src/manual/manualSendToMongo.js +++ b/src/manual/manualSendToMongo.js @@ -1,5 +1,5 @@ import fs from "fs"; -import { mongoRead, upsert } from "../database/mongo-wrapper.js"; +import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; /* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */ @@ -12,7 +12,7 @@ let main = async () => { let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`); let fileContents = JSON.parse(fileRaw); console.log(fileContents); - await upsert(fileContents, file + suffixMongo); + await databaseUpsert(fileContents, file + suffixMongo); } }; main(); diff --git a/src/manual/pullSuperforecastsManually.sh b/src/manual/pullSuperforecastsManually.sh index 331ad7e..885d2a0 100755 --- a/src/manual/pullSuperforecastsManually.sh +++ b/src/manual/pullSuperforecastsManually.sh @@ -1,4 +1,4 @@ #!/bin/bash cd /home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend -date > done.txt -/home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/manual/pullSuperforecastsManually.js >> done.txt +date > ./notes/last-superforecast-pull.txt +/home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/manual/pullSuperforecastsManually.js >> ./notes/last-superforecast-pull.txt \ No newline at end of file diff --git a/src/platforms/betfair-fetch.js b/src/platforms/betfair-fetch.js index 9cd9589..79de674 100644 --- a/src/platforms/betfair-fetch.js +++ b/src/platforms/betfair-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import https from "https"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let endpoint = process.env.SECRET_BETFAIR_ENDPOINT; @@ -143,7 +143,7 @@ export async function betfair() { // console.log(results.map(result => ({title: result.title, description: result.description}))) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polyprediction-questions.json', string); - await upsert(results, "betfair-questions"); + await databaseUpsert(results, "betfair-questions"); console.log("Done"); } // betfair() diff --git a/src/platforms/deprecated/astralcodexten-fetch.js b/src/platforms/deprecated/astralcodexten-fetch.js index 4cf27f3..394a2dc 100644 --- a/src/platforms/deprecated/astralcodexten-fetch.js +++ b/src/platforms/deprecated/astralcodexten-fetch.js @@ -2,7 +2,7 @@ import fs from 'fs' import axios from "axios" import { calculateStars } from "../utils/stars.js" -import { upsert } from "../utils/mongo-wrapper.js" +import { databaseUpsert } from "../utils/database-wrapper.js" /* Definitions */ let graphQLendpoint = "https://api.foretold.io/graphql" @@ -96,7 +96,7 @@ export async function astralcodexten(){ console.log(JSON.stringify(results, null, 2)) fs.writeFileSync('./data/astralcodexten-questions.json', string); */ - await upsert(results, "astralcodexten-questions") + await databaseUpsert(results, "astralcodexten-questions") // console.log(results) console.log("Done") } diff --git a/src/platforms/deprecated/coupcast-fetch.js b/src/platforms/deprecated/coupcast-fetch.js index 8cdf5a2..6ae9d7e 100644 --- a/src/platforms/deprecated/coupcast-fetch.js +++ b/src/platforms/deprecated/coupcast-fetch.js @@ -5,7 +5,7 @@ import Papa from "papaparse" import open from "open" import readline from "readline" import {calculateStars} from "../utils/stars.js" -import {upsert} from "../utils/mongo-wrapper.js" +import {databaseUpsert} from "../utils/database-wrapper.js" /* Definitions */ let coupCastEndpoint = "https://www.oneearthfuture.org/sites/all/themes/stability/stability_sub/data/dashboard_2021_code_06.csv" @@ -142,7 +142,7 @@ async function processArray(countryArray) { } // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/elicit-questions.json', string); - await upsert(results, "coupcast-questions") + await databaseUpsert(results, "coupcast-questions") // console.log(results) console.log("Done") } diff --git a/src/platforms/deprecated/csetforetell-fetch.js b/src/platforms/deprecated/csetforetell-fetch.js index 15c9bcd..53484f0 100644 --- a/src/platforms/deprecated/csetforetell-fetch.js +++ b/src/platforms/deprecated/csetforetell-fetch.js @@ -4,7 +4,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js" import { Tabletojson } from "tabletojson" import toMarkdown from "../utils/toMarkdown.js" import { calculateStars } from "../utils/stars.js" -import { upsert } from "../utils/mongo-wrapper.js" +import { databaseUpsert } from "../utils/database-wrapper.js" /* Definitions */ let htmlEndPoint = 'https://www.cset-foretell.com/questions?page=' @@ -237,7 +237,7 @@ async function csetforetell_inner(cookie) { // fs.writeFileSync('./data/csetforetell-questions.json', string); // console.log(results) if (results.length > 0) { - await upsert(results, "csetforetell-questions") + await databaseUpsert(results, "csetforetell-questions") } else { console.log("Not updating results, as process was not signed in") } diff --git a/src/platforms/deprecated/elicit-fetch.js b/src/platforms/deprecated/elicit-fetch.js index 59cb4bf..7f0f91d 100644 --- a/src/platforms/deprecated/elicit-fetch.js +++ b/src/platforms/deprecated/elicit-fetch.js @@ -5,7 +5,7 @@ import Papa from "papaparse" import open from "open" import readline from "readline" import {calculateStars} from "../utils/stars.js" -import {upsert} from "../utils/mongo-wrapper.js" +import {databaseUpsert} from "../utils/database-wrapper.js" /* Definitions */ let elicitEndpoint = "https://elicit.org/api/v1/binary-questions/csv?binaryQuestions.resolved=false&binaryQuestions.search=&binaryQuestions.sortBy=popularity&predictors=community" @@ -84,7 +84,7 @@ async function processArray(arrayQuestions) { } // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/elicit-questions.json', string); - await upsert(results, "elicit-questions") + await databaseUpsert(results, "elicit-questions") console.log("Done") } diff --git a/src/platforms/deprecated/estimize-fetch.js b/src/platforms/deprecated/estimize-fetch.js index 0c7535c..cb21a8b 100644 --- a/src/platforms/deprecated/estimize-fetch.js +++ b/src/platforms/deprecated/estimize-fetch.js @@ -1,6 +1,6 @@ import fs from "fs" import {calculateStars} from "../utils/stars.js" -import {upsert} from "../utils/mongo-wrapper.js" +import {databaseUpsert} from "../utils/database-wrapper.js" export async function estimize(){ @@ -31,6 +31,6 @@ export async function estimize(){ // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/estimize-questions.json', string); - await upsert(results, "estimize-questions") + await databaseUpsert(results, "estimize-questions") } //estimize() diff --git a/src/platforms/deprecated/hypermind-fetch.js b/src/platforms/deprecated/hypermind-fetch.js index 57875c9..3cd606a 100644 --- a/src/platforms/deprecated/hypermind-fetch.js +++ b/src/platforms/deprecated/hypermind-fetch.js @@ -6,7 +6,7 @@ import fetch from "isomorphic-fetch" import {getCookie, applyIfCookieExists} from "../utils/getCookies.js" import toMarkdown from "../utils/toMarkdown.js" import { calculateStars } from "../utils/stars.js" -import { upsert } from "../utils/mongo-wrapper.js" +import { databaseUpsert } from "../utils/database-wrapper.js" /* Definitions */ let hypermindEnpoint1 = 'https://predict.hypermind.com/dash/jsx.json' @@ -172,7 +172,7 @@ async function hypermind_inner(cookie) { console.log(resultsTotalUnique.length, "results") // let string = JSON.stringify(resultsTotalUnique, null, 2) // fs.writeFileSync('./data/hypermind-questions.json', string); - await upsert(resultsTotalUnique, "hypermind-questions") + await databaseUpsert(resultsTotalUnique, "hypermind-questions") } //hypermind() diff --git a/src/platforms/deprecated/ladbrokes-fetch.js b/src/platforms/deprecated/ladbrokes-fetch.js index da091f5..172dbf4 100644 --- a/src/platforms/deprecated/ladbrokes-fetch.js +++ b/src/platforms/deprecated/ladbrokes-fetch.js @@ -2,7 +2,7 @@ import axios from "axios"; import fs from "fs"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../utils/mongo-wrapper.js"; +import { databaseUpsert } from "../utils/database-wrapper.js"; /* Definitions */ let endpointPolitics = `https://ss-aka-ori.ladbrokes.com/openbet-ssviewer/Drilldown/2.31/EventToOutcomeForClass/302,301,300?simpleFilter=event.siteChannels:contains:M&simpleFilter=event.eventSortCode:intersects:TNMT,TR01,TR02,TR03,TR04,TR05,TR06,TR07,TR08,TR09,TR10,TR11,TR12,TR13,TR14,TR15,TR16,TR17,TR18,TR19,TR20&simpleFilter=event.suspendAtTime:greaterThan:${new Date().toISOString()}.000Z&limitRecords=outcome:1&limitRecords=market:1&translationLang=en&responseFormat=json&prune=event&prune=market`; @@ -140,7 +140,7 @@ export async function ladbrokes() { // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/ladbrokes-questions.json', string); - await upsert(results, "ladbrokes-questions"); + await databaseUpsert(results, "ladbrokes-questions"); console.log("Done"); } //ladbrokes() diff --git a/src/platforms/deprecated/omen-fetch.js b/src/platforms/deprecated/omen-fetch.js index 93afc30..3b00c5c 100644 --- a/src/platforms/deprecated/omen-fetch.js +++ b/src/platforms/deprecated/omen-fetch.js @@ -2,7 +2,7 @@ import fs from 'fs' import axios from "axios" import { calculateStars } from "../../utils/stars.js" -import {upsert} from "../../utils/mongo-wrapper.js" +import {databaseUpsert} from "../../utils/database-wrapper.js" /* Definitions */ let graphQLendpoint = "https://api.thegraph.com/subgraphs/name/protofire/omen" @@ -93,7 +93,7 @@ export async function omen() { // console.log(result) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/omen-questions.json', string); - await upsert(results, "omen-questions") + await databaseUpsert(results, "omen-questions") console.log("Done") } //omen() diff --git a/src/platforms/deprecated/williamhill-fetch.js b/src/platforms/deprecated/williamhill-fetch.js index 82a44d6..eaa5b21 100644 --- a/src/platforms/deprecated/williamhill-fetch.js +++ b/src/platforms/deprecated/williamhill-fetch.js @@ -3,7 +3,7 @@ import axios from "axios" import fs from "fs" import toMarkdown from "../utils/toMarkdown.js" import {calculateStars} from "../utils/stars.js" -import { upsert } from "../utils/mongo-wrapper.js" +import { databaseUpsert } from "../utils/database-wrapper.js" /* Definitions */ let endpoint = "https://sports.williamhill.com/betting/en-gb/politics" @@ -129,7 +129,7 @@ export async function williamhill() { let results = processResults(response) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/williamhill-questions.json', string); - await upsert(results, "williamhill-questions") + await databaseUpsert(results, "williamhill-questions") console.log(results.sort((a,b) => (a.title > b.title))) console.log("Done") } diff --git a/src/platforms/example-fetch.js b/src/platforms/example-fetch.js index 6cb46c2..561dddf 100644 --- a/src/platforms/example-fetch.js +++ b/src/platforms/example-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let endpoint = "https://example.com/"; @@ -64,7 +64,7 @@ export async function example() { // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polyprediction-questions.json', string); - await upsert(results, "example-questions"); + await databaseUpsert(results, "example-questions"); console.log("Done"); } //example() diff --git a/src/platforms/fantasyscotus-fetch.js b/src/platforms/fantasyscotus-fetch.js index 956cb22..0ba7854 100644 --- a/src/platforms/fantasyscotus-fetch.js +++ b/src/platforms/fantasyscotus-fetch.js @@ -2,7 +2,7 @@ import fs from "fs"; import axios from "axios"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let unixtime = new Date().getTime(); @@ -118,7 +118,7 @@ export async function fantasyscotus() { //console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/fantasyscotus-questions.json', string); - await upsert(results, "fantasyscotus-questions"); + await databaseUpsert(results, "fantasyscotus-questions"); console.log("Done"); } //fantasyscotus() diff --git a/src/platforms/foretold-fetch.js b/src/platforms/foretold-fetch.js index 4b9d812..3deaf69 100644 --- a/src/platforms/foretold-fetch.js +++ b/src/platforms/foretold-fetch.js @@ -2,7 +2,7 @@ import fs from "fs"; import axios from "axios"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let graphQLendpoint = "https://api.foretold.io/graphql"; @@ -101,7 +101,7 @@ export async function foretold() { } // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/foretold-questions.json', string); - await upsert(results, "foretold-questions"); + await databaseUpsert(results, "foretold-questions"); console.log("Done"); } // foretold() diff --git a/src/platforms/givewellopenphil-fetch.js b/src/platforms/givewellopenphil-fetch.js index 3015b67..eb1e5b8 100644 --- a/src/platforms/givewellopenphil-fetch.js +++ b/src/platforms/givewellopenphil-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let locationData = "./data/"; @@ -64,6 +64,6 @@ async function main() { } // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/givewell-questions-unprocessed.json', string); - await upsert(results, "givewell-questions-unprocessed"); + await databaseUpsert(results, "givewell-questions-unprocessed"); } main(); diff --git a/src/platforms/goodjudgment-fetch.js b/src/platforms/goodjudgment-fetch.js index a707140..0e65ade 100644 --- a/src/platforms/goodjudgment-fetch.js +++ b/src/platforms/goodjudgment-fetch.js @@ -6,7 +6,7 @@ import { Tabletojson } from "tabletojson"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; import { hash } from "../utils/hash.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let endpoint = "https://goodjudgment.io/superforecasts/"; @@ -122,7 +122,7 @@ export async function goodjudgment() { // fs.writeFileSync('./data/goodjudgment-questions.json', string); // fs.writeFileSync('./goodjudgment-questions-test.json', string); console.log(results); - await upsert(results, "goodjudgment-questions"); + await databaseUpsert(results, "goodjudgment-questions"); console.log( "Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js" ); diff --git a/src/platforms/goodjudmentopen-fetch.js b/src/platforms/goodjudmentopen-fetch.js index 2680235..537470b 100644 --- a/src/platforms/goodjudmentopen-fetch.js +++ b/src/platforms/goodjudmentopen-fetch.js @@ -5,7 +5,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; import { Tabletojson } from "tabletojson"; import { calculateStars } from "../utils/stars.js"; import toMarkdown from "../utils/toMarkdown.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let htmlEndPoint = "https://www.gjopen.com/questions?page="; @@ -223,7 +223,7 @@ async function goodjudgmentopen_inner(cookie) { // fs.writeFileSync('./data/goodjudmentopen-questions.json', string); console.log(results); if (results.length > 0) { - await upsert(results, "goodjudmentopen-questions"); + await databaseUpsert(results, "goodjudmentopen-questions"); } else { console.log("Not updating results, as process was not signed in"); } diff --git a/src/platforms/infer-fetch.js b/src/platforms/infer-fetch.js index 41baf9d..3a7d80d 100644 --- a/src/platforms/infer-fetch.js +++ b/src/platforms/infer-fetch.js @@ -4,7 +4,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; import { Tabletojson } from "tabletojson"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let htmlEndPoint = "https://www.infer-pub.com/questions"; @@ -269,7 +269,7 @@ async function infer_inner(cookie) { // fs.writeFileSync('./data/infer-questions.json', string); // console.log(results) if (results.length > 0) { - await upsert(results, "infer-questions"); + await databaseUpsert(results, "infer-questions"); } else { console.log("Not updating results, as process was not signed in"); } diff --git a/src/platforms/kalshi-fetch.js b/src/platforms/kalshi-fetch.js index b450d92..0016ce5 100644 --- a/src/platforms/kalshi-fetch.js +++ b/src/platforms/kalshi-fetch.js @@ -2,7 +2,7 @@ import fs from "fs"; import axios from "axios"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3' @@ -83,7 +83,7 @@ export async function kalshi() { // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polymarket-questions.json', string); - await upsert(results, "kalshi-questions"); + await databaseUpsert(results, "kalshi-questions"); console.log("Done"); } // kalshi() diff --git a/src/platforms/manifoldmarkets-fetch.js b/src/platforms/manifoldmarkets-fetch.js index c9c754f..e18473f 100644 --- a/src/platforms/manifoldmarkets-fetch.js +++ b/src/platforms/manifoldmarkets-fetch.js @@ -2,7 +2,7 @@ import fs from "fs"; import axios from "axios"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let endpoint = "https://us-central1-mantic-markets.cloudfunctions.net/markets"; @@ -96,7 +96,7 @@ export async function manifoldmarkets() { // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polyprediction-questions.json', string); - await upsert(results, "manifoldmarkets-questions"); + await databaseUpsert(results, "manifoldmarkets-questions"); console.log("Done"); } // manifoldmarkets() diff --git a/src/platforms/metaculus-fetch.js b/src/platforms/metaculus-fetch.js index 597d39e..198541f 100644 --- a/src/platforms/metaculus-fetch.js +++ b/src/platforms/metaculus-fetch.js @@ -3,7 +3,7 @@ import axios from "axios"; import fs from "fs"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; @@ -154,7 +154,7 @@ export async function metaculus() { // let string = JSON.stringify(all_questions, null, 2) // fs.writeFileSync('./metaculus-questions.json', string); - await upsert(all_questions, "metaculus-questions"); + await databaseUpsert(all_questions, "metaculus-questions"); console.log("Done"); } diff --git a/src/platforms/polymarket-fetch.js b/src/platforms/polymarket-fetch.js index daa27a4..d86d610 100644 --- a/src/platforms/polymarket-fetch.js +++ b/src/platforms/polymarket-fetch.js @@ -2,7 +2,7 @@ import fs from "fs"; import axios from "axios"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let graphQLendpoint = @@ -152,7 +152,7 @@ export async function polymarket() { // console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polymarket-questions.json', string); - await upsert(results, "polymarket-questions"); + await databaseUpsert(results, "polymarket-questions"); console.log("Done"); } // polymarket() diff --git a/src/platforms/predictit-fetch.js b/src/platforms/predictit-fetch.js index 3d520f1..5d6ae8e 100644 --- a/src/platforms/predictit-fetch.js +++ b/src/platforms/predictit-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Support functions */ async function fetchmarkets() { @@ -110,7 +110,7 @@ export async function predictit() { //console.log(results) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/predictit-questions.json', string); - await upsert(results, "predictit-questions"); + await databaseUpsert(results, "predictit-questions"); console.log("Done"); } diff --git a/src/platforms/rootclaim-fetch.js b/src/platforms/rootclaim-fetch.js index 1a97668..b191165 100644 --- a/src/platforms/rootclaim-fetch.js +++ b/src/platforms/rootclaim-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let jsonEndpoint = @@ -69,7 +69,7 @@ export async function rootclaim() { //console.log(JSON.stringify(results, null, 4)) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('rootclaim-questions.json', string); - await upsert(results, "rootclaim-questions"); + await databaseUpsert(results, "rootclaim-questions"); console.log("Done"); } //rootclaim() diff --git a/src/platforms/smarkets-fetch.js b/src/platforms/smarkets-fetch.js index 2ba6539..8e67954 100644 --- a/src/platforms/smarkets-fetch.js +++ b/src/platforms/smarkets-fetch.js @@ -3,7 +3,7 @@ import fs from "fs"; import axios from "axios"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/"; @@ -153,6 +153,6 @@ export async function smarkets() { // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('./data/smarkets-questions.json', string); - await upsert(results, "smarkets-questions"); + await databaseUpsert(results, "smarkets-questions"); } //smarkets() diff --git a/src/platforms/wildeford-fetch.js b/src/platforms/wildeford-fetch.js index 364d5c7..6a6342b 100644 --- a/src/platforms/wildeford-fetch.js +++ b/src/platforms/wildeford-fetch.js @@ -6,7 +6,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; import toMarkdown from "../utils/toMarkdown.js"; import { calculateStars } from "../utils/stars.js"; import { hash } from "../utils/hash.js"; -import { upsert } from "../database/mongo-wrapper.js"; +import { databaseUpsert } from "../database/database-wrapper.js"; /* Definitions */ const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL @@ -125,7 +125,7 @@ export async function wildeford_inner(google_api_key) { // console.log(results.sort((a,b) => (a.title > b.title))) // let string = JSON.stringify(results, null, 2) // fs.writeFileSync('polyprediction-questions.json', string); - await upsert(results, "wildeford-questions"); + await databaseUpsert(results, "wildeford-questions"); console.log("Done"); } //example() diff --git a/src/utils/algolia.js b/src/utils/algolia.js index 20f23ca..688659d 100644 --- a/src/utils/algolia.js +++ b/src/utils/algolia.js @@ -1,7 +1,7 @@ import algoliasearch from 'algoliasearch'; import fs from "fs" import {getCookie} from "./getCookies.js" -import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js" +import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" import { mergeEverythingInner } from '../flow/mergeEverything.js'; let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia") @@ -23,7 +23,7 @@ export async function rebuildAlgoliaDatabaseTheHardWay(){ } export async function rebuildAlgoliaDatabaseTheEasyWay(){ - let records = await mongoReadWithReadCredentials("metaforecasts") + let records = await databaseReadWithReadCredentials("metaforecasts") records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ diff --git a/src/utils/evaluations/pullForecastsToCSVForRating.js b/src/utils/evaluations/pullForecastsToCSVForRating.js index 2e2e18e..0afb9a3 100644 --- a/src/utils/evaluations/pullForecastsToCSVForRating.js +++ b/src/utils/evaluations/pullForecastsToCSVForRating.js @@ -1,6 +1,6 @@ /* Imports */ import fs from "fs" -import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" +import { databaseReadWithReadCredentials } from "../database-wrapper.js" /* Definitions */ @@ -13,7 +13,7 @@ let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators let main = async () => { let highQualityPlatforms = ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await mongoReadWithReadCredentials("metaforecasts") + let json = await databaseReadWithReadCredentials("metaforecasts") console.log(json.length) //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js b/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js index dde6cec..5c06c99 100644 --- a/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js +++ b/src/utils/evaluations/pullMetaculusForecastsToCSVForRating.js @@ -1,6 +1,6 @@ /* Imports */ import fs from "fs" -import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" +import { databaseReadWithReadCredentials } from "../database-wrapper.js" /* Definitions */ @@ -22,7 +22,7 @@ let shuffleArray = (array) => { let main = async () => { let highQualityPlatforms = [ 'Metaculus' ] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] - let json = await mongoReadWithReadCredentials("metaforecasts") + let json = await databaseReadWithReadCredentials("metaforecasts") console.log(json.length) //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //console.log(uniquePlatforms) diff --git a/src/utils/misc/process-forecasts-into-elicit.js b/src/utils/misc/process-forecasts-into-elicit.js index 281312d..da8af0e 100644 --- a/src/utils/misc/process-forecasts-into-elicit.js +++ b/src/utils/misc/process-forecasts-into-elicit.js @@ -1,6 +1,6 @@ /* Imports */ import fs from "fs" -import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" +import { databaseReadWithReadCredentials } from "../database-wrapper.js" /* Definitions */ let locationData = "./data/" @@ -8,7 +8,7 @@ let locationData = "./data/" /* Body */ // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src async function main(){ - let data = await mongoReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata) + let data = await databaseReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata) let processDescription = (description) => { if(description == null || description == undefined || description == ""){ return ""