feat: Read pg database

This commit is contained in:
NunoSempere 2022-02-12 16:04:31 -05:00
parent 6bb3da0e32
commit 6a257c2152
14 changed files with 231 additions and 174 deletions

View File

@ -1,36 +1,107 @@
import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js" import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js"
import { pgUpsert } from "./pg-wrapper.js" import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js"
export async function databaseUpsert({ contents, group }) { export async function databaseUpsert({ contents, group }) {
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
let mongoDocName; let mongoDocName;
switch (group) { switch (group) {
case 'combined': case 'combined':
mongoDocName = "metaforecasts" mongoDocName = "metaforecasts"
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
await pgUpsert({contents, schema: "latest", tableName: "combined"}) await pgUpsert({ contents, schema: "latest", tableName: "combined" })
break; break;
case 'history': case 'history':
let currentDate = new Date() let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
mongoDocName = `metaforecast_history_${dateUpToMonth}` mongoDocName = `metaforecast_history_${dateUpToMonth}`
await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase") await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase")
await pgUpsert({contents, schema: "history", tableName: "combined"}) // await pgUpsert({ contents, schema: "history", tableName: "combined" })
break; break;
default: default:
mongoDocName = `${group}-questions` mongoDocName = `${group}-questions`
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
await pgUpsert({contents, schema: "latest", tableName: group}) await pgUpsert({ contents, schema: "latest", tableName: group })
} }
} }
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") // databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
export const databaseRead = mongoRead; export async function databaseRead({ group }) {
let response, mongoDocName, responseMongo, responsePg
let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02
let displayPossibleResponses = (response1, response2) => {
console.log("Possible responses:")
console.log("Mongo: ")
console.log(response1.slice(0, 2))
console.log("Postgres: ")
console.log(response2.slice(0, 2))
console.log("")
}
switch (group) {
case 'combined':
mongoDocName = "metaforecasts"
responseMongo = await mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
responsePg = await pgRead({ schema: "latest", tableName: "combined" })
displayPossibleResponses(responseMongo, responsePg)
break;
case 'history':
mongoDocName = `metaforecast_history_${dateUpToMonth}`
responseMongo = await mongoRead(mongoDocName, "metaforecastHistory", "metaforecastDatabase")
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
break;
default:
mongoDocName = `${group}-questions`
responseMongo = mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
responsePg = await pgRead({ schema: "latest", tableName: group })
}
response = responseMongo // responsePg
return response
}
// databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") // databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
export const databaseReadWithReadCredentials = mongoReadWithReadCredentials; export async function databaseReadWithReadCredentials({ group }) {
// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") let response, mongoDocName, responseMongo, responsePg
let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02
export const databaseGetAllElements = mongoGetAllElements; let displayPossibleResponses = (response1, response2) => {
// databaseGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") console.log("Possible responses:")
console.log("Mongo: ")
console.log(response1.slice(0, 2))
console.log("Postgres: ")
console.log(response2.slice(0, 2))
console.log("")
}
switch (group) {
case 'combined':
mongoDocName = "metaforecasts"
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: "combined" })
displayPossibleResponses(responseMongo, responsePg)
break;
case 'history':
mongoDocName = `metaforecast_history_${dateUpToMonth}`
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastHistory", "metaforecastDatabase")
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
break;
default:
mongoDocName = `${group}-questions`
responseMongo = mongoRemongoReadWithReadCredentialsad(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: group })
displayPossibleResponses(responseMongo, responsePg)
}
response = responseMongo // responsePg
return response
}
//= ;
// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")

View File

@ -12,21 +12,35 @@ const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...cre
/* Postgres database connection code */ /* Postgres database connection code */
const pool = new Pool({ const databaseURL = getSecret("digitalocean-postgres")
connectionString: process.env.DATABASE_URL || getSecret("heroku-postgres"), // process.env.DATABASE_URL || getSecret("heroku-postgres")
const readWritePool = new Pool({
connectionString: databaseURL,
ssl: { ssl: {
rejectUnauthorized: false rejectUnauthorized: false
} }
}); });
const readOnlyDatabaseURL = "postgresql://public_read_only_user:ffKhp52FJNNa8cKK@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require"
const readOnlyPool = new Pool({
connectionString: readOnlyDatabaseURL,
ssl: {
rejectUnauthorized: false
}
});
// Helpers // Helpers
const runPgCommand = async (query) => { const runPgCommand = async ({ command, pool }) => {
console.log(query) console.log(command)
const client = await pool.connect(); try{
const result = await client.query(query); const client = await pool.connect();
const results = { 'results': (result) ? result.rows : null }; const result = await client.query(command);
client.release(); const results = { 'results': (result) ? result.rows : null };
}catch(error){
console.log(error)
}finally{
client.release();
}
// console.log(results) // console.log(results)
return results return results
} }
@ -48,40 +62,76 @@ let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table}
let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);` let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);` let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
export async function pgInitialize() { async function setPermissionsForPublicUser() {
for (let schema of schemas) { let initCommands = ["REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
runPgCommand(`CREATE SCHEMA IF NOT EXISTS ${schema}`) "GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;"]
for (let command of initCommands) {
await runPgCommand({ command, pool: readWritePool })
} }
runPgCommand(`SET search_path TO ${schemas.join(",")},public;`)
let buildGrantSelectForSchema = (schema) => `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`
for (let schema of schemas) {
await runPgCommand({ command: buildGrantSelectForSchema(schema), pool: readWritePool })
}
let alterDefaultPrivilegesForSchema = (schema) => `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`
for (let schema of schemas) {
await runPgCommand({ command: alterDefaultPrivilegesForSchema(schema), pool: readWritePool })
}
}
export async function pgInitialize() {
console.log("Create schemas")
for (let schema of schemas) {
await runPgCommand({ command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, pool: readWritePool })
}
console.log("")
console.log("Set search path")
await runPgCommand({ command: `SET search_path TO ${schemas.join(",")},public;`, pool: readWritePool })
console.log("")
console.log("Set public user permissions")
await setPermissionsForPublicUser()
console.log("")
console.log("Create tables & their indexes")
for (let schema of schemas) { for (let schema of schemas) {
for (let table of tableNamesWhitelist) { for (let table of tableNamesWhitelist) {
await runPgCommand(dropTable(schema, table)) await runPgCommand({ command: dropTable(schema, table), pool: readWritePool })
await runPgCommand(buildMetaforecastTable(schema, table)) await runPgCommand({ command: buildMetaforecastTable(schema, table), pool: readWritePool })
if (schema == "history") { if (schema == "history") {
await runPgCommand(createIndex(schema, table)) await runPgCommand({ command: createIndex(schema, table), pool: readWritePool })
} else { } else {
await runPgCommand(createUniqueIndex(schema, table)) await runPgCommand({ command: createUniqueIndex(schema, table), pool: readWritePool })
} }
} }
} }
console.log("")
} }
// pgInitialize() // pgInitialize()
// Read // Read
export async function pgRead({schema, tableName}) { async function pgReadWithPool({ schema, tableName, pool }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let command = `SELECT * from ${schema}.${tableName}` let command = `SELECT * from ${schema}.${tableName}`
let response = await runPgCommand(command) let response = await runPgCommand({ command, pool })
let results = response. results let results = response.results
return results return results
} else { } else {
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`) throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
} }
} }
export async function pgRead({ schema, tableName }) {
return await pgReadWithPool({ schema, tableName, pool: readWritePool })
}
export async function pgReadWithReadCredentials({ schema, tableName }) {
return await pgReadWithPool({ schema, tableName, readOnlyPool: readOnlyPool })
}
export async function pgInsert({ datum, schema, tableName }) { export async function pgInsert({ datum, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
@ -99,10 +149,14 @@ export async function pgInsert({ datum, schema, tableName }) {
JSON.stringify(datum.qualityindicators || []), JSON.stringify(datum.qualityindicators || []),
JSON.stringify(datum.extra || []) JSON.stringify(datum.extra || [])
] ]
try{
const client = await pool.connect(); const client = await readWritePool.connect();
const result = await client.query(text, values); const result = await client.query(text, values);
client.release(); }catch(error){
console.log(error)
}finally{
client.release();
}
// console.log(result) // console.log(result)
return result return result
} else { } else {
@ -160,10 +214,10 @@ export async function pgUpsert({ contents, schema, tableName }) {
} }
} }
console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`) console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`)
let check = await pgRead({schema, tableName}) let check = await pgRead({ schema, tableName })
console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`) console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`)
console.log("Sample: ") console.log("Sample: ")
console.log(JSON.stringify(check.slice(0,1), null, 4)); console.log(JSON.stringify(check.slice(0, 1), null, 4));
//console.log(JSON.stringify(check.slice(0, 1), null, 4)); //console.log(JSON.stringify(check.slice(0, 1), null, 4));

View File

@ -1,65 +1,66 @@
import { writeFileSync } from "fs" import { writeFileSync } from "fs"
import { databaseReadWithReadCredentials, databaseUpsert } from "../../database/database-wrapper.js" import { databaseReadWithReadCredentials, databaseUpsert } from "../../database/database-wrapper.js"
let databaseRead = databaseReadWithReadCredentials
let isEmptyArray = arr => arr.length == 0 let isEmptyArray = arr => arr.length == 0
export async function addToHistory(){ export async function addToHistory() {
let currentDate = new Date() let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
let currentJSONwithMetaculus = await databaseRead("metaforecasts") let currentJSONwithMetaculus = await databaseReadWithReadCredentials({ group: "combined" })
let currentJSON = currentJSONwithMetaculus.filter(element => element.platform != "Metaculus" && element.platform != "Estimize") // without Metaculus let currentJSON = currentJSONwithMetaculus.filter(element => element.platform != "Metaculus" && element.platform != "Estimize") // without Metaculus
// console.log(currentJSON.slice(0,20)) // console.log(currentJSON.slice(0,20))
// console.log(currentJSON) // console.log(currentJSON)
let historyJSON = await databaseRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") let historyJSON = await databaseReadWithReadCredentials({ group: "history" })
// console.log(historyJSON) // console.log(historyJSON)
let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url)))
// console.log(currentForecastsWithAHistory) // console.log(currentForecastsWithAHistory)
let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url)))
// console.log(currentForecastsWithoutAHistory) // console.log(currentForecastsWithoutAHistory)
// Add both types of forecast // Add both types of forecast
let newHistoryJSON = [] let newHistoryJSON = []
for(let historyElement of historyJSON){ for (let historyElement of historyJSON) {
let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url ) let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url)
// console.log(correspondingNewElement) // console.log(correspondingNewElement)
if(!isEmptyArray(correspondingNewElementArray)){ if (!isEmptyArray(correspondingNewElementArray)) {
let correspondingNewElement = correspondingNewElementArray[0] let correspondingNewElement = correspondingNewElementArray[0]
let timeStampOfNewElement = correspondingNewElement.timestamp let timeStampOfNewElement = correspondingNewElement.timestamp
let doesHistoryAlreadyContainElement = historyElement.history.map(element => element.timestamp).includes(timeStampOfNewElement) let doesHistoryAlreadyContainElement = historyElement.history.map(element => element.timestamp).includes(timeStampOfNewElement)
if(!doesHistoryAlreadyContainElement){ if (!doesHistoryAlreadyContainElement) {
let historyWithNewElement = historyElement["history"].concat({ let historyWithNewElement = historyElement["history"].concat({
"timestamp": correspondingNewElement.timestamp, "timestamp": correspondingNewElement.timestamp,
"options": correspondingNewElement.options, "options": correspondingNewElement.options,
"qualityindicators": correspondingNewElement.qualityindicators "qualityindicators": correspondingNewElement.qualityindicators
}) })
let newHistoryElement = {...correspondingNewElement, "history": historyWithNewElement} let newHistoryElement = { ...correspondingNewElement, "history": historyWithNewElement }
// If some element (like the description) changes, we keep the new one. // If some element (like the description) changes, we keep the new one.
newHistoryJSON.push(newHistoryElement) newHistoryJSON.push(newHistoryElement)
}else{ } else {
newHistoryJSON.push(historyElement) newHistoryJSON.push(historyElement)
} }
}else{ } else {
// console.log(historyElement) // console.log(historyElement)
newHistoryJSON.push(historyElement) newHistoryJSON.push(historyElement)
} }
} }
for(let currentForecast of currentForecastsWithoutAHistory){ for (let currentForecast of currentForecastsWithoutAHistory) {
let newHistoryElement = ({...currentForecast, "history": [{ let newHistoryElement = ({
"timestamp": currentForecast.timestamp, ...currentForecast, "history": [{
"options": currentForecast.options, "timestamp": currentForecast.timestamp,
"qualityindicators": currentForecast.qualityindicators "options": currentForecast.options,
}]}) "qualityindicators": currentForecast.qualityindicators
}]
})
delete newHistoryElement.timestamp delete newHistoryElement.timestamp
delete newHistoryElement.options delete newHistoryElement.options
delete newHistoryElement.qualityindicators delete newHistoryElement.qualityindicators
newHistoryJSON.push(newHistoryElement) newHistoryJSON.push(newHistoryElement)
} }
await databaseUpsert({contents: newHistoryJSON, group: "history"}) await databaseUpsert({ contents: newHistoryJSON, group: "history" })
// console.log(newHistoryJSON.slice(0,5)) // console.log(newHistoryJSON.slice(0,5))
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2)) // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))

View File

@ -1,9 +1,9 @@
import { databaseRead, databaseUpsert } from "../../database/database-wrapper.js" import { databaseRead, databaseUpsert } from "../../database/database-wrapper.js"
export async function createHistoryForMonth(){ export async function createHistoryForMonth() {
let currentDate = new Date() let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
let metaforecasts = await databaseRead("metaforecasts") let metaforecasts = await databaseRead({ group: "combined" })
let metaforecastsHistorySeed = metaforecasts.map(element => { let metaforecastsHistorySeed = metaforecasts.map(element => {
// let moreoriginsdata = element.author ? ({author: element.author}) : ({}) // let moreoriginsdata = element.author ? ({author: element.author}) : ({})
return ({ return ({
@ -18,10 +18,10 @@ export async function createHistoryForMonth(){
qualityindicators: element.qualityindicators qualityindicators: element.qualityindicators
}], }],
extra: element.extra || {} extra: element.extra || {}
}) })
}).filter(element => element.platform != "Metaculus" && element.platform != "Estimize") }).filter(element => element.platform != "Metaculus" && element.platform != "Estimize")
//console.log(metaforecastsHistorySeed) //console.log(metaforecastsHistorySeed)
await databaseUpsert({contents: metaforecastsHistorySeed, group: "history"}) await databaseUpsert({ contents: metaforecastsHistorySeed, group: "history" })
} }
////createInitialHistory() ////createInitialHistory()

View File

@ -1,68 +0,0 @@
import { writeFileSync } from "fs"
import { databaseReadWithReadCredentials, databaseUpsert } from "../database-wrapper.js"
let databaseRead = databaseReadWithReadCredentials
let isEmptyArray = arr => arr.length == 0
export async function addToHistory(){
// throw new Error("Not today")
let currentJSON = await databaseRead("metaforecasts")
// console.log(currentJSON)
let historyJSON = await databaseRead("metaforecast_history")
// console.log(historyJSON)
let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url )))
// console.log(currentForecastsWithAHistory)
let currentForecastsWithoutAHistory = currentJSON.filter(element => isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url )))
// console.log(currentForecastsWithoutAHistory)
// Add both types of forecast
let newHistoryJSON = []
for(let historyElement of historyJSON){
let correspondingNewElementArray = currentForecastsWithAHistory.filter(element => historyElement.title == element.title && historyElement.url == element.url )
// console.log(correspondingNewElement)
if(!isEmptyArray(correspondingNewElementArray)){
let correspondingNewElement = correspondingNewElementArray[0]
let timeStampOfNewElement = correspondingNewElement.timestamp
let doesHistoryAlreadyContainElement = historyElement.history.map(element => element.timestamp).includes(timeStampOfNewElement)
if(!doesHistoryAlreadyContainElement){
let historyWithNewElement = historyElement["history"].concat({
"timestamp": correspondingNewElement.timestamp,
"options": correspondingNewElement.options,
"qualityindicators": correspondingNewElement.qualityindicators
})
let newHistoryElement = {...correspondingNewElement, "history": historyWithNewElement}
// If some element (like the description) changes, we keep the new one.
newHistoryJSON.push(newHistoryElement)
}else{
newHistoryJSON.push(historyElement)
}
}else{
// console.log(historyElement)
newHistoryJSON.push(historyElement)
}
}
for(let currentForecast of currentForecastsWithoutAHistory){
let newHistoryElement = ({...currentForecast, "history": [{
"timestamp": currentForecast.timestamp,
"options": currentForecast.options,
"qualityindicators": currentForecast.qualityindicators
}]})
delete newHistoryElement.timestamp
delete newHistoryElement.options
delete newHistoryElement.qualityindicators
newHistoryJSON.push(newHistoryElement)
}
databaseUpsert(newHistoryJSON, "metaforecast_history")
// console.log(newHistoryJSON.slice(0,5))
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
// writefile(JSON.stringify(newHistoryJSON, null, 2), "metaforecasts_history", "", ".json")
//console.log(newHistoryJSON)
/*
let forecastsAlreadyInHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url )))
*/
console.log(new Date().toISOString())
}
// addToHistory()

View File

@ -1,7 +1,7 @@
import { databaseRead, databaseUpsert } from "../database-wrapper.js" import { databaseRead, databaseUpsert } from "../database-wrapper.js"
let createInitialHistory = async () => { let createInitialHistory = async () => {
let metaforecasts = await databaseRead("metaforecasts") let metaforecasts = await databaseRead({ group: "combined" })
let metaforecastsHistorySeed = metaforecasts.map(element => { let metaforecastsHistorySeed = metaforecasts.map(element => {
// let moreoriginsdata = element.author ? ({author: element.author}) : ({}) // let moreoriginsdata = element.author ? ({author: element.author}) : ({})
return ({ return ({
@ -16,10 +16,10 @@ let createInitialHistory = async () => {
qualityindicators: element.qualityindicators qualityindicators: element.qualityindicators
}], }],
extra: element.extra || {} extra: element.extra || {}
}) })
}) })
console.log(metaforecastsHistorySeed) console.log(metaforecastsHistorySeed)
await databaseUpsert(metaforecastsHistorySeed, "metaforecast_history") await databaseUpsert({ contents: metaforecastsHistorySeed, group: "history" })
} }
createInitialHistory() createInitialHistory()

View File

@ -1,12 +1,11 @@
import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; import { databaseRead, databaseUpsert } from "../database/database-wrapper.js";
import { platformNames } from "../platforms/all-platforms.js" import { platformNames } from "../platforms/all-platforms.js"
/* Merge everything */ /* Merge everything */
let suffix = "-questions";
export async function mergeEverythingInner() { export async function mergeEverythingInner() {
let merged = []; let merged = [];
for (let platformName of platformNames) { for (let platformName of platformNames) {
let json = await databaseRead(platformName + suffix); let json = await databaseRead({ group: platformName });
console.log(`${platformName} has ${json.length} questions\n`); console.log(`${platformName} has ${json.length} questions\n`);
merged = merged.concat(json); merged = merged.concat(json);
} }

View File

@ -27,7 +27,7 @@ let shuffle = (array) => {
let main = async () => { let main = async () => {
let init = Date.now(); let init = Date.now();
let json = await databaseReadWithReadCredentials("metaforecasts"); let json = await databaseReadWithReadCredentials({ group: "combined" });
json = json.filter( json = json.filter(
(forecast) => (forecast) =>

View File

@ -3,7 +3,7 @@ import fs from "fs"
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"
let main = async () => { let main = async () => {
let json = await databaseReadWithReadCredentials("metaforecasts") let json = await databaseReadWithReadCredentials({ group: "combined" })
let string = JSON.stringify(json, null, 2) let string = JSON.stringify(json, null, 2)
let filename = 'metaforecasts.json' let filename = 'metaforecasts.json'
fs.writeFileSync(filename, string); fs.writeFileSync(filename, string);

View File

@ -1,5 +1,5 @@
import fs from "fs"; import fs from "fs";
import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; import { databaseUpsert } from "../database/database-wrapper.js";
/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */ /* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */

View File

@ -1,35 +1,35 @@
import algoliasearch from 'algoliasearch'; import algoliasearch from 'algoliasearch';
import fs from "fs" import fs from "fs"
import {getSecret} from "./getSecrets.js" import { getSecret } from "./getSecrets.js"
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"
import { mergeEverythingInner } from '../flow/mergeEverything.js'; import { mergeEverythingInner } from '../flow/mergeEverything.js';
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getSecret("algolia") let cookie = process.env.ALGOLIA_MASTER_API_KEY || getSecret("algolia")
const client = algoliasearch('96UD3NTQ7L', cookie); const client = algoliasearch('96UD3NTQ7L', cookie);
const index = client.initIndex('metaforecast'); const index = client.initIndex('metaforecast');
export async function rebuildAlgoliaDatabaseTheHardWay(){ export async function rebuildAlgoliaDatabaseTheHardWay() {
console.log("Doing this the hard way") console.log("Doing this the hard way")
let records = await mergeEverythingInner() let records = await mergeEverythingInner()
records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) records = records.map((record, index) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index }))
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
if(index.exists()){ if (index.exists()) {
console.log("Index exists") console.log("Index exists")
index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error)) index.replaceAllObjects(records, { safe: true }).catch(error => console.log(error))
console.log(`Pushed ${records.length} records. Algolia will update asynchronously`) console.log(`Pushed ${records.length} records. Algolia will update asynchronously`)
} }
} }
export async function rebuildAlgoliaDatabaseTheEasyWay(){ export async function rebuildAlgoliaDatabaseTheEasyWay() {
let records = await databaseReadWithReadCredentials("metaforecasts") let records = await databaseReadWithReadCredentials({ group: "combined" })
records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) records = records.map((record, index) => ({ ...record, has_numforecasts: record.numforecasts ? true : false, objectID: index }))
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/ // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
if(index.exists()){ if (index.exists()) {
console.log("Index exists") console.log("Index exists")
index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error)) index.replaceAllObjects(records, { safe: true }).catch(error => console.log(error))
console.log(`Pushed ${records.length} records. Algolia will update asynchronously`) console.log(`Pushed ${records.length} records. Algolia will update asynchronously`)
} }
} }

View File

@ -13,13 +13,13 @@ let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators
let main = async () => { let main = async () => {
let highQualityPlatforms = ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] let highQualityPlatforms = ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
let json = await databaseReadWithReadCredentials("metaforecasts") let json = await databaseReadWithReadCredentials({ group: "combined" })
console.log(json.length) console.log(json.length)
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)
let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform)) let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform))
let tsv = "index\ttitle\turl\tqualityindicators\n"+forecastsFromGoodPlatforms let tsv = "index\ttitle\turl\tqualityindicators\n" + forecastsFromGoodPlatforms
.map((forecast, index) => { .map((forecast, index) => {
let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}` let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}`
console.log(row) console.log(row)

View File

@ -10,26 +10,26 @@ import { databaseReadWithReadCredentials } from "../database-wrapper.js"
let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators).map(entry => `${entry[0]}: ${entry[1]}`).join("; ") let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators).map(entry => `${entry[0]}: ${entry[1]}`).join("; ")
let shuffleArray = (array) => { let shuffleArray = (array) => {
// See: https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array // See: https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array
for (let i = array.length - 1; i > 0; i--) { for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1)); const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]]; [array[i], array[j]] = [array[j], array[i]];
} }
return array return array
} }
/* Body */ /* Body */
let main = async () => { let main = async () => {
let highQualityPlatforms = [ 'Metaculus' ] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] let highQualityPlatforms = ['Metaculus'] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
let json = await databaseReadWithReadCredentials("metaforecasts") let json = await databaseReadWithReadCredentials({ group: "combined" })
console.log(json.length) console.log(json.length)
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)
let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform)) let forecastsFromGoodPlatforms = json.filter(forecast => highQualityPlatforms.includes(forecast.platform))
let forecastsFromGoodPlatformsShuffled = shuffleArray(forecastsFromGoodPlatforms) let forecastsFromGoodPlatformsShuffled = shuffleArray(forecastsFromGoodPlatforms)
let tsv = "index\ttitle\turl\tqualityindicators\n"+forecastsFromGoodPlatforms let tsv = "index\ttitle\turl\tqualityindicators\n" + forecastsFromGoodPlatforms
.map((forecast, index) => { .map((forecast, index) => {
let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}` let row = `${index}\t${forecast.title}\t${forecast.url}\t${getQualityIndicators(forecast)}`
console.log(row) console.log(row)

View File

@ -8,7 +8,7 @@ let locationData = "./data/"
/* Body */ /* Body */
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
async function main(){ async function main(){
let data = await databaseReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata) let data = await databaseReadWithReadCredentials({ group: "combined" }) //JSON.parse(rawdata)
let processDescription = (description) => { let processDescription = (description) => {
if(description == null || description == undefined || description == ""){ if(description == null || description == undefined || description == ""){
return "" return ""