feat: Make code somewhat database independent, and reorganized old
data.
This commit is contained in:
		
							parent
							
								
									7174188b57
								
							
						
					
					
						commit
						65fbddf0cf
					
				| Before Width: | Height: | Size: 277 KiB After Width: | Height: | Size: 277 KiB | 
| Can't render this file because it is too large. | 
							
								
								
									
										13
									
								
								src/database/database-wrapper.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								src/database/database-wrapper.js
									
									
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,13 @@ | |||
| import {mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements} from "./mongo-wrapper.js" | ||||
| 
 | ||||
| export const databaseUpsert = mongoUpsert;  | ||||
| // databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
 | ||||
| 
 | ||||
| export const databaseRead = mongoRead; | ||||
| // databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
 | ||||
| 
 | ||||
| export const databaseReadWithReadCredentials = mongoReadWithReadCredentials; | ||||
| // databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
 | ||||
| 
 | ||||
| export const databaseGetAllElements = mongoGetAllElements; | ||||
| // databaseGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection")
 | ||||
|  | @ -35,7 +35,7 @@ function roughSizeOfObject(object) { | |||
|   return megaBytesRounded; | ||||
| } | ||||
| 
 | ||||
| export async function upsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { | ||||
| export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") { | ||||
|   const url = process.env.MONGODB_URL || getCookie("mongodb"); | ||||
|   const client = new MongoClient(url); | ||||
|   try { | ||||
|  | @ -58,7 +58,7 @@ export async function upsert(contents, documentName, collectionName = "metaforec | |||
| 
 | ||||
|     // Insert a single document, wait for promise so we can read it back
 | ||||
|     // const p = await collection.insertOne(metaforecastDocument);
 | ||||
|     await collection.replaceOne(filter, document, { upsert: true }); | ||||
|     await collection.replaceOne(filter, document, { databaseUpsert: true }); | ||||
|     console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`) | ||||
| 
 | ||||
|     // Find one document
 | ||||
|  |  | |||
							
								
								
									
										49
									
								
								src/database/pg-wrapper.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								src/database/pg-wrapper.js
									
									
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,49 @@ | |||
| import pkg from 'pg'; | ||||
| const { Pool } = pkg | ||||
| 
 | ||||
| /* Postgres database connection code */ | ||||
| const pool = new Pool({ | ||||
|   connectionString: process.env.DATABASE_URL, | ||||
|   ssl: { | ||||
|     rejectUnauthorized: false | ||||
|   } | ||||
| }); | ||||
| 
 | ||||
| const tableWhiteList = ["latest.combined"] | ||||
| 
 | ||||
| export async function pgRead(tableName="latest.combined"){ | ||||
|   if(tableWhiteList.includes(tableName)){ | ||||
|     const client = await pool.connect(); | ||||
|     const result = await client.query(`SELECT * from ${tableName}`); | ||||
|     const results = { 'results': (result) ? result.rows : null}; | ||||
|     // response.render('pages/db', results );
 | ||||
|     client.release(); | ||||
|     return results | ||||
|   }else{ | ||||
|     throw Error("Table not in whitelist; stopping to avoid tricky sql injections") | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| export async function pgInsert(data, tableName="latest.combined"){ | ||||
|   if(tableWhiteList.includes(tableName)){ | ||||
| 		let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` | ||||
| 		let values = [ | ||||
| 			data.id, | ||||
| 			data.title, | ||||
| 			data.url, | ||||
| 			data.platform, | ||||
| 			data.description || '', | ||||
| 			data.options || [], | ||||
| 			data.timestamp || Date.now(), // fix
 | ||||
| 			data.stars || (data.qualityindicators ? data.qualityindicators.stars : 2), | ||||
| 			data.qualityindicators || [], | ||||
| 			data.extra || [] | ||||
| 		] | ||||
| 
 | ||||
| 		const client = await pool.connect(); | ||||
| 		const result = await client.query(text, values); | ||||
| 		client.release(); | ||||
| 	}else{ | ||||
|     throw Error("Table not in whitelist; stopping to avoid tricky sql injections") | ||||
| 	} | ||||
| } | ||||
|  | @ -1,17 +1,17 @@ | |||
| import { writeFileSync } from "fs" | ||||
| import { mongoReadWithReadCredentials, upsert } from "../../database/mongo-wrapper.js" | ||||
| let mongoRead = mongoReadWithReadCredentials | ||||
| import { databaseReadWithReadCredentials, databaseUpsert } from "../../database/database-wrapper.js" | ||||
| let databaseRead = databaseReadWithReadCredentials | ||||
| let isEmptyArray = arr => arr.length == 0 | ||||
| 
 | ||||
| export async function addToHistory(){ | ||||
|   let currentDate = new Date() | ||||
|   let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") | ||||
| 
 | ||||
|   let currentJSONwithMetaculus = await mongoRead("metaforecasts") | ||||
|   let currentJSONwithMetaculus = await databaseRead("metaforecasts") | ||||
|   let currentJSON = currentJSONwithMetaculus.filter(element => element.platform != "Metaculus" && element.platform != "Estimize") // without Metaculus
 | ||||
|   // console.log(currentJSON.slice(0,20))
 | ||||
|   // console.log(currentJSON)
 | ||||
|   let historyJSON = await mongoRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") | ||||
|   let historyJSON = await databaseRead(`metaforecast_history_${dateUpToMonth}`,"metaforecastHistory") | ||||
|   // console.log(historyJSON)
 | ||||
| 
 | ||||
|   let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) | ||||
|  | @ -59,7 +59,7 @@ export async function addToHistory(){ | |||
|     newHistoryJSON.push(newHistoryElement) | ||||
|   } | ||||
| 
 | ||||
|   await upsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") | ||||
|   await databaseUpsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") | ||||
| 
 | ||||
|   // console.log(newHistoryJSON.slice(0,5))
 | ||||
|   // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
 | ||||
|  |  | |||
|  | @ -1,9 +1,9 @@ | |||
| import { mongoRead, upsert } from "../../database/mongo-wrapper.js" | ||||
| import { databaseRead, databaseUpsert } from "../../database/database-wrapper.js" | ||||
| 
 | ||||
| export async function createHistoryForMonth(){ | ||||
|     let currentDate = new Date() | ||||
|     let dateUpToMonth = currentDate.toISOString().slice(0,7).replace("-", "_") | ||||
|     let metaforecasts = await mongoRead("metaforecasts") | ||||
|     let metaforecasts = await databaseRead("metaforecasts") | ||||
|     let metaforecastsHistorySeed = metaforecasts.map(element => { | ||||
|         // let moreoriginsdata = element.author ? ({author: element.author}) : ({})
 | ||||
|         return ({ | ||||
|  | @ -21,7 +21,7 @@ export async function createHistoryForMonth(){ | |||
|          }) | ||||
|     }).filter(element => element.platform != "Metaculus" && element.platform != "Estimize") | ||||
|     //console.log(metaforecastsHistorySeed)
 | ||||
|     await upsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") | ||||
|     await databaseUpsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory") | ||||
| 
 | ||||
| } | ||||
| ////createInitialHistory()
 | ||||
|  | @ -1,13 +1,13 @@ | |||
| import { writeFileSync } from "fs" | ||||
| import { mongoReadWithReadCredentials, upsert } from "../mongo-wrapper.js" | ||||
| let mongoRead = mongoReadWithReadCredentials | ||||
| import { databaseReadWithReadCredentials, databaseUpsert } from "../database-wrapper.js" | ||||
| let databaseRead = databaseReadWithReadCredentials | ||||
| let isEmptyArray = arr => arr.length == 0 | ||||
| 
 | ||||
| export async function addToHistory(){ | ||||
|   // throw new Error("Not today")
 | ||||
|   let currentJSON = await mongoRead("metaforecasts") | ||||
|   let currentJSON = await databaseRead("metaforecasts") | ||||
|   // console.log(currentJSON)
 | ||||
|   let historyJSON = await mongoRead("metaforecast_history") | ||||
|   let historyJSON = await databaseRead("metaforecast_history") | ||||
|   // console.log(historyJSON)
 | ||||
| 
 | ||||
|   let currentForecastsWithAHistory = currentJSON.filter(element => !isEmptyArray(historyJSON.filter(historyElement => historyElement.title == element.title && historyElement.url == element.url ))) | ||||
|  | @ -55,7 +55,7 @@ export async function addToHistory(){ | |||
|     newHistoryJSON.push(newHistoryElement) | ||||
|   } | ||||
| 
 | ||||
|   upsert(newHistoryJSON, "metaforecast_history") | ||||
|   databaseUpsert(newHistoryJSON, "metaforecast_history") | ||||
|   // console.log(newHistoryJSON.slice(0,5))
 | ||||
|   // writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
 | ||||
|   // writefile(JSON.stringify(newHistoryJSON, null, 2), "metaforecasts_history", "", ".json")
 | ||||
|  |  | |||
|  | @ -1,7 +1,7 @@ | |||
| import { mongoRead, upsert } from "../mongo-wrapper.js" | ||||
| import { databaseRead, databaseUpsert } from "../database-wrapper.js" | ||||
| 
 | ||||
| let createInitialHistory = async () => { | ||||
|     let metaforecasts = await mongoRead("metaforecasts") | ||||
|     let metaforecasts = await databaseRead("metaforecasts") | ||||
|     let metaforecastsHistorySeed = metaforecasts.map(element => { | ||||
|         // let moreoriginsdata = element.author ? ({author: element.author}) : ({})
 | ||||
|         return ({ | ||||
|  | @ -19,7 +19,7 @@ let createInitialHistory = async () => { | |||
|          }) | ||||
|     }) | ||||
|     console.log(metaforecastsHistorySeed) | ||||
|     await upsert(metaforecastsHistorySeed, "metaforecast_history") | ||||
|     await databaseUpsert(metaforecastsHistorySeed, "metaforecast_history") | ||||
| 
 | ||||
| } | ||||
| createInitialHistory() | ||||
|  | @ -1,4 +1,4 @@ | |||
| import { mongoRead, upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; | ||||
| import { platformNames } from "../platforms/all-platforms.js" | ||||
| /* Merge everything */ | ||||
| let suffix = "-questions"; | ||||
|  | @ -6,7 +6,7 @@ let suffix = "-questions"; | |||
| export async function mergeEverythingInner() { | ||||
|   let merged = []; | ||||
|   for (let platformName of platformNames) { | ||||
|     let json = await mongoRead(platformName + suffix); | ||||
|     let json = await databaseRead(platformName + suffix); | ||||
|     console.log(`${platformName} has ${json.length} questions\n`); | ||||
|     merged = merged.concat(json); | ||||
|   } | ||||
|  | @ -22,6 +22,6 @@ export async function mergeEverythingInner() { | |||
| 
 | ||||
| export async function mergeEverything() { | ||||
|   let merged = await mergeEverythingInner(); | ||||
|   await upsert(merged, "metaforecasts"); | ||||
|   await databaseUpsert(merged, "metaforecasts"); | ||||
|   console.log("Done"); | ||||
| } | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| import fs from "fs"; | ||||
| import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js"; | ||||
| import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| let filename = | ||||
|   "/home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend/data/frontpage.json"; | ||||
|  | @ -27,7 +27,7 @@ let shuffle = (array) => { | |||
| let main = async () => { | ||||
|   let init = Date.now(); | ||||
| 
 | ||||
|   let json = await mongoReadWithReadCredentials("metaforecasts"); | ||||
|   let json = await databaseReadWithReadCredentials("metaforecasts"); | ||||
| 
 | ||||
|   json = json.filter( | ||||
|     (forecast) => | ||||
|  |  | |||
|  | @ -1,9 +1,9 @@ | |||
| import fs from "fs" | ||||
| 
 | ||||
| import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js" | ||||
| import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" | ||||
| 
 | ||||
| let main = async () => { | ||||
|   let json = await mongoReadWithReadCredentials("metaforecasts") | ||||
|   let json = await databaseReadWithReadCredentials("metaforecasts") | ||||
|   let string = JSON.stringify(json, null, 2) | ||||
|   let filename = 'metaforecasts.json' | ||||
|   fs.writeFileSync(filename, string); | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| import fs from "fs"; | ||||
| import { mongoRead, upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseRead, databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */ | ||||
| 
 | ||||
|  | @ -12,7 +12,7 @@ let main = async () => { | |||
|     let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`); | ||||
|     let fileContents = JSON.parse(fileRaw); | ||||
|     console.log(fileContents); | ||||
|     await upsert(fileContents, file + suffixMongo); | ||||
|     await databaseUpsert(fileContents, file + suffixMongo); | ||||
|   } | ||||
| }; | ||||
| main(); | ||||
|  |  | |||
|  | @ -1,4 +1,4 @@ | |||
| #!/bin/bash | ||||
| cd /home/loki/Documents/core/software/fresh/js/metaforecast/metaforecast-backend | ||||
| date > done.txt | ||||
| /home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/manual/pullSuperforecastsManually.js >> done.txt | ||||
| date > ./notes/last-superforecast-pull.txt | ||||
| /home/loki/.nvm/versions/node/v16.8.0/bin/node ./src/manual/pullSuperforecastsManually.js >> ./notes/last-superforecast-pull.txt | ||||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import https from "https"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpoint = process.env.SECRET_BETFAIR_ENDPOINT; | ||||
|  | @ -143,7 +143,7 @@ export async function betfair() { | |||
|   // console.log(results.map(result => ({title: result.title, description: result.description})))
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polyprediction-questions.json', string);
 | ||||
|   await upsert(results, "betfair-questions"); | ||||
|   await databaseUpsert(results, "betfair-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| // betfair()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from 'fs' | ||||
| import axios from "axios" | ||||
| import { calculateStars } from "../utils/stars.js" | ||||
| import { upsert } from "../utils/mongo-wrapper.js" | ||||
| import { databaseUpsert } from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let graphQLendpoint = "https://api.foretold.io/graphql" | ||||
|  | @ -96,7 +96,7 @@ export async function astralcodexten(){ | |||
|   console.log(JSON.stringify(results, null, 2)) | ||||
|   fs.writeFileSync('./data/astralcodexten-questions.json', string); | ||||
|   */ | ||||
|   await upsert(results, "astralcodexten-questions") | ||||
|   await databaseUpsert(results, "astralcodexten-questions") | ||||
|   // console.log(results)
 | ||||
|   console.log("Done") | ||||
| } | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ import Papa from "papaparse" | |||
| import open from "open" | ||||
| import readline from "readline" | ||||
| import {calculateStars} from "../utils/stars.js" | ||||
| import {upsert} from "../utils/mongo-wrapper.js" | ||||
| import {databaseUpsert} from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let coupCastEndpoint = "https://www.oneearthfuture.org/sites/all/themes/stability/stability_sub/data/dashboard_2021_code_06.csv" | ||||
|  | @ -142,7 +142,7 @@ async function processArray(countryArray) { | |||
|   } | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/elicit-questions.json', string);
 | ||||
|   await upsert(results, "coupcast-questions") | ||||
|   await databaseUpsert(results, "coupcast-questions") | ||||
|   // console.log(results)
 | ||||
|   console.log("Done") | ||||
| } | ||||
|  |  | |||
|  | @ -4,7 +4,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js" | |||
| import { Tabletojson } from "tabletojson" | ||||
| import toMarkdown from "../utils/toMarkdown.js" | ||||
| import { calculateStars } from "../utils/stars.js" | ||||
| import { upsert } from "../utils/mongo-wrapper.js" | ||||
| import { databaseUpsert } from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let htmlEndPoint = 'https://www.cset-foretell.com/questions?page=' | ||||
|  | @ -237,7 +237,7 @@ async function csetforetell_inner(cookie) { | |||
|   // fs.writeFileSync('./data/csetforetell-questions.json', string);
 | ||||
|   // console.log(results)
 | ||||
|   if (results.length > 0) { | ||||
|     await upsert(results, "csetforetell-questions") | ||||
|     await databaseUpsert(results, "csetforetell-questions") | ||||
|   } else { | ||||
|     console.log("Not updating results, as process was not signed in") | ||||
|   } | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ import Papa from "papaparse" | |||
| import open from "open" | ||||
| import readline from "readline" | ||||
| import {calculateStars} from "../utils/stars.js" | ||||
| import {upsert} from "../utils/mongo-wrapper.js" | ||||
| import {databaseUpsert} from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let elicitEndpoint = "https://elicit.org/api/v1/binary-questions/csv?binaryQuestions.resolved=false&binaryQuestions.search=&binaryQuestions.sortBy=popularity&predictors=community" | ||||
|  | @ -84,7 +84,7 @@ async function processArray(arrayQuestions) { | |||
|   } | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/elicit-questions.json', string);
 | ||||
|   await upsert(results, "elicit-questions") | ||||
|   await databaseUpsert(results, "elicit-questions") | ||||
| 
 | ||||
|   console.log("Done") | ||||
| } | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| import fs from "fs" | ||||
| import {calculateStars} from "../utils/stars.js" | ||||
| import {upsert} from "../utils/mongo-wrapper.js" | ||||
| import {databaseUpsert} from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| 
 | ||||
| export async function estimize(){ | ||||
|  | @ -31,6 +31,6 @@ export async function estimize(){ | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/estimize-questions.json', string);
 | ||||
|   await upsert(results, "estimize-questions") | ||||
|   await databaseUpsert(results, "estimize-questions") | ||||
| } | ||||
| //estimize()
 | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ import fetch from "isomorphic-fetch" | |||
| import {getCookie, applyIfCookieExists} from "../utils/getCookies.js" | ||||
| import toMarkdown from "../utils/toMarkdown.js" | ||||
| import { calculateStars } from "../utils/stars.js" | ||||
| import { upsert } from "../utils/mongo-wrapper.js" | ||||
| import { databaseUpsert } from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let hypermindEnpoint1 = 'https://predict.hypermind.com/dash/jsx.json' | ||||
|  | @ -172,7 +172,7 @@ async function hypermind_inner(cookie) { | |||
|   console.log(resultsTotalUnique.length, "results") | ||||
|   // let string = JSON.stringify(resultsTotalUnique, null, 2)
 | ||||
|   // fs.writeFileSync('./data/hypermind-questions.json', string);
 | ||||
|   await upsert(resultsTotalUnique, "hypermind-questions") | ||||
|   await databaseUpsert(resultsTotalUnique, "hypermind-questions") | ||||
| 
 | ||||
| } | ||||
| //hypermind()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import axios from "axios"; | ||||
| import fs from "fs"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../utils/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../utils/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpointPolitics = `https://ss-aka-ori.ladbrokes.com/openbet-ssviewer/Drilldown/2.31/EventToOutcomeForClass/302,301,300?simpleFilter=event.siteChannels:contains:M&simpleFilter=event.eventSortCode:intersects:TNMT,TR01,TR02,TR03,TR04,TR05,TR06,TR07,TR08,TR09,TR10,TR11,TR12,TR13,TR14,TR15,TR16,TR17,TR18,TR19,TR20&simpleFilter=event.suspendAtTime:greaterThan:${new Date().toISOString()}.000Z&limitRecords=outcome:1&limitRecords=market:1&translationLang=en&responseFormat=json&prune=event&prune=market`; | ||||
|  | @ -140,7 +140,7 @@ export async function ladbrokes() { | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/ladbrokes-questions.json', string);
 | ||||
|   await upsert(results, "ladbrokes-questions"); | ||||
|   await databaseUpsert(results, "ladbrokes-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| //ladbrokes()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from 'fs' | ||||
| import axios from "axios" | ||||
| import { calculateStars } from "../../utils/stars.js" | ||||
| import {upsert} from "../../utils/mongo-wrapper.js" | ||||
| import {databaseUpsert} from "../../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let graphQLendpoint = "https://api.thegraph.com/subgraphs/name/protofire/omen" | ||||
|  | @ -93,7 +93,7 @@ export async function omen() { | |||
|   // console.log(result)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/omen-questions.json', string);
 | ||||
|   await upsert(results, "omen-questions") | ||||
|   await databaseUpsert(results, "omen-questions") | ||||
|   console.log("Done")   | ||||
| } | ||||
| //omen()
 | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import axios from "axios" | |||
| import fs from "fs" | ||||
| import toMarkdown from "../utils/toMarkdown.js" | ||||
| import {calculateStars} from "../utils/stars.js" | ||||
| import { upsert } from "../utils/mongo-wrapper.js" | ||||
| import { databaseUpsert } from "../utils/database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpoint = "https://sports.williamhill.com/betting/en-gb/politics" | ||||
|  | @ -129,7 +129,7 @@ export async function williamhill() { | |||
|   let results = processResults(response) | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/williamhill-questions.json', string);
 | ||||
|   await upsert(results, "williamhill-questions") | ||||
|   await databaseUpsert(results, "williamhill-questions") | ||||
|   console.log(results.sort((a,b) => (a.title > b.title))) | ||||
|   console.log("Done") | ||||
| } | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpoint = "https://example.com/"; | ||||
|  | @ -64,7 +64,7 @@ export async function example() { | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polyprediction-questions.json', string);
 | ||||
|   await upsert(results, "example-questions"); | ||||
|   await databaseUpsert(results, "example-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| //example()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from "fs"; | ||||
| import axios from "axios"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let unixtime = new Date().getTime(); | ||||
|  | @ -118,7 +118,7 @@ export async function fantasyscotus() { | |||
|   //console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/fantasyscotus-questions.json', string);
 | ||||
|   await upsert(results, "fantasyscotus-questions"); | ||||
|   await databaseUpsert(results, "fantasyscotus-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| //fantasyscotus()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from "fs"; | ||||
| import axios from "axios"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let graphQLendpoint = "https://api.foretold.io/graphql"; | ||||
|  | @ -101,7 +101,7 @@ export async function foretold() { | |||
|   } | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/foretold-questions.json', string);
 | ||||
|   await upsert(results, "foretold-questions"); | ||||
|   await databaseUpsert(results, "foretold-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| // foretold()
 | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let locationData = "./data/"; | ||||
|  | @ -64,6 +64,6 @@ async function main() { | |||
|   } | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/givewell-questions-unprocessed.json', string);
 | ||||
|   await upsert(results, "givewell-questions-unprocessed"); | ||||
|   await databaseUpsert(results, "givewell-questions-unprocessed"); | ||||
| } | ||||
| main(); | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ import { Tabletojson } from "tabletojson"; | |||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { hash } from "../utils/hash.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpoint = "https://goodjudgment.io/superforecasts/"; | ||||
|  | @ -122,7 +122,7 @@ export async function goodjudgment() { | |||
|   // fs.writeFileSync('./data/goodjudgment-questions.json', string);
 | ||||
|   // fs.writeFileSync('./goodjudgment-questions-test.json', string);
 | ||||
|   console.log(results); | ||||
|   await upsert(results, "goodjudgment-questions"); | ||||
|   await databaseUpsert(results, "goodjudgment-questions"); | ||||
|   console.log( | ||||
|     "Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js" | ||||
|   ); | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; | |||
| import { Tabletojson } from "tabletojson"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let htmlEndPoint = "https://www.gjopen.com/questions?page="; | ||||
|  | @ -223,7 +223,7 @@ async function goodjudgmentopen_inner(cookie) { | |||
|   // fs.writeFileSync('./data/goodjudmentopen-questions.json', string);
 | ||||
|   console.log(results); | ||||
|   if (results.length > 0) { | ||||
|     await upsert(results, "goodjudmentopen-questions"); | ||||
|     await databaseUpsert(results, "goodjudmentopen-questions"); | ||||
|   } else { | ||||
|     console.log("Not updating results, as process was not signed in"); | ||||
|   } | ||||
|  |  | |||
|  | @ -4,7 +4,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; | |||
| import { Tabletojson } from "tabletojson"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let htmlEndPoint = "https://www.infer-pub.com/questions"; | ||||
|  | @ -269,7 +269,7 @@ async function infer_inner(cookie) { | |||
|   // fs.writeFileSync('./data/infer-questions.json', string);
 | ||||
|   // console.log(results)
 | ||||
|   if (results.length > 0) { | ||||
|     await upsert(results, "infer-questions"); | ||||
|     await databaseUpsert(results, "infer-questions"); | ||||
|   } else { | ||||
|     console.log("Not updating results, as process was not signed in"); | ||||
|   } | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from "fs"; | ||||
| import axios from "axios"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let jsonEndpoint = "https://trading-api.kalshi.com/v1/cached/markets/"; //"https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket"//"https://subgraph-backup.poly.market/subgraphs/name/TokenUnion/polymarket"//'https://subgraph-matic.poly.market/subgraphs/name/TokenUnion/polymarket3'
 | ||||
|  | @ -83,7 +83,7 @@ export async function kalshi() { | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polymarket-questions.json', string);
 | ||||
|   await upsert(results, "kalshi-questions"); | ||||
|   await databaseUpsert(results, "kalshi-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| // kalshi()
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from "fs"; | ||||
| import axios from "axios"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let endpoint = "https://us-central1-mantic-markets.cloudfunctions.net/markets"; | ||||
|  | @ -96,7 +96,7 @@ export async function manifoldmarkets() { | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polyprediction-questions.json', string);
 | ||||
|   await upsert(results, "manifoldmarkets-questions"); | ||||
|   await databaseUpsert(results, "manifoldmarkets-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| // manifoldmarkets()
 | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import axios from "axios"; | |||
| import fs from "fs"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let jsonEndPoint = "https://www.metaculus.com/api2/questions/?page="; | ||||
|  | @ -154,7 +154,7 @@ export async function metaculus() { | |||
| 
 | ||||
|   // let string = JSON.stringify(all_questions, null, 2)
 | ||||
|   // fs.writeFileSync('./metaculus-questions.json', string);
 | ||||
|   await upsert(all_questions, "metaculus-questions"); | ||||
|   await databaseUpsert(all_questions, "metaculus-questions"); | ||||
| 
 | ||||
|   console.log("Done"); | ||||
| } | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| import fs from "fs"; | ||||
| import axios from "axios"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let graphQLendpoint = | ||||
|  | @ -152,7 +152,7 @@ export async function polymarket() { | |||
|   // console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polymarket-questions.json', string);
 | ||||
|   await upsert(results, "polymarket-questions"); | ||||
|   await databaseUpsert(results, "polymarket-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| // polymarket()
 | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Support functions */ | ||||
| async function fetchmarkets() { | ||||
|  | @ -110,7 +110,7 @@ export async function predictit() { | |||
|   //console.log(results)
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/predictit-questions.json', string);
 | ||||
|   await upsert(results, "predictit-questions"); | ||||
|   await databaseUpsert(results, "predictit-questions"); | ||||
| 
 | ||||
|   console.log("Done"); | ||||
| } | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let jsonEndpoint = | ||||
|  | @ -69,7 +69,7 @@ export async function rootclaim() { | |||
|   //console.log(JSON.stringify(results, null, 4))
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('rootclaim-questions.json', string);
 | ||||
|   await upsert(results, "rootclaim-questions"); | ||||
|   await databaseUpsert(results, "rootclaim-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| //rootclaim()
 | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import fs from "fs"; | |||
| import axios from "axios"; | ||||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let htmlEndPointEntrance = "https://api.smarkets.com/v3/events/"; | ||||
|  | @ -153,6 +153,6 @@ export async function smarkets() { | |||
| 
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('./data/smarkets-questions.json', string);
 | ||||
|   await upsert(results, "smarkets-questions"); | ||||
|   await databaseUpsert(results, "smarkets-questions"); | ||||
| } | ||||
| //smarkets()
 | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"; | |||
| import toMarkdown from "../utils/toMarkdown.js"; | ||||
| import { calculateStars } from "../utils/stars.js"; | ||||
| import { hash } from "../utils/hash.js"; | ||||
| import { upsert } from "../database/mongo-wrapper.js"; | ||||
| import { databaseUpsert } from "../database/database-wrapper.js"; | ||||
| 
 | ||||
| /* Definitions */ | ||||
| const SHEET_ID = "1xcgYF7Q0D95TPHLLSgwhWBHFrWZUGJn7yTyAhDR4vi0"; // spreadsheet key is the long id in the sheets URL
 | ||||
|  | @ -125,7 +125,7 @@ export async function wildeford_inner(google_api_key) { | |||
|   // console.log(results.sort((a,b) => (a.title > b.title)))
 | ||||
|   // let string = JSON.stringify(results, null, 2)
 | ||||
|   // fs.writeFileSync('polyprediction-questions.json', string);
 | ||||
|   await upsert(results, "wildeford-questions"); | ||||
|   await databaseUpsert(results, "wildeford-questions"); | ||||
|   console.log("Done"); | ||||
| } | ||||
| //example()
 | ||||
|  |  | |||
|  | @ -1,7 +1,7 @@ | |||
| import algoliasearch from 'algoliasearch'; | ||||
| import fs from "fs" | ||||
| import {getCookie} from "./getCookies.js" | ||||
| import { mongoReadWithReadCredentials } from "../database/mongo-wrapper.js" | ||||
| import { databaseReadWithReadCredentials } from "../database/database-wrapper.js" | ||||
| import { mergeEverythingInner } from '../flow/mergeEverything.js'; | ||||
| 
 | ||||
| let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia") | ||||
|  | @ -23,7 +23,7 @@ export async function rebuildAlgoliaDatabaseTheHardWay(){ | |||
| } | ||||
| 
 | ||||
| export async function rebuildAlgoliaDatabaseTheEasyWay(){ | ||||
|   let records = await mongoReadWithReadCredentials("metaforecasts") | ||||
|   let records = await databaseReadWithReadCredentials("metaforecasts") | ||||
|   records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) ) | ||||
|   // this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/  
 | ||||
|    | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| /* Imports */ | ||||
| import fs from "fs" | ||||
| import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" | ||||
| import { databaseReadWithReadCredentials } from "../database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| 
 | ||||
|  | @ -13,7 +13,7 @@ let getQualityIndicators = forecast => Object.entries(forecast.qualityindicators | |||
| 
 | ||||
| let main = async () => { | ||||
|   let highQualityPlatforms = ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] | ||||
|   let json = await mongoReadWithReadCredentials("metaforecasts") | ||||
|   let json = await databaseReadWithReadCredentials("metaforecasts") | ||||
|   console.log(json.length) | ||||
|   //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
 | ||||
|   //console.log(uniquePlatforms)
 | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| /* Imports */ | ||||
| import fs from "fs" | ||||
| import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" | ||||
| import { databaseReadWithReadCredentials } from "../database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| 
 | ||||
|  | @ -22,7 +22,7 @@ let shuffleArray = (array) => { | |||
| 
 | ||||
| let main = async () => { | ||||
|   let highQualityPlatforms = [ 'Metaculus' ] // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
 | ||||
|   let json = await mongoReadWithReadCredentials("metaforecasts") | ||||
|   let json = await databaseReadWithReadCredentials("metaforecasts") | ||||
|   console.log(json.length) | ||||
|   //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
 | ||||
|   //console.log(uniquePlatforms)
 | ||||
|  |  | |||
|  | @ -1,6 +1,6 @@ | |||
| /* Imports */ | ||||
| import fs from "fs" | ||||
| import { mongoReadWithReadCredentials } from "../mongo-wrapper.js" | ||||
| import { databaseReadWithReadCredentials } from "../database-wrapper.js" | ||||
| 
 | ||||
| /* Definitions */ | ||||
| let locationData = "./data/" | ||||
|  | @ -8,7 +8,7 @@ let locationData = "./data/" | |||
| /* Body */ | ||||
| // let rawdata =  fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
 | ||||
| async function main(){ | ||||
|   let data = await mongoReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata)
 | ||||
|   let data = await databaseReadWithReadCredentials("metaforecasts") //JSON.parse(rawdata)
 | ||||
|   let processDescription = (description) => { | ||||
|     if(description == null || description == undefined || description == ""){ | ||||
|       return "" | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue
	
	Block a user