fix: Added fix to nasty bug which was inflating the number of coup cast predictions

Also made algolia code slightly more robust
This commit is contained in:
NunoSempere 2021-08-11 21:00:13 +02:00
parent 6144675a41
commit 8e47215e6e
5 changed files with 36 additions and 33 deletions

View File

@ -135,9 +135,10 @@ async function processArray(countryArray) {
}
})
results.push(processedPrediction1)
// results.push(processedPrediction1)
// Not pushing monthly
results.push(processedPrediction2)
}
}
}
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/elicit-questions.json', string);

View File

@ -158,6 +158,7 @@ async function hypermind_inner(cookie) {
}
// console.log(resultsTotal)
console.log(resultsTotalUnique)
console.log(resultsTotalUnique.length, "results")
// let string = JSON.stringify(resultsTotalUnique, null, 2)
// fs.writeFileSync('./data/hypermind-questions.json', string);
await upsert(resultsTotalUnique, "hypermind-questions")

View File

@ -3,42 +3,34 @@ import {getCookie} from "./getCookies.js"
import fs from "fs"
import { mongoReadWithReadCredentials } from "./mongo-wrapper.js"
import { mergeEverythingInner } from './mergeEverything.js';
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia")
const client = algoliasearch('96UD3NTQ7L', cookie); // delete this when committing
const index = client.initIndex('metaforecast');
export async function rebuildAlgoliaDatabase(){
let records = await mongoReadWithReadCredentials("metaforecasts")
// let string = JSON.stringify(json, null, 2)
// fs.writeFileSync('metaforecasts.json', string);
console.log("Doing this the hard way")
let records = await mergeEverythingInner()
records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) )
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
if(index.exists()){
console.log("Index exists")
index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error))
console.log(`Pushed ${records.length} records. Algolia will update asynchronously`)
}
}
export async function rebuildAlgoliaDatabaseTheEasyWay(){
let records = await mongoReadWithReadCredentials("metaforecasts")
records = records.map((record, index) => ({...record, has_numforecasts: record.numforecasts ? true : false, objectID: index}) )
// this is necessary to filter by missing attributes https://www.algolia.com/doc/guides/managing-results/refine-results/filtering/how-to/filter-by-null-or-missing-attributes/
/*
index.clearObjects().wait().then(response => {
console.log(response)
});
*/
if(index.exists()){
console.log("Index exists")
index.replaceAllObjects(records, { safe:true }).catch(error => console.log(error))
console.log(`Pushed ${records.length} records. Algolia will update asynchronously`)
}
/*await index.clearObjects()
console.log("Past data")
setTimeout(function(){
index.saveObjects(records, { autoGenerateObjectIDIfNotExist: true }).then(() =>
console.log("algolia search: done")
).catch(error => {
console.log("algolia search: error", error)
})
alert('hello');
}, 60*1000); // 1 minute seconds
*/
}
//rebuildAlgoliaDatabase()

View File

@ -1,16 +1,23 @@
import { mongoRead, upsert } from "./mongo-wrapper.js"
/* Merge everything */
let sets = ["astralcodexten", "betfair", "coupcast", "csetforetell", "elicit", "estimize", "fantasyscotus", "foretold", "givewellopenphil", "goodjudgment","goodjudmentopen", "hypermind", "kalshi", "ladbrokes", "metaculus", "omen", "polymarket", "predictit", "rootclaim", "smarkets", "williamhill", "xrisk"]
let sets = ["astralcodexten", "betfair", "coupcast", "csetforetell", "elicit", /* "estimize" ,*/ "fantasyscotus", "foretold", "givewellopenphil", "goodjudgment","goodjudmentopen", "hypermind", "kalshi", "ladbrokes", "metaculus", "omen", "polymarket", "predictit", "rootclaim", "smarkets", "williamhill", "xrisk"]
let suffix = "-questions"
export async function mergeEverything(){
export async function mergeEverythingInner(){
let merged = []
for(let set of sets){
let json = await mongoRead(set+suffix)
console.log(`${set} has ${json.length} questions`)
merged = merged.concat(json)
}
let mergedprocessed = merged.map(element => ({...element, optionsstringforsearch: element.options.map(option => option.name).join(", ")}))
await upsert( mergedprocessed,"metaforecasts")
console.log(`In total, there are ${mergedprocessed.length} questions`)
return mergedprocessed
}
export async function mergeEverything(){
let merged = await mergeEverythingInner()
await upsert( merged,"metaforecasts")
console.log("Done")
}

View File

@ -53,17 +53,19 @@ export async function upsert (contents, documentName, collectionName="metaforeca
"timestamp": new Date().toISOString(),
"contentsArray": contents
})
// Create a filter
const filter = { "name": documentName };
// Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument);
await collection.replaceOne(filter, document, { upsert: true });
console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`)
// Find one document
const myDocument = await collection.findOne(filter);
// Print to the console
console.log(`Updating document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(contents)} MB`)
console.log(`Received document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(contents)} MB`)
console.log("Sample: ")
console.log(JSON.stringify(myDocument.contentsArray.slice(0,1), null, 4));
} catch (err) {