feat: Added postgres database in parallel to the mongo one

This commit is contained in:
NunoSempere 2022-02-12 01:27:56 -05:00
parent 65fbddf0cf
commit 6bb3da0e32
37 changed files with 583 additions and 184 deletions

2
.gitignore vendored
View File

@ -4,7 +4,7 @@ node_modules/*
## Security
**/betfaircertificates/
**/privatekeys.json
**/secrets.json
## Personal notes
notes/*

237
package-lock.json generated
View File

@ -19,6 +19,7 @@
"mongodb": "^3.6.6",
"open": "^7.3.1",
"papaparse": "^5.3.0",
"pg": "^8.7.3",
"tabletojson": "^2.0.4",
"textversionjs": "^1.1.3",
"tunnel": "^0.0.6"
@ -308,6 +309,14 @@
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
},
"node_modules/buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
"engines": {
"node": ">=4"
}
},
"node_modules/cacheable-lookup": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
@ -1058,6 +1067,11 @@
"node": ">=8"
}
},
"node_modules/packet-reader": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
},
"node_modules/papaparse": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
@ -1076,6 +1090,115 @@
"parse5": "^6.0.1"
}
},
"node_modules/pg": {
"version": "8.7.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.7.3.tgz",
"integrity": "sha512-HPmH4GH4H3AOprDJOazoIcpI49XFsHCe8xlrjHkWiapdbHK+HLtbm/GQzXYAZwmPju/kzKhjaSfMACG+8cgJcw==",
"dependencies": {
"buffer-writer": "2.0.0",
"packet-reader": "1.0.0",
"pg-connection-string": "^2.5.0",
"pg-pool": "^3.5.1",
"pg-protocol": "^1.5.0",
"pg-types": "^2.1.0",
"pgpass": "1.x"
},
"engines": {
"node": ">= 8.0.0"
},
"peerDependencies": {
"pg-native": ">=2.0.0"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-connection-string": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.5.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.1.tgz",
"integrity": "sha512-6iCR0wVrro6OOHFsyavV+i6KYL4lVNyYAB9RD18w66xSzN+d8b66HiwuP30Gp1SH5O9T82fckkzsRjlrhD0ioQ==",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@ -1157,6 +1280,14 @@
"memory-pager": "^1.0.2"
}
},
"node_modules/split2": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz",
"integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
@ -1232,6 +1363,14 @@
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"engines": {
"node": ">=0.4"
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
@ -1488,6 +1627,11 @@
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
},
"buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw=="
},
"cacheable-lookup": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
@ -2029,6 +2173,11 @@
"p-finally": "^1.0.0"
}
},
"packet-reader": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
},
"papaparse": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
@ -2047,6 +2196,84 @@
"parse5": "^6.0.1"
}
},
"pg": {
"version": "8.7.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.7.3.tgz",
"integrity": "sha512-HPmH4GH4H3AOprDJOazoIcpI49XFsHCe8xlrjHkWiapdbHK+HLtbm/GQzXYAZwmPju/kzKhjaSfMACG+8cgJcw==",
"requires": {
"buffer-writer": "2.0.0",
"packet-reader": "1.0.0",
"pg-connection-string": "^2.5.0",
"pg-pool": "^3.5.1",
"pg-protocol": "^1.5.0",
"pg-types": "^2.1.0",
"pgpass": "1.x"
}
},
"pg-connection-string": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
},
"pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="
},
"pg-pool": {
"version": "3.5.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.1.tgz",
"integrity": "sha512-6iCR0wVrro6OOHFsyavV+i6KYL4lVNyYAB9RD18w66xSzN+d8b66HiwuP30Gp1SH5O9T82fckkzsRjlrhD0ioQ==",
"requires": {}
},
"pg-protocol": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
},
"pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"requires": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
}
},
"pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"requires": {
"split2": "^4.1.0"
}
},
"postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="
},
"postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU="
},
"postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="
},
"postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"requires": {
"xtend": "^4.0.0"
}
},
"process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@ -2113,6 +2340,11 @@
"memory-pager": "^1.0.2"
}
},
"split2": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz",
"integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ=="
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
@ -2178,6 +2410,11 @@
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
},
"xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",

View File

@ -35,6 +35,7 @@
"mongodb": "^3.6.6",
"open": "^7.3.1",
"papaparse": "^5.3.0",
"pg": "^8.7.3",
"tabletojson": "^2.0.4",
"textversionjs": "^1.1.3",
"tunnel": "^0.0.6"

View File

@ -1,6 +1,29 @@
import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js"
import { pgUpsert } from "./pg-wrapper.js"
export const databaseUpsert = mongoUpsert;
export async function databaseUpsert({ contents, group }) {
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
let mongoDocName;
switch (group) {
case 'combined':
mongoDocName = "metaforecasts"
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
await pgUpsert({contents, schema: "latest", tableName: "combined"})
break;
case 'history':
let currentDate = new Date()
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
mongoDocName = `metaforecast_history_${dateUpToMonth}`
await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase")
await pgUpsert({contents, schema: "history", tableName: "combined"})
break;
default:
mongoDocName = `${group}-questions`
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
await pgUpsert({contents, schema: "latest", tableName: group})
}
}
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
export const databaseRead = mongoRead;

View File

@ -1,42 +1,10 @@
import pkg from 'mongodb';
const { MongoClient } = pkg;
import { getCookie } from "../utils/getCookies.js"
function roughSizeOfObject(object) {
var objectList = [];
var stack = [object];
var bytes = 0;
while (stack.length) {
var value = stack.pop();
if (typeof value === 'boolean') {
bytes += 4;
}
else if (typeof value === 'string') {
bytes += value.length * 2;
}
else if (typeof value === 'number') {
bytes += 8;
}
else if
(
typeof value === 'object'
&& objectList.indexOf(value) === -1
) {
objectList.push(value);
for (var i in value) {
stack.push(value[i]);
}
}
}
let megaBytes = bytes / (1024) ** 2
let megaBytesRounded = Math.round(megaBytes * 10) / 10
return megaBytesRounded;
}
import { getSecret } from "../utils/getSecrets.js"
import { roughSizeOfObject } from "../utils/roughSize.js"
export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
const url = process.env.MONGODB_URL || getCookie("mongodb");
const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url);
try {
await client.connect();
@ -58,7 +26,7 @@ export async function mongoUpsert(contents, documentName, collectionName = "meta
// Insert a single document, wait for promise so we can read it back
// const p = await collection.insertOne(metaforecastDocument);
await collection.replaceOne(filter, document, { databaseUpsert: true });
await collection.replaceOne(filter, document, { upsert: true });
console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`)
// Find one document
@ -76,7 +44,7 @@ export async function mongoUpsert(contents, documentName, collectionName = "meta
}
export async function mongoRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
const url = process.env.MONGODB_URL || getCookie("mongodb");
const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url, {
useNewUrlParser: true,
@ -152,7 +120,7 @@ export async function mongoReadWithReadCredentials(documentName, collectionName
}
export async function mongoGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") {
const url = process.env.MONGODB_URL || getCookie("mongodb");
const url = process.env.MONGODB_URL || getSecret("mongodb");
const client = new MongoClient(url, {
useNewUrlParser: true,
useUnifiedTopology: true,

View File

@ -1,49 +1,173 @@
import pkg from 'pg';
const { Pool } = pkg
const { Pool } = pkg;
import { platformNames } from "../platforms/all/platformNames.js"
import { getSecret } from '../utils/getSecrets.js';
import { roughSizeOfObject } from "../utils/roughSize.js"
// Definitions
const schemas = ["latest", "history"]
const tableNamesWhitelist = ["combined", ...platformNames]
const createFullName = (schemaName, namesArray) => namesArray.map(name => `${schemaName}.${name}`)
const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...createFullName("history", tableNamesWhitelist)]
/* Postgres database connection code */
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
connectionString: process.env.DATABASE_URL || getSecret("heroku-postgres"),
ssl: {
rejectUnauthorized: false
}
});
const tableWhiteList = ["latest.combined"]
export async function pgRead(tableName="latest.combined"){
if(tableWhiteList.includes(tableName)){
// Helpers
const runPgCommand = async (query) => {
console.log(query)
const client = await pool.connect();
const result = await client.query(`SELECT * from ${tableName}`);
const result = await client.query(query);
const results = { 'results': (result) ? result.rows : null };
// response.render('pages/db', results );
client.release();
// console.log(results)
return results
}
// Initialize
let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}`
let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
id text,
title text,
url text,
platform text,
description text,
options json,
timestamp timestamp,
stars int,
qualityindicators json,
extra json
);`
let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
export async function pgInitialize() {
for (let schema of schemas) {
runPgCommand(`CREATE SCHEMA IF NOT EXISTS ${schema}`)
}
runPgCommand(`SET search_path TO ${schemas.join(",")},public;`)
for (let schema of schemas) {
for (let table of tableNamesWhitelist) {
await runPgCommand(dropTable(schema, table))
await runPgCommand(buildMetaforecastTable(schema, table))
if (schema == "history") {
await runPgCommand(createIndex(schema, table))
} else {
await runPgCommand(createUniqueIndex(schema, table))
}
}
}
}
// pgInitialize()
// Read
export async function pgRead({schema, tableName}) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let command = `SELECT * from ${schema}.${tableName}`
let response = await runPgCommand(command)
let results = response. results
return results
} else {
throw Error("Table not in whitelist; stopping to avoid tricky sql injections")
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
}
}
export async function pgInsert(data, tableName="latest.combined"){
if(tableWhiteList.includes(tableName)){
let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
export async function pgInsert({ datum, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
let timestamp = datum.timestamp || new Date().toISOString()
timestamp = timestamp.slice(0, 19).replace("T", " ")
let values = [
data.id,
data.title,
data.url,
data.platform,
data.description || '',
data.options || [],
data.timestamp || Date.now(), // fix
data.stars || (data.qualityindicators ? data.qualityindicators.stars : 2),
data.qualityindicators || [],
data.extra || []
datum.id,
datum.title,
datum.url,
datum.platform,
datum.description || '',
JSON.stringify(datum.options || []),
timestamp, // fix
datum.stars || (datum.qualityindicators ? datum.qualityindicators.stars : 2),
JSON.stringify(datum.qualityindicators || []),
JSON.stringify(datum.extra || [])
]
const client = await pool.connect();
const result = await client.query(text, values);
client.release();
// console.log(result)
return result
} else {
throw Error("Table not in whitelist; stopping to avoid tricky sql injections")
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
}
}
/*
pgInsert({
"id": "fantasyscotus-580",
"title": "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision",
"url": "https://fantasyscotus.net/user-predictions/case/wooden-v-us/",
"platform": "FantasySCOTUS",
"description": "62.50% (75 out of 120) of FantasySCOTUS players predict that the lower court's decision will be affirmed. FantasySCOTUS overall predicts an outcome of Affirm 6-3. Historically, FantasySCOTUS has chosen the correct side 50.00% of the time.",
"options": [
{
"name": "Yes",
"probability": 0.625,
"type": "PROBABILITY"
},
{
"name": "No",
"probability": 0.375,
"type": "PROBABILITY"
}
],
"timestamp": "2022-02-11T21:42:19.291Z",
"qualityindicators": {
"numforecasts": 120,
"stars": 2
}
}
)
*/
export async function pgUpsert({ contents, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
if (schema == "latest") {
await dropTable(schema, tableName);
await buildMetaforecastTable(schema, tableName);
await createUniqueIndex(schema, tableName)
}
console.log(`Inserting into postgres table ${schema}.${tableName}`)
let i = 0
for (let datum of contents) {
await pgInsert({ datum, schema, tableName })
if (i < 10) {
console.log(`Inserted ${datum.id}`)
i++
} else if (i == 10) {
console.log("...")
i++
}
}
console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`)
let check = await pgRead({schema, tableName})
console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`)
console.log("Sample: ")
console.log(JSON.stringify(check.slice(0,1), null, 4));
//console.log(JSON.stringify(check.slice(0, 1), null, 4));
} else {
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
}
}

View File

@ -4,7 +4,6 @@ import { updateHistory } from "./history/updateHistory.js"
import { rebuildAlgoliaDatabase } from "../utils/algolia.js"
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData.js"
import { platformFetchers } from "../platforms/all-platforms.js"
/* Do everything */
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));

View File

@ -59,7 +59,7 @@ export async function addToHistory(){
newHistoryJSON.push(newHistoryElement)
}
await databaseUpsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory")
await databaseUpsert({contents: newHistoryJSON, group: "history"})
// console.log(newHistoryJSON.slice(0,5))
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))

View File

@ -21,7 +21,7 @@ export async function createHistoryForMonth(){
})
}).filter(element => element.platform != "Metaculus" && element.platform != "Estimize")
//console.log(metaforecastsHistorySeed)
await databaseUpsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory")
await databaseUpsert({contents: metaforecastsHistorySeed, group: "history"})
}
////createInitialHistory()

View File

@ -22,6 +22,6 @@ export async function mergeEverythingInner() {
export async function mergeEverything() {
let merged = await mergeEverythingInner();
await databaseUpsert(merged, "metaforecasts");
await databaseUpsert({ contents: merged, group: "combined" });
console.log("Done");
}

View File

@ -1,5 +1,5 @@
import axios from "axios"
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js"
export async function rebuildNetlifySiteWithNewData_inner(cookie) {
let payload = ({});
@ -9,6 +9,6 @@ export async function rebuildNetlifySiteWithNewData_inner(cookie) {
}
export async function rebuildNetlifySiteWithNewData() {
let cookie = process.env.REBUIDNETLIFYHOOKURL || getCookie("netlify");
await applyIfCookieExists(cookie, rebuildNetlifySiteWithNewData_inner)
let cookie = process.env.REBUIDNETLIFYHOOKURL || getSecret("netlify");
await applyIfSecretExists(cookie, rebuildNetlifySiteWithNewData_inner)
}

View File

@ -7,19 +7,21 @@ import { mergeEverything } from "./flow/mergeEverything.js"
import { updateHistory } from "./flow/history/updateHistory.js"
import { rebuildAlgoliaDatabase } from "./utils/algolia.js"
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js"
import { pgInitialize } from "./database/pg-wrapper.js"
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"
/* Support functions */
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, rebuildNetlifySiteWithNewData, doEverything]
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, pgInitialize, rebuildNetlifySiteWithNewData, doEverything]
let functionNames = functions.map(fun => fun.name)
let whattodoMessage = functionNames
.slice(0, platformFetchers.length)
.map((functionName, i) => `[${i}]: Download predictions from ${functionName}`)
.join('\n') +
`\n[${functionNames.length - 5}]: Merge jsons them into one big json (and push it to mongodb database)` +
`\n[${functionNames.length - 4}]: Update history` +
`\n[${functionNames.length - 3}]: Rebuild algolia database ("index")` +
`\n[${functionNames.length - 6}]: Merge jsons them into one big json (and push it to mongodb database)` +
`\n[${functionNames.length - 5}]: Update history` +
`\n[${functionNames.length - 4}]: Rebuild algolia database ("index")` +
`\n[${functionNames.length - 3}]: Rebuild postgres database` +
`\n[${functionNames.length - 2}]: Rebuild netlify site with new data` +
// `\n[${functionNames.length-1}]: Add to history` +
`\n[${functionNames.length - 1}]: All of the above` +

View File

@ -12,7 +12,7 @@ let main = async () => {
let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`);
let fileContents = JSON.parse(fileRaw);
console.log(fileContents);
await databaseUpsert(fileContents, file + suffixMongo);
await databaseUpsert({contents: fileContents, group: file });
}
};
main();

View File

@ -1,61 +1,2 @@
import { betfair } from "./betfair-fetch.js";
import { fantasyscotus } from "./fantasyscotus-fetch.js";
import { foretold } from "./foretold-fetch.js";
import { goodjudgment } from "./goodjudgment-fetch.js";
import { goodjudgmentopen } from "./goodjudmentopen-fetch.js";
import { infer } from "./infer-fetch.js";
import { kalshi } from "./kalshi-fetch.js";
import { manifoldmarkets } from "./manifoldmarkets-fetch.js";
import { metaculus } from "./metaculus-fetch.js";
import { polymarket } from "./polymarket-fetch.js";
import { predictit } from "./predictit-fetch.js";
import { rootclaim } from "./rootclaim-fetch.js";
import { smarkets } from "./smarkets-fetch.js";
import { wildeford } from "./wildeford-fetch.js";
/* Deprecated
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
import { coupcast } from "../platforms/coupcast-fetch.js"
import { csetforetell } from "../platforms/csetforetell-fetch.js"
import { elicit } from "../platforms/elicit-fetch.js"
import { estimize } from "../platforms/estimize-fetch.js"
import { hypermind } from "../platforms/hypermind-fetch.js"
import { ladbrokes } from "../platforms/ladbrokes-fetch.js";
import { williamhill } from "../platforms/williamhill-fetch.js";
*/
export const platformFetchers = [
betfair,
fantasyscotus,
foretold,
goodjudgment,
goodjudgmentopen,
infer,
kalshi,
manifoldmarkets,
metaculus,
polymarket,
predictit,
rootclaim,
smarkets,
wildeford,
];
export const platformNames = [
"betfair",
"fantasyscotus",
"foretold",
"givewellopenphil",
"goodjudgment",
"goodjudmentopen",
"infer",
"kalshi",
"manifoldmarkets",
"metaculus",
"polymarket",
"predictit",
"rootclaim",
"smarkets",
"wildeford",
"xrisk",
];
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc
export { platformFetchers } from "./all/platformFetchers.js";
export { platformNames } from "./all/platformNames.js";

View File

@ -0,0 +1,42 @@
import { betfair } from "../betfair-fetch.js";
import { fantasyscotus } from "../fantasyscotus-fetch.js";
import { foretold } from "../foretold-fetch.js";
import { goodjudgment } from "../goodjudgment-fetch.js";
import { goodjudgmentopen } from "../goodjudmentopen-fetch.js";
import { infer } from "../infer-fetch.js";
import { kalshi } from "../kalshi-fetch.js";
import { manifoldmarkets } from "../manifoldmarkets-fetch.js";
import { metaculus } from "../metaculus-fetch.js";
import { polymarket } from "../polymarket-fetch.js";
import { predictit } from "../predictit-fetch.js";
import { rootclaim } from "../rootclaim-fetch.js";
import { smarkets } from "../smarkets-fetch.js";
import { wildeford } from "../wildeford-fetch.js";
/* Deprecated
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
import { coupcast } from "../platforms/coupcast-fetch.js"
import { csetforetell } from "../platforms/csetforetell-fetch.js"
import { elicit } from "../platforms/elicit-fetch.js"
import { estimize } from "../platforms/estimize-fetch.js"
import { hypermind } from "../platforms/hypermind-fetch.js"
import { ladbrokes } from "../platforms/ladbrokes-fetch.js";
import { williamhill } from "../platforms/williamhill-fetch.js";
*/
export const platformFetchers = [
betfair,
fantasyscotus,
foretold,
goodjudgment,
goodjudgmentopen,
infer,
kalshi,
manifoldmarkets,
metaculus,
polymarket,
predictit,
rootclaim,
smarkets,
wildeford,
];

View File

@ -0,0 +1,20 @@
// This needs to be its own file to avoid cyclical dependencies.
export const platformNames = [
"betfair",
"fantasyscotus",
"foretold",
"givewellopenphil",
"goodjudgment",
"goodjudmentopen",
"infer",
"kalshi",
"manifoldmarkets",
"metaculus",
"polymarket",
"predictit",
"rootclaim",
"smarkets",
"wildeford",
"xrisk",
];
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc

View File

@ -143,7 +143,7 @@ export async function betfair() {
// console.log(results.map(result => ({title: result.title, description: result.description})))
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polyprediction-questions.json', string);
await databaseUpsert(results, "betfair-questions");
await databaseUpsert({ contents: results, group: "betfair" });
console.log("Done");
}
// betfair()

View File

@ -1,6 +1,6 @@
/* Imports */
import axios from "axios"
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js"
import { Tabletojson } from "tabletojson"
import toMarkdown from "../utils/toMarkdown.js"
import { calculateStars } from "../utils/stars.js"
@ -249,6 +249,6 @@ async function csetforetell_inner(cookie) {
export async function csetforetell() {
let cookie = process.env.CSETFORETELL_COOKIE || getCookie("csetforetell")
await applyIfCookieExists(cookie, csetforetell_inner)
let cookie = process.env.CSETFORETELL_COOKIE || getSecret("csetforetell")
await applyIfSecretExists(cookie, csetforetell_inner)
}

View File

@ -3,7 +3,7 @@ import fs from 'fs'
import axios from "axios"
import https from "https"
import fetch from "isomorphic-fetch"
import {getCookie, applyIfCookieExists} from "../utils/getCookies.js"
import {getSecret, applyIfSecretExists} from "../utils/getSecrets.js"
import toMarkdown from "../utils/toMarkdown.js"
import { calculateStars } from "../utils/stars.js"
import { databaseUpsert } from "../utils/database-wrapper.js"
@ -178,6 +178,6 @@ async function hypermind_inner(cookie) {
//hypermind()
export async function hypermind() {
let cookie = process.env.HYPERMINDCOOKIE || getCookie("hypermind")
await applyIfCookieExists(cookie, hypermind_inner)
let cookie = process.env.HYPERMINDCOOKIE || getSecret("hypermind")
await applyIfSecretExists(cookie, hypermind_inner)
}

View File

@ -64,7 +64,7 @@ export async function example() {
// console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polyprediction-questions.json', string);
await databaseUpsert(results, "example-questions");
await databaseUpsert({ contents: results, group: "example" });
console.log("Done");
}
//example()

View File

@ -118,7 +118,7 @@ export async function fantasyscotus() {
//console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/fantasyscotus-questions.json', string);
await databaseUpsert(results, "fantasyscotus-questions");
await databaseUpsert({ contents: results, group: "fantasyscotus" });
console.log("Done");
}
//fantasyscotus()

View File

@ -101,7 +101,8 @@ export async function foretold() {
}
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/foretold-questions.json', string);
await databaseUpsert(results, "foretold-questions");
await databaseUpsert({ contents: results, group: "foretold" });
console.log("Done");
}
// foretold()

View File

@ -64,6 +64,7 @@ async function main() {
}
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/givewell-questions-unprocessed.json', string);
await databaseUpsert(results, "givewell-questions-unprocessed");
await databaseUpsert({ contents: results, group: "givewell-questions-unprocessed" });
}
main();

View File

@ -122,7 +122,8 @@ export async function goodjudgment() {
// fs.writeFileSync('./data/goodjudgment-questions.json', string);
// fs.writeFileSync('./goodjudgment-questions-test.json', string);
console.log(results);
await databaseUpsert(results, "goodjudgment-questions");
await databaseUpsert({ contents: results, group: "goodjudgment" });
console.log(
"Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js"
);

View File

@ -1,7 +1,7 @@
/* Imports */
import fs from "fs";
import axios from "axios";
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
import { Tabletojson } from "tabletojson";
import { calculateStars } from "../utils/stars.js";
import toMarkdown from "../utils/toMarkdown.js";
@ -223,7 +223,8 @@ async function goodjudgmentopen_inner(cookie) {
// fs.writeFileSync('./data/goodjudmentopen-questions.json', string);
console.log(results);
if (results.length > 0) {
await databaseUpsert(results, "goodjudmentopen-questions");
await databaseUpsert({ contents: results, group: "goodjudmentopen" });
} else {
console.log("Not updating results, as process was not signed in");
}
@ -237,6 +238,6 @@ async function goodjudgmentopen_inner(cookie) {
export async function goodjudgmentopen() {
let cookie =
process.env.GOODJUDGMENTOPENCOOKIE || getCookie("goodjudmentopen");
await applyIfCookieExists(cookie, goodjudgmentopen_inner);
process.env.GOODJUDGMENTOPENCOOKIE || getSecret("goodjudmentopen");
await applyIfSecretExists(cookie, goodjudgmentopen_inner);
}

View File

@ -1,6 +1,6 @@
/* Imports */
import axios from "axios";
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
import { Tabletojson } from "tabletojson";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
@ -269,7 +269,7 @@ async function infer_inner(cookie) {
// fs.writeFileSync('./data/infer-questions.json', string);
// console.log(results)
if (results.length > 0) {
await databaseUpsert(results, "infer-questions");
await databaseUpsert({ contents: results, group: "infer" });
} else {
console.log("Not updating results, as process was not signed in");
}
@ -282,6 +282,6 @@ async function infer_inner(cookie) {
}
export async function infer() {
let cookie = process.env.INFER_COOKIE || getCookie("infer");
await applyIfCookieExists(cookie, infer_inner);
let cookie = process.env.INFER_COOKIE || getSecret("infer");
await applyIfSecretExists(cookie, infer_inner);
}

View File

@ -83,7 +83,8 @@ export async function kalshi() {
// console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polymarket-questions.json', string);
await databaseUpsert(results, "kalshi-questions");
await databaseUpsert({ contents: results, group: "kalshi" });
console.log("Done");
}
// kalshi()

View File

@ -96,7 +96,8 @@ export async function manifoldmarkets() {
// console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polyprediction-questions.json', string);
await databaseUpsert(results, "manifoldmarkets-questions");
await databaseUpsert({ contents: results, group: "manifoldmarkets" });
console.log("Done");
}
// manifoldmarkets()

View File

@ -154,7 +154,7 @@ export async function metaculus() {
// let string = JSON.stringify(all_questions, null, 2)
// fs.writeFileSync('./metaculus-questions.json', string);
await databaseUpsert(all_questions, "metaculus-questions");
await databaseUpsert({ contents: all_questions, group: "metaculus" });
console.log("Done");
}

View File

@ -152,7 +152,8 @@ export async function polymarket() {
// console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polymarket-questions.json', string);
await databaseUpsert(results, "polymarket-questions");
await databaseUpsert({ contents: results, group: "polymarket" });
console.log("Done");
}
// polymarket()

View File

@ -110,7 +110,7 @@ export async function predictit() {
//console.log(results)
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/predictit-questions.json', string);
await databaseUpsert(results, "predictit-questions");
await databaseUpsert({ contents: results, group: "predictit" });
console.log("Done");
}

View File

@ -69,7 +69,8 @@ export async function rootclaim() {
//console.log(JSON.stringify(results, null, 4))
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('rootclaim-questions.json', string);
await databaseUpsert(results, "rootclaim-questions");
await databaseUpsert({ contents: results, group: "rootclaim" });
console.log("Done");
}
//rootclaim()

View File

@ -153,6 +153,7 @@ export async function smarkets() {
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('./data/smarkets-questions.json', string);
await databaseUpsert(results, "smarkets-questions");
await databaseUpsert({ contents: results, group: "smarkets" });
}
//smarkets()

View File

@ -2,7 +2,7 @@
import fs from "fs";
// import axios from "axios"
import { GoogleSpreadsheet } from "google-spreadsheet";
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
import toMarkdown from "../utils/toMarkdown.js";
import { calculateStars } from "../utils/stars.js";
import { hash } from "../utils/hash.js";
@ -125,12 +125,13 @@ export async function wildeford_inner(google_api_key) {
// console.log(results.sort((a,b) => (a.title > b.title)))
// let string = JSON.stringify(results, null, 2)
// fs.writeFileSync('polyprediction-questions.json', string);
await databaseUpsert(results, "wildeford-questions");
await databaseUpsert({ contents: results, group: "wildeford" });
console.log("Done");
}
//example()
export async function wildeford() {
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || getCookie("google-api"); // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
await applyIfCookieExists(GOOGLE_API_KEY, wildeford_inner);
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || getSecret("google-api"); // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner);
}

View File

@ -1,10 +1,10 @@
import algoliasearch from 'algoliasearch';
import fs from "fs"
import {getCookie} from "./getCookies.js"
import {getSecret} from "./getSecrets.js"
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"
import { mergeEverythingInner } from '../flow/mergeEverything.js';
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia")
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getSecret("algolia")
const client = algoliasearch('96UD3NTQ7L', cookie);
const index = client.initIndex('metaforecast');

View File

@ -1,9 +1,9 @@
import fs from 'fs'
export function getCookie(property){
export function getSecret(property){
let answer = 0
try {
let rawcookie = fs.readFileSync("./src/input/privatekeys.json")
let rawcookie = fs.readFileSync("./src/input/secrets.json")
let cookie = JSON.parse(rawcookie)
if (cookie[property]){
answer = cookie[property]
@ -15,7 +15,7 @@ export function getCookie(property){
return answer
}
export async function applyIfCookieExists(cookie, fun){
export async function applyIfSecretExists(cookie, fun){
if(cookie){
await fun(cookie)
}else if(!cookie){

32
src/utils/roughSize.js Normal file
View File

@ -0,0 +1,32 @@
export function roughSizeOfObject(object) {
var objectList = [];
var stack = [object];
var bytes = 0;
while (stack.length) {
var value = stack.pop();
if (typeof value === 'boolean') {
bytes += 4;
}
else if (typeof value === 'string') {
bytes += value.length * 2;
}
else if (typeof value === 'number') {
bytes += 8;
}
else if
(
typeof value === 'object'
&& objectList.indexOf(value) === -1
) {
objectList.push(value);
for (var i in value) {
stack.push(value[i]);
}
}
}
let megaBytes = bytes / (1024) ** 2
let megaBytesRounded = Math.round(megaBytes * 10) / 10
return megaBytesRounded;
}