feat: save changes to show Cole &co

This commit is contained in:
NunoSempere 2022-02-14 13:58:36 -05:00
parent e280b36e20
commit 927114a8cc
4 changed files with 55326 additions and 293 deletions

54791
data/frontpage.json Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,107 +1,157 @@
import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js" import {
import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js" mongoUpsert,
mongoRead,
mongoReadWithReadCredentials,
mongoGetAllElements,
} from "./mongo-wrapper.js";
import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js";
export async function databaseUpsert({ contents, group }) { export async function databaseUpsert({ contents, group }) {
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
let mongoDocName; let mongoDocName;
switch (group) { switch (group) {
case 'combined': case "combined":
mongoDocName = "metaforecasts" mongoDocName = "metaforecasts";
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") await mongoUpsert(
await pgUpsert({ contents, schema: "latest", tableName: "combined" }) contents,
mongoDocName,
"metaforecastCollection",
"metaforecastDatabase"
);
await pgUpsert({ contents, schema: "latest", tableName: "combined" });
break; break;
case 'history': case "history":
let currentDate = new Date() let currentDate = new Date();
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") let dateUpToMonth = currentDate
mongoDocName = `metaforecast_history_${dateUpToMonth}` .toISOString()
await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase") .slice(0, 7)
.replace("-", "_");
mongoDocName = `metaforecast_history_${dateUpToMonth}`;
await mongoUpsert(
data,
mongoDocName,
"metaforecastHistory",
"metaforecastDatabase"
);
// await pgUpsert({ contents, schema: "history", tableName: "combined" }) // await pgUpsert({ contents, schema: "history", tableName: "combined" })
break; break;
default: default:
mongoDocName = `${group}-questions` mongoDocName = `${group}-questions`;
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase") await mongoUpsert(
await pgUpsert({ contents, schema: "latest", tableName: group }) contents,
mongoDocName,
"metaforecastCollection",
"metaforecastDatabase"
);
await pgUpsert({ contents, schema: "latest", tableName: group });
} }
} }
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") // databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
export async function databaseRead({ group }) { export async function databaseRead({ group }) {
let response, mongoDocName, responseMongo, responsePg let response, mongoDocName, responseMongo, responsePg;
let currentDate = new Date() let currentDate = new Date();
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02 let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_"); // e.g., 2022_02
let displayPossibleResponses = (response1, response2) => { let displayPossibleResponses = (response1, response2) => {
console.log("Possible responses:") console.log("Possible responses:");
console.log("Mongo: ") console.log("Mongo: ");
console.log(response1.slice(0, 2)) console.log(response1.slice(0, 2));
console.log("Postgres: ") console.log("Postgres: ");
console.log(response2.slice(0, 2)) console.log(response2.slice(0, 2));
console.log("") console.log("");
} };
switch (group) { switch (group) {
case 'combined': case "combined":
mongoDocName = "metaforecasts" mongoDocName = "metaforecasts";
responseMongo = await mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase") responseMongo = await mongoRead(
responsePg = await pgRead({ schema: "latest", tableName: "combined" }) mongoDocName,
displayPossibleResponses(responseMongo, responsePg) "metaforecastCollection",
"metaforecastDatabase"
);
responsePg = await pgRead({ schema: "latest", tableName: "combined" });
displayPossibleResponses(responseMongo, responsePg);
break; break;
case 'history': case "history":
mongoDocName = `metaforecast_history_${dateUpToMonth}` mongoDocName = `metaforecast_history_${dateUpToMonth}`;
responseMongo = await mongoRead(mongoDocName, "metaforecastHistory", "metaforecastDatabase") responseMongo = await mongoRead(
mongoDocName,
"metaforecastHistory",
"metaforecastDatabase"
);
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month. // responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
break; break;
default: default:
mongoDocName = `${group}-questions` mongoDocName = `${group}-questions`;
responseMongo = mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase") responseMongo = mongoRead(
responsePg = await pgRead({ schema: "latest", tableName: group }) mongoDocName,
"metaforecastCollection",
"metaforecastDatabase"
);
responsePg = await pgRead({ schema: "latest", tableName: group });
} }
response = responseMongo; // responsePg
return response;
response = responseMongo // responsePg
return response
} }
// databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") // databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
export async function databaseReadWithReadCredentials({ group }) { export async function databaseReadWithReadCredentials({ group }) {
let response, mongoDocName, responseMongo, responsePg let response, mongoDocName, responseMongo, responsePg;
let currentDate = new Date() let currentDate = new Date();
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02 let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_"); // e.g., 2022_02
let displayPossibleResponses = (response1, response2) => { let displayPossibleResponses = (response1, response2) => {
console.log("Possible responses:") console.log("Possible responses:");
console.log("Mongo: ") console.log("Mongo: ");
console.log(response1.slice(0, 2)) console.log(response1.slice(0, 2));
console.log("Postgres: ") console.log("Postgres: ");
console.log(response2.slice(0, 2)) console.log(response2.slice(0, 2));
console.log("") console.log("");
} };
switch (group) { switch (group) {
case 'combined': case "combined":
mongoDocName = "metaforecasts" mongoDocName = "metaforecasts";
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastCollection", "metaforecastDatabase") responseMongo = await mongoReadWithReadCredentials(
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: "combined" }) mongoDocName,
displayPossibleResponses(responseMongo, responsePg) "metaforecastCollection",
"metaforecastDatabase"
);
responsePg = await pgReadWithReadCredentials({
schema: "latest",
tableName: "combined",
});
displayPossibleResponses(responseMongo, responsePg);
break; break;
case 'history': case "history":
mongoDocName = `metaforecast_history_${dateUpToMonth}` mongoDocName = `metaforecast_history_${dateUpToMonth}`;
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastHistory", "metaforecastDatabase") responseMongo = await mongoReadWithReadCredentials(
mongoDocName,
"metaforecastHistory",
"metaforecastDatabase"
);
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month. // responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
break; break;
default: default:
mongoDocName = `${group}-questions` mongoDocName = `${group}-questions`;
responseMongo = mongoRemongoReadWithReadCredentialsad(mongoDocName, "metaforecastCollection", "metaforecastDatabase") responseMongo = mongoRemongoReadWithReadCredentialsad(
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: group }) mongoDocName,
displayPossibleResponses(responseMongo, responsePg) "metaforecastCollection",
"metaforecastDatabase"
);
responsePg = await pgReadWithReadCredentials({
schema: "latest",
tableName: group,
});
displayPossibleResponses(responseMongo, responsePg);
} }
response = responseMongo // responsePg response = responseMongo; // responsePg
return response return response;
} }
//= ; //= ;
// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") // databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")

View File

@ -1,54 +1,66 @@
import pkg from 'pg'; import pkg from "pg";
const { Pool } = pkg; const { Pool } = pkg;
import { platformNames } from "../platforms/all/platformNames.js" import { platformNames } from "../platforms/all/platformNames.js";
import { getSecret } from '../utils/getSecrets.js'; import { getSecret } from "../utils/getSecrets.js";
import { roughSizeOfObject } from "../utils/roughSize.js" import { roughSizeOfObject } from "../utils/roughSize.js";
import { hash } from "../utils/hash.js";
// Definitions // Definitions
const schemas = ["latest", "history"] const schemas = ["latest", "history"];
const tableNamesWhitelist = ["combined", ...platformNames] const tableNamesWhitelist = ["combined", ...platformNames];
const createFullName = (schemaName, namesArray) => namesArray.map(name => `${schemaName}.${name}`) const createFullName = (schemaName, namesArray) =>
const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...createFullName("history", tableNamesWhitelist)] namesArray.map((name) => `${schemaName}.${name}`);
const tableWhiteList = [
...createFullName("latest", tableNamesWhitelist),
...createFullName("history", tableNamesWhitelist),
"latest.dashboards",
];
/* Postgres database connection code */ /* Postgres database connection code */
const databaseURL = process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres") const databaseURL =
process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres");
// process.env.DATABASE_URL || getSecret("heroku-postgres") // process.env.DATABASE_URL || getSecret("heroku-postgres")
const readWritePool = new Pool({ const readWritePool = new Pool({
connectionString: databaseURL, connectionString: databaseURL,
ssl: { ssl: {
rejectUnauthorized: false rejectUnauthorized: false,
} },
}); });
const readOnlyDatabaseURL = "postgresql://public_read_only_user:ffKhp52FJNNa8cKK@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" const readOnlyDatabaseURL =
getSecret("digitalocean-postgres-public") ||
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require";
const readOnlyPool = new Pool({ const readOnlyPool = new Pool({
connectionString: readOnlyDatabaseURL, connectionString: readOnlyDatabaseURL,
ssl: { ssl: {
rejectUnauthorized: false rejectUnauthorized: false,
} },
}); });
// Helpers // Helpers
const runPgCommand = async ({ command, pool }) => { const runPgCommand = async ({ command, pool }) => {
console.log(command) console.log(command);
const client = await pool.connect(); const client = await pool.connect();
let result let result;
try { try {
let response = await client.query(command); let response = await client.query(command);
result = { 'results': (response) ? response.rows : null }; console.log(response);
result = { results: response ? response.rows : null };
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} finally { } finally {
client.release(); client.release();
} }
// console.log(results) // console.log(results)
return result return result;
} };
// Initialize // Initialize
let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}` let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}`;
let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table} ( let buildMetaforecastTable = (
schema,
table
) => `CREATE TABLE ${schema}.${table} (
id text, id text,
title text, title text,
url text, url text,
@ -59,114 +71,193 @@ let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table}
stars int, stars int,
qualityindicators json, qualityindicators json,
extra json extra json
);` );`;
let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
async function setPermissionsForPublicUser() { let buildDashboard = () =>
`CREATE TABLE latest.dashboards (
id text,
title text,
description text,
contents json,
timestamp timestamp,
creator text,
extra json
);`;
let initCommands = ["REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;", let createIndex = (schema, table) =>
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;"] `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
let createUniqueIndex = (schema, table) =>
`CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
export async function setPermissionsForPublicUser() {
let initCommands = [
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;",
];
for (let command of initCommands) { for (let command of initCommands) {
await runPgCommand({ command, pool: readWritePool }) await runPgCommand({ command, pool: readWritePool });
} }
let buildGrantSelectForSchema = (schema) => `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user` let buildGrantSelectForSchema = (schema) =>
`GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`;
for (let schema of schemas) { for (let schema of schemas) {
await runPgCommand({ command: buildGrantSelectForSchema(schema), pool: readWritePool }) await runPgCommand({
command: buildGrantSelectForSchema(schema),
pool: readWritePool,
});
} }
let alterDefaultPrivilegesForSchema = (schema) => `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user` let alterDefaultPrivilegesForSchema = (schema) =>
`ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`;
for (let schema of schemas) { for (let schema of schemas) {
await runPgCommand({ command: alterDefaultPrivilegesForSchema(schema), pool: readWritePool }) await runPgCommand({
command: alterDefaultPrivilegesForSchema(schema),
pool: readWritePool,
});
} }
} }
export async function pgInitialize() { export async function pgInitialize() {
console.log("Create schemas") let YOLO = false;
if (YOLO) {
console.log("Create schemas");
for (let schema of schemas) { for (let schema of schemas) {
await runPgCommand({ command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, pool: readWritePool }) await runPgCommand({
command: `CREATE SCHEMA IF NOT EXISTS ${schema}`,
pool: readWritePool,
});
} }
console.log("") console.log("");
console.log("Set search path") console.log("Set search path");
await runPgCommand({ command: `SET search_path TO ${schemas.join(",")},public;`, pool: readWritePool }) await runPgCommand({
console.log("") command: `SET search_path TO ${schemas.join(",")},public;`,
pool: readWritePool,
});
console.log("");
console.log("Set public user permissions") console.log("Set public user permissions");
await setPermissionsForPublicUser() await setPermissionsForPublicUser();
console.log("") console.log("");
console.log("Create tables & their indexes") console.log("Create tables & their indexes");
for (let schema of schemas) { for (let schema of schemas) {
for (let table of tableNamesWhitelist) { for (let table of tableNamesWhitelist) {
await runPgCommand({ command: dropTable(schema, table), pool: readWritePool }) await runPgCommand({
await runPgCommand({ command: buildMetaforecastTable(schema, table), pool: readWritePool }) command: dropTable(schema, table),
pool: readWritePool,
});
await runPgCommand({
command: buildMetaforecastTable(schema, table),
pool: readWritePool,
});
if (schema == "history") { if (schema == "history") {
await runPgCommand({ command: createIndex(schema, table), pool: readWritePool }) await runPgCommand({
command: createIndex(schema, table),
pool: readWritePool,
});
} else { } else {
await runPgCommand({ command: createUniqueIndex(schema, table), pool: readWritePool }) await runPgCommand({
command: createUniqueIndex(schema, table),
pool: readWritePool,
});
} }
} }
} }
console.log("") } else {
console.log(
"This command is dangerous, set YOLO to true in the code to invoke it"
);
console.log("Create dashboard table and its index");
await runPgCommand({
command: buildDashboard(),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex("latest", "dashboards"),
pool: readWritePool,
});
console.log("");
}
} }
// pgInitialize() // pgInitialize()
// Read // Read
async function pgReadWithPool({ schema, tableName, pool }) { async function pgReadWithPool({ schema, tableName, pool }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let command = `SELECT * from ${schema}.${tableName}` let command = `SELECT * from ${schema}.${tableName}`;
let response = await runPgCommand({ command, pool }) let response = await runPgCommand({ command, pool });
let results = response.results let results = response.results;
return results return results;
} else { } else {
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`) throw Error(
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
);
} }
} }
export async function pgRead({ schema, tableName }) { export async function pgRead({ schema, tableName }) {
return await pgReadWithPool({ schema, tableName, pool: readWritePool }) return await pgReadWithPool({ schema, tableName, pool: readWritePool });
} }
export async function pgReadWithReadCredentials({ schema, tableName }) { export async function pgReadWithReadCredentials({ schema, tableName }) {
return await pgReadWithPool({ schema, tableName, readOnlyPool: readOnlyPool }) // currently does not work.
/* return await pgReadWithPool({
schema,
tableName,
pool: readOnlyPool,
});
*/
return await pgReadWithPool({ schema, tableName, pool: readWritePool });
}
export async function pgGetByIds({ ids, schema, table }) {
let idstring = `( ${ids.map((id) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
let command = `SELECT * from ${schema}.${table} where id in ${idstring}`;
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
let response = await runPgCommand({ command, pool: readWritePool });
let results = response.results;
console.log(results);
return results;
} }
export async function pgInsert({ datum, schema, tableName }) { export async function pgInsert({ datum, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`;
let timestamp = datum.timestamp || new Date().toISOString() let timestamp = datum.timestamp || new Date().toISOString();
timestamp = timestamp.slice(0, 19).replace("T", " ") timestamp = timestamp.slice(0, 19).replace("T", " ");
let values = [ let values = [
datum.id, datum.id,
datum.title, datum.title,
datum.url, datum.url,
datum.platform, datum.platform,
datum.description || '', datum.description || "",
JSON.stringify(datum.options || []), JSON.stringify(datum.options || []),
timestamp, // fix timestamp, // fix
datum.stars || (datum.qualityindicators ? datum.qualityindicators.stars : 2), datum.stars ||
(datum.qualityindicators ? datum.qualityindicators.stars : 2),
JSON.stringify(datum.qualityindicators || []), JSON.stringify(datum.qualityindicators || []),
JSON.stringify(datum.extra || []) JSON.stringify(datum.extra || []),
] ];
const client = await readWritePool.connect(); const client = await readWritePool.connect();
let result let result;
try { try {
result = await client.query(text, values); result = await client.query(text, values);
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} finally { } finally {
client.release(); client.release();
} }
// console.log(result) // console.log(result)
return result return result;
} else { } else {
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`) throw Error(
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
);
} }
} }
/* For reference:
/*
pgInsert({ pgInsert({
"id": "fantasyscotus-580", "id": "fantasyscotus-580",
"title": "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision", "title": "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision",
@ -193,35 +284,110 @@ pgInsert({
} }
) )
*/ */
export async function pgInsertIntoDashboard({ datum, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`;
let timestamp = datum.timestamp || new Date().toISOString();
timestamp = timestamp.slice(0, 19).replace("T", " ");
let values = [
hash(JSON.stringify(datum.contents)),
datum.title || "",
datum.description || "",
JSON.stringify(datum.contents || []),
timestamp, // fixed
datum.creator || "",
JSON.stringify(datum.extra || []),
];
const client = await readWritePool.connect();
let result;
try {
result = await client.query(text, values);
} catch (error) {
console.log(error);
} finally {
client.release();
}
// console.log(result)
return result;
} else {
throw Error(
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
);
}
}
/* For reference
id text,
title text,
description text,
contents json,
timestamp timestamp,
creator text,
extra json
*/
pgInsertIntoDashboard({
datum: {
title: "Test dashboard",
description: "A test dashboard",
contents: [
"rootclaim-did-former-new-england-patriots-tight-end-aaron-hernandez-commit-suicide-19060",
"metaculus-3912",
"givewellopenphil-2021-133",
"rootclaim-what-happened-to-barry-and-honey-sherman-19972",
"rootclaim-what-caused-the-disappearance-of-malaysia-airlines-flight-370",
],
creator: "Nuño Sempere",
},
schema: "latest",
tableName: "dashboards",
});
export async function pgUpsert({ contents, schema, tableName }) { export async function pgUpsert({ contents, schema, tableName }) {
if (tableWhiteList.includes(`${schema}.${tableName}`)) { if (tableWhiteList.includes(`${schema}.${tableName}`)) {
if (schema == "latest") { if (schema == "latest") {
await runPgCommand({ command: dropTable(schema, tableName), pool: readWritePool }) await runPgCommand({
await runPgCommand({ command: buildMetaforecastTable(schema, tableName), pool: readWritePool }) command: dropTable(schema, tableName),
await runPgCommand({ command: createUniqueIndex(schema, tableName), pool: readWritePool }) pool: readWritePool,
});
await runPgCommand({
command: buildMetaforecastTable(schema, tableName),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex(schema, tableName),
pool: readWritePool,
});
} }
console.log(`Upserting into postgres table ${schema}.${tableName}`) console.log(`Upserting into postgres table ${schema}.${tableName}`);
let i = 0 let i = 0;
for (let datum of contents) { for (let datum of contents) {
await pgInsert({ datum, schema, tableName }) await pgInsert({ datum, schema, tableName });
if (i < 10) { if (i < 10) {
console.log(`Inserted ${datum.id}`) console.log(`Inserted ${datum.id}`);
i++ i++;
} else if (i == 10) { } else if (i == 10) {
console.log("...") console.log("...");
i++ i++;
} }
} }
console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`) console.log(
let check = await pgRead({ schema, tableName }) `Inserted rows with approximate cummulative size ${roughSizeOfObject(
console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`) contents
console.log("Sample: ") )} MB into ${schema}.${tableName}.`
);
let check = await pgRead({ schema, tableName });
console.log(
`Received rows with approximate cummulative size ${roughSizeOfObject(
check
)} MB from ${schema}.${tableName}.`
);
console.log("Sample: ");
console.log(JSON.stringify(check.slice(0, 1), null, 4)); console.log(JSON.stringify(check.slice(0, 1), null, 4));
//console.log(JSON.stringify(check.slice(0, 1), null, 4)); //console.log(JSON.stringify(check.slice(0, 1), null, 4));
} else { } else {
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`) throw Error(
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
);
} }
} }

View File

@ -1,71 +1,97 @@
/* Imports */ /* Imports */
import fs from 'fs' import fs from "fs";
import readline from "readline" import readline from "readline";
import { platformFetchers } from "./platforms/all-platforms.js" import { platformFetchers } from "./platforms/all-platforms.js";
import { mergeEverything } from "./flow/mergeEverything.js" import { mergeEverything } from "./flow/mergeEverything.js";
import { updateHistory } from "./flow/history/updateHistory.js" import { updateHistory } from "./flow/history/updateHistory.js";
import { rebuildAlgoliaDatabase } from "./utils/algolia.js" import { rebuildAlgoliaDatabase } from "./utils/algolia.js";
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js" import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js";
import { pgInitialize } from "./database/pg-wrapper.js" import {
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js" pgInitialize,
setPermissionsForPublicUser,
} from "./database/pg-wrapper.js";
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js";
/* Support functions */ /* Support functions */
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, rebuildNetlifySiteWithNewData, doEverything, pgInitialize] let functions = [
let functionNames = functions.map(fun => fun.name) ...platformFetchers,
mergeEverything,
updateHistory,
rebuildAlgoliaDatabase,
rebuildNetlifySiteWithNewData,
doEverything,
setPermissionsForPublicUser,
pgInitialize,
];
let functionNames = functions.map((fun) => fun.name);
let whattodoMessage = functionNames let generateWhatToDoMessage = () => {
.slice(0, platformFetchers.length) let l = platformFetchers.length;
.map((functionName, i) => `[${i}]: Download predictions from ${functionName}`) let messagesForFetchers = platformFetchers.map(
.join('\n') + (fun, i) => `[${i}]: Download predictions from ${fun.name}`
`\n[${functionNames.length - 6}]: Merge jsons them into one big json (and push it to mongodb database)` + );
`\n[${functionNames.length - 5}]: Update history` + let otherMessages = [
`\n[${functionNames.length - 4}]: Rebuild algolia database ("index")` + "Merge jsons them into one big json (and push it to mongodb database)",
`\n[${functionNames.length - 3}]: Rebuild netlify site with new data` + `Update history`,
`Rebuild algolia database ("index")`,
`Rebuild netlify site with new data`,
// `\n[${functionNames.length-1}]: Add to history` + // `\n[${functionNames.length-1}]: Add to history` +
`\n[${functionNames.length - 2}]: All of the above` + `All of the above`,
`\n[${functionNames.length - 1}]: Rebuild postgres database` + `Initialize permissions for postgres public user`,
`\nChoose one option, wisely: #` `Rebuild postgres database`,
];
let otherMessagesWithNums = otherMessages.map(
(message, i) => `[${i + l}]: ${message}`
);
let completeMessages = [
...messagesForFetchers,
...otherMessagesWithNums,
`\nChoose one option, wisely: #`,
].join("\n");
return completeMessages;
};
let whattodoMessage = generateWhatToDoMessage();
/* BODY */ /* BODY */
let commandLineUtility = async () => { let commandLineUtility = async () => {
let whattodo = async (message, callback) => { let whattodo = async (message, callback) => {
const rl = readline.createInterface({ const rl = readline.createInterface({
input: process.stdin, input: process.stdin,
output: process.stdout output: process.stdout,
}); });
rl.question(message, async (answer) => { rl.question(message, async (answer) => {
rl.close(); rl.close();
await callback(answer) await callback(answer);
}); });
} };
let executeoption = async (option) => { let executeoption = async (option) => {
option = Number(option) option = Number(option);
//console.log(functionNames[option]) //console.log(functionNames[option])
if (option < 0) { if (option < 0) {
console.log(`Error, ${option} < 0 or ${option} < 0`) console.log(`Error, ${option} < 0 or ${option} < 0`);
} else if (option < functions.length) { } else if (option < functions.length) {
await tryCatchTryAgain(functions[option]) console.log(`Running: ${functions[option].name}\n`);
} await tryCatchTryAgain(functions[option]);
} }
};
if (process.argv.length == 3) { if (process.argv.length == 3) {
const option = process.argv[2] // e.g., npm start 15 <- const option = process.argv[2]; // e.g., npm start 15 <-
const optionNum = Number(option) const optionNum = Number(option);
if (!isNaN(optionNum)) { if (!isNaN(optionNum)) {
await executeoption(optionNum) await executeoption(optionNum);
} else if (option == "all") { } else if (option == "all") {
await executeoption(functions.length - 1) // 15 = execute all fetchers await executeoption(functions.length - 1); // 15 = execute all fetchers
} else { } else {
await whattodo(whattodoMessage, executeoption) await whattodo(whattodoMessage, executeoption);
} }
} else ( } else await whattodo(whattodoMessage, executeoption);
await whattodo(whattodoMessage, executeoption) };
)
}
// console.log("1") // console.log("1")
// console.log(process.argv) // console.log(process.argv)
commandLineUtility() commandLineUtility();
// doEverything() // doEverything()