feat: save changes to show Cole &co
This commit is contained in:
parent
e280b36e20
commit
927114a8cc
54791
data/frontpage.json
Normal file
54791
data/frontpage.json
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,107 +1,157 @@
|
|||
import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js"
|
||||
import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js"
|
||||
import {
|
||||
mongoUpsert,
|
||||
mongoRead,
|
||||
mongoReadWithReadCredentials,
|
||||
mongoGetAllElements,
|
||||
} from "./mongo-wrapper.js";
|
||||
import { pgUpsert, pgRead, pgReadWithReadCredentials } from "./pg-wrapper.js";
|
||||
|
||||
export async function databaseUpsert({ contents, group }) {
|
||||
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
|
||||
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
|
||||
let mongoDocName;
|
||||
switch (group) {
|
||||
case 'combined':
|
||||
mongoDocName = "metaforecasts"
|
||||
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
await pgUpsert({ contents, schema: "latest", tableName: "combined" })
|
||||
break;
|
||||
case 'history':
|
||||
let currentDate = new Date()
|
||||
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`
|
||||
await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase")
|
||||
// await pgUpsert({ contents, schema: "history", tableName: "combined" })
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`
|
||||
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
await pgUpsert({ contents, schema: "latest", tableName: group })
|
||||
}
|
||||
|
||||
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
|
||||
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
|
||||
let mongoDocName;
|
||||
switch (group) {
|
||||
case "combined":
|
||||
mongoDocName = "metaforecasts";
|
||||
await mongoUpsert(
|
||||
contents,
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
await pgUpsert({ contents, schema: "latest", tableName: "combined" });
|
||||
break;
|
||||
case "history":
|
||||
let currentDate = new Date();
|
||||
let dateUpToMonth = currentDate
|
||||
.toISOString()
|
||||
.slice(0, 7)
|
||||
.replace("-", "_");
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`;
|
||||
await mongoUpsert(
|
||||
data,
|
||||
mongoDocName,
|
||||
"metaforecastHistory",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
// await pgUpsert({ contents, schema: "history", tableName: "combined" })
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`;
|
||||
await mongoUpsert(
|
||||
contents,
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
await pgUpsert({ contents, schema: "latest", tableName: group });
|
||||
}
|
||||
}
|
||||
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
||||
|
||||
export async function databaseRead({ group }) {
|
||||
let response, mongoDocName, responseMongo, responsePg
|
||||
let currentDate = new Date()
|
||||
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02
|
||||
let response, mongoDocName, responseMongo, responsePg;
|
||||
let currentDate = new Date();
|
||||
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_"); // e.g., 2022_02
|
||||
|
||||
let displayPossibleResponses = (response1, response2) => {
|
||||
console.log("Possible responses:")
|
||||
console.log("Mongo: ")
|
||||
console.log(response1.slice(0, 2))
|
||||
console.log("Postgres: ")
|
||||
console.log(response2.slice(0, 2))
|
||||
console.log("")
|
||||
}
|
||||
let displayPossibleResponses = (response1, response2) => {
|
||||
console.log("Possible responses:");
|
||||
console.log("Mongo: ");
|
||||
console.log(response1.slice(0, 2));
|
||||
console.log("Postgres: ");
|
||||
console.log(response2.slice(0, 2));
|
||||
console.log("");
|
||||
};
|
||||
|
||||
switch (group) {
|
||||
case 'combined':
|
||||
mongoDocName = "metaforecasts"
|
||||
responseMongo = await mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
responsePg = await pgRead({ schema: "latest", tableName: "combined" })
|
||||
displayPossibleResponses(responseMongo, responsePg)
|
||||
break;
|
||||
case 'history':
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`
|
||||
responseMongo = await mongoRead(mongoDocName, "metaforecastHistory", "metaforecastDatabase")
|
||||
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`
|
||||
responseMongo = mongoRead(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
responsePg = await pgRead({ schema: "latest", tableName: group })
|
||||
switch (group) {
|
||||
case "combined":
|
||||
mongoDocName = "metaforecasts";
|
||||
responseMongo = await mongoRead(
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
responsePg = await pgRead({ schema: "latest", tableName: "combined" });
|
||||
displayPossibleResponses(responseMongo, responsePg);
|
||||
break;
|
||||
case "history":
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`;
|
||||
responseMongo = await mongoRead(
|
||||
mongoDocName,
|
||||
"metaforecastHistory",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`;
|
||||
responseMongo = mongoRead(
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
responsePg = await pgRead({ schema: "latest", tableName: group });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
response = responseMongo // responsePg
|
||||
return response
|
||||
response = responseMongo; // responsePg
|
||||
return response;
|
||||
}
|
||||
// databaseRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
||||
|
||||
export async function databaseReadWithReadCredentials({ group }) {
|
||||
let response, mongoDocName, responseMongo, responsePg
|
||||
let currentDate = new Date()
|
||||
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_") // e.g., 2022_02
|
||||
let response, mongoDocName, responseMongo, responsePg;
|
||||
let currentDate = new Date();
|
||||
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_"); // e.g., 2022_02
|
||||
|
||||
let displayPossibleResponses = (response1, response2) => {
|
||||
console.log("Possible responses:")
|
||||
console.log("Mongo: ")
|
||||
console.log(response1.slice(0, 2))
|
||||
console.log("Postgres: ")
|
||||
console.log(response2.slice(0, 2))
|
||||
console.log("")
|
||||
}
|
||||
let displayPossibleResponses = (response1, response2) => {
|
||||
console.log("Possible responses:");
|
||||
console.log("Mongo: ");
|
||||
console.log(response1.slice(0, 2));
|
||||
console.log("Postgres: ");
|
||||
console.log(response2.slice(0, 2));
|
||||
console.log("");
|
||||
};
|
||||
|
||||
switch (group) {
|
||||
case 'combined':
|
||||
mongoDocName = "metaforecasts"
|
||||
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: "combined" })
|
||||
displayPossibleResponses(responseMongo, responsePg)
|
||||
break;
|
||||
case 'history':
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`
|
||||
responseMongo = await mongoReadWithReadCredentials(mongoDocName, "metaforecastHistory", "metaforecastDatabase")
|
||||
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`
|
||||
responseMongo = mongoRemongoReadWithReadCredentialsad(mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||
responsePg = await pgReadWithReadCredentials({ schema: "latest", tableName: group })
|
||||
displayPossibleResponses(responseMongo, responsePg)
|
||||
}
|
||||
switch (group) {
|
||||
case "combined":
|
||||
mongoDocName = "metaforecasts";
|
||||
responseMongo = await mongoReadWithReadCredentials(
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
responsePg = await pgReadWithReadCredentials({
|
||||
schema: "latest",
|
||||
tableName: "combined",
|
||||
});
|
||||
displayPossibleResponses(responseMongo, responsePg);
|
||||
break;
|
||||
case "history":
|
||||
mongoDocName = `metaforecast_history_${dateUpToMonth}`;
|
||||
responseMongo = await mongoReadWithReadCredentials(
|
||||
mongoDocName,
|
||||
"metaforecastHistory",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
// responsePg = await pgReadWithReadCredentials({ schema: "history", tableName: "combined" }) // fix, make dependent on month.
|
||||
break;
|
||||
default:
|
||||
mongoDocName = `${group}-questions`;
|
||||
responseMongo = mongoRemongoReadWithReadCredentialsad(
|
||||
mongoDocName,
|
||||
"metaforecastCollection",
|
||||
"metaforecastDatabase"
|
||||
);
|
||||
responsePg = await pgReadWithReadCredentials({
|
||||
schema: "latest",
|
||||
tableName: group,
|
||||
});
|
||||
displayPossibleResponses(responseMongo, responsePg);
|
||||
}
|
||||
|
||||
response = responseMongo // responsePg
|
||||
return response
|
||||
response = responseMongo; // responsePg
|
||||
return response;
|
||||
}
|
||||
//= ;
|
||||
// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
||||
// databaseReadWithReadCredentials(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
||||
|
||||
|
|
|
@ -1,54 +1,66 @@
|
|||
import pkg from 'pg';
|
||||
import pkg from "pg";
|
||||
const { Pool } = pkg;
|
||||
import { platformNames } from "../platforms/all/platformNames.js"
|
||||
import { getSecret } from '../utils/getSecrets.js';
|
||||
import { roughSizeOfObject } from "../utils/roughSize.js"
|
||||
import { platformNames } from "../platforms/all/platformNames.js";
|
||||
import { getSecret } from "../utils/getSecrets.js";
|
||||
import { roughSizeOfObject } from "../utils/roughSize.js";
|
||||
import { hash } from "../utils/hash.js";
|
||||
|
||||
// Definitions
|
||||
const schemas = ["latest", "history"]
|
||||
const tableNamesWhitelist = ["combined", ...platformNames]
|
||||
const createFullName = (schemaName, namesArray) => namesArray.map(name => `${schemaName}.${name}`)
|
||||
const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...createFullName("history", tableNamesWhitelist)]
|
||||
|
||||
const schemas = ["latest", "history"];
|
||||
const tableNamesWhitelist = ["combined", ...platformNames];
|
||||
const createFullName = (schemaName, namesArray) =>
|
||||
namesArray.map((name) => `${schemaName}.${name}`);
|
||||
const tableWhiteList = [
|
||||
...createFullName("latest", tableNamesWhitelist),
|
||||
...createFullName("history", tableNamesWhitelist),
|
||||
"latest.dashboards",
|
||||
];
|
||||
|
||||
/* Postgres database connection code */
|
||||
const databaseURL = process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres")
|
||||
const databaseURL =
|
||||
process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres");
|
||||
// process.env.DATABASE_URL || getSecret("heroku-postgres")
|
||||
const readWritePool = new Pool({
|
||||
connectionString: databaseURL,
|
||||
ssl: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
connectionString: databaseURL,
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
});
|
||||
|
||||
const readOnlyDatabaseURL = "postgresql://public_read_only_user:ffKhp52FJNNa8cKK@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require"
|
||||
const readOnlyDatabaseURL =
|
||||
getSecret("digitalocean-postgres-public") ||
|
||||
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require";
|
||||
const readOnlyPool = new Pool({
|
||||
connectionString: readOnlyDatabaseURL,
|
||||
ssl: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
connectionString: readOnlyDatabaseURL,
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Helpers
|
||||
const runPgCommand = async ({ command, pool }) => {
|
||||
console.log(command)
|
||||
const client = await pool.connect();
|
||||
let result
|
||||
try {
|
||||
let response = await client.query(command);
|
||||
result = { 'results': (response) ? response.rows : null };
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(results)
|
||||
return result
|
||||
}
|
||||
console.log(command);
|
||||
const client = await pool.connect();
|
||||
let result;
|
||||
try {
|
||||
let response = await client.query(command);
|
||||
console.log(response);
|
||||
result = { results: response ? response.rows : null };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(results)
|
||||
return result;
|
||||
};
|
||||
|
||||
// Initialize
|
||||
let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}`
|
||||
let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
|
||||
let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}`;
|
||||
let buildMetaforecastTable = (
|
||||
schema,
|
||||
table
|
||||
) => `CREATE TABLE ${schema}.${table} (
|
||||
id text,
|
||||
title text,
|
||||
url text,
|
||||
|
@ -59,114 +71,193 @@ let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table}
|
|||
stars int,
|
||||
qualityindicators json,
|
||||
extra json
|
||||
);`
|
||||
let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
|
||||
let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
|
||||
);`;
|
||||
|
||||
async function setPermissionsForPublicUser() {
|
||||
let buildDashboard = () =>
|
||||
`CREATE TABLE latest.dashboards (
|
||||
id text,
|
||||
title text,
|
||||
description text,
|
||||
contents json,
|
||||
timestamp timestamp,
|
||||
creator text,
|
||||
extra json
|
||||
);`;
|
||||
|
||||
let initCommands = ["REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
|
||||
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;"]
|
||||
for (let command of initCommands) {
|
||||
await runPgCommand({ command, pool: readWritePool })
|
||||
}
|
||||
let createIndex = (schema, table) =>
|
||||
`CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
|
||||
let createUniqueIndex = (schema, table) =>
|
||||
`CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`;
|
||||
|
||||
let buildGrantSelectForSchema = (schema) => `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({ command: buildGrantSelectForSchema(schema), pool: readWritePool })
|
||||
}
|
||||
export async function setPermissionsForPublicUser() {
|
||||
let initCommands = [
|
||||
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
|
||||
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;",
|
||||
];
|
||||
for (let command of initCommands) {
|
||||
await runPgCommand({ command, pool: readWritePool });
|
||||
}
|
||||
|
||||
let alterDefaultPrivilegesForSchema = (schema) => `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({ command: alterDefaultPrivilegesForSchema(schema), pool: readWritePool })
|
||||
}
|
||||
let buildGrantSelectForSchema = (schema) =>
|
||||
`GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`;
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: buildGrantSelectForSchema(schema),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
|
||||
let alterDefaultPrivilegesForSchema = (schema) =>
|
||||
`ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`;
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: alterDefaultPrivilegesForSchema(schema),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function pgInitialize() {
|
||||
console.log("Create schemas")
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({ command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, pool: readWritePool })
|
||||
}
|
||||
console.log("")
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create schemas");
|
||||
for (let schema of schemas) {
|
||||
await runPgCommand({
|
||||
command: `CREATE SCHEMA IF NOT EXISTS ${schema}`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
console.log("");
|
||||
|
||||
console.log("Set search path")
|
||||
await runPgCommand({ command: `SET search_path TO ${schemas.join(",")},public;`, pool: readWritePool })
|
||||
console.log("")
|
||||
console.log("Set search path");
|
||||
await runPgCommand({
|
||||
command: `SET search_path TO ${schemas.join(",")},public;`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
|
||||
console.log("Set public user permissions")
|
||||
await setPermissionsForPublicUser()
|
||||
console.log("")
|
||||
console.log("Set public user permissions");
|
||||
await setPermissionsForPublicUser();
|
||||
console.log("");
|
||||
|
||||
console.log("Create tables & their indexes")
|
||||
for (let schema of schemas) {
|
||||
for (let table of tableNamesWhitelist) {
|
||||
await runPgCommand({ command: dropTable(schema, table), pool: readWritePool })
|
||||
await runPgCommand({ command: buildMetaforecastTable(schema, table), pool: readWritePool })
|
||||
if (schema == "history") {
|
||||
await runPgCommand({ command: createIndex(schema, table), pool: readWritePool })
|
||||
} else {
|
||||
await runPgCommand({ command: createUniqueIndex(schema, table), pool: readWritePool })
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log("")
|
||||
console.log("Create tables & their indexes");
|
||||
for (let schema of schemas) {
|
||||
for (let table of tableNamesWhitelist) {
|
||||
await runPgCommand({
|
||||
command: dropTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildMetaforecastTable(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
if (schema == "history") {
|
||||
await runPgCommand({
|
||||
command: createIndex(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
} else {
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex(schema, table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
"This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
console.log("Create dashboard table and its index");
|
||||
|
||||
await runPgCommand({
|
||||
command: buildDashboard(),
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex("latest", "dashboards"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
}
|
||||
}
|
||||
// pgInitialize()
|
||||
|
||||
// Read
|
||||
async function pgReadWithPool({ schema, tableName, pool }) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let command = `SELECT * from ${schema}.${tableName}`
|
||||
let response = await runPgCommand({ command, pool })
|
||||
let results = response.results
|
||||
return results
|
||||
} else {
|
||||
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||
}
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let command = `SELECT * from ${schema}.${tableName}`;
|
||||
let response = await runPgCommand({ command, pool });
|
||||
let results = response.results;
|
||||
return results;
|
||||
} else {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function pgRead({ schema, tableName }) {
|
||||
return await pgReadWithPool({ schema, tableName, pool: readWritePool })
|
||||
return await pgReadWithPool({ schema, tableName, pool: readWritePool });
|
||||
}
|
||||
|
||||
export async function pgReadWithReadCredentials({ schema, tableName }) {
|
||||
return await pgReadWithPool({ schema, tableName, readOnlyPool: readOnlyPool })
|
||||
// currently does not work.
|
||||
/* return await pgReadWithPool({
|
||||
schema,
|
||||
tableName,
|
||||
pool: readOnlyPool,
|
||||
});
|
||||
*/
|
||||
return await pgReadWithPool({ schema, tableName, pool: readWritePool });
|
||||
}
|
||||
|
||||
export async function pgGetByIds({ ids, schema, table }) {
|
||||
let idstring = `( ${ids.map((id) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
|
||||
let command = `SELECT * from ${schema}.${table} where id in ${idstring}`;
|
||||
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
|
||||
let response = await runPgCommand({ command, pool: readWritePool });
|
||||
let results = response.results;
|
||||
console.log(results);
|
||||
return results;
|
||||
}
|
||||
|
||||
export async function pgInsert({ datum, schema, tableName }) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
|
||||
let timestamp = datum.timestamp || new Date().toISOString()
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ")
|
||||
let values = [
|
||||
datum.id,
|
||||
datum.title,
|
||||
datum.url,
|
||||
datum.platform,
|
||||
datum.description || '',
|
||||
JSON.stringify(datum.options || []),
|
||||
timestamp, // fix
|
||||
datum.stars || (datum.qualityindicators ? datum.qualityindicators.stars : 2),
|
||||
JSON.stringify(datum.qualityindicators || []),
|
||||
JSON.stringify(datum.extra || [])
|
||||
]
|
||||
const client = await readWritePool.connect();
|
||||
let result
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(result)
|
||||
return result
|
||||
} else {
|
||||
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||
}
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`;
|
||||
let timestamp = datum.timestamp || new Date().toISOString();
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||
let values = [
|
||||
datum.id,
|
||||
datum.title,
|
||||
datum.url,
|
||||
datum.platform,
|
||||
datum.description || "",
|
||||
JSON.stringify(datum.options || []),
|
||||
timestamp, // fix
|
||||
datum.stars ||
|
||||
(datum.qualityindicators ? datum.qualityindicators.stars : 2),
|
||||
JSON.stringify(datum.qualityindicators || []),
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
const client = await readWritePool.connect();
|
||||
let result;
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(result)
|
||||
return result;
|
||||
} else {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/* For reference:
|
||||
pgInsert({
|
||||
"id": "fantasyscotus-580",
|
||||
"title": "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision",
|
||||
|
@ -193,35 +284,110 @@ pgInsert({
|
|||
}
|
||||
)
|
||||
*/
|
||||
export async function pgInsertIntoDashboard({ datum, schema, tableName }) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`;
|
||||
let timestamp = datum.timestamp || new Date().toISOString();
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||
let values = [
|
||||
hash(JSON.stringify(datum.contents)),
|
||||
datum.title || "",
|
||||
datum.description || "",
|
||||
JSON.stringify(datum.contents || []),
|
||||
timestamp, // fixed
|
||||
datum.creator || "",
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
const client = await readWritePool.connect();
|
||||
let result;
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
// console.log(result)
|
||||
return result;
|
||||
} else {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
}
|
||||
/* For reference
|
||||
id text,
|
||||
title text,
|
||||
description text,
|
||||
contents json,
|
||||
timestamp timestamp,
|
||||
creator text,
|
||||
extra json
|
||||
*/
|
||||
|
||||
pgInsertIntoDashboard({
|
||||
datum: {
|
||||
title: "Test dashboard",
|
||||
description: "A test dashboard",
|
||||
contents: [
|
||||
"rootclaim-did-former-new-england-patriots-tight-end-aaron-hernandez-commit-suicide-19060",
|
||||
"metaculus-3912",
|
||||
"givewellopenphil-2021-133",
|
||||
"rootclaim-what-happened-to-barry-and-honey-sherman-19972",
|
||||
"rootclaim-what-caused-the-disappearance-of-malaysia-airlines-flight-370",
|
||||
],
|
||||
creator: "Nuño Sempere",
|
||||
},
|
||||
schema: "latest",
|
||||
tableName: "dashboards",
|
||||
});
|
||||
|
||||
export async function pgUpsert({ contents, schema, tableName }) {
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
if (schema == "latest") {
|
||||
await runPgCommand({ command: dropTable(schema, tableName), pool: readWritePool })
|
||||
await runPgCommand({ command: buildMetaforecastTable(schema, tableName), pool: readWritePool })
|
||||
await runPgCommand({ command: createUniqueIndex(schema, tableName), pool: readWritePool })
|
||||
}
|
||||
console.log(`Upserting into postgres table ${schema}.${tableName}`)
|
||||
let i = 0
|
||||
for (let datum of contents) {
|
||||
await pgInsert({ datum, schema, tableName })
|
||||
if (i < 10) {
|
||||
console.log(`Inserted ${datum.id}`)
|
||||
i++
|
||||
} else if (i == 10) {
|
||||
console.log("...")
|
||||
i++
|
||||
}
|
||||
}
|
||||
console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`)
|
||||
let check = await pgRead({ schema, tableName })
|
||||
console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`)
|
||||
console.log("Sample: ")
|
||||
console.log(JSON.stringify(check.slice(0, 1), null, 4));
|
||||
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||
if (schema == "latest") {
|
||||
await runPgCommand({
|
||||
command: dropTable(schema, tableName),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildMetaforecastTable(schema, tableName),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex(schema, tableName),
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
console.log(`Upserting into postgres table ${schema}.${tableName}`);
|
||||
let i = 0;
|
||||
for (let datum of contents) {
|
||||
await pgInsert({ datum, schema, tableName });
|
||||
if (i < 10) {
|
||||
console.log(`Inserted ${datum.id}`);
|
||||
i++;
|
||||
} else if (i == 10) {
|
||||
console.log("...");
|
||||
i++;
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
`Inserted rows with approximate cummulative size ${roughSizeOfObject(
|
||||
contents
|
||||
)} MB into ${schema}.${tableName}.`
|
||||
);
|
||||
let check = await pgRead({ schema, tableName });
|
||||
console.log(
|
||||
`Received rows with approximate cummulative size ${roughSizeOfObject(
|
||||
check
|
||||
)} MB from ${schema}.${tableName}.`
|
||||
);
|
||||
console.log("Sample: ");
|
||||
console.log(JSON.stringify(check.slice(0, 1), null, 4));
|
||||
|
||||
//console.log(JSON.stringify(check.slice(0, 1), null, 4));
|
||||
|
||||
} else {
|
||||
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||
}
|
||||
}
|
||||
//console.log(JSON.stringify(check.slice(0, 1), null, 4));
|
||||
} else {
|
||||
throw Error(
|
||||
`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
140
src/index.js
140
src/index.js
|
@ -1,71 +1,97 @@
|
|||
/* Imports */
|
||||
import fs from 'fs'
|
||||
import readline from "readline"
|
||||
import fs from "fs";
|
||||
import readline from "readline";
|
||||
|
||||
import { platformFetchers } from "./platforms/all-platforms.js"
|
||||
import { mergeEverything } from "./flow/mergeEverything.js"
|
||||
import { updateHistory } from "./flow/history/updateHistory.js"
|
||||
import { rebuildAlgoliaDatabase } from "./utils/algolia.js"
|
||||
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js"
|
||||
import { pgInitialize } from "./database/pg-wrapper.js"
|
||||
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"
|
||||
import { platformFetchers } from "./platforms/all-platforms.js";
|
||||
import { mergeEverything } from "./flow/mergeEverything.js";
|
||||
import { updateHistory } from "./flow/history/updateHistory.js";
|
||||
import { rebuildAlgoliaDatabase } from "./utils/algolia.js";
|
||||
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js";
|
||||
import {
|
||||
pgInitialize,
|
||||
setPermissionsForPublicUser,
|
||||
} from "./database/pg-wrapper.js";
|
||||
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js";
|
||||
|
||||
/* Support functions */
|
||||
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, rebuildNetlifySiteWithNewData, doEverything, pgInitialize]
|
||||
let functionNames = functions.map(fun => fun.name)
|
||||
let functions = [
|
||||
...platformFetchers,
|
||||
mergeEverything,
|
||||
updateHistory,
|
||||
rebuildAlgoliaDatabase,
|
||||
rebuildNetlifySiteWithNewData,
|
||||
doEverything,
|
||||
setPermissionsForPublicUser,
|
||||
pgInitialize,
|
||||
];
|
||||
let functionNames = functions.map((fun) => fun.name);
|
||||
|
||||
let whattodoMessage = functionNames
|
||||
.slice(0, platformFetchers.length)
|
||||
.map((functionName, i) => `[${i}]: Download predictions from ${functionName}`)
|
||||
.join('\n') +
|
||||
`\n[${functionNames.length - 6}]: Merge jsons them into one big json (and push it to mongodb database)` +
|
||||
`\n[${functionNames.length - 5}]: Update history` +
|
||||
`\n[${functionNames.length - 4}]: Rebuild algolia database ("index")` +
|
||||
`\n[${functionNames.length - 3}]: Rebuild netlify site with new data` +
|
||||
// `\n[${functionNames.length-1}]: Add to history` +
|
||||
`\n[${functionNames.length - 2}]: All of the above` +
|
||||
`\n[${functionNames.length - 1}]: Rebuild postgres database` +
|
||||
`\nChoose one option, wisely: #`
|
||||
let generateWhatToDoMessage = () => {
|
||||
let l = platformFetchers.length;
|
||||
let messagesForFetchers = platformFetchers.map(
|
||||
(fun, i) => `[${i}]: Download predictions from ${fun.name}`
|
||||
);
|
||||
let otherMessages = [
|
||||
"Merge jsons them into one big json (and push it to mongodb database)",
|
||||
`Update history`,
|
||||
`Rebuild algolia database ("index")`,
|
||||
`Rebuild netlify site with new data`,
|
||||
// `\n[${functionNames.length-1}]: Add to history` +
|
||||
`All of the above`,
|
||||
`Initialize permissions for postgres public user`,
|
||||
`Rebuild postgres database`,
|
||||
];
|
||||
let otherMessagesWithNums = otherMessages.map(
|
||||
(message, i) => `[${i + l}]: ${message}`
|
||||
);
|
||||
let completeMessages = [
|
||||
...messagesForFetchers,
|
||||
...otherMessagesWithNums,
|
||||
`\nChoose one option, wisely: #`,
|
||||
].join("\n");
|
||||
return completeMessages;
|
||||
};
|
||||
|
||||
let whattodoMessage = generateWhatToDoMessage();
|
||||
|
||||
/* BODY */
|
||||
let commandLineUtility = async () => {
|
||||
let whattodo = async (message, callback) => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
rl.question(message, async (answer) => {
|
||||
rl.close();
|
||||
await callback(answer)
|
||||
});
|
||||
}
|
||||
let whattodo = async (message, callback) => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
rl.question(message, async (answer) => {
|
||||
rl.close();
|
||||
await callback(answer);
|
||||
});
|
||||
};
|
||||
|
||||
let executeoption = async (option) => {
|
||||
option = Number(option)
|
||||
//console.log(functionNames[option])
|
||||
if (option < 0) {
|
||||
console.log(`Error, ${option} < 0 or ${option} < 0`)
|
||||
} else if (option < functions.length) {
|
||||
await tryCatchTryAgain(functions[option])
|
||||
}
|
||||
}
|
||||
let executeoption = async (option) => {
|
||||
option = Number(option);
|
||||
//console.log(functionNames[option])
|
||||
if (option < 0) {
|
||||
console.log(`Error, ${option} < 0 or ${option} < 0`);
|
||||
} else if (option < functions.length) {
|
||||
console.log(`Running: ${functions[option].name}\n`);
|
||||
await tryCatchTryAgain(functions[option]);
|
||||
}
|
||||
};
|
||||
|
||||
if (process.argv.length == 3) {
|
||||
const option = process.argv[2] // e.g., npm start 15 <-
|
||||
const optionNum = Number(option)
|
||||
if (!isNaN(optionNum)) {
|
||||
await executeoption(optionNum)
|
||||
} else if (option == "all") {
|
||||
await executeoption(functions.length - 1) // 15 = execute all fetchers
|
||||
} else {
|
||||
await whattodo(whattodoMessage, executeoption)
|
||||
}
|
||||
} else (
|
||||
await whattodo(whattodoMessage, executeoption)
|
||||
)
|
||||
}
|
||||
if (process.argv.length == 3) {
|
||||
const option = process.argv[2]; // e.g., npm start 15 <-
|
||||
const optionNum = Number(option);
|
||||
if (!isNaN(optionNum)) {
|
||||
await executeoption(optionNum);
|
||||
} else if (option == "all") {
|
||||
await executeoption(functions.length - 1); // 15 = execute all fetchers
|
||||
} else {
|
||||
await whattodo(whattodoMessage, executeoption);
|
||||
}
|
||||
} else await whattodo(whattodoMessage, executeoption);
|
||||
};
|
||||
|
||||
// console.log("1")
|
||||
// console.log(process.argv)
|
||||
commandLineUtility()
|
||||
commandLineUtility();
|
||||
// doEverything()
|
||||
|
|
Loading…
Reference in New Issue
Block a user