cleanup: unnecessary db-related code

This commit is contained in:
Vyacheslav Matyukhin 2022-04-07 03:53:08 +03:00
parent 6a6597c657
commit 5e34ebf35e
No known key found for this signature in database
GPG Key ID: 3D2A774C5489F96C
12 changed files with 28 additions and 281 deletions

View File

@ -11,7 +11,7 @@ const allTableNames = [...forecastTableNames, "dashboards", "frontpage"];
/* Postgres database connection code */ /* Postgres database connection code */
const databaseURL = process.env.DIGITALOCEAN_POSTGRES; const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
export const readWritePool = new Pool({ export const pool = new Pool({
connectionString: databaseURL, connectionString: databaseURL,
ssl: process.env.POSTGRES_NO_SSL ssl: process.env.POSTGRES_NO_SSL
? false ? false
@ -20,258 +20,15 @@ export const readWritePool = new Pool({
}, },
}); });
const readOnlyDatabaseURL =
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" ||
process.env.DIGITALOCEAN_POSTGRES_PUBLIC;
const readOnlyPool = new Pool({
// never used
connectionString: readOnlyDatabaseURL,
ssl: process.env.POSTGRES_NO_SSL
? false
: {
rejectUnauthorized: false,
},
});
// Helpers
export const runPgCommand = async ({
command,
pool,
}: {
command: string;
pool: Pool;
}) => {
console.log(command);
const client = await pool.connect();
let result;
try {
let response = await client.query(command);
result = { results: response ? response.rows : null };
} catch (error) {
console.log(error);
} finally {
client.release();
}
return result;
};
// Initialize
let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`;
let createIndex = (table: string) =>
`CREATE INDEX ${table}_id_index ON ${table} (id);`;
let createUniqueIndex = (table: string) =>
`CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`;
async function pgInitializeScaffolding() {
async function setPermissionsForPublicUser() {
let initCommands = [
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;",
];
for (let command of initCommands) {
await runPgCommand({ command, pool: readWritePool });
}
await runPgCommand({
command:
"GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user",
pool: readWritePool,
});
await runPgCommand({
command:
"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user",
pool: readWritePool,
});
}
let YOLO = false;
if (YOLO) {
console.log("Set public user permissions");
await setPermissionsForPublicUser();
console.log("");
} else {
console.log(
"pgInitializeScaffolding: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} (
id text,
title text,
url text,
platform text,
description text,
options json,
timestamp timestamp,
stars int,
qualityindicators json,
extra json
);`;
async function pgInitializeQuestions() {
let YOLO = false;
if (YOLO) {
console.log("Create tables & their indexes");
const table = "questions";
await runPgCommand({
command: dropTable(table),
pool: readWritePool,
});
await runPgCommand({
command: buildMetaforecastTable(table),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex(table),
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeQuestions: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
async function pgInitializeDashboards() {
let buildDashboard = () =>
`CREATE TABLE dashboards (
id text,
title text,
description text,
contents json,
timestamp timestamp,
creator text,
extra json
);`;
let YOLO = false;
if (YOLO) {
console.log("Create dashboard table and its index");
await runPgCommand({
command: dropTable("dashboards"),
pool: readWritePool,
});
await runPgCommand({
command: buildDashboard(),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex("dashboards"),
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeDashboard: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
let buildHistoryTable = (table: string) => `CREATE TABLE ${table} (
id text,
title text,
url text,
platform text,
description text,
options json,
timestamp timestamp,
stars int,
qualityindicators json,
extra json
);`;
export async function pgInitializeHistories() {
let YOLO = false;
if (YOLO) {
console.log("Create history table & index");
await runPgCommand({
command: dropTable("history"),
pool: readWritePool,
});
await runPgCommand({
command: buildHistoryTable("history"),
pool: readWritePool,
});
await runPgCommand({
command: createIndex("history"), // Not unique!!
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeHistories: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
async function pgInitializeFrontpage() {
let YOLO = false;
if (YOLO) {
await runPgCommand({
command: dropTable("frontpage"),
pool: readWritePool,
});
await runPgCommand({
command: `CREATE TABLE frontpage (
id serial primary key,
frontpage_full jsonb,
frontpage_sliced jsonb
);`,
pool: readWritePool,
});
} else {
console.log(
"pgInitializeFrontpage: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
export async function pgInitialize() {
await pgInitializeScaffolding();
await pgInitializeQuestions();
await pgInitializeHistories();
await pgInitializeDashboards();
await pgInitializeFrontpage();
}
// Read // Read
async function pgReadWithPool({ export async function pgRead({ tableName }: { tableName: string }) {
tableName,
pool,
}: {
tableName: string;
pool: Pool;
}) {
if (!allTableNames.includes(tableName)) { if (!allTableNames.includes(tableName)) {
throw Error( throw Error(
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
); );
} }
let command = `SELECT * from ${tableName}`; let command = `SELECT * from ${tableName}`;
let response = await runPgCommand({ command, pool }); return (await pool.query(command)).rows;
let results = response.results;
return results;
}
export async function pgRead({ tableName }: { tableName: string }) {
return await pgReadWithPool({ tableName, pool: readWritePool });
}
export async function pgReadWithReadCredentials({
tableName,
}: {
tableName: string;
}) {
// currently does not work.
/* return await pgReadWithPool({
tableName,
pool: readOnlyPool,
});
*/
return await pgReadWithPool({ tableName, pool: readWritePool });
} }
export async function pgGetByIds({ export async function pgGetByIds({
@ -284,8 +41,7 @@ export async function pgGetByIds({
let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3) let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
let command = `SELECT * from ${table} where id in ${idstring}`; let command = `SELECT * from ${table} where id in ${idstring}`;
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn // see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
let response = await runPgCommand({ command, pool: readWritePool }); let results = (await pool.query(command)).rows;
let results = response.results;
console.log(results); console.log(results);
return results; return results;
} }
@ -372,7 +128,7 @@ export async function pgInsertIntoDashboard({ datum }) {
datum.creator || "", datum.creator || "",
JSON.stringify(datum.extra || []), JSON.stringify(datum.extra || []),
]; ];
const client = await readWritePool.connect(); const client = await pool.connect();
let result; let result;
try { try {
result = await client.query(text, values); result = await client.query(text, values);
@ -426,7 +182,7 @@ export async function pgUpsert({
} }
await measureTime(async () => { await measureTime(async () => {
const client = await readWritePool.connect(); const client = await pool.connect();
try { try {
await client.query("BEGIN"); await client.query("BEGIN");
if (replacePlatform) { if (replacePlatform) {

View File

@ -1,7 +1,7 @@
import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper"; import { pgRead, pgUpsert } from "../../database/pg-wrapper";
export async function updateHistory() { export async function updateHistory() {
let latest = await pgReadWithReadCredentials({ tableName: "questions" }); let latest = await pgRead({ tableName: "questions" });
await pgUpsert({ await pgUpsert({
contents: latest, contents: latest,
tableName: "history", tableName: "history",

View File

@ -1,4 +1,3 @@
import { pgInitialize } from "../database/pg-wrapper";
import { doEverything } from "../flow/doEverything"; import { doEverything } from "../flow/doEverything";
import { updateHistory } from "../flow/history/updateHistory"; import { updateHistory } from "../flow/history/updateHistory";
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData"; import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
@ -45,11 +44,6 @@ export const jobs: Job[] = [
run: doEverything, run: doEverything,
separate: true, separate: true,
}, },
{
name: "migrate",
message: "Initialize postgres database",
run: pgInitialize,
},
]; ];
function sleep(ms: number) { function sleep(ms: number) {

View File

@ -1,8 +1,8 @@
import { pgRead, readWritePool } from "./database/pg-wrapper"; import { pgRead, pool } from "./database/pg-wrapper";
import { Forecast } from "./platforms"; import { Forecast } from "./platforms";
export async function getFrontpage(): Promise<Forecast[]> { export async function getFrontpage(): Promise<Forecast[]> {
const res = await readWritePool.query( const res = await pool.query(
"SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1" "SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1"
); );
if (!res.rows.length) return []; if (!res.rows.length) return [];
@ -10,7 +10,7 @@ export async function getFrontpage(): Promise<Forecast[]> {
} }
export async function getFrontpageFull(): Promise<Forecast[]> { export async function getFrontpageFull(): Promise<Forecast[]> {
const res = await readWritePool.query( const res = await pool.query(
"SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1" "SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1"
); );
if (!res.rows.length) return []; if (!res.rows.length) return [];
@ -23,18 +23,18 @@ export async function rebuildFrontpage() {
}); });
const frontpageSliced = ( const frontpageSliced = (
await readWritePool.query(` await pool.query(`
SELECT * FROM questions SELECT * FROM questions
WHERE WHERE
(qualityindicators->>'stars')::int >= 3 (qualityindicators->>'stars')::int >= 3
AND description != '' AND description != ''
AND JSON_ARRAY_LENGTH(options) > 0 AND JSONB_ARRAY_LENGTH(options) > 0
ORDER BY RANDOM() LIMIT 50 ORDER BY RANDOM() LIMIT 50
`) `)
).rows; ).rows;
const start = Date.now(); const start = Date.now();
await readWritePool.query( await pool.query(
"INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", "INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)] [JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
); );

View File

@ -2,10 +2,10 @@ import "dotenv/config";
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../database/pg-wrapper"; import { pgRead } from "../database/pg-wrapper";
let main = async () => { let main = async () => {
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
let string = JSON.stringify(json, null, 2); let string = JSON.stringify(json, null, 2);
let filename = "metaforecasts.json"; let filename = "metaforecasts.json";
fs.writeFileSync(filename, string); fs.writeFileSync(filename, string);

View File

@ -1,3 +0,0 @@
import { pgInitialize } from "../database/pg-wrapper";
pgInitialize();

View File

@ -1,9 +1,9 @@
import "dotenv/config"; import "dotenv/config";
import { readWritePool } from "../database/pg-wrapper"; import { pool } from "../database/pg-wrapper";
const migrate = async () => { const migrate = async () => {
const client = await readWritePool.connect(); const client = await pool.connect();
const execQuery = async (q: string) => { const execQuery = async (q: string) => {
console.log(q); console.log(q);

View File

@ -1,9 +1,9 @@
import "dotenv/config"; import "dotenv/config";
import { readWritePool } from "../database/pg-wrapper"; import { pool } from "../database/pg-wrapper";
const migrate = async () => { const migrate = async () => {
const client = await readWritePool.connect(); const client = await pool.connect();
const execQuery = async (q: string) => { const execQuery = async (q: string) => {
console.log(q); console.log(q);

View File

@ -1,6 +1,6 @@
import algoliasearch from "algoliasearch"; import algoliasearch from "algoliasearch";
import { pgReadWithReadCredentials } from "../database/pg-wrapper"; import { pgRead } from "../database/pg-wrapper";
import { platforms } from "../platforms"; import { platforms } from "../platforms";
let cookie = process.env.ALGOLIA_MASTER_API_KEY; let cookie = process.env.ALGOLIA_MASTER_API_KEY;
@ -20,7 +20,7 @@ let getoptionsstringforsearch = (record: any) => {
}; };
export async function rebuildAlgoliaDatabaseTheEasyWay() { export async function rebuildAlgoliaDatabaseTheEasyWay() {
let records: any[] = await pgReadWithReadCredentials({ let records: any[] = await pgRead({
tableName: "questions", tableName: "questions",
}); });

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
@ -24,7 +24,7 @@ let main = async () => {
"PredictIt", "PredictIt",
"Rootclaim", "Rootclaim",
]; ];
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
console.log(json.length); console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
@ -26,7 +26,7 @@ let shuffleArray = (array) => {
let main = async () => { let main = async () => {
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
console.log(json.length); console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
let locationData = "./data/"; let locationData = "./data/";
@ -9,7 +9,7 @@ let locationData = "./data/";
/* Body */ /* Body */
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
async function main() { async function main() {
let data = await pgReadWithReadCredentials({ tableName: "questions" }); //JSON.parse(rawdata) let data = await pgRead({ tableName: "questions" }); //JSON.parse(rawdata)
let processDescription = (description) => { let processDescription = (description) => {
if (description == null || description == undefined || description == "") { if (description == null || description == undefined || description == "") {
return ""; return "";