From 4eeab9c8614b6c8d63344678f5f404fe02d2e6ef Mon Sep 17 00:00:00 2001 From: Vyacheslav Matyukhin Date: Wed, 30 Mar 2022 13:53:22 +0300 Subject: [PATCH] WIP: no schemas --- src/backend/database/database-wrapper.ts | 35 +--- src/backend/database/pg-wrapper.ts | 189 +++++++-------------- src/backend/flow/history/updateHistory.ts | 11 +- src/backend/flow/mergeEverything.ts | 6 +- src/backend/frontpage.ts | 9 +- src/pages/api/create-dashboard-from-ids.ts | 1 - src/pages/api/dashboard-by-id.ts | 2 - src/pages/api/questions.ts | 2 +- 8 files changed, 74 insertions(+), 181 deletions(-) diff --git a/src/backend/database/database-wrapper.ts b/src/backend/database/database-wrapper.ts index c96088c..7ca2679 100644 --- a/src/backend/database/database-wrapper.ts +++ b/src/backend/database/database-wrapper.ts @@ -1,43 +1,18 @@ import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper"; -const dateUpToYear = () => new Date().toISOString().slice(0, 4); -const dateUpToMonth = () => - new Date().toISOString().slice(0, 7).replace("-", "_"); - export async function databaseUpsert({ contents, group }) { // No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear. // (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){ - switch (group) { - case "combined": - await pgUpsert({ contents, schema: "latest", tableName: "combined" }); - break; - case "history": - await pgUpsert({ - contents, - schema: "history", - tableName: `h${dateUpToYear()}`, - }); - await pgUpsert({ - contents, - schema: "history", - tableName: `h${dateUpToMonth()}`, - }); - break; - default: - await pgUpsert({ contents, schema: "latest", tableName: group }); - } + const tableName = group === "history" ? "h2022" : group; + await pgUpsert({ contents, tableName }); } const readWithReader = async ( group: string, - reader: (opts: { schema: string; tableName: string }) => Promise + reader: (opts: { tableName: string }) => Promise ) => { - const schema = group === "history" ? "history" : "latest"; - const tableName = group === "history" ? `h${dateUpToMonth()}` : group; - const response = await reader({ - schema, - tableName, - }); + const tableName = group === "history" ? "h2022" : group; + const response = await reader({ tableName }); console.log("Postgres: "); console.log(response.slice(0, 2)); diff --git a/src/backend/database/pg-wrapper.ts b/src/backend/database/pg-wrapper.ts index 0b21c9b..90b129a 100644 --- a/src/backend/database/pg-wrapper.ts +++ b/src/backend/database/pg-wrapper.ts @@ -6,7 +6,6 @@ import { measureTime } from "../utils/measureTime"; import { roughSizeOfObject } from "../utils/roughSize"; // Definitions -const schemas = ["latest", "history"]; const year = Number(new Date().toISOString().slice(0, 4)); const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number const allowed_months = [...Array(12).keys()] @@ -25,12 +24,10 @@ const tableNamesWhiteListHistory = [ ...allowed_years, ...allowed_year_month_histories, ]; -const createFullName = (schemaName, namesArray) => - namesArray.map((name) => `${schemaName}.${name}`); const tableWhiteList = [ - ...createFullName("latest", tableNamesWhitelistLatest), - ...createFullName("history", tableNamesWhiteListHistory), - "latest.dashboards", + ...tableNamesWhitelistLatest, + ...tableNamesWhiteListHistory, + "dashboards", ]; /* Postgres database connection code */ @@ -80,12 +77,11 @@ export const runPgCommand = async ({ }; // Initialize -let dropTable = (schema: string, table: string) => - `DROP TABLE IF EXISTS ${schema}.${table}`; -let createIndex = (schema: string, table: string) => - `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`; -let createUniqueIndex = (schema: string, table: string) => - `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`; +let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`; +let createIndex = (table: string) => + `CREATE INDEX ${table}_id_index ON ${table} (id);`; +let createUniqueIndex = (table: string) => + `CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`; async function pgInitializeScaffolding() { async function setPermissionsForPublicUser() { @@ -97,38 +93,30 @@ async function pgInitializeScaffolding() { await runPgCommand({ command, pool: readWritePool }); } - let buildGrantSelectForSchema = (schema: string) => - `GRANT SELECT ON ALL TABLES IN SCHEMA ${schema} TO public_read_only_user`; - for (let schema of schemas) { - await runPgCommand({ - command: buildGrantSelectForSchema(schema), - pool: readWritePool, - }); - } + await runPgCommand({ + command: + "GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user", + pool: readWritePool, + }); - let alterDefaultPrivilegesForSchema = (schema: string) => - `ALTER DEFAULT PRIVILEGES IN SCHEMA ${schema} GRANT SELECT ON TABLES TO public_read_only_user`; - for (let schema of schemas) { - await runPgCommand({ - command: alterDefaultPrivilegesForSchema(schema), - pool: readWritePool, - }); - } + await runPgCommand({ + command: + "ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user", + pool: readWritePool, + }); } let YOLO = false; if (YOLO) { console.log("Create schemas"); - for (let schema of schemas) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, - pool: readWritePool, - }); - } + await runPgCommand({ + command: `CREATE SCHEMA IF NOT EXISTS public`, + pool: readWritePool, + }); console.log(""); console.log("Set search path"); await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, + command: `SET search_path TO public;`, pool: readWritePool, }); console.log(""); @@ -143,10 +131,7 @@ async function pgInitializeScaffolding() { } } -let buildMetaforecastTable = ( - schema: string, - table: string -) => `CREATE TABLE ${schema}.${table} ( +let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} ( id text, title text, url text, @@ -166,23 +151,15 @@ async function pgInitializeLatest() { let schema = "latest"; for (let table of tableNamesWhitelistLatest) { await runPgCommand({ - command: dropTable(schema, table), + command: dropTable(schema), pool: readWritePool, }); await runPgCommand({ - command: buildMetaforecastTable(schema, table), + command: buildMetaforecastTable(schema), pool: readWritePool, }); - /* - if (schema == "history") { - await runPgCommand({ - command: createIndex(schema, table), - pool: readWritePool, - }); - } else { - */ await runPgCommand({ - command: createUniqueIndex(schema, table), + command: createUniqueIndex(schema), pool: readWritePool, }); //} @@ -197,7 +174,7 @@ async function pgInitializeLatest() { async function pgInitializeDashboards() { let buildDashboard = () => - `CREATE TABLE latest.dashboards ( + `CREATE TABLE dashboards ( id text, title text, description text, @@ -208,23 +185,10 @@ async function pgInitializeDashboards() { );`; let YOLO = false; if (YOLO) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS history;`, - pool: readWritePool, - }); - console.log(""); - - console.log("Set search path"); - await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, - pool: readWritePool, - }); - console.log(""); - console.log("Create dashboard table and its index"); await runPgCommand({ - command: dropTable("latest", "dashboards"), + command: dropTable("dashboards"), pool: readWritePool, }); @@ -234,7 +198,7 @@ async function pgInitializeDashboards() { }); await runPgCommand({ - command: createUniqueIndex("latest", "dashboards"), + command: createUniqueIndex("dashboards"), pool: readWritePool, }); console.log(""); @@ -245,10 +209,7 @@ async function pgInitializeDashboards() { } } -let buildHistoryTable = ( - schema: string, - table: string -) => `CREATE TABLE ${schema}.${table} ( +let buildHistoryTable = (table: string) => `CREATE TABLE ${table} ( id text, title text, url text, @@ -264,41 +225,21 @@ export async function pgInitializeHistories() { let YOLO = false; if (YOLO) { console.log("Drop all previous history tables (Danger!)"); - await runPgCommand({ - command: `DROP SCHEMA history CASCADE;`, - pool: readWritePool, - }); - console.log(""); - - console.log("Create schemas"); - for (let schema of schemas) { - await runPgCommand({ - command: `CREATE SCHEMA IF NOT EXISTS ${schema}`, - pool: readWritePool, - }); - } - console.log(""); - - console.log("Set search path"); - await runPgCommand({ - command: `SET search_path TO ${schemas.join(",")},public;`, - pool: readWritePool, - }); + console.log("TODO - drop history tables"); // hope we won't need it until we get proper migrations console.log(""); console.log("Create tables & their indexes"); - let schema = "history"; for (let table of tableNamesWhiteListHistory) { await runPgCommand({ - command: dropTable(schema, table), + command: dropTable(table), pool: readWritePool, }); await runPgCommand({ - command: buildHistoryTable(schema, table), + command: buildHistoryTable(table), pool: readWritePool, }); await runPgCommand({ - command: createIndex(schema, table), // Not unique!! + command: createIndex(table), // Not unique!! pool: readWritePool, }); } @@ -314,11 +255,11 @@ async function pgInitializeFrontpage() { let YOLO = false; if (YOLO) { await runPgCommand({ - command: dropTable("latest", "frontpage"), + command: dropTable("frontpage"), pool: readWritePool, }); await runPgCommand({ - command: `CREATE TABLE latest.frontpage ( + command: `CREATE TABLE frontpage ( id serial primary key, frontpage_full jsonb, frontpage_sliced jsonb @@ -342,64 +283,51 @@ export async function pgInitialize() { // Read async function pgReadWithPool({ - schema, tableName, pool, }: { - schema: string; tableName: string; pool: Pool; }) { - if (tableWhiteList.includes(`${schema}.${tableName}`)) { - let command = `SELECT * from ${schema}.${tableName}`; + if (tableWhiteList.includes(tableName)) { + let command = `SELECT * from ${tableName}`; let response = await runPgCommand({ command, pool }); let results = response.results; return results; } else { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } } -export async function pgRead({ - schema, - tableName, -}: { - schema: string; - tableName: string; -}) { - return await pgReadWithPool({ schema, tableName, pool: readWritePool }); +export async function pgRead({ tableName }: { tableName: string }) { + return await pgReadWithPool({ tableName, pool: readWritePool }); } export async function pgReadWithReadCredentials({ - schema, tableName, }: { - schema: string; tableName: string; }) { // currently does not work. /* return await pgReadWithPool({ - schema, tableName, pool: readOnlyPool, }); */ - return await pgReadWithPool({ schema, tableName, pool: readWritePool }); + return await pgReadWithPool({ tableName, pool: readWritePool }); } export async function pgGetByIds({ ids, - schema, table, }: { ids: string[]; - schema: string; table: string; }) { let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3) - let command = `SELECT * from ${schema}.${table} where id in ${idstring}`; + let command = `SELECT * from ${table} where id in ${idstring}`; // see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn let response = await runPgCommand({ command, pool: readWritePool }); let results = response.results; @@ -409,23 +337,21 @@ export async function pgGetByIds({ export async function pgBulkInsert({ data, - schema, tableName, client, }: { data: Forecast[]; - schema: string; tableName: string; client: PoolClient; }) { - if (!tableWhiteList.includes(`${schema}.${tableName}`)) { + if (!tableWhiteList.includes(tableName)) { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } const generateQuery = (rows: number) => { - let text = `INSERT INTO ${schema}.${tableName} VALUES`; + let text = `INSERT INTO ${tableName} VALUES`; const cols = 10; const parts: string[] = []; for (let r = 0; r < rows; r++) { @@ -478,9 +404,9 @@ export async function pgBulkInsert({ } } -export async function pgInsertIntoDashboard({ datum, schema, tableName }) { - if (tableWhiteList.includes(`${schema}.${tableName}`)) { - let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`; +export async function pgInsertIntoDashboard({ datum, tableName }) { + if (tableWhiteList.includes(tableName)) { + let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`; let timestamp = datum.timestamp || new Date().toISOString(); timestamp = timestamp.slice(0, 19).replace("T", " "); let values = [ @@ -505,7 +431,7 @@ export async function pgInsertIntoDashboard({ datum, schema, tableName }) { return result; } else { throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } } @@ -532,16 +458,15 @@ pgInsertIntoDashboard({ ], creator: "Nuño Sempere", }, - schema: "latest", tableName: "dashboards", }); */ -export async function pgUpsert({ contents, schema, tableName }) { - if (!tableWhiteList.includes(`${schema}.${tableName}`)) { +export async function pgUpsert({ contents, tableName }) { + if (!tableWhiteList.includes(tableName)) { console.log("tableWhiteList:"); console.log(tableWhiteList); throw Error( - `Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections` + `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` ); } @@ -553,7 +478,7 @@ export async function pgUpsert({ contents, schema, tableName }) { client.query(`DELETE FROM latest.${tableName}`); } console.log( - `Upserting ${contents.length} rows into postgres table ${schema}.${tableName}.` + `Upserting ${contents.length} rows into postgres table ${tableName}.` ); console.log( `Expected to take ${Number((contents.length * 831.183) / 4422).toFixed( @@ -563,13 +488,13 @@ export async function pgUpsert({ contents, schema, tableName }) { )} minutes` ); - await pgBulkInsert({ data: contents, schema, tableName, client }); + await pgBulkInsert({ data: contents, tableName, client }); console.log( `Inserted ${ contents.length } rows with approximate cummulative size ${roughSizeOfObject( contents - )} MB into ${schema}.${tableName}.` + )} MB into ${tableName}.` ); console.log("Sample: "); diff --git a/src/backend/flow/history/updateHistory.ts b/src/backend/flow/history/updateHistory.ts index b4733dc..6c09bcf 100644 --- a/src/backend/flow/history/updateHistory.ts +++ b/src/backend/flow/history/updateHistory.ts @@ -1,12 +1,9 @@ -import { - databaseReadWithReadCredentials, - databaseUpsert, -} from "../../database/database-wrapper"; +import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper"; export async function updateHistory() { - let latest = await databaseReadWithReadCredentials({ group: "combined" }); - await databaseUpsert({ + let latest = await pgReadWithReadCredentials({ tableName: "combined" }); + await pgUpsert({ contents: latest, - group: "history", + tableName: "h2022", }); } diff --git a/src/backend/flow/mergeEverything.ts b/src/backend/flow/mergeEverything.ts index ba50b88..20c8dad 100644 --- a/src/backend/flow/mergeEverything.ts +++ b/src/backend/flow/mergeEverything.ts @@ -1,4 +1,4 @@ -import { databaseRead, databaseUpsert } from "../database/database-wrapper"; +import { pgRead, pgUpsert } from "../database/pg-wrapper"; import { platforms } from "../platforms"; /* Merge everything */ @@ -7,7 +7,7 @@ export async function mergeEverythingInner() { let merged = []; for (let platform of platforms) { const platformName = platform.name; - let json = await databaseRead({ group: platformName }); + let json = await pgRead({ tableName: platformName }); console.log(`${platformName} has ${json.length} questions\n`); merged = merged.concat(json); } @@ -23,6 +23,6 @@ export async function mergeEverythingInner() { export async function mergeEverything() { let merged = await mergeEverythingInner(); - await databaseUpsert({ contents: merged, group: "combined" }); + await pgUpsert({ contents: merged, tableName: "combined" }); console.log("Done"); } diff --git a/src/backend/frontpage.ts b/src/backend/frontpage.ts index 2ba4318..aed0fbc 100644 --- a/src/backend/frontpage.ts +++ b/src/backend/frontpage.ts @@ -3,7 +3,7 @@ import { pgRead, readWritePool } from "./database/pg-wrapper"; export async function getFrontpageRaw() { const client = await readWritePool.connect(); const res = await client.query( - "SELECT frontpage_sliced FROM latest.frontpage ORDER BY id DESC LIMIT 1" + "SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1" ); if (!res.rows.length) return []; console.log(res.rows[0].frontpage_sliced); @@ -13,7 +13,7 @@ export async function getFrontpageRaw() { export async function getFrontpageFullRaw() { const client = await readWritePool.connect(); const res = await client.query( - "SELECT frontpage_full FROM latest.frontpage ORDER BY id DESC LIMIT 1" + "SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1" ); if (!res.rows.length) return []; console.log(res.rows[0]); @@ -38,14 +38,13 @@ export async function getFrontpage() { export async function rebuildFrontpage() { const frontpageFull = await pgRead({ - schema: "latest", tableName: "combined", }); const client = await readWritePool.connect(); const frontpageSliced = ( await client.query(` - SELECT * FROM latest.combined + SELECT * FROM combined WHERE (qualityindicators->>'stars')::int >= 3 AND description != '' @@ -56,7 +55,7 @@ export async function rebuildFrontpage() { const start = Date.now(); await client.query( - "INSERT INTO latest.frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", + "INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", [JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)] ); diff --git a/src/pages/api/create-dashboard-from-ids.ts b/src/pages/api/create-dashboard-from-ids.ts index 3b06f27..a782be2 100644 --- a/src/pages/api/create-dashboard-from-ids.ts +++ b/src/pages/api/create-dashboard-from-ids.ts @@ -27,7 +27,6 @@ export default async function handler( creator: body.creator || "", extra: [], }, - schema: "latest", tableName: "dashboards", }); res.status(200).send({ diff --git a/src/pages/api/dashboard-by-id.ts b/src/pages/api/dashboard-by-id.ts index e89d5c3..366c234 100644 --- a/src/pages/api/dashboard-by-id.ts +++ b/src/pages/api/dashboard-by-id.ts @@ -16,7 +16,6 @@ export default async function handler( console.log(id); let dashboardItemArray = await pgGetByIds({ ids: [id], - schema: "latest", table: "dashboards", }); if (!!dashboardItemArray && dashboardItemArray.length > 0) { @@ -24,7 +23,6 @@ export default async function handler( console.log(dashboardItem); let dashboardContents = await pgGetByIds({ ids: dashboardItem.contents, - schema: "latest", table: "combined", }); res.status(200).send({ diff --git a/src/pages/api/questions.ts b/src/pages/api/questions.ts index 4d925ad..f06b6cf 100644 --- a/src/pages/api/questions.ts +++ b/src/pages/api/questions.ts @@ -6,7 +6,7 @@ export default async function handler( req: NextApiRequest, res: NextApiResponse ) { - let allQuestions = await pgRead({ schema: "latest", tableName: "combined" }); + let allQuestions = await pgRead({ tableName: "combined" }); console.log(allQuestions.map((element) => element.title).slice(0, 5)); console.log("..."); res.status(200).json(allQuestions);