cleanup: unnecessary db-related code
This commit is contained in:
parent
6a6597c657
commit
5e34ebf35e
|
@ -11,7 +11,7 @@ const allTableNames = [...forecastTableNames, "dashboards", "frontpage"];
|
|||
|
||||
/* Postgres database connection code */
|
||||
const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
|
||||
export const readWritePool = new Pool({
|
||||
export const pool = new Pool({
|
||||
connectionString: databaseURL,
|
||||
ssl: process.env.POSTGRES_NO_SSL
|
||||
? false
|
||||
|
@ -20,258 +20,15 @@ export const readWritePool = new Pool({
|
|||
},
|
||||
});
|
||||
|
||||
const readOnlyDatabaseURL =
|
||||
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" ||
|
||||
process.env.DIGITALOCEAN_POSTGRES_PUBLIC;
|
||||
const readOnlyPool = new Pool({
|
||||
// never used
|
||||
connectionString: readOnlyDatabaseURL,
|
||||
ssl: process.env.POSTGRES_NO_SSL
|
||||
? false
|
||||
: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Helpers
|
||||
export const runPgCommand = async ({
|
||||
command,
|
||||
pool,
|
||||
}: {
|
||||
command: string;
|
||||
pool: Pool;
|
||||
}) => {
|
||||
console.log(command);
|
||||
const client = await pool.connect();
|
||||
let result;
|
||||
try {
|
||||
let response = await client.query(command);
|
||||
result = { results: response ? response.rows : null };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// Initialize
|
||||
let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`;
|
||||
let createIndex = (table: string) =>
|
||||
`CREATE INDEX ${table}_id_index ON ${table} (id);`;
|
||||
let createUniqueIndex = (table: string) =>
|
||||
`CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`;
|
||||
|
||||
async function pgInitializeScaffolding() {
|
||||
async function setPermissionsForPublicUser() {
|
||||
let initCommands = [
|
||||
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
|
||||
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;",
|
||||
];
|
||||
for (let command of initCommands) {
|
||||
await runPgCommand({ command, pool: readWritePool });
|
||||
}
|
||||
|
||||
await runPgCommand({
|
||||
command:
|
||||
"GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user",
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
await runPgCommand({
|
||||
command:
|
||||
"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user",
|
||||
pool: readWritePool,
|
||||
});
|
||||
}
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Set public user permissions");
|
||||
await setPermissionsForPublicUser();
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeScaffolding: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} (
|
||||
id text,
|
||||
title text,
|
||||
url text,
|
||||
platform text,
|
||||
description text,
|
||||
options json,
|
||||
timestamp timestamp,
|
||||
stars int,
|
||||
qualityindicators json,
|
||||
extra json
|
||||
);`;
|
||||
|
||||
async function pgInitializeQuestions() {
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create tables & their indexes");
|
||||
const table = "questions";
|
||||
await runPgCommand({
|
||||
command: dropTable(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildMetaforecastTable(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex(table),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeQuestions: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function pgInitializeDashboards() {
|
||||
let buildDashboard = () =>
|
||||
`CREATE TABLE dashboards (
|
||||
id text,
|
||||
title text,
|
||||
description text,
|
||||
contents json,
|
||||
timestamp timestamp,
|
||||
creator text,
|
||||
extra json
|
||||
);`;
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create dashboard table and its index");
|
||||
|
||||
await runPgCommand({
|
||||
command: dropTable("dashboards"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
await runPgCommand({
|
||||
command: buildDashboard(),
|
||||
pool: readWritePool,
|
||||
});
|
||||
|
||||
await runPgCommand({
|
||||
command: createUniqueIndex("dashboards"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeDashboard: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let buildHistoryTable = (table: string) => `CREATE TABLE ${table} (
|
||||
id text,
|
||||
title text,
|
||||
url text,
|
||||
platform text,
|
||||
description text,
|
||||
options json,
|
||||
timestamp timestamp,
|
||||
stars int,
|
||||
qualityindicators json,
|
||||
extra json
|
||||
);`;
|
||||
export async function pgInitializeHistories() {
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
console.log("Create history table & index");
|
||||
await runPgCommand({
|
||||
command: dropTable("history"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: buildHistoryTable("history"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: createIndex("history"), // Not unique!!
|
||||
pool: readWritePool,
|
||||
});
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeHistories: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function pgInitializeFrontpage() {
|
||||
let YOLO = false;
|
||||
if (YOLO) {
|
||||
await runPgCommand({
|
||||
command: dropTable("frontpage"),
|
||||
pool: readWritePool,
|
||||
});
|
||||
await runPgCommand({
|
||||
command: `CREATE TABLE frontpage (
|
||||
id serial primary key,
|
||||
frontpage_full jsonb,
|
||||
frontpage_sliced jsonb
|
||||
);`,
|
||||
pool: readWritePool,
|
||||
});
|
||||
} else {
|
||||
console.log(
|
||||
"pgInitializeFrontpage: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function pgInitialize() {
|
||||
await pgInitializeScaffolding();
|
||||
await pgInitializeQuestions();
|
||||
await pgInitializeHistories();
|
||||
await pgInitializeDashboards();
|
||||
await pgInitializeFrontpage();
|
||||
}
|
||||
|
||||
// Read
|
||||
async function pgReadWithPool({
|
||||
tableName,
|
||||
pool,
|
||||
}: {
|
||||
tableName: string;
|
||||
pool: Pool;
|
||||
}) {
|
||||
export async function pgRead({ tableName }: { tableName: string }) {
|
||||
if (!allTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
let command = `SELECT * from ${tableName}`;
|
||||
let response = await runPgCommand({ command, pool });
|
||||
let results = response.results;
|
||||
return results;
|
||||
}
|
||||
|
||||
export async function pgRead({ tableName }: { tableName: string }) {
|
||||
return await pgReadWithPool({ tableName, pool: readWritePool });
|
||||
}
|
||||
|
||||
export async function pgReadWithReadCredentials({
|
||||
tableName,
|
||||
}: {
|
||||
tableName: string;
|
||||
}) {
|
||||
// currently does not work.
|
||||
/* return await pgReadWithPool({
|
||||
tableName,
|
||||
pool: readOnlyPool,
|
||||
});
|
||||
*/
|
||||
return await pgReadWithPool({ tableName, pool: readWritePool });
|
||||
return (await pool.query(command)).rows;
|
||||
}
|
||||
|
||||
export async function pgGetByIds({
|
||||
|
@ -284,8 +41,7 @@ export async function pgGetByIds({
|
|||
let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
|
||||
let command = `SELECT * from ${table} where id in ${idstring}`;
|
||||
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
|
||||
let response = await runPgCommand({ command, pool: readWritePool });
|
||||
let results = response.results;
|
||||
let results = (await pool.query(command)).rows;
|
||||
console.log(results);
|
||||
return results;
|
||||
}
|
||||
|
@ -372,7 +128,7 @@ export async function pgInsertIntoDashboard({ datum }) {
|
|||
datum.creator || "",
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
const client = await readWritePool.connect();
|
||||
const client = await pool.connect();
|
||||
let result;
|
||||
try {
|
||||
result = await client.query(text, values);
|
||||
|
@ -426,7 +182,7 @@ export async function pgUpsert({
|
|||
}
|
||||
|
||||
await measureTime(async () => {
|
||||
const client = await readWritePool.connect();
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
if (replacePlatform) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper";
|
||||
import { pgRead, pgUpsert } from "../../database/pg-wrapper";
|
||||
|
||||
export async function updateHistory() {
|
||||
let latest = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
let latest = await pgRead({ tableName: "questions" });
|
||||
await pgUpsert({
|
||||
contents: latest,
|
||||
tableName: "history",
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { pgInitialize } from "../database/pg-wrapper";
|
||||
import { doEverything } from "../flow/doEverything";
|
||||
import { updateHistory } from "../flow/history/updateHistory";
|
||||
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
|
||||
|
@ -45,11 +44,6 @@ export const jobs: Job[] = [
|
|||
run: doEverything,
|
||||
separate: true,
|
||||
},
|
||||
{
|
||||
name: "migrate",
|
||||
message: "Initialize postgres database",
|
||||
run: pgInitialize,
|
||||
},
|
||||
];
|
||||
|
||||
function sleep(ms: number) {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { pgRead, readWritePool } from "./database/pg-wrapper";
|
||||
import { pgRead, pool } from "./database/pg-wrapper";
|
||||
import { Forecast } from "./platforms";
|
||||
|
||||
export async function getFrontpage(): Promise<Forecast[]> {
|
||||
const res = await readWritePool.query(
|
||||
const res = await pool.query(
|
||||
"SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1"
|
||||
);
|
||||
if (!res.rows.length) return [];
|
||||
|
@ -10,7 +10,7 @@ export async function getFrontpage(): Promise<Forecast[]> {
|
|||
}
|
||||
|
||||
export async function getFrontpageFull(): Promise<Forecast[]> {
|
||||
const res = await readWritePool.query(
|
||||
const res = await pool.query(
|
||||
"SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1"
|
||||
);
|
||||
if (!res.rows.length) return [];
|
||||
|
@ -23,18 +23,18 @@ export async function rebuildFrontpage() {
|
|||
});
|
||||
|
||||
const frontpageSliced = (
|
||||
await readWritePool.query(`
|
||||
await pool.query(`
|
||||
SELECT * FROM questions
|
||||
WHERE
|
||||
(qualityindicators->>'stars')::int >= 3
|
||||
AND description != ''
|
||||
AND JSON_ARRAY_LENGTH(options) > 0
|
||||
AND JSONB_ARRAY_LENGTH(options) > 0
|
||||
ORDER BY RANDOM() LIMIT 50
|
||||
`)
|
||||
).rows;
|
||||
|
||||
const start = Date.now();
|
||||
await readWritePool.query(
|
||||
await pool.query(
|
||||
"INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
|
||||
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
|
||||
);
|
||||
|
|
|
@ -2,10 +2,10 @@ import "dotenv/config";
|
|||
|
||||
import fs from "fs";
|
||||
|
||||
import { pgReadWithReadCredentials } from "../database/pg-wrapper";
|
||||
import { pgRead } from "../database/pg-wrapper";
|
||||
|
||||
let main = async () => {
|
||||
let json = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
let json = await pgRead({ tableName: "questions" });
|
||||
let string = JSON.stringify(json, null, 2);
|
||||
let filename = "metaforecasts.json";
|
||||
fs.writeFileSync(filename, string);
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
import { pgInitialize } from "../database/pg-wrapper";
|
||||
|
||||
pgInitialize();
|
|
@ -1,9 +1,9 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import { readWritePool } from "../database/pg-wrapper";
|
||||
import { pool } from "../database/pg-wrapper";
|
||||
|
||||
const migrate = async () => {
|
||||
const client = await readWritePool.connect();
|
||||
const client = await pool.connect();
|
||||
|
||||
const execQuery = async (q: string) => {
|
||||
console.log(q);
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import { readWritePool } from "../database/pg-wrapper";
|
||||
import { pool } from "../database/pg-wrapper";
|
||||
|
||||
const migrate = async () => {
|
||||
const client = await readWritePool.connect();
|
||||
const client = await pool.connect();
|
||||
|
||||
const execQuery = async (q: string) => {
|
||||
console.log(q);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import algoliasearch from "algoliasearch";
|
||||
|
||||
import { pgReadWithReadCredentials } from "../database/pg-wrapper";
|
||||
import { pgRead } from "../database/pg-wrapper";
|
||||
import { platforms } from "../platforms";
|
||||
|
||||
let cookie = process.env.ALGOLIA_MASTER_API_KEY;
|
||||
|
@ -20,7 +20,7 @@ let getoptionsstringforsearch = (record: any) => {
|
|||
};
|
||||
|
||||
export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
||||
let records: any[] = await pgReadWithReadCredentials({
|
||||
let records: any[] = await pgRead({
|
||||
tableName: "questions",
|
||||
});
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
|
@ -24,7 +24,7 @@ let main = async () => {
|
|||
"PredictIt",
|
||||
"Rootclaim",
|
||||
];
|
||||
let json = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
let json = await pgRead({ tableName: "questions" });
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
|
@ -26,7 +26,7 @@ let shuffleArray = (array) => {
|
|||
|
||||
let main = async () => {
|
||||
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
|
||||
let json = await pgReadWithReadCredentials({ tableName: "questions" });
|
||||
let json = await pgRead({ tableName: "questions" });
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
|
||||
/* Definitions */
|
||||
let locationData = "./data/";
|
||||
|
@ -9,7 +9,7 @@ let locationData = "./data/";
|
|||
/* Body */
|
||||
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
|
||||
async function main() {
|
||||
let data = await pgReadWithReadCredentials({ tableName: "questions" }); //JSON.parse(rawdata)
|
||||
let data = await pgRead({ tableName: "questions" }); //JSON.parse(rawdata)
|
||||
let processDescription = (description) => {
|
||||
if (description == null || description == undefined || description == "") {
|
||||
return "";
|
||||
|
|
Loading…
Reference in New Issue
Block a user