store frontpage code in db

This commit is contained in:
Vyacheslav Matyukhin 2022-03-22 00:53:16 +03:00
parent 6ffa3c0cfc
commit a5563f9d2e
No known key found for this signature in database
GPG Key ID: 3D2A774C5489F96C
2 changed files with 60 additions and 39 deletions

View File

@ -38,7 +38,7 @@ const tableWhiteList = [
const databaseURL =
process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres");
// process.env.DATABASE_URL || getSecret("heroku-postgres")
const readWritePool = new Pool({
export const readWritePool = new Pool({
connectionString: databaseURL,
ssl: process.env.POSTGRES_NO_SSL
? false
@ -61,7 +61,7 @@ const readOnlyPool = new Pool({
});
// Helpers
const runPgCommand = async ({ command, pool }) => {
export const runPgCommand = async ({ command, pool }) => {
console.log(command);
const client = await pool.connect();
let result;
@ -113,7 +113,7 @@ async function pgInitializeScaffolding() {
});
}
}
let YOLO = true;
let YOLO = false;
if (YOLO) {
console.log("Create schemas");
for (let schema of schemas) {
@ -158,7 +158,7 @@ let buildMetaforecastTable = (
);`;
async function pgInitializeLatest() {
let YOLO = true;
let YOLO = false;
if (YOLO) {
console.log("Create tables & their indexes");
let schema = "latest";
@ -204,7 +204,7 @@ async function pgInitializeDashboards() {
creator text,
extra json
);`;
let YOLO = true;
let YOLO = false;
if (YOLO) {
await runPgCommand({
command: `CREATE SCHEMA IF NOT EXISTS history;`,
@ -256,7 +256,7 @@ let buildHistoryTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
extra json
);`;
export async function pgInitializeHistories() {
let YOLO = true;
let YOLO = false;
if (YOLO) {
console.log("Drop all previous history tables (Danger!)");
await runPgCommand({
@ -305,11 +305,41 @@ export async function pgInitializeHistories() {
}
}
async function pgInitializeFrontpage() {
let buildFrontpage = () =>
`CREATE TABLE latest.frontpage (
id serial primary key,
frontpage_sliced jsonb,
frontpage_full jsonb
);`;
let YOLO = false;
if (YOLO) {
console.log("Create frontpage table and its index");
await runPgCommand({
command: dropTable("latest", "frontpage"),
pool: readWritePool,
});
await runPgCommand({
command: buildFrontpage(),
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeFrontpage: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
export async function pgInitialize() {
await pgInitializeScaffolding();
await pgInitializeLatest();
await pgInitializeHistories();
await pgInitializeDashboards();
await pgInitializeFrontpage();
}
// Read

View File

@ -1,22 +1,24 @@
import fs from 'fs';
import { pgRead } from './database/pg-wrapper';
import { pgRead, readWritePool } from './database/pg-wrapper';
// TODO - move to global `constants.ts` config
const location = "/Users/berekuk/coding/quri/metaforecast-backend/data";
export async function getFrontpageRaw() {
let frontpageSlicedLocation = `${location}/frontpage_sliced.json`;
return JSON.parse(
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
); // TODO - async, no reason to lock
const client = await readWritePool.connect();
const res = await client.query(
"SELECT frontpage_sliced FROM latest.frontpage ORDER BY id DESC LIMIT 1"
);
if (!res.rows.length) return [];
return res.rows[0].frontpage_sliced;
}
export async function getFrontpageFullRaw() {
let frontpageSlicedLocation = `${location}/frontpage_full.json`;
return JSON.parse(
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
); // TODO - async, no reason to lock
const client = await readWritePool.connect();
const res = await client.query(
"SELECT frontpage_full FROM latest.frontpage ORDER BY id DESC LIMIT 1"
);
if (!res.rows.length) return [];
return res.rows[0].frontpage_full;
}
export async function getFrontpage() {
@ -61,14 +63,12 @@ let shuffle = (array) => {
export async function downloadFrontpage() {
let init = Date.now();
let response = await pgRead({ schema: "latest", tableName: "combined" });
fs.writeFileSync(
`${location}/frontpage_full.json`,
JSON.stringify(response, null, 4)
);
console.log(`frontpage_full.json written to ${location}`);
const frontpageFull = await pgRead({
schema: "latest",
tableName: "combined",
});
let responseFiltered = response.filter(
let frontpageFiltered = frontpageFull.filter(
(forecast) =>
forecast.qualityindicators &&
forecast.qualityindicators.stars >= 3 &&
@ -76,26 +76,17 @@ export async function downloadFrontpage() {
forecast.options.length > 0 &&
forecast.description != ""
);
let responseFilteredAndRandomized = shuffle(responseFiltered).slice(0, 50);
fs.writeFileSync(
`${location}/frontpage_sliced.json`,
JSON.stringify(responseFilteredAndRandomized, null, 4)
let frontpageSliced = shuffle(frontpageFiltered).slice(0, 50);
const client = await readWritePool.connect();
await client.query(
"INSERT INTO latest.frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
);
console.log(`frontpage_sliced.json written to ${location}`);
let end = Date.now();
let difference = end - init;
console.log(
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
);
/*
# (run code)
sleep 10
cp /home/azrael/server/data/frontpage_freshly_sliced.json /home/azrael/server/data/frontpage_sliced.json
date > /home/azrael/server/data/frontpage_slicetime.txt
cat /home/azrael/server/data/frontpage_freshly_sliced.json >> /home/azrael/server/data/frontpage_slicetime.txt
*/
}
// TODO: call /api/cron/update-frontpage from github actions every 6 hours
// TODO: store frontpage_sliced copy somewhere