store frontpage code in db
This commit is contained in:
parent
6ffa3c0cfc
commit
a5563f9d2e
|
@ -38,7 +38,7 @@ const tableWhiteList = [
|
||||||
const databaseURL =
|
const databaseURL =
|
||||||
process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres");
|
process.env.DIGITALOCEAN_POSTGRES || getSecret("digitalocean-postgres");
|
||||||
// process.env.DATABASE_URL || getSecret("heroku-postgres")
|
// process.env.DATABASE_URL || getSecret("heroku-postgres")
|
||||||
const readWritePool = new Pool({
|
export const readWritePool = new Pool({
|
||||||
connectionString: databaseURL,
|
connectionString: databaseURL,
|
||||||
ssl: process.env.POSTGRES_NO_SSL
|
ssl: process.env.POSTGRES_NO_SSL
|
||||||
? false
|
? false
|
||||||
|
@ -61,7 +61,7 @@ const readOnlyPool = new Pool({
|
||||||
});
|
});
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
const runPgCommand = async ({ command, pool }) => {
|
export const runPgCommand = async ({ command, pool }) => {
|
||||||
console.log(command);
|
console.log(command);
|
||||||
const client = await pool.connect();
|
const client = await pool.connect();
|
||||||
let result;
|
let result;
|
||||||
|
@ -113,7 +113,7 @@ async function pgInitializeScaffolding() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let YOLO = true;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Create schemas");
|
console.log("Create schemas");
|
||||||
for (let schema of schemas) {
|
for (let schema of schemas) {
|
||||||
|
@ -158,7 +158,7 @@ let buildMetaforecastTable = (
|
||||||
);`;
|
);`;
|
||||||
|
|
||||||
async function pgInitializeLatest() {
|
async function pgInitializeLatest() {
|
||||||
let YOLO = true;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Create tables & their indexes");
|
console.log("Create tables & their indexes");
|
||||||
let schema = "latest";
|
let schema = "latest";
|
||||||
|
@ -204,7 +204,7 @@ async function pgInitializeDashboards() {
|
||||||
creator text,
|
creator text,
|
||||||
extra json
|
extra json
|
||||||
);`;
|
);`;
|
||||||
let YOLO = true;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
await runPgCommand({
|
await runPgCommand({
|
||||||
command: `CREATE SCHEMA IF NOT EXISTS history;`,
|
command: `CREATE SCHEMA IF NOT EXISTS history;`,
|
||||||
|
@ -256,7 +256,7 @@ let buildHistoryTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
|
||||||
extra json
|
extra json
|
||||||
);`;
|
);`;
|
||||||
export async function pgInitializeHistories() {
|
export async function pgInitializeHistories() {
|
||||||
let YOLO = true;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Drop all previous history tables (Danger!)");
|
console.log("Drop all previous history tables (Danger!)");
|
||||||
await runPgCommand({
|
await runPgCommand({
|
||||||
|
@ -305,11 +305,41 @@ export async function pgInitializeHistories() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function pgInitializeFrontpage() {
|
||||||
|
let buildFrontpage = () =>
|
||||||
|
`CREATE TABLE latest.frontpage (
|
||||||
|
id serial primary key,
|
||||||
|
frontpage_sliced jsonb,
|
||||||
|
frontpage_full jsonb
|
||||||
|
);`;
|
||||||
|
let YOLO = false;
|
||||||
|
if (YOLO) {
|
||||||
|
console.log("Create frontpage table and its index");
|
||||||
|
|
||||||
|
await runPgCommand({
|
||||||
|
command: dropTable("latest", "frontpage"),
|
||||||
|
pool: readWritePool,
|
||||||
|
});
|
||||||
|
|
||||||
|
await runPgCommand({
|
||||||
|
command: buildFrontpage(),
|
||||||
|
pool: readWritePool,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("");
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
"pgInitializeFrontpage: This command is dangerous, set YOLO to true in the code to invoke it"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function pgInitialize() {
|
export async function pgInitialize() {
|
||||||
await pgInitializeScaffolding();
|
await pgInitializeScaffolding();
|
||||||
await pgInitializeLatest();
|
await pgInitializeLatest();
|
||||||
await pgInitializeHistories();
|
await pgInitializeHistories();
|
||||||
await pgInitializeDashboards();
|
await pgInitializeDashboards();
|
||||||
|
await pgInitializeFrontpage();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read
|
// Read
|
||||||
|
|
|
@ -1,22 +1,24 @@
|
||||||
import fs from 'fs';
|
import { pgRead, readWritePool } from './database/pg-wrapper';
|
||||||
|
|
||||||
import { pgRead } from './database/pg-wrapper';
|
|
||||||
|
|
||||||
// TODO - move to global `constants.ts` config
|
// TODO - move to global `constants.ts` config
|
||||||
const location = "/Users/berekuk/coding/quri/metaforecast-backend/data";
|
const location = "/Users/berekuk/coding/quri/metaforecast-backend/data";
|
||||||
|
|
||||||
export async function getFrontpageRaw() {
|
export async function getFrontpageRaw() {
|
||||||
let frontpageSlicedLocation = `${location}/frontpage_sliced.json`;
|
const client = await readWritePool.connect();
|
||||||
return JSON.parse(
|
const res = await client.query(
|
||||||
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
|
"SELECT frontpage_sliced FROM latest.frontpage ORDER BY id DESC LIMIT 1"
|
||||||
); // TODO - async, no reason to lock
|
);
|
||||||
|
if (!res.rows.length) return [];
|
||||||
|
return res.rows[0].frontpage_sliced;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getFrontpageFullRaw() {
|
export async function getFrontpageFullRaw() {
|
||||||
let frontpageSlicedLocation = `${location}/frontpage_full.json`;
|
const client = await readWritePool.connect();
|
||||||
return JSON.parse(
|
const res = await client.query(
|
||||||
fs.readFileSync(frontpageSlicedLocation, { encoding: "utf-8" })
|
"SELECT frontpage_full FROM latest.frontpage ORDER BY id DESC LIMIT 1"
|
||||||
); // TODO - async, no reason to lock
|
);
|
||||||
|
if (!res.rows.length) return [];
|
||||||
|
return res.rows[0].frontpage_full;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getFrontpage() {
|
export async function getFrontpage() {
|
||||||
|
@ -61,14 +63,12 @@ let shuffle = (array) => {
|
||||||
export async function downloadFrontpage() {
|
export async function downloadFrontpage() {
|
||||||
let init = Date.now();
|
let init = Date.now();
|
||||||
|
|
||||||
let response = await pgRead({ schema: "latest", tableName: "combined" });
|
const frontpageFull = await pgRead({
|
||||||
fs.writeFileSync(
|
schema: "latest",
|
||||||
`${location}/frontpage_full.json`,
|
tableName: "combined",
|
||||||
JSON.stringify(response, null, 4)
|
});
|
||||||
);
|
|
||||||
console.log(`frontpage_full.json written to ${location}`);
|
|
||||||
|
|
||||||
let responseFiltered = response.filter(
|
let frontpageFiltered = frontpageFull.filter(
|
||||||
(forecast) =>
|
(forecast) =>
|
||||||
forecast.qualityindicators &&
|
forecast.qualityindicators &&
|
||||||
forecast.qualityindicators.stars >= 3 &&
|
forecast.qualityindicators.stars >= 3 &&
|
||||||
|
@ -76,26 +76,17 @@ export async function downloadFrontpage() {
|
||||||
forecast.options.length > 0 &&
|
forecast.options.length > 0 &&
|
||||||
forecast.description != ""
|
forecast.description != ""
|
||||||
);
|
);
|
||||||
let responseFilteredAndRandomized = shuffle(responseFiltered).slice(0, 50);
|
let frontpageSliced = shuffle(frontpageFiltered).slice(0, 50);
|
||||||
fs.writeFileSync(
|
|
||||||
`${location}/frontpage_sliced.json`,
|
const client = await readWritePool.connect();
|
||||||
JSON.stringify(responseFilteredAndRandomized, null, 4)
|
await client.query(
|
||||||
|
"INSERT INTO latest.frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
|
||||||
|
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
|
||||||
);
|
);
|
||||||
console.log(`frontpage_sliced.json written to ${location}`);
|
|
||||||
|
|
||||||
let end = Date.now();
|
let end = Date.now();
|
||||||
let difference = end - init;
|
let difference = end - init;
|
||||||
console.log(
|
console.log(
|
||||||
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
`Took ${difference / 1000} seconds, or ${difference / (1000 * 60)} minutes.`
|
||||||
);
|
);
|
||||||
|
|
||||||
/*
|
|
||||||
# (run code)
|
|
||||||
sleep 10
|
|
||||||
cp /home/azrael/server/data/frontpage_freshly_sliced.json /home/azrael/server/data/frontpage_sliced.json
|
|
||||||
date > /home/azrael/server/data/frontpage_slicetime.txt
|
|
||||||
cat /home/azrael/server/data/frontpage_freshly_sliced.json >> /home/azrael/server/data/frontpage_slicetime.txt
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
// TODO: call /api/cron/update-frontpage from github actions every 6 hours
|
|
||||||
// TODO: store frontpage_sliced copy somewhere
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user