Merge pull request #48 from QURIresearch/prisma-migrate

Prisma migrate
This commit is contained in:
Vyacheslav Matyukhin 2022-04-08 00:18:25 +03:00 committed by GitHub
commit a65f157be8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 308 additions and 279 deletions

75
package-lock.json generated
View File

@ -9,6 +9,7 @@
"version": "2.0.0", "version": "2.0.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@prisma/client": "^3.11.1",
"@tailwindcss/forms": "^0.4.0", "@tailwindcss/forms": "^0.4.0",
"@tailwindcss/typography": "^0.5.1", "@tailwindcss/typography": "^0.5.1",
"@types/jsdom": "^16.2.14", "@types/jsdom": "^16.2.14",
@ -43,6 +44,7 @@
"postcss": "^8.2.1", "postcss": "^8.2.1",
"postcss-flexbugs-fixes": "^5.0.2", "postcss-flexbugs-fixes": "^5.0.2",
"postcss-preset-env": "^7.3.2", "postcss-preset-env": "^7.3.2",
"prisma": "^3.11.1",
"query-string": "^7.1.1", "query-string": "^7.1.1",
"react": "^17.0.2", "react": "^17.0.2",
"react-component-export-image": "^1.0.6", "react-component-export-image": "^1.0.6",
@ -1193,6 +1195,37 @@
"node": ">= 8" "node": ">= 8"
} }
}, },
"node_modules/@prisma/client": {
"version": "3.11.1",
"resolved": "https://registry.npmjs.org/@prisma/client/-/client-3.11.1.tgz",
"integrity": "sha512-B3C7zQG4HbjJzUr2Zg9UVkBJutbqq9/uqkl1S138+keZCubJrwizx3RuIvGwI+s+pm3qbsyNqXiZgL3Ir0fSng==",
"hasInstallScript": true,
"dependencies": {
"@prisma/engines-version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9"
},
"engines": {
"node": ">=12.6"
},
"peerDependencies": {
"prisma": "*"
},
"peerDependenciesMeta": {
"prisma": {
"optional": true
}
}
},
"node_modules/@prisma/engines": {
"version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9",
"resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9.tgz",
"integrity": "sha512-MILbsGnvmnhCbFGa2/iSnsyGyazU3afzD7ldjCIeLIGKkNBMSZgA2IvpYsAXl+6qFHKGrS3B2otKfV31dwMSQw==",
"hasInstallScript": true
},
"node_modules/@prisma/engines-version": {
"version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9",
"resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9.tgz",
"integrity": "sha512-HkcsDniA4iNb/gi0iuyOJNAM7nD/LwQ0uJm15v360O5dee3TM4lWdSQiTYBMK6FF68ACUItmzSur7oYuUZ2zkQ=="
},
"node_modules/@rescript/react": { "node_modules/@rescript/react": {
"version": "0.10.3", "version": "0.10.3",
"resolved": "https://registry.npmjs.org/@rescript%2freact/-/react-0.10.3.tgz", "resolved": "https://registry.npmjs.org/@rescript%2freact/-/react-0.10.3.tgz",
@ -33505,6 +33538,22 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/prisma": {
"version": "3.11.1",
"resolved": "https://registry.npmjs.org/prisma/-/prisma-3.11.1.tgz",
"integrity": "sha512-aYn8bQwt1xwR2oSsVNHT4PXU7EhsThIwmpNB/MNUaaMx5OPLTro6VdNJe/sJssXFLxhamfWeMjwmpXjljo6xkg==",
"hasInstallScript": true,
"dependencies": {
"@prisma/engines": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9"
},
"bin": {
"prisma": "build/index.js",
"prisma2": "build/index.js"
},
"engines": {
"node": ">=12.6"
}
},
"node_modules/process-nextick-args": { "node_modules/process-nextick-args": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@ -36219,6 +36268,24 @@
"fastq": "^1.6.0" "fastq": "^1.6.0"
} }
}, },
"@prisma/client": {
"version": "3.11.1",
"resolved": "https://registry.npmjs.org/@prisma/client/-/client-3.11.1.tgz",
"integrity": "sha512-B3C7zQG4HbjJzUr2Zg9UVkBJutbqq9/uqkl1S138+keZCubJrwizx3RuIvGwI+s+pm3qbsyNqXiZgL3Ir0fSng==",
"requires": {
"@prisma/engines-version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9"
}
},
"@prisma/engines": {
"version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9",
"resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9.tgz",
"integrity": "sha512-MILbsGnvmnhCbFGa2/iSnsyGyazU3afzD7ldjCIeLIGKkNBMSZgA2IvpYsAXl+6qFHKGrS3B2otKfV31dwMSQw=="
},
"@prisma/engines-version": {
"version": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9",
"resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9.tgz",
"integrity": "sha512-HkcsDniA4iNb/gi0iuyOJNAM7nD/LwQ0uJm15v360O5dee3TM4lWdSQiTYBMK6FF68ACUItmzSur7oYuUZ2zkQ=="
},
"@rescript/react": { "@rescript/react": {
"version": "0.10.3", "version": "0.10.3",
"resolved": "https://registry.npmjs.org/@rescript%2freact/-/react-0.10.3.tgz", "resolved": "https://registry.npmjs.org/@rescript%2freact/-/react-0.10.3.tgz",
@ -60202,6 +60269,14 @@
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz",
"integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==" "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg=="
}, },
"prisma": {
"version": "3.11.1",
"resolved": "https://registry.npmjs.org/prisma/-/prisma-3.11.1.tgz",
"integrity": "sha512-aYn8bQwt1xwR2oSsVNHT4PXU7EhsThIwmpNB/MNUaaMx5OPLTro6VdNJe/sJssXFLxhamfWeMjwmpXjljo6xkg==",
"requires": {
"@prisma/engines": "3.11.1-1.1a2506facaf1a4727b7c26850735e88ec779dee9"
}
},
"process-nextick-args": { "process-nextick-args": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",

View File

@ -27,6 +27,7 @@
"dbshell": ". .env && psql $DIGITALOCEAN_POSTGRES" "dbshell": ". .env && psql $DIGITALOCEAN_POSTGRES"
}, },
"dependencies": { "dependencies": {
"@prisma/client": "^3.11.1",
"@tailwindcss/forms": "^0.4.0", "@tailwindcss/forms": "^0.4.0",
"@tailwindcss/typography": "^0.5.1", "@tailwindcss/typography": "^0.5.1",
"@types/jsdom": "^16.2.14", "@types/jsdom": "^16.2.14",
@ -61,6 +62,7 @@
"postcss": "^8.2.1", "postcss": "^8.2.1",
"postcss-flexbugs-fixes": "^5.0.2", "postcss-flexbugs-fixes": "^5.0.2",
"postcss-preset-env": "^7.3.2", "postcss-preset-env": "^7.3.2",
"prisma": "^3.11.1",
"query-string": "^7.1.1", "query-string": "^7.1.1",
"react": "^17.0.2", "react": "^17.0.2",
"react-component-export-image": "^1.0.6", "react-component-export-image": "^1.0.6",

View File

@ -0,0 +1,57 @@
-- CreateTable
CREATE TABLE "dashboards" (
"id" TEXT NOT NULL,
"title" TEXT NOT NULL,
"description" TEXT NOT NULL,
"contents" JSONB NOT NULL,
"timestamp" TIMESTAMP(6) NOT NULL,
"creator" TEXT NOT NULL,
"extra" JSONB NOT NULL,
CONSTRAINT "dashboards_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "frontpage" (
"id" SERIAL NOT NULL,
"frontpage_full" JSONB NOT NULL,
"frontpage_sliced" JSONB NOT NULL,
CONSTRAINT "frontpage_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "history" (
"id" TEXT NOT NULL,
"title" TEXT NOT NULL,
"url" TEXT NOT NULL,
"platform" TEXT NOT NULL,
"description" TEXT NOT NULL,
"options" JSONB NOT NULL,
"timestamp" TIMESTAMP(6) NOT NULL,
"stars" INTEGER NOT NULL,
"qualityindicators" JSONB NOT NULL,
"extra" JSONB NOT NULL,
"pk" SERIAL NOT NULL,
CONSTRAINT "history_pkey" PRIMARY KEY ("pk")
);
-- CreateTable
CREATE TABLE "questions" (
"id" TEXT NOT NULL,
"title" TEXT NOT NULL,
"url" TEXT NOT NULL,
"platform" TEXT NOT NULL,
"description" TEXT NOT NULL,
"options" JSONB NOT NULL,
"timestamp" TIMESTAMP(6) NOT NULL,
"stars" INTEGER NOT NULL,
"qualityindicators" JSONB NOT NULL,
"extra" JSONB NOT NULL,
CONSTRAINT "questions_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "history_id_idx" ON "history"("id");

View File

@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"

53
prisma/schema.prisma Normal file
View File

@ -0,0 +1,53 @@
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DIGITALOCEAN_POSTGRES")
}
model dashboards {
id String @id
title String
description String
contents Json
timestamp DateTime @db.Timestamp(6)
creator String
extra Json
}
model frontpage {
id Int @id @default(autoincrement())
frontpage_full Json
frontpage_sliced Json
}
model history {
id String
title String
url String
platform String
description String
options Json
timestamp DateTime @db.Timestamp(6)
stars Int
qualityindicators Json
extra Json
pk Int @id @default(autoincrement())
@@index([id])
}
model questions {
id String @id
title String
url String
platform String
description String
options Json
timestamp DateTime @db.Timestamp(6)
stars Int
qualityindicators Json
extra Json
}

View File

@ -11,7 +11,7 @@ const allTableNames = [...forecastTableNames, "dashboards", "frontpage"];
/* Postgres database connection code */ /* Postgres database connection code */
const databaseURL = process.env.DIGITALOCEAN_POSTGRES; const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
export const readWritePool = new Pool({ export const pool = new Pool({
connectionString: databaseURL, connectionString: databaseURL,
ssl: process.env.POSTGRES_NO_SSL ssl: process.env.POSTGRES_NO_SSL
? false ? false
@ -20,258 +20,15 @@ export const readWritePool = new Pool({
}, },
}); });
const readOnlyDatabaseURL =
"postgresql://public_read_only_user:gOcihnLhqRIQUQYt@postgres-red-do-user-10290909-0.b.db.ondigitalocean.com:25060/metaforecastpg?sslmode=require" ||
process.env.DIGITALOCEAN_POSTGRES_PUBLIC;
const readOnlyPool = new Pool({
// never used
connectionString: readOnlyDatabaseURL,
ssl: process.env.POSTGRES_NO_SSL
? false
: {
rejectUnauthorized: false,
},
});
// Helpers
export const runPgCommand = async ({
command,
pool,
}: {
command: string;
pool: Pool;
}) => {
console.log(command);
const client = await pool.connect();
let result;
try {
let response = await client.query(command);
result = { results: response ? response.rows : null };
} catch (error) {
console.log(error);
} finally {
client.release();
}
return result;
};
// Initialize
let dropTable = (table: string) => `DROP TABLE IF EXISTS ${table}`;
let createIndex = (table: string) =>
`CREATE INDEX ${table}_id_index ON ${table} (id);`;
let createUniqueIndex = (table: string) =>
`CREATE UNIQUE INDEX ${table}_id_index ON ${table} (id);`;
async function pgInitializeScaffolding() {
async function setPermissionsForPublicUser() {
let initCommands = [
"REVOKE ALL ON DATABASE metaforecastpg FROM public_read_only_user;",
"GRANT CONNECT ON DATABASE metaforecastpg TO public_read_only_user;",
];
for (let command of initCommands) {
await runPgCommand({ command, pool: readWritePool });
}
await runPgCommand({
command:
"GRANT SELECT ON ALL TABLES IN SCHEMA public TO public_read_only_user",
pool: readWritePool,
});
await runPgCommand({
command:
"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO public_read_only_user",
pool: readWritePool,
});
}
let YOLO = false;
if (YOLO) {
console.log("Set public user permissions");
await setPermissionsForPublicUser();
console.log("");
} else {
console.log(
"pgInitializeScaffolding: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
let buildMetaforecastTable = (table: string) => `CREATE TABLE ${table} (
id text,
title text,
url text,
platform text,
description text,
options json,
timestamp timestamp,
stars int,
qualityindicators json,
extra json
);`;
async function pgInitializeQuestions() {
let YOLO = false;
if (YOLO) {
console.log("Create tables & their indexes");
const table = "questions";
await runPgCommand({
command: dropTable(table),
pool: readWritePool,
});
await runPgCommand({
command: buildMetaforecastTable(table),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex(table),
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeQuestions: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
async function pgInitializeDashboards() {
let buildDashboard = () =>
`CREATE TABLE dashboards (
id text,
title text,
description text,
contents json,
timestamp timestamp,
creator text,
extra json
);`;
let YOLO = false;
if (YOLO) {
console.log("Create dashboard table and its index");
await runPgCommand({
command: dropTable("dashboards"),
pool: readWritePool,
});
await runPgCommand({
command: buildDashboard(),
pool: readWritePool,
});
await runPgCommand({
command: createUniqueIndex("dashboards"),
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeDashboard: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
let buildHistoryTable = (table: string) => `CREATE TABLE ${table} (
id text,
title text,
url text,
platform text,
description text,
options json,
timestamp timestamp,
stars int,
qualityindicators json,
extra json
);`;
export async function pgInitializeHistories() {
let YOLO = false;
if (YOLO) {
console.log("Create history table & index");
await runPgCommand({
command: dropTable("history"),
pool: readWritePool,
});
await runPgCommand({
command: buildHistoryTable("history"),
pool: readWritePool,
});
await runPgCommand({
command: createIndex("history"), // Not unique!!
pool: readWritePool,
});
console.log("");
} else {
console.log(
"pgInitializeHistories: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
async function pgInitializeFrontpage() {
let YOLO = false;
if (YOLO) {
await runPgCommand({
command: dropTable("frontpage"),
pool: readWritePool,
});
await runPgCommand({
command: `CREATE TABLE frontpage (
id serial primary key,
frontpage_full jsonb,
frontpage_sliced jsonb
);`,
pool: readWritePool,
});
} else {
console.log(
"pgInitializeFrontpage: This command is dangerous, set YOLO to true in the code to invoke it"
);
}
}
export async function pgInitialize() {
await pgInitializeScaffolding();
await pgInitializeQuestions();
await pgInitializeHistories();
await pgInitializeDashboards();
await pgInitializeFrontpage();
}
// Read // Read
async function pgReadWithPool({ export async function pgRead({ tableName }: { tableName: string }) {
tableName,
pool,
}: {
tableName: string;
pool: Pool;
}) {
if (!allTableNames.includes(tableName)) { if (!allTableNames.includes(tableName)) {
throw Error( throw Error(
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections` `Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
); );
} }
let command = `SELECT * from ${tableName}`; let command = `SELECT * from ${tableName}`;
let response = await runPgCommand({ command, pool }); return (await pool.query(command)).rows;
let results = response.results;
return results;
}
export async function pgRead({ tableName }: { tableName: string }) {
return await pgReadWithPool({ tableName, pool: readWritePool });
}
export async function pgReadWithReadCredentials({
tableName,
}: {
tableName: string;
}) {
// currently does not work.
/* return await pgReadWithPool({
tableName,
pool: readOnlyPool,
});
*/
return await pgReadWithPool({ tableName, pool: readWritePool });
} }
export async function pgGetByIds({ export async function pgGetByIds({
@ -284,8 +41,7 @@ export async function pgGetByIds({
let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3) let idstring = `( ${ids.map((id: string) => `'${id}'`).join(", ")} )`; // (1, 2, 3)
let command = `SELECT * from ${table} where id in ${idstring}`; let command = `SELECT * from ${table} where id in ${idstring}`;
// see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn // see: https://stackoverflow.com/questions/5803472/sql-where-id-in-id1-id2-idn
let response = await runPgCommand({ command, pool: readWritePool }); let results = (await pool.query(command)).rows;
let results = response.results;
console.log(results); console.log(results);
return results; return results;
} }
@ -372,7 +128,7 @@ export async function pgInsertIntoDashboard({ datum }) {
datum.creator || "", datum.creator || "",
JSON.stringify(datum.extra || []), JSON.stringify(datum.extra || []),
]; ];
const client = await readWritePool.connect(); const client = await pool.connect();
let result; let result;
try { try {
result = await client.query(text, values); result = await client.query(text, values);
@ -426,7 +182,7 @@ export async function pgUpsert({
} }
await measureTime(async () => { await measureTime(async () => {
const client = await readWritePool.connect(); const client = await pool.connect();
try { try {
await client.query("BEGIN"); await client.query("BEGIN");
if (replacePlatform) { if (replacePlatform) {

View File

@ -1,7 +1,7 @@
import { pgReadWithReadCredentials, pgUpsert } from "../../database/pg-wrapper"; import { pgRead, pgUpsert } from "../../database/pg-wrapper";
export async function updateHistory() { export async function updateHistory() {
let latest = await pgReadWithReadCredentials({ tableName: "questions" }); let latest = await pgRead({ tableName: "questions" });
await pgUpsert({ await pgUpsert({
contents: latest, contents: latest,
tableName: "history", tableName: "history",

View File

@ -1,4 +1,3 @@
import { pgInitialize } from "../database/pg-wrapper";
import { doEverything } from "../flow/doEverything"; import { doEverything } from "../flow/doEverything";
import { updateHistory } from "../flow/history/updateHistory"; import { updateHistory } from "../flow/history/updateHistory";
import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData"; import { rebuildNetlifySiteWithNewData } from "../flow/rebuildNetliftySiteWithNewData";
@ -45,11 +44,6 @@ export const jobs: Job[] = [
run: doEverything, run: doEverything,
separate: true, separate: true,
}, },
{
name: "migrate",
message: "Initialize postgres database",
run: pgInitialize,
},
]; ];
function sleep(ms: number) { function sleep(ms: number) {

View File

@ -1,8 +1,8 @@
import { pgRead, readWritePool } from "./database/pg-wrapper"; import { pgRead, pool } from "./database/pg-wrapper";
import { Forecast } from "./platforms"; import { Forecast } from "./platforms";
export async function getFrontpage(): Promise<Forecast[]> { export async function getFrontpage(): Promise<Forecast[]> {
const res = await readWritePool.query( const res = await pool.query(
"SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1" "SELECT frontpage_sliced FROM frontpage ORDER BY id DESC LIMIT 1"
); );
if (!res.rows.length) return []; if (!res.rows.length) return [];
@ -10,7 +10,7 @@ export async function getFrontpage(): Promise<Forecast[]> {
} }
export async function getFrontpageFull(): Promise<Forecast[]> { export async function getFrontpageFull(): Promise<Forecast[]> {
const res = await readWritePool.query( const res = await pool.query(
"SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1" "SELECT frontpage_full FROM frontpage ORDER BY id DESC LIMIT 1"
); );
if (!res.rows.length) return []; if (!res.rows.length) return [];
@ -23,18 +23,18 @@ export async function rebuildFrontpage() {
}); });
const frontpageSliced = ( const frontpageSliced = (
await readWritePool.query(` await pool.query(`
SELECT * FROM questions SELECT * FROM questions
WHERE WHERE
(qualityindicators->>'stars')::int >= 3 (qualityindicators->>'stars')::int >= 3
AND description != '' AND description != ''
AND JSON_ARRAY_LENGTH(options) > 0 AND JSONB_ARRAY_LENGTH(options) > 0
ORDER BY RANDOM() LIMIT 50 ORDER BY RANDOM() LIMIT 50
`) `)
).rows; ).rows;
const start = Date.now(); const start = Date.now();
await readWritePool.query( await pool.query(
"INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)", "INSERT INTO frontpage(frontpage_full, frontpage_sliced) VALUES($1, $2)",
[JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)] [JSON.stringify(frontpageFull), JSON.stringify(frontpageSliced)]
); );

View File

@ -2,10 +2,10 @@ import "dotenv/config";
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../database/pg-wrapper"; import { pgRead } from "../database/pg-wrapper";
let main = async () => { let main = async () => {
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
let string = JSON.stringify(json, null, 2); let string = JSON.stringify(json, null, 2);
let filename = "metaforecasts.json"; let filename = "metaforecasts.json";
fs.writeFileSync(filename, string); fs.writeFileSync(filename, string);

View File

@ -1,3 +0,0 @@
import { pgInitialize } from "../database/pg-wrapper";
pgInitialize();

View File

@ -1,9 +1,9 @@
import "dotenv/config"; import "dotenv/config";
import { readWritePool } from "../database/pg-wrapper"; import { pool } from "../database/pg-wrapper";
const migrate = async () => { const migrate = async () => {
const client = await readWritePool.connect(); const client = await pool.connect();
const execQuery = async (q: string) => { const execQuery = async (q: string) => {
console.log(q); console.log(q);

View File

@ -0,0 +1,92 @@
import "dotenv/config";
import { pool } from "../database/pg-wrapper";
const migrate = async () => {
const client = await pool.connect();
const execQuery = async (q: string) => {
console.log(q);
await client.query(q);
};
try {
await client.query("BEGIN");
const notNullColumn = async (table: string, column: string) => {
await execQuery(
`ALTER TABLE ${table} ALTER COLUMN ${column} SET NOT NULL`
);
};
const jsonbColumn = async (table: string, column: string) => {
await execQuery(
`ALTER TABLE ${table} ALTER COLUMN ${column} SET DATA TYPE jsonb USING ${column}::jsonb`
);
};
const t2c = {
dashboards: [
"id",
"title",
"description",
"contents",
"timestamp",
"creator",
"extra",
],
frontpage: ["frontpage_sliced", "frontpage_full"],
history: [
"id",
"title",
"url",
"platform",
"description",
"options",
"timestamp",
"stars",
"qualityindicators",
"extra",
],
questions: [
"id",
"title",
"url",
"platform",
"description",
"options",
"timestamp",
"stars",
"qualityindicators",
"extra",
],
};
for (const [table, columns] of Object.entries(t2c)) {
for (const column of columns) {
await notNullColumn(table, column);
}
}
await execQuery("ALTER TABLE history ADD COLUMN pk SERIAL PRIMARY KEY");
await execQuery("ALTER TABLE dashboards ADD PRIMARY KEY (id)");
await execQuery("ALTER TABLE questions ADD PRIMARY KEY (id)");
await jsonbColumn("dashboards", "contents");
await jsonbColumn("dashboards", "extra");
for (const table of ["history", "questions"]) {
await jsonbColumn(table, "options");
await jsonbColumn(table, "qualityindicators");
await jsonbColumn(table, "extra");
}
await client.query("COMMIT");
} catch (e) {
await client.query("ROLLBACK");
throw e;
} finally {
client.release();
}
};
migrate();

View File

@ -1,6 +1,6 @@
import algoliasearch from "algoliasearch"; import algoliasearch from "algoliasearch";
import { pgReadWithReadCredentials } from "../database/pg-wrapper"; import { pgRead } from "../database/pg-wrapper";
import { platforms } from "../platforms"; import { platforms } from "../platforms";
let cookie = process.env.ALGOLIA_MASTER_API_KEY; let cookie = process.env.ALGOLIA_MASTER_API_KEY;
@ -20,7 +20,7 @@ let getoptionsstringforsearch = (record: any) => {
}; };
export async function rebuildAlgoliaDatabaseTheEasyWay() { export async function rebuildAlgoliaDatabaseTheEasyWay() {
let records: any[] = await pgReadWithReadCredentials({ let records: any[] = await pgRead({
tableName: "questions", tableName: "questions",
}); });

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
@ -24,7 +24,7 @@ let main = async () => {
"PredictIt", "PredictIt",
"Rootclaim", "Rootclaim",
]; ];
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
console.log(json.length); console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
@ -26,7 +26,7 @@ let shuffleArray = (array) => {
let main = async () => { let main = async () => {
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim'] let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
let json = await pgReadWithReadCredentials({ tableName: "questions" }); let json = await pgRead({ tableName: "questions" });
console.log(json.length); console.log(json.length);
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))] //let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
//console.log(uniquePlatforms) //console.log(uniquePlatforms)

View File

@ -1,7 +1,7 @@
/* Imports */ /* Imports */
import fs from "fs"; import fs from "fs";
import { pgReadWithReadCredentials } from "../../database/pg-wrapper"; import { pgRead } from "../../database/pg-wrapper";
/* Definitions */ /* Definitions */
let locationData = "./data/"; let locationData = "./data/";
@ -9,7 +9,7 @@ let locationData = "./data/";
/* Body */ /* Body */
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src // let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
async function main() { async function main() {
let data = await pgReadWithReadCredentials({ tableName: "questions" }); //JSON.parse(rawdata) let data = await pgRead({ tableName: "questions" }); //JSON.parse(rawdata)
let processDescription = (description) => { let processDescription = (description) => {
if (description == null || description == undefined || description == "") { if (description == null || description == undefined || description == "") {
return ""; return "";