feat: data in public schema; merge history tables
This commit is contained in:
parent
4eeab9c861
commit
a1ba23e340
|
@ -1,30 +0,0 @@
|
||||||
import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper";
|
|
||||||
|
|
||||||
export async function databaseUpsert({ contents, group }) {
|
|
||||||
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.
|
|
||||||
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
|
|
||||||
const tableName = group === "history" ? "h2022" : group;
|
|
||||||
await pgUpsert({ contents, tableName });
|
|
||||||
}
|
|
||||||
|
|
||||||
const readWithReader = async (
|
|
||||||
group: string,
|
|
||||||
reader: (opts: { tableName: string }) => Promise<any>
|
|
||||||
) => {
|
|
||||||
const tableName = group === "history" ? "h2022" : group;
|
|
||||||
const response = await reader({ tableName });
|
|
||||||
|
|
||||||
console.log("Postgres: ");
|
|
||||||
console.log(response.slice(0, 2));
|
|
||||||
console.log("");
|
|
||||||
|
|
||||||
return response;
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function databaseRead({ group }) {
|
|
||||||
return await readWithReader(group, pgRead);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function databaseReadWithReadCredentials({ group }) {
|
|
||||||
return await readWithReader(group, pgReadWithReadCredentials);
|
|
||||||
}
|
|
|
@ -5,30 +5,11 @@ import { hash } from "../utils/hash";
|
||||||
import { measureTime } from "../utils/measureTime";
|
import { measureTime } from "../utils/measureTime";
|
||||||
import { roughSizeOfObject } from "../utils/roughSize";
|
import { roughSizeOfObject } from "../utils/roughSize";
|
||||||
|
|
||||||
// Definitions
|
const platformTableNames = platforms.map((platform) => platform.name);
|
||||||
const year = Number(new Date().toISOString().slice(0, 4));
|
|
||||||
const allowed_years = [year, year + 1].map((year) => `h${year}`); // tables can't begin with number
|
const forecastTableNames = [...platformTableNames, "combined", "history"];
|
||||||
const allowed_months = [...Array(12).keys()]
|
|
||||||
.map((x) => x + 1)
|
const allTableNames = [...forecastTableNames, "dashboards", "frontpage"];
|
||||||
.map((x) => (String(x).length == 1 ? `0${x}` : x));
|
|
||||||
const allowed_year_month_histories = [].concat(
|
|
||||||
...allowed_years.map((year) =>
|
|
||||||
allowed_months.map((month) => `${year}_${month}`)
|
|
||||||
)
|
|
||||||
); // h2022_01
|
|
||||||
const tableNamesWhitelistLatest = [
|
|
||||||
"combined",
|
|
||||||
...platforms.map((platform) => platform.name),
|
|
||||||
];
|
|
||||||
const tableNamesWhiteListHistory = [
|
|
||||||
...allowed_years,
|
|
||||||
...allowed_year_month_histories,
|
|
||||||
];
|
|
||||||
const tableWhiteList = [
|
|
||||||
...tableNamesWhitelistLatest,
|
|
||||||
...tableNamesWhiteListHistory,
|
|
||||||
"dashboards",
|
|
||||||
];
|
|
||||||
|
|
||||||
/* Postgres database connection code */
|
/* Postgres database connection code */
|
||||||
const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
|
const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
|
||||||
|
@ -107,20 +88,6 @@ async function pgInitializeScaffolding() {
|
||||||
}
|
}
|
||||||
let YOLO = false;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Create schemas");
|
|
||||||
await runPgCommand({
|
|
||||||
command: `CREATE SCHEMA IF NOT EXISTS public`,
|
|
||||||
pool: readWritePool,
|
|
||||||
});
|
|
||||||
console.log("");
|
|
||||||
|
|
||||||
console.log("Set search path");
|
|
||||||
await runPgCommand({
|
|
||||||
command: `SET search_path TO public;`,
|
|
||||||
pool: readWritePool,
|
|
||||||
});
|
|
||||||
console.log("");
|
|
||||||
|
|
||||||
console.log("Set public user permissions");
|
console.log("Set public user permissions");
|
||||||
await setPermissionsForPublicUser();
|
await setPermissionsForPublicUser();
|
||||||
console.log("");
|
console.log("");
|
||||||
|
@ -148,21 +115,19 @@ async function pgInitializeLatest() {
|
||||||
let YOLO = false;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Create tables & their indexes");
|
console.log("Create tables & their indexes");
|
||||||
let schema = "latest";
|
for (const table of platformTableNames) {
|
||||||
for (let table of tableNamesWhitelistLatest) {
|
|
||||||
await runPgCommand({
|
await runPgCommand({
|
||||||
command: dropTable(schema),
|
command: dropTable(table),
|
||||||
pool: readWritePool,
|
pool: readWritePool,
|
||||||
});
|
});
|
||||||
await runPgCommand({
|
await runPgCommand({
|
||||||
command: buildMetaforecastTable(schema),
|
command: buildMetaforecastTable(table),
|
||||||
pool: readWritePool,
|
pool: readWritePool,
|
||||||
});
|
});
|
||||||
await runPgCommand({
|
await runPgCommand({
|
||||||
command: createUniqueIndex(schema),
|
command: createUniqueIndex(table),
|
||||||
pool: readWritePool,
|
pool: readWritePool,
|
||||||
});
|
});
|
||||||
//}
|
|
||||||
}
|
}
|
||||||
console.log("");
|
console.log("");
|
||||||
} else {
|
} else {
|
||||||
|
@ -224,25 +189,19 @@ let buildHistoryTable = (table: string) => `CREATE TABLE ${table} (
|
||||||
export async function pgInitializeHistories() {
|
export async function pgInitializeHistories() {
|
||||||
let YOLO = false;
|
let YOLO = false;
|
||||||
if (YOLO) {
|
if (YOLO) {
|
||||||
console.log("Drop all previous history tables (Danger!)");
|
console.log("Create history table & index");
|
||||||
console.log("TODO - drop history tables"); // hope we won't need it until we get proper migrations
|
await runPgCommand({
|
||||||
console.log("");
|
command: dropTable("history"),
|
||||||
|
pool: readWritePool,
|
||||||
console.log("Create tables & their indexes");
|
});
|
||||||
for (let table of tableNamesWhiteListHistory) {
|
await runPgCommand({
|
||||||
await runPgCommand({
|
command: buildHistoryTable("history"),
|
||||||
command: dropTable(table),
|
pool: readWritePool,
|
||||||
pool: readWritePool,
|
});
|
||||||
});
|
await runPgCommand({
|
||||||
await runPgCommand({
|
command: createIndex("history"), // Not unique!!
|
||||||
command: buildHistoryTable(table),
|
pool: readWritePool,
|
||||||
pool: readWritePool,
|
});
|
||||||
});
|
|
||||||
await runPgCommand({
|
|
||||||
command: createIndex(table), // Not unique!!
|
|
||||||
pool: readWritePool,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
console.log("");
|
console.log("");
|
||||||
} else {
|
} else {
|
||||||
console.log(
|
console.log(
|
||||||
|
@ -289,16 +248,15 @@ async function pgReadWithPool({
|
||||||
tableName: string;
|
tableName: string;
|
||||||
pool: Pool;
|
pool: Pool;
|
||||||
}) {
|
}) {
|
||||||
if (tableWhiteList.includes(tableName)) {
|
if (!allTableNames.includes(tableName)) {
|
||||||
let command = `SELECT * from ${tableName}`;
|
|
||||||
let response = await runPgCommand({ command, pool });
|
|
||||||
let results = response.results;
|
|
||||||
return results;
|
|
||||||
} else {
|
|
||||||
throw Error(
|
throw Error(
|
||||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
let command = `SELECT * from ${tableName}`;
|
||||||
|
let response = await runPgCommand({ command, pool });
|
||||||
|
let results = response.results;
|
||||||
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function pgRead({ tableName }: { tableName: string }) {
|
export async function pgRead({ tableName }: { tableName: string }) {
|
||||||
|
@ -344,7 +302,7 @@ export async function pgBulkInsert({
|
||||||
tableName: string;
|
tableName: string;
|
||||||
client: PoolClient;
|
client: PoolClient;
|
||||||
}) {
|
}) {
|
||||||
if (!tableWhiteList.includes(tableName)) {
|
if (!forecastTableNames.includes(tableName)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||||
);
|
);
|
||||||
|
@ -404,36 +362,30 @@ export async function pgBulkInsert({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function pgInsertIntoDashboard({ datum, tableName }) {
|
export async function pgInsertIntoDashboard({ datum }) {
|
||||||
if (tableWhiteList.includes(tableName)) {
|
let text = `INSERT INTO dashboards VALUES($1, $2, $3, $4, $5, $6, $7)`;
|
||||||
let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7)`;
|
let timestamp = datum.timestamp || new Date().toISOString();
|
||||||
let timestamp = datum.timestamp || new Date().toISOString();
|
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
let values = [
|
||||||
let values = [
|
hash(JSON.stringify(datum.contents)),
|
||||||
hash(JSON.stringify(datum.contents)),
|
datum.title || "",
|
||||||
datum.title || "",
|
datum.description || "",
|
||||||
datum.description || "",
|
JSON.stringify(datum.contents || []),
|
||||||
JSON.stringify(datum.contents || []),
|
timestamp, // fixed
|
||||||
timestamp, // fixed
|
datum.creator || "",
|
||||||
datum.creator || "",
|
JSON.stringify(datum.extra || []),
|
||||||
JSON.stringify(datum.extra || []),
|
];
|
||||||
];
|
const client = await readWritePool.connect();
|
||||||
const client = await readWritePool.connect();
|
let result;
|
||||||
let result;
|
try {
|
||||||
try {
|
result = await client.query(text, values);
|
||||||
result = await client.query(text, values);
|
} catch (error) {
|
||||||
} catch (error) {
|
console.log(error);
|
||||||
console.log(error);
|
} finally {
|
||||||
} finally {
|
client.release();
|
||||||
client.release();
|
|
||||||
}
|
|
||||||
// console.log(result)
|
|
||||||
return result;
|
|
||||||
} else {
|
|
||||||
throw Error(
|
|
||||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
// console.log(result)
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
/* For reference
|
/* For reference
|
||||||
id text,
|
id text,
|
||||||
|
@ -461,10 +413,16 @@ pgInsertIntoDashboard({
|
||||||
tableName: "dashboards",
|
tableName: "dashboards",
|
||||||
});
|
});
|
||||||
*/
|
*/
|
||||||
export async function pgUpsert({ contents, tableName }) {
|
export async function pgUpsert({
|
||||||
if (!tableWhiteList.includes(tableName)) {
|
contents,
|
||||||
console.log("tableWhiteList:");
|
tableName,
|
||||||
console.log(tableWhiteList);
|
replace,
|
||||||
|
}: {
|
||||||
|
contents: Forecast[];
|
||||||
|
tableName: string;
|
||||||
|
replace: boolean;
|
||||||
|
}) {
|
||||||
|
if (!forecastTableNames.includes(tableName)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||||
);
|
);
|
||||||
|
@ -474,19 +432,12 @@ export async function pgUpsert({ contents, tableName }) {
|
||||||
const client = await readWritePool.connect();
|
const client = await readWritePool.connect();
|
||||||
try {
|
try {
|
||||||
await client.query("BEGIN");
|
await client.query("BEGIN");
|
||||||
if (schema === "latest") {
|
if (replace) {
|
||||||
client.query(`DELETE FROM latest.${tableName}`);
|
client.query(`DELETE FROM ${tableName}`);
|
||||||
}
|
}
|
||||||
console.log(
|
console.log(
|
||||||
`Upserting ${contents.length} rows into postgres table ${tableName}.`
|
`Upserting ${contents.length} rows into postgres table ${tableName}.`
|
||||||
);
|
);
|
||||||
console.log(
|
|
||||||
`Expected to take ${Number((contents.length * 831.183) / 4422).toFixed(
|
|
||||||
2
|
|
||||||
)} seconds or ${Number((contents.length * 13.85305) / 4422).toFixed(
|
|
||||||
2
|
|
||||||
)} minutes`
|
|
||||||
);
|
|
||||||
|
|
||||||
await pgBulkInsert({ data: contents, tableName, client });
|
await pgBulkInsert({ data: contents, tableName, client });
|
||||||
console.log(
|
console.log(
|
||||||
|
|
|
@ -4,6 +4,7 @@ export async function updateHistory() {
|
||||||
let latest = await pgReadWithReadCredentials({ tableName: "combined" });
|
let latest = await pgReadWithReadCredentials({ tableName: "combined" });
|
||||||
await pgUpsert({
|
await pgUpsert({
|
||||||
contents: latest,
|
contents: latest,
|
||||||
tableName: "h2022",
|
tableName: "history",
|
||||||
|
replace: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,6 @@ export async function mergeEverythingInner() {
|
||||||
|
|
||||||
export async function mergeEverything() {
|
export async function mergeEverything() {
|
||||||
let merged = await mergeEverythingInner();
|
let merged = await mergeEverythingInner();
|
||||||
await pgUpsert({ contents: merged, tableName: "combined" });
|
await pgUpsert({ contents: merged, tableName: "combined", replace: true });
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,10 +2,10 @@ import "dotenv/config";
|
||||||
|
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
import { databaseReadWithReadCredentials } from "../database/database-wrapper";
|
import { pgReadWithReadCredentials } from "../database/pg-wrapper";
|
||||||
|
|
||||||
let main = async () => {
|
let main = async () => {
|
||||||
let json = await databaseReadWithReadCredentials({ group: "combined" });
|
let json = await pgReadWithReadCredentials({ tableName: "combined" });
|
||||||
let string = JSON.stringify(json, null, 2);
|
let string = JSON.stringify(json, null, 2);
|
||||||
let filename = "metaforecasts.json";
|
let filename = "metaforecasts.json";
|
||||||
fs.writeFileSync(filename, string);
|
fs.writeFileSync(filename, string);
|
||||||
|
|
38
src/backend/manual/noSchemaMigrate.ts
Normal file
38
src/backend/manual/noSchemaMigrate.ts
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import "dotenv/config";
|
||||||
|
|
||||||
|
import { readWritePool } from "../database/pg-wrapper";
|
||||||
|
import { platforms } from "../platforms";
|
||||||
|
|
||||||
|
const migrate = async () => {
|
||||||
|
const client = await readWritePool.connect();
|
||||||
|
|
||||||
|
const execQuery = async (q: string) => {
|
||||||
|
console.log(q);
|
||||||
|
await client.query(q);
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.query("BEGIN");
|
||||||
|
const copyTable = async (from: string, to: string) => {
|
||||||
|
await execQuery(`DROP TABLE IF EXISTS ${to}`);
|
||||||
|
await execQuery(`CREATE TABLE ${to} (LIKE ${from} INCLUDING ALL)`);
|
||||||
|
await execQuery(`INSERT INTO ${to} SELECT * FROM ${from}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const platform of platforms) {
|
||||||
|
await copyTable(`latest.${platform.name}`, platform.name);
|
||||||
|
}
|
||||||
|
await copyTable("latest.dashboards", "dashboards");
|
||||||
|
await copyTable("latest.combined", "combined");
|
||||||
|
await copyTable("latest.frontpage", "frontpage");
|
||||||
|
await copyTable("history.h2022", "history");
|
||||||
|
await client.query("COMMIT");
|
||||||
|
} catch (e) {
|
||||||
|
await client.query("ROLLBACK");
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
client.release();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
migrate();
|
|
@ -1,8 +1,8 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
|
import { Platform } from "./";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let endpoint = "https://example.com/";
|
let endpoint = "https://example.com/";
|
||||||
|
@ -59,12 +59,11 @@ async function processPredictions(predictions) {
|
||||||
|
|
||||||
/* Body */
|
/* Body */
|
||||||
|
|
||||||
export async function example() {
|
export const example: Platform = {
|
||||||
let data = await fetchData();
|
name: "example",
|
||||||
let results = await processPredictions(data); // somehow needed
|
async fetcher() {
|
||||||
// console.log(results)
|
let data = await fetchData();
|
||||||
// let string = JSON.stringify(results, null, 2)
|
let results = await processPredictions(data); // somehow needed
|
||||||
await databaseUpsert({ contents: results, group: "example" });
|
return results;
|
||||||
console.log("Done");
|
},
|
||||||
}
|
};
|
||||||
//example()
|
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
|
||||||
import { calculateStars } from "../utils/stars";
|
import { calculateStars } from "../utils/stars";
|
||||||
import { Platform } from "./";
|
import { Platform } from "./";
|
||||||
|
|
||||||
|
@ -59,10 +58,10 @@ async function main1() {
|
||||||
// console.log(result)
|
// console.log(result)
|
||||||
results.push(result);
|
results.push(result);
|
||||||
}
|
}
|
||||||
await databaseUpsert({
|
// await databaseUpsert({
|
||||||
contents: results,
|
// contents: results,
|
||||||
group: "givewell-questions-unprocessed",
|
// group: "givewell-questions-unprocessed",
|
||||||
});
|
// });
|
||||||
}
|
}
|
||||||
|
|
||||||
export const givewellopenphil: Platform = {
|
export const givewellopenphil: Platform = {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { databaseUpsert } from "../database/database-wrapper";
|
import { pgUpsert } from "../database/pg-wrapper";
|
||||||
import { betfair } from "./betfair";
|
import { betfair } from "./betfair";
|
||||||
import { fantasyscotus } from "./fantasyscotus";
|
import { fantasyscotus } from "./fantasyscotus";
|
||||||
import { foretold } from "./foretold";
|
import { foretold } from "./foretold";
|
||||||
|
@ -112,7 +112,11 @@ export const processPlatform = async (platform: Platform) => {
|
||||||
}
|
}
|
||||||
let results = await platform.fetcher();
|
let results = await platform.fetcher();
|
||||||
if (results && results.length) {
|
if (results && results.length) {
|
||||||
await databaseUpsert({ contents: results, group: platform.name });
|
await pgUpsert({
|
||||||
|
contents: results,
|
||||||
|
tableName: platform.name,
|
||||||
|
replace: true,
|
||||||
|
});
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
} else {
|
} else {
|
||||||
console.log(`Platform ${platform.name} didn't return any results`);
|
console.log(`Platform ${platform.name} didn't return any results`);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import algoliasearch from "algoliasearch";
|
import algoliasearch from "algoliasearch";
|
||||||
|
|
||||||
import { databaseReadWithReadCredentials } from "../database/database-wrapper";
|
import { pgReadWithReadCredentials } from "../database/pg-wrapper";
|
||||||
import { mergeEverythingInner } from "../flow/mergeEverything";
|
import { mergeEverythingInner } from "../flow/mergeEverything";
|
||||||
|
|
||||||
let cookie = process.env.ALGOLIA_MASTER_API_KEY;
|
let cookie = process.env.ALGOLIA_MASTER_API_KEY;
|
||||||
|
@ -41,8 +41,8 @@ let getoptionsstringforsearch = (record: any) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
export async function rebuildAlgoliaDatabaseTheEasyWay() {
|
||||||
let records: any[] = await databaseReadWithReadCredentials({
|
let records: any[] = await pgReadWithReadCredentials({
|
||||||
group: "combined",
|
tableName: "combined",
|
||||||
});
|
});
|
||||||
|
|
||||||
records = records.map((record, index: number) => ({
|
records = records.map((record, index: number) => ({
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
import { databaseReadWithReadCredentials } from "../../database/database-wrapper";
|
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ let main = async () => {
|
||||||
"PredictIt",
|
"PredictIt",
|
||||||
"Rootclaim",
|
"Rootclaim",
|
||||||
];
|
];
|
||||||
let json = await databaseReadWithReadCredentials({ group: "combined" });
|
let json = await pgReadWithReadCredentials({ tableName: "combined" });
|
||||||
console.log(json.length);
|
console.log(json.length);
|
||||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||||
//console.log(uniquePlatforms)
|
//console.log(uniquePlatforms)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
import { databaseReadWithReadCredentials } from "../../database/database-wrapper";
|
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ let shuffleArray = (array) => {
|
||||||
|
|
||||||
let main = async () => {
|
let main = async () => {
|
||||||
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
|
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
|
||||||
let json = await databaseReadWithReadCredentials({ group: "combined" });
|
let json = await pgReadWithReadCredentials({ tableName: "combined" });
|
||||||
console.log(json.length);
|
console.log(json.length);
|
||||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||||
//console.log(uniquePlatforms)
|
//console.log(uniquePlatforms)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
import { databaseReadWithReadCredentials } from "../../database/database-wrapper";
|
import { pgReadWithReadCredentials } from "../../database/pg-wrapper";
|
||||||
|
|
||||||
/* Definitions */
|
/* Definitions */
|
||||||
let locationData = "./data/";
|
let locationData = "./data/";
|
||||||
|
@ -9,7 +9,7 @@ let locationData = "./data/";
|
||||||
/* Body */
|
/* Body */
|
||||||
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
|
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
|
||||||
async function main() {
|
async function main() {
|
||||||
let data = await databaseReadWithReadCredentials({ group: "combined" }); //JSON.parse(rawdata)
|
let data = await pgReadWithReadCredentials({ tableName: "combined" }); //JSON.parse(rawdata)
|
||||||
let processDescription = (description) => {
|
let processDescription = (description) => {
|
||||||
if (description == null || description == undefined || description == "") {
|
if (description == null || description == undefined || description == "") {
|
||||||
return "";
|
return "";
|
||||||
|
|
|
@ -27,7 +27,6 @@ export default async function handler(
|
||||||
creator: body.creator || "",
|
creator: body.creator || "",
|
||||||
extra: [],
|
extra: [],
|
||||||
},
|
},
|
||||||
tableName: "dashboards",
|
|
||||||
});
|
});
|
||||||
res.status(200).send({
|
res.status(200).send({
|
||||||
dashboardId: id,
|
dashboardId: id,
|
||||||
|
|
|
@ -17,7 +17,8 @@
|
||||||
"incremental": true,
|
"incremental": true,
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"isolatedModules": true
|
"isolatedModules": true,
|
||||||
|
"allowJs": true
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"next-env.d.ts",
|
"next-env.d.ts",
|
||||||
|
|
Loading…
Reference in New Issue
Block a user