refactor: prisma everywhere, drop unused columns and tables
This commit is contained in:
parent
da83eabdbe
commit
fd2c39435d
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `stars` on the `history` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `stars` on the `questions` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "history" DROP COLUMN "stars";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "questions" DROP COLUMN "stars";
|
|
@ -0,0 +1,8 @@
|
|||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the `frontpage` table. If the table is not empty, all the data it contains will be lost.
|
||||
|
||||
*/
|
||||
-- DropTable
|
||||
DROP TABLE "frontpage";
|
|
@ -23,14 +23,6 @@ model Dashboard {
|
|||
@@map("dashboards")
|
||||
}
|
||||
|
||||
model Frontpage {
|
||||
id Int @id @default(autoincrement())
|
||||
frontpage_full Json
|
||||
frontpage_sliced Json
|
||||
|
||||
@@map("frontpage")
|
||||
}
|
||||
|
||||
model History {
|
||||
id String
|
||||
title String
|
||||
|
@ -39,7 +31,6 @@ model History {
|
|||
description String
|
||||
options Json
|
||||
timestamp DateTime @db.Timestamp(6)
|
||||
stars Int
|
||||
qualityindicators Json
|
||||
extra Json
|
||||
pk Int @id @default(autoincrement())
|
||||
|
@ -49,14 +40,37 @@ model History {
|
|||
}
|
||||
|
||||
model Question {
|
||||
id String @id
|
||||
title String
|
||||
url String
|
||||
platform String
|
||||
description String
|
||||
options Json
|
||||
timestamp DateTime @db.Timestamp(6)
|
||||
stars Int
|
||||
/// E.g. "fantasyscotus-580"
|
||||
id String @id
|
||||
/// E.g. "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision"
|
||||
title String
|
||||
/// E.g. "https://fantasyscotus.net/user-predictions/case/wooden-v-us/"
|
||||
url String
|
||||
/// E.g. "fantasyscotus"
|
||||
platform String
|
||||
/// E.g. "62.50% (75 out of 120) of FantasySCOTUS players predict that the lower court's decision will be affirmed. FantasySCOTUS overall predicts an outcome of Affirm 6-3. Historically, FantasySCOTUS has chosen the correct side 50.00% of the time."
|
||||
description String
|
||||
|
||||
// E.g.:
|
||||
// [
|
||||
// {
|
||||
// "name": "Yes",
|
||||
// "probability": 0.625,
|
||||
// "type": "PROBABILITY"
|
||||
// },
|
||||
// {
|
||||
// "name": "No",
|
||||
// "probability": 0.375,
|
||||
// "type": "PROBABILITY"
|
||||
// }
|
||||
// ]
|
||||
options Json
|
||||
timestamp DateTime @db.Timestamp(6)
|
||||
|
||||
// {
|
||||
// "numforecasts": 120,
|
||||
// "stars": 2
|
||||
// }
|
||||
qualityindicators Json
|
||||
extra Json
|
||||
|
||||
|
|
|
@ -1,163 +0,0 @@
|
|||
import { Pool, PoolClient } from "pg";
|
||||
|
||||
import { Question } from "../platforms";
|
||||
import { measureTime } from "../utils/measureTime";
|
||||
import { roughSizeOfObject } from "../utils/roughSize";
|
||||
|
||||
const questionTableNames = ["questions", "history"];
|
||||
|
||||
const allTableNames = [...questionTableNames, "dashboards", "frontpage"];
|
||||
|
||||
/* Postgres database connection code */
|
||||
const databaseURL = process.env.DIGITALOCEAN_POSTGRES;
|
||||
export const pool = new Pool({
|
||||
connectionString: databaseURL,
|
||||
ssl: process.env.POSTGRES_NO_SSL
|
||||
? false
|
||||
: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Read
|
||||
export async function pgRead({ tableName }: { tableName: string }) {
|
||||
if (!allTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
let command = `SELECT * from ${tableName}`;
|
||||
return (await pool.query(command)).rows;
|
||||
}
|
||||
|
||||
export async function pgBulkInsert({
|
||||
data,
|
||||
tableName,
|
||||
client,
|
||||
}: {
|
||||
data: Question[];
|
||||
tableName: string;
|
||||
client: PoolClient;
|
||||
}) {
|
||||
if (!questionTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
|
||||
const generateQuery = (rows: number) => {
|
||||
let text = `INSERT INTO ${tableName} VALUES`;
|
||||
const cols = 10;
|
||||
const parts: string[] = [];
|
||||
for (let r = 0; r < rows; r++) {
|
||||
const bits = [];
|
||||
for (let c = 1; c <= cols; c++) {
|
||||
bits.push(`$${cols * r + c}`);
|
||||
}
|
||||
parts.push("(" + bits.join(", ") + ")");
|
||||
}
|
||||
|
||||
text += parts.join(", ");
|
||||
return text;
|
||||
};
|
||||
|
||||
let from = 0;
|
||||
const chunkSize = 20;
|
||||
while (from < data.length - 1) {
|
||||
const take = Math.min(chunkSize, data.length - from);
|
||||
const query = generateQuery(take);
|
||||
|
||||
const chunk = [];
|
||||
for (let i = from; i < from + take; i++) {
|
||||
const datum = data[i];
|
||||
let timestamp =
|
||||
datum.timestamp &&
|
||||
!!datum.timestamp.slice &&
|
||||
!isNaN(Date.parse(datum.timestamp))
|
||||
? datum.timestamp
|
||||
: new Date().toISOString();
|
||||
timestamp = timestamp.slice(0, 19).replace("T", " ");
|
||||
const values = [
|
||||
datum.id,
|
||||
datum.title,
|
||||
datum.url,
|
||||
datum.platform,
|
||||
datum.description || "",
|
||||
JSON.stringify(datum.options || []),
|
||||
timestamp, // fix
|
||||
datum.stars ||
|
||||
(datum.qualityindicators ? datum.qualityindicators.stars : 2),
|
||||
JSON.stringify(datum.qualityindicators || []),
|
||||
JSON.stringify(datum.extra || []),
|
||||
];
|
||||
chunk.push(...values);
|
||||
}
|
||||
|
||||
console.log(`Inserting ${from + 1}..${from + take}`);
|
||||
from += take;
|
||||
await client.query(query, chunk);
|
||||
}
|
||||
}
|
||||
|
||||
export async function pgUpsert({
|
||||
contents,
|
||||
tableName,
|
||||
replacePlatform,
|
||||
}: {
|
||||
contents: Question[];
|
||||
tableName: string;
|
||||
replacePlatform?: string;
|
||||
}) {
|
||||
if (!questionTableNames.includes(tableName)) {
|
||||
throw Error(
|
||||
`Table ${tableName} not in whitelist; stopping to avoid tricky sql injections`
|
||||
);
|
||||
}
|
||||
|
||||
await measureTime(async () => {
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
if (replacePlatform) {
|
||||
await client.query(`DELETE FROM ${tableName} WHERE platform = $1`, [
|
||||
replacePlatform,
|
||||
]);
|
||||
}
|
||||
console.log(
|
||||
`Upserting ${contents.length} rows into postgres table ${tableName}.`
|
||||
);
|
||||
|
||||
await pgBulkInsert({ data: contents, tableName, client });
|
||||
console.log(
|
||||
`Inserted ${
|
||||
contents.length
|
||||
} rows with approximate cummulative size ${roughSizeOfObject(
|
||||
contents
|
||||
)} MB into ${tableName}.`
|
||||
);
|
||||
|
||||
console.log("Sample: ");
|
||||
console.log(
|
||||
JSON.stringify(
|
||||
// only show the first three options
|
||||
contents.slice(0, 1).map((question) => ({
|
||||
...question,
|
||||
options: question.options
|
||||
? question.options.length > 3
|
||||
? question.options.slice(0, 3).concat("...")
|
||||
: question.options
|
||||
: null,
|
||||
})),
|
||||
null,
|
||||
4
|
||||
)
|
||||
);
|
||||
await client.query("COMMIT");
|
||||
} catch (e) {
|
||||
await client.query("ROLLBACK");
|
||||
throw e;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
});
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
import { pgRead, pgUpsert } from "../../database/pg-wrapper";
|
||||
import { prisma } from "../../database/prisma";
|
||||
|
||||
export async function updateHistory() {
|
||||
let latest = await pgRead({ tableName: "questions" });
|
||||
await pgUpsert({
|
||||
contents: latest,
|
||||
tableName: "history",
|
||||
const questions = await prisma.question.findMany({});
|
||||
await prisma.history.createMany({
|
||||
data: questions,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -2,10 +2,10 @@ import "dotenv/config";
|
|||
|
||||
import fs from "fs";
|
||||
|
||||
import { pgRead } from "../database/pg-wrapper";
|
||||
import { prisma } from "../database/prisma";
|
||||
|
||||
let main = async () => {
|
||||
let json = await pgRead({ tableName: "questions" });
|
||||
let json = await prisma.question.findMany({});
|
||||
let string = JSON.stringify(json, null, 2);
|
||||
let filename = "metaforecasts.json";
|
||||
fs.writeFileSync(filename, string);
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import { pool } from "../database/pg-wrapper";
|
||||
|
||||
const migrate = async () => {
|
||||
const client = await pool.connect();
|
||||
|
||||
const execQuery = async (q: string) => {
|
||||
console.log(q);
|
||||
await client.query(q);
|
||||
};
|
||||
|
||||
const platformTitleToName = {
|
||||
Betfair: "betfair",
|
||||
FantasySCOTUS: "fantasyscotus",
|
||||
Foretold: "foretold",
|
||||
"GiveWell/OpenPhilanthropy": "givewellopenphil",
|
||||
"Good Judgment": "goodjudgment",
|
||||
"Good Judgment Open": "goodjudgmentopen",
|
||||
Infer: "infer",
|
||||
Kalshi: "kalshi",
|
||||
"Manifold Markets": "manifold",
|
||||
Metaculus: "metaculus",
|
||||
"Peter Wildeford": "wildeford",
|
||||
PolyMarket: "polymarket",
|
||||
PredictIt: "predictit",
|
||||
Rootclaim: "rootclaim",
|
||||
Smarkets: "smarkets",
|
||||
"X-risk estimates": "xrisk",
|
||||
};
|
||||
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
const copyTable = async (from: string, to: string) => {
|
||||
await execQuery(`DROP TABLE IF EXISTS ${to}`);
|
||||
await execQuery(`CREATE TABLE ${to} (LIKE ${from} INCLUDING ALL)`);
|
||||
await execQuery(`INSERT INTO ${to} SELECT * FROM ${from}`);
|
||||
};
|
||||
|
||||
await copyTable("latest.dashboards", "dashboards");
|
||||
await copyTable("latest.combined", "questions");
|
||||
await copyTable("latest.frontpage", "frontpage");
|
||||
await copyTable("history.h2022", "history");
|
||||
|
||||
for (const [title, name] of Object.entries(platformTitleToName)) {
|
||||
console.log(`Updating ${title} -> ${name}`);
|
||||
for (const table of ["questions", "history"]) {
|
||||
await client.query(
|
||||
`UPDATE ${table} SET platform=$1 WHERE platform=$2`,
|
||||
[name, title]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Fixing GJOpen ids in questions and history");
|
||||
for (const table of ["questions", "history"]) {
|
||||
await client.query(
|
||||
`UPDATE ${table} SET id=REPLACE(id, 'goodjudmentopen-', 'goodjudgmentopen-') WHERE id LIKE 'goodjudmentopen-%'`
|
||||
);
|
||||
}
|
||||
|
||||
const fixId = (id: string) =>
|
||||
id.replace("goodjudmentopen-", "goodjudgmentopen-");
|
||||
|
||||
console.log(
|
||||
"Please rebuild frontpage manually - current version includes invalid GJOpen and xrisk ids"
|
||||
);
|
||||
|
||||
const updateDashboards = async () => {
|
||||
const res = await client.query("SELECT id, contents FROM dashboards");
|
||||
for (const row of res.rows) {
|
||||
let { id, contents } = row;
|
||||
contents = contents.map(fixId);
|
||||
await client.query(
|
||||
"UPDATE dashboards SET contents = $1 WHERE id = $2",
|
||||
[JSON.stringify(contents), id]
|
||||
);
|
||||
}
|
||||
};
|
||||
console.log("Updating dashboards");
|
||||
await updateDashboards();
|
||||
|
||||
await client.query("COMMIT");
|
||||
} catch (e) {
|
||||
await client.query("ROLLBACK");
|
||||
throw e;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
};
|
||||
|
||||
migrate();
|
|
@ -1,92 +0,0 @@
|
|||
import "dotenv/config";
|
||||
|
||||
import { pool } from "../database/pg-wrapper";
|
||||
|
||||
const migrate = async () => {
|
||||
const client = await pool.connect();
|
||||
|
||||
const execQuery = async (q: string) => {
|
||||
console.log(q);
|
||||
await client.query(q);
|
||||
};
|
||||
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
|
||||
const notNullColumn = async (table: string, column: string) => {
|
||||
await execQuery(
|
||||
`ALTER TABLE ${table} ALTER COLUMN ${column} SET NOT NULL`
|
||||
);
|
||||
};
|
||||
|
||||
const jsonbColumn = async (table: string, column: string) => {
|
||||
await execQuery(
|
||||
`ALTER TABLE ${table} ALTER COLUMN ${column} SET DATA TYPE jsonb USING ${column}::jsonb`
|
||||
);
|
||||
};
|
||||
|
||||
const t2c = {
|
||||
dashboards: [
|
||||
"id",
|
||||
"title",
|
||||
"description",
|
||||
"contents",
|
||||
"timestamp",
|
||||
"creator",
|
||||
"extra",
|
||||
],
|
||||
frontpage: ["frontpage_sliced", "frontpage_full"],
|
||||
history: [
|
||||
"id",
|
||||
"title",
|
||||
"url",
|
||||
"platform",
|
||||
"description",
|
||||
"options",
|
||||
"timestamp",
|
||||
"stars",
|
||||
"qualityindicators",
|
||||
"extra",
|
||||
],
|
||||
questions: [
|
||||
"id",
|
||||
"title",
|
||||
"url",
|
||||
"platform",
|
||||
"description",
|
||||
"options",
|
||||
"timestamp",
|
||||
"stars",
|
||||
"qualityindicators",
|
||||
"extra",
|
||||
],
|
||||
};
|
||||
for (const [table, columns] of Object.entries(t2c)) {
|
||||
for (const column of columns) {
|
||||
await notNullColumn(table, column);
|
||||
}
|
||||
}
|
||||
|
||||
await execQuery("ALTER TABLE history ADD COLUMN pk SERIAL PRIMARY KEY");
|
||||
await execQuery("ALTER TABLE dashboards ADD PRIMARY KEY (id)");
|
||||
await execQuery("ALTER TABLE questions ADD PRIMARY KEY (id)");
|
||||
|
||||
await jsonbColumn("dashboards", "contents");
|
||||
await jsonbColumn("dashboards", "extra");
|
||||
|
||||
for (const table of ["history", "questions"]) {
|
||||
await jsonbColumn(table, "options");
|
||||
await jsonbColumn(table, "qualityindicators");
|
||||
await jsonbColumn(table, "extra");
|
||||
}
|
||||
|
||||
await client.query("COMMIT");
|
||||
} catch (e) {
|
||||
await client.query("ROLLBACK");
|
||||
throw e;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
};
|
||||
|
||||
migrate();
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "example";
|
||||
|
@ -24,9 +24,9 @@ async function fetchData() {
|
|||
|
||||
async function processPredictions(predictions) {
|
||||
let results = await predictions.map((prediction) => {
|
||||
let id = `${platformName}-${prediction.id}`;
|
||||
let probability = prediction.probability;
|
||||
let options = [
|
||||
const id = `${platformName}-${prediction.id}`;
|
||||
const probability = prediction.probability;
|
||||
const options = [
|
||||
{
|
||||
name: "Yes",
|
||||
probability: probability,
|
||||
|
@ -38,19 +38,19 @@ async function processPredictions(predictions) {
|
|||
type: "PROBABILITY",
|
||||
},
|
||||
];
|
||||
let result = {
|
||||
const result: FetchedQuestion = {
|
||||
id,
|
||||
title: prediction.title,
|
||||
url: `https://example.com`,
|
||||
platform: platformName,
|
||||
description: prediction.description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
options,
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {
|
||||
/* some: somex, factors: factors */
|
||||
}),
|
||||
other: prediction.otherx,
|
||||
indicators: prediction.indicatorx,
|
||||
// other: prediction.otherx,
|
||||
// indicators: prediction.indicatorx,
|
||||
},
|
||||
};
|
||||
return result;
|
||||
|
|
|
@ -3,7 +3,7 @@ import axios from "axios";
|
|||
import https from "https";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
const platformName = "betfair";
|
||||
|
||||
|
@ -80,7 +80,7 @@ async function whipIntoShape(data) {
|
|||
async function processPredictions(data) {
|
||||
let predictions = await whipIntoShape(data);
|
||||
// console.log(JSON.stringify(predictions, null, 4))
|
||||
let results: Question[] = predictions.map((prediction) => {
|
||||
let results: FetchedQuestion[] = predictions.map((prediction) => {
|
||||
/* if(Math.floor(Math.random() * 10) % 20 ==0){
|
||||
console.log(JSON.stringify(prediction, null, 4))
|
||||
} */
|
||||
|
@ -126,7 +126,6 @@ async function processPredictions(data) {
|
|||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {
|
||||
volume: prediction.totalMatched,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
const platformName = "fantasyscotus";
|
||||
|
||||
|
@ -67,7 +67,7 @@ async function processData(data) {
|
|||
let historicalPercentageCorrect = data.stats.pcnt_correct;
|
||||
let historicalProbabilityCorrect =
|
||||
Number(historicalPercentageCorrect.replace("%", "")) / 100;
|
||||
let results: Question[] = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
for (let event of events) {
|
||||
if (event.accuracy == "") {
|
||||
let id = `${platformName}-${event.id}`;
|
||||
|
@ -75,7 +75,7 @@ async function processData(data) {
|
|||
let predictionData = await getPredictionsData(event.docket_url);
|
||||
let pAffirm = predictionData.proportionAffirm;
|
||||
//let trackRecord = event.prediction.includes("Affirm") ? historicalProbabilityCorrect : 1-historicalProbabilityCorrect
|
||||
let eventObject: Question = {
|
||||
let eventObject: FetchedQuestion = {
|
||||
id: id,
|
||||
title: `In ${event.short_name}, the SCOTUS will affirm the lower court's decision`,
|
||||
url: `https://fantasyscotus.net/user-predictions${event.docket_url}`,
|
||||
|
@ -99,7 +99,6 @@ async function processData(data) {
|
|||
type: "PROBABILITY",
|
||||
},
|
||||
],
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: Number(predictionData.numForecasts),
|
||||
stars: calculateStars(platformName, {}),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
|
@ -61,7 +61,7 @@ export const foretold: Platform = {
|
|||
label: "Foretold",
|
||||
color: "#62520b",
|
||||
async fetcher() {
|
||||
let results = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
for (let community of highQualityCommunities) {
|
||||
let questions = await fetchAllCommunityQuestions(community);
|
||||
questions = questions.map((question) => question.node);
|
||||
|
@ -84,14 +84,13 @@ export const foretold: Platform = {
|
|||
},
|
||||
];
|
||||
}
|
||||
let result = {
|
||||
id: id,
|
||||
let result: FetchedQuestion = {
|
||||
id,
|
||||
title: question.name,
|
||||
url: `https://www.foretold.io/c/${community}/m/${question.id}`,
|
||||
platform: platformName,
|
||||
description: "",
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
options,
|
||||
qualityindicators: {
|
||||
numforecasts: Math.floor(Number(question.measurementCount) / 2),
|
||||
stars: calculateStars(platformName, {}),
|
||||
|
|
|
@ -47,12 +47,12 @@ async function main1() {
|
|||
);
|
||||
let description = "<h2 " + internalforecasts[1];
|
||||
|
||||
let result = {
|
||||
title: title,
|
||||
url: url,
|
||||
const result = {
|
||||
title,
|
||||
url,
|
||||
platform: platformName,
|
||||
description: description,
|
||||
timestamp: new Date().toISOString(),
|
||||
description,
|
||||
options: [],
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
|
@ -80,7 +80,7 @@ export const givewellopenphil: Platform = {
|
|||
const dataWithDate = data.map((datum: any) => ({
|
||||
...datum,
|
||||
platform: platformName,
|
||||
timestamp: "2021-02-23",
|
||||
timestamp: new Date("2021-02-23"),
|
||||
}));
|
||||
return dataWithDate;
|
||||
},
|
||||
|
|
|
@ -5,7 +5,7 @@ import tunnel from "tunnel";
|
|||
|
||||
import { hash } from "../utils/hash";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "goodjudgment";
|
||||
|
@ -57,7 +57,7 @@ export const goodjudgment: Platform = {
|
|||
.then((query) => query.data);
|
||||
|
||||
// Processing
|
||||
let results = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
let jsonTable = Tabletojson.convert(content, { stripHtmlFromCells: false });
|
||||
jsonTable.shift(); // deletes first element
|
||||
jsonTable.pop(); // deletes last element
|
||||
|
@ -100,14 +100,13 @@ export const goodjudgment: Platform = {
|
|||
analysis = analysis ? analysis[0] : "";
|
||||
analysis = analysis ? analysis[0] : ""; // not a duplicate
|
||||
// console.log(analysis)
|
||||
let standardObj = {
|
||||
id: id,
|
||||
title: title,
|
||||
let standardObj: FetchedQuestion = {
|
||||
id,
|
||||
title,
|
||||
url: endpoint,
|
||||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
description,
|
||||
options,
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
|
|
|
@ -114,7 +114,6 @@ async function fetchStats(questionUrl, cookie) {
|
|||
let result = {
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: Number(numforecasts),
|
||||
numforecasters: Number(numforecasters),
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import { pgUpsert } from "../database/pg-wrapper";
|
||||
import { Question } from "@prisma/client";
|
||||
|
||||
import { prisma } from "../database/prisma";
|
||||
import { betfair } from "./betfair";
|
||||
import { fantasyscotus } from "./fantasyscotus";
|
||||
import { foretold } from "./foretold";
|
||||
|
@ -28,57 +30,23 @@ export interface QualityIndicators {
|
|||
tradevolume?: string;
|
||||
pool?: any;
|
||||
createdTime?: any;
|
||||
shares_volume?: any;
|
||||
yes_bid?: any;
|
||||
yes_ask?: any;
|
||||
spread?: any;
|
||||
}
|
||||
|
||||
export interface Question {
|
||||
id: string;
|
||||
// "fantasyscotus-580"
|
||||
|
||||
title: string;
|
||||
// "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision"
|
||||
|
||||
url: string;
|
||||
// "https://fantasyscotus.net/user-predictions/case/wooden-v-us/"
|
||||
|
||||
description: string;
|
||||
// "62.50% (75 out of 120) of FantasySCOTUS players predict that the lower court's decision will be affirmed. FantasySCOTUS overall predicts an outcome of Affirm 6-3. Historically, FantasySCOTUS has chosen the correct side 50.00% of the time."
|
||||
platform: string;
|
||||
// "FantasySCOTUS"
|
||||
|
||||
options: any[];
|
||||
/*
|
||||
[
|
||||
{
|
||||
"name": "Yes",
|
||||
"probability": 0.625,
|
||||
"type": "PROBABILITY"
|
||||
},
|
||||
{
|
||||
"name": "No",
|
||||
"probability": 0.375,
|
||||
"type": "PROBABILITY"
|
||||
}
|
||||
]
|
||||
*/
|
||||
|
||||
timestamp: string;
|
||||
// "2022-02-11T21:42:19.291Z"
|
||||
|
||||
stars?: number;
|
||||
// 2
|
||||
|
||||
qualityindicators: QualityIndicators;
|
||||
/*
|
||||
{
|
||||
"numforecasts": 120,
|
||||
"stars": 2
|
||||
}
|
||||
*/
|
||||
extra?: any;
|
||||
}
|
||||
export type FetchedQuestion = Omit<
|
||||
Question,
|
||||
"extra" | "qualityindicators" | "timestamp"
|
||||
> & {
|
||||
timestamp?: Date;
|
||||
extra?: object; // required in DB but annoying to return empty; also this is slightly stricter than Prisma's JsonValue
|
||||
qualityindicators: QualityIndicators; // slightly stronger type than Prisma's JsonValue
|
||||
};
|
||||
|
||||
// fetcher should return null if platform failed to fetch questions for some reason
|
||||
export type PlatformFetcher = () => Promise<Question[] | null>;
|
||||
export type PlatformFetcher = () => Promise<FetchedQuestion[] | null>;
|
||||
|
||||
export interface Platform {
|
||||
name: string; // short name for ids and `platform` db column, e.g. "xrisk"
|
||||
|
@ -95,13 +63,6 @@ export interface Platform {
|
|||
|
||||
// export type PlatformFetcher = (options: FetchOptions) => Promise<void>;
|
||||
|
||||
// interface Platform {
|
||||
// name: string;
|
||||
// color?: string;
|
||||
// longName: string;
|
||||
// fetcher: PlatformFetcher;
|
||||
// }
|
||||
|
||||
export const platforms: Platform[] = [
|
||||
betfair,
|
||||
fantasyscotus,
|
||||
|
@ -126,13 +87,23 @@ export const processPlatform = async (platform: Platform) => {
|
|||
console.log(`Platform ${platform.name} doesn't have a fetcher, skipping`);
|
||||
return;
|
||||
}
|
||||
let results = await platform.fetcher();
|
||||
const results = await platform.fetcher();
|
||||
if (results && results.length) {
|
||||
await pgUpsert({
|
||||
contents: results,
|
||||
tableName: "questions",
|
||||
replacePlatform: platform.name,
|
||||
});
|
||||
await prisma.$transaction([
|
||||
prisma.question.deleteMany({
|
||||
where: {
|
||||
platform: platform.name,
|
||||
},
|
||||
}),
|
||||
prisma.question.createMany({
|
||||
data: results.map((q) => ({
|
||||
extra: {},
|
||||
timestamp: new Date(),
|
||||
...q,
|
||||
qualityindicators: q.qualityindicators as object, // fighting typescript
|
||||
})),
|
||||
}),
|
||||
]);
|
||||
console.log("Done");
|
||||
} else {
|
||||
console.log(`Platform ${platform.name} didn't return any results`);
|
||||
|
|
|
@ -5,7 +5,7 @@ import { applyIfSecretExists } from "../utils/getSecrets";
|
|||
import { measureTime } from "../utils/measureTime";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "infer";
|
||||
|
@ -105,7 +105,6 @@ async function fetchStats(questionUrl, cookie) {
|
|||
let result = {
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: Number(numforecasts),
|
||||
numforecasters: Number(numforecasters),
|
||||
|
@ -147,7 +146,7 @@ function sleep(ms) {
|
|||
async function infer_inner(cookie: string) {
|
||||
let i = 1;
|
||||
let response = await fetchPage(i, cookie);
|
||||
let results: Question[] = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
|
||||
await measureTime(async () => {
|
||||
// console.log("Downloading... This might take a couple of minutes. Results will be shown.")
|
||||
|
@ -178,7 +177,7 @@ async function infer_inner(cookie: string) {
|
|||
let questionNumRegex = new RegExp("questions/([0-9]+)");
|
||||
let questionNum = url.match(questionNumRegex)[1]; //.split("questions/")[1].split("-")[0];
|
||||
let id = `${platformName}-${questionNum}`;
|
||||
let question: Question = {
|
||||
let question: FetchedQuestion = {
|
||||
id: id,
|
||||
title: title,
|
||||
description: moreinfo.description,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "kalshi";
|
||||
|
@ -22,8 +22,8 @@ async function processMarkets(markets) {
|
|||
// console.log(markets)
|
||||
markets = markets.filter((market) => market.close_date > dateNow);
|
||||
let results = await markets.map((market) => {
|
||||
let probability = market.last_price / 100;
|
||||
let options = [
|
||||
const probability = market.last_price / 100;
|
||||
const options = [
|
||||
{
|
||||
name: "Yes",
|
||||
probability: probability,
|
||||
|
@ -35,15 +35,14 @@ async function processMarkets(markets) {
|
|||
type: "PROBABILITY",
|
||||
},
|
||||
];
|
||||
let id = `${platformName}-${market.id}`;
|
||||
let result = {
|
||||
id: id,
|
||||
const id = `${platformName}-${market.id}`;
|
||||
const result: FetchedQuestion = {
|
||||
id,
|
||||
title: market.title.replaceAll("*", ""),
|
||||
url: `https://kalshi.com/markets/${market.ticker_name}`,
|
||||
platform: platformName,
|
||||
description: `${market.settle_details}. The resolution source is: ${market.ranged_group_name} (${market.settle_source_url})`,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
options,
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {
|
||||
shares_volume: market.volume,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "manifold";
|
||||
|
@ -23,7 +23,7 @@ async function fetchData() {
|
|||
return response;
|
||||
}
|
||||
|
||||
function showStatistics(results: Question[]) {
|
||||
function showStatistics(results: FetchedQuestion[]) {
|
||||
console.log(`Num unresolved markets: ${results.length}`);
|
||||
let sum = (arr) => arr.reduce((tally, a) => tally + a, 0);
|
||||
let num2StarsOrMore = results.filter(
|
||||
|
@ -44,7 +44,7 @@ function showStatistics(results: Question[]) {
|
|||
}
|
||||
|
||||
async function processPredictions(predictions) {
|
||||
let results: Question[] = await predictions.map((prediction) => {
|
||||
let results: FetchedQuestion[] = await predictions.map((prediction) => {
|
||||
let id = `${platformName}-${prediction.id}`; // oops, doesn't match platform name
|
||||
let probability = prediction.probability;
|
||||
let options = [
|
||||
|
@ -59,14 +59,13 @@ async function processPredictions(predictions) {
|
|||
type: "PROBABILITY",
|
||||
},
|
||||
];
|
||||
const result: Question = {
|
||||
const result: FetchedQuestion = {
|
||||
id: id,
|
||||
title: prediction.question,
|
||||
url: prediction.url,
|
||||
platform: platformName,
|
||||
description: prediction.description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {
|
||||
volume7Days: prediction.volume7Days,
|
||||
|
@ -86,7 +85,7 @@ async function processPredictions(predictions) {
|
|||
});
|
||||
|
||||
const unresolvedResults = results.filter(
|
||||
(result) => !result.extra.isResolved
|
||||
(result) => !(result.extra as any).isResolved
|
||||
);
|
||||
return unresolvedResults;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import axios from "axios";
|
|||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "metaculus";
|
||||
|
@ -148,14 +148,13 @@ export const metaculus: Platform = {
|
|||
];
|
||||
}
|
||||
let id = `${platformName}-${result.id}`;
|
||||
let interestingInfo = {
|
||||
id: id,
|
||||
let interestingInfo: FetchedQuestion = {
|
||||
id,
|
||||
title: result.title,
|
||||
url: "https://www.metaculus.com" + result.page_url,
|
||||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
description,
|
||||
options,
|
||||
qualityindicators: {
|
||||
numforecasts: Number(result.number_of_predictions),
|
||||
stars: calculateStars(platformName, {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "polymarket";
|
||||
|
@ -68,7 +68,7 @@ export const polymarket: Platform = {
|
|||
label: "PolyMarket",
|
||||
color: "#00314e",
|
||||
async fetcher() {
|
||||
let results: Question[] = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
let webpageEndpointData = await fetchAllContractInfo();
|
||||
for (let marketInfo of webpageEndpointData) {
|
||||
let address = marketInfo.marketMakerAddress;
|
||||
|
@ -102,14 +102,13 @@ export const polymarket: Platform = {
|
|||
});
|
||||
}
|
||||
|
||||
let result: Question = {
|
||||
let result: FetchedQuestion = {
|
||||
id: id,
|
||||
title: marketInfo.question,
|
||||
url: "https://polymarket.com/market/" + marketInfo.slug,
|
||||
platform: platformName,
|
||||
description: marketInfo.description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: numforecasts.toFixed(0),
|
||||
liquidity: liquidity.toFixed(2),
|
||||
|
|
|
@ -2,7 +2,7 @@ import axios from "axios";
|
|||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
const platformName = "predictit";
|
||||
|
||||
|
@ -53,7 +53,7 @@ export const predictit: Platform = {
|
|||
}));
|
||||
// console.log(markets)
|
||||
|
||||
let results = [];
|
||||
let results: FetchedQuestion[] = [];
|
||||
for (let market of markets) {
|
||||
// console.log(market.name)
|
||||
let id = `${platformName}-${market.id}`;
|
||||
|
@ -96,17 +96,16 @@ export const predictit: Platform = {
|
|||
];
|
||||
}
|
||||
|
||||
let obj = {
|
||||
id: id,
|
||||
const obj: FetchedQuestion = {
|
||||
id,
|
||||
title: market["name"],
|
||||
url: market.url,
|
||||
platform: platformName,
|
||||
description: description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
description,
|
||||
options,
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {}),
|
||||
shares_volume: shares_volume,
|
||||
shares_volume,
|
||||
},
|
||||
};
|
||||
// console.log(obj)
|
||||
|
|
|
@ -3,7 +3,7 @@ import { JSDOM } from "jsdom";
|
|||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import toMarkdown from "../utils/toMarkdown";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
const platformName = "rootclaim";
|
||||
const jsonEndpoint =
|
||||
|
@ -50,7 +50,7 @@ export const rootclaim: Platform = {
|
|||
color: "#0d1624",
|
||||
async fetcher() {
|
||||
const claims = await fetchAllRootclaims();
|
||||
const results: Question[] = [];
|
||||
const results: FetchedQuestion[] = [];
|
||||
|
||||
for (const claim of claims) {
|
||||
const id = `${platformName}-${claim.slug.toLowerCase()}`;
|
||||
|
@ -71,14 +71,13 @@ export const rootclaim: Platform = {
|
|||
|
||||
const description = await fetchDescription(url, claim.isclaim);
|
||||
|
||||
let obj: Question = {
|
||||
let obj: FetchedQuestion = {
|
||||
id,
|
||||
title: toMarkdown(claim.question).replace("\n", ""),
|
||||
url,
|
||||
platform: platformName,
|
||||
description: toMarkdown(description).replace("'", "'"),
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
numforecasts: 1,
|
||||
stars: calculateStars(platformName, {}),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import axios from "axios";
|
||||
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform, Question } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "smarkets";
|
||||
|
@ -159,14 +159,14 @@ export const smarkets: Platform = {
|
|||
name = name+ (contractName=="Yes"?'':` (${contracts["contracts"][0].name})`)
|
||||
}
|
||||
*/
|
||||
let result: Question = {
|
||||
let result: FetchedQuestion = {
|
||||
id: id,
|
||||
title: name,
|
||||
url: "https://smarkets.com/event/" + market.event_id + market.slug,
|
||||
platform: platformName,
|
||||
description: market.description,
|
||||
options: options,
|
||||
timestamp: new Date().toISOString(),
|
||||
timestamp: new Date(),
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, {}),
|
||||
},
|
||||
|
|
|
@ -4,7 +4,7 @@ import { GoogleSpreadsheet } from "google-spreadsheet";
|
|||
import { applyIfSecretExists } from "../utils/getSecrets";
|
||||
import { hash } from "../utils/hash";
|
||||
import { calculateStars } from "../utils/stars";
|
||||
import { Platform } from "./";
|
||||
import { FetchedQuestion, Platform } from "./";
|
||||
|
||||
/* Definitions */
|
||||
const platformName = "wildeford";
|
||||
|
@ -88,16 +88,14 @@ async function processPredictions(predictions) {
|
|||
type: "PROBABILITY",
|
||||
},
|
||||
];
|
||||
let result = {
|
||||
id: id,
|
||||
title: title,
|
||||
let result: FetchedQuestion = {
|
||||
id,
|
||||
title,
|
||||
url: prediction["url"],
|
||||
platform: platformName,
|
||||
description: prediction["Notes"] || "",
|
||||
options: options,
|
||||
timestamp: new Date(
|
||||
Date.parse(prediction["Prediction Date"] + "Z")
|
||||
).toISOString(),
|
||||
options,
|
||||
timestamp: new Date(Date.parse(prediction["Prediction Date"] + "Z")),
|
||||
qualityindicators: {
|
||||
stars: calculateStars(platformName, null),
|
||||
},
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
import { prisma } from "../../database/prisma";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
|
@ -24,7 +24,7 @@ const main = async () => {
|
|||
"PredictIt",
|
||||
"Rootclaim",
|
||||
];
|
||||
const json = await pgRead({ tableName: "questions" });
|
||||
const json = await prisma.question.findMany({});
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(forecast => forecast.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import fs from "fs";
|
||||
|
||||
import { shuffleArray } from "../../../utils";
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
import { prisma } from "../../database/prisma";
|
||||
|
||||
/* Definitions */
|
||||
|
||||
|
@ -18,7 +18,7 @@ let getQualityIndicators = (question) =>
|
|||
|
||||
let main = async () => {
|
||||
let highQualityPlatforms = ["Metaculus"]; // ['CSET-foretell', 'Foretold', 'Good Judgment Open', 'Metaculus', 'PredictIt', 'Rootclaim']
|
||||
let json = await pgRead({ tableName: "questions" });
|
||||
let json = await prisma.question.findMany({});
|
||||
console.log(json.length);
|
||||
//let uniquePlatforms = [...new Set(json.map(question => question.platform))]
|
||||
//console.log(uniquePlatforms)
|
||||
|
|
|
@ -38,7 +38,7 @@ for (let datum of data) {
|
|||
*/
|
||||
timestamp: "2021-02-23T15∶21∶37.005Z", //new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: datum.qualityindicators.stars, //datum["stars"],
|
||||
stars: datum.qualityindicators.stars,
|
||||
},
|
||||
};
|
||||
results.push(result);
|
||||
|
|
|
@ -40,7 +40,7 @@ ${datum["description"]}`
|
|||
],
|
||||
timestamp: new Date().toISOString(),
|
||||
qualityindicators: {
|
||||
stars: 2, //datum["stars"]
|
||||
stars: 2,
|
||||
},
|
||||
};
|
||||
results.push(result);
|
||||
|
|
|
@ -23,7 +23,7 @@ for (let datum of data) {
|
|||
options: datum.options,
|
||||
timestamp: datum.timestamps,
|
||||
qualityindicators: {
|
||||
stars: 2, //datum["stars"]
|
||||
stars: 2,
|
||||
},
|
||||
};
|
||||
results.push(result);
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
/* Imports */
|
||||
import fs from "fs";
|
||||
|
||||
import { pgRead } from "../../database/pg-wrapper";
|
||||
import { prisma } from "../../database/prisma";
|
||||
import { QualityIndicators } from "../../platforms";
|
||||
|
||||
/* Definitions */
|
||||
let locationData = "./data/";
|
||||
|
@ -9,8 +10,8 @@ let locationData = "./data/";
|
|||
/* Body */
|
||||
// let rawdata = fs.readFileSync("./data/merged-questions.json") // run from topmost folder, not from src
|
||||
async function main() {
|
||||
let data = await pgRead({ tableName: "questions" }); //JSON.parse(rawdata)
|
||||
let processDescription = (description) => {
|
||||
const data = await prisma.question.findMany({});
|
||||
const processDescription = (description) => {
|
||||
if (description == null || description == undefined || description == "") {
|
||||
return "";
|
||||
} else {
|
||||
|
@ -32,14 +33,14 @@ async function main() {
|
|||
};
|
||||
|
||||
let results = [];
|
||||
for (let datum of data) {
|
||||
for (const datum of data) {
|
||||
// do something
|
||||
let description = processDescription(datum["description"]);
|
||||
let forecasts = datum["qualityindicators"]
|
||||
? datum["qualityindicators"].numforecasts
|
||||
const description = processDescription(datum["description"]);
|
||||
const forecasts = datum["qualityindicators"]
|
||||
? (datum["qualityindicators"] as object as QualityIndicators).numforecasts
|
||||
: "unknown";
|
||||
let stars = datum["qualityindicators"]
|
||||
? datum["qualityindicators"].stars
|
||||
const stars = datum["qualityindicators"]
|
||||
? (datum["qualityindicators"] as object as QualityIndicators).stars
|
||||
: 2;
|
||||
results.push("Title: " + datum["title"]);
|
||||
results.push("URL: " + datum["url"]);
|
||||
|
|
|
@ -43,11 +43,10 @@ export default async function searchGuesstimate(
|
|||
description,
|
||||
options: [],
|
||||
qualityindicators: {
|
||||
stars: stars,
|
||||
stars,
|
||||
numforecasts: 1,
|
||||
numforecasters: 1,
|
||||
},
|
||||
stars,
|
||||
extra: {
|
||||
visualization: model.big_screenshot,
|
||||
},
|
||||
|
|
|
@ -116,7 +116,6 @@ export default async function searchWithAlgolia({
|
|||
},
|
||||
],
|
||||
timestamp: `${new Date().toISOString().slice(0, 10)}`,
|
||||
stars: 5, // legacy
|
||||
qualityindicators: {
|
||||
numforecasts: 1,
|
||||
numforecasters: 1,
|
||||
|
@ -148,7 +147,6 @@ export default async function searchWithAlgolia({
|
|||
},
|
||||
],
|
||||
timestamp: `${new Date().toISOString().slice(0, 10)}`,
|
||||
stars: 5, // legacy
|
||||
qualityindicators: {
|
||||
numforecasts: 1,
|
||||
numforecasters: 1,
|
||||
|
@ -183,7 +181,6 @@ export default async function searchWithAlgolia({
|
|||
},
|
||||
],
|
||||
timestamp: `${new Date().toISOString().slice(0, 10)}`,
|
||||
stars: 1, // legacy
|
||||
qualityindicators: {
|
||||
numforecasts: 1,
|
||||
numforecasters: 1,
|
||||
|
|
Loading…
Reference in New Issue
Block a user