cleanup: more ts, delete unused api endpoints

This commit is contained in:
Vyacheslav Matyukhin 2022-03-27 12:15:27 +03:00
parent cf1162faed
commit 41b314b84a
No known key found for this signature in database
GPG Key ID: 3D2A774C5489F96C
21 changed files with 61 additions and 67 deletions

View File

@ -1,9 +1,5 @@
import {
mongoRead,
mongoReadWithReadCredentials,
mongoUpsert,
} from "./mongo-wrapper";
import { pgRead, pgReadWithReadCredentials, pgUpsert } from "./pg-wrapper";
import { mongoRead, mongoReadWithReadCredentials, mongoUpsert } from './mongo-wrapper';
import { pgRead, pgReadWithReadCredentials, pgUpsert } from './pg-wrapper';
export async function databaseUpsert({ contents, group }) {
// No, this should be more rational, ({contents, group, schema})? Or should this be managed by this layer? Unclear.

View File

@ -1,5 +1,7 @@
import pkg from "mongodb";
import { roughSizeOfObject } from "../utils/roughSize";
import pkg from 'mongodb';
import { roughSizeOfObject } from '../utils/roughSize';
const { MongoClient } = pkg;
export async function mongoUpsert(

View File

@ -1,7 +1,9 @@
import pkg from "pg";
import { platformNames } from "../platforms/all/platformNames";
import { hash } from "../utils/hash";
import { roughSizeOfObject } from "../utils/roughSize";
import pkg from 'pg';
import { platformNames } from '../platforms/all/platformNames';
import { hash } from '../utils/hash';
import { roughSizeOfObject } from '../utils/roughSize';
const { Pool } = pkg;
// Definitions

View File

@ -1,8 +1,9 @@
import { platformFetchers } from "../platforms/all-platforms";
import { rebuildAlgoliaDatabase } from "../utils/algolia";
import { updateHistory } from "./history/updateHistory";
import { mergeEverything } from "./mergeEverything";
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData";
import { platformFetchers } from '../platforms/all-platforms';
import { rebuildAlgoliaDatabase } from '../utils/algolia';
import { updateHistory } from './history/updateHistory';
import { mergeEverything } from './mergeEverything';
import { rebuildNetlifySiteWithNewData } from './rebuildNetliftySiteWithNewData';
/* Do everything */
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));

View File

@ -1,3 +0,0 @@
import { doEverything } from "./doEverything";
doEverything();

View File

@ -0,0 +1,3 @@
import { doEverything } from './doEverything';
doEverything();

View File

@ -1,7 +1,4 @@
import {
databaseReadWithReadCredentials,
databaseUpsert,
} from "../../database/database-wrapper";
import { databaseReadWithReadCredentials, databaseUpsert } from '../../database/database-wrapper';
export async function updateHistory() {
let latest = await databaseReadWithReadCredentials({ group: "combined" });

View File

@ -1,5 +1,6 @@
import { databaseRead, databaseUpsert } from "../database/database-wrapper";
import { platformNames } from "../platforms/all-platforms";
import { databaseRead, databaseUpsert } from '../database/database-wrapper';
import { platformNames } from '../platforms/all-platforms';
/* Merge everything */
export async function mergeEverythingInner() {

View File

@ -1,5 +1,6 @@
import axios from "axios";
import { applyIfSecretExists } from "../utils/getSecrets";
import axios from 'axios';
import { applyIfSecretExists } from '../utils/getSecrets';
async function rebuildNetlifySiteWithNewData_inner(cookie) {
let payload = {};

View File

@ -1,6 +1,8 @@
import "dotenv/config";
import fs from "fs";
import { databaseReadWithReadCredentials } from "../database/database-wrapper";
import 'dotenv/config';
import fs from 'fs';
import { databaseReadWithReadCredentials } from '../database/database-wrapper';
let main = async () => {
let json = await databaseReadWithReadCredentials({ group: "combined" });

View File

@ -1,3 +0,0 @@
import { pgInitialize } from "../database/pg-wrapper";
pgInitialize();

View File

@ -0,0 +1,3 @@
import { pgInitialize } from '../database/pg-wrapper';
pgInitialize();

View File

@ -1,5 +1,6 @@
import fs from "fs";
import { databaseUpsert } from "../database/database-wrapper";
import fs from 'fs';
import { databaseUpsert } from '../database/database-wrapper';
/* This is necessary for estimize, the database of x-risk estimates, and for the OpenPhil/GiveWell predictions. Unlike the others, I'm not fetching them constantly, but only once. */
@ -9,7 +10,9 @@ let suffixMongo = "-questions";
let main = async () => {
for (let file of pushManualFiles) {
let fileRaw = fs.readFileSync(`./input/${file + suffixFiles}`);
let fileRaw = fs.readFileSync(`./input/${file + suffixFiles}`, {
encoding: "utf-8",
});
let fileContents = JSON.parse(fileRaw);
console.log(fileContents);
await databaseUpsert({ contents: fileContents, group: file });

View File

@ -1,5 +1,5 @@
/* Imports */
import { goodjudgment } from "../platforms/goodjudgment-fetch";
import { goodjudgment } from '../platforms/goodjudgment-fetch';
/* Definitions */

View File

@ -1,17 +1,17 @@
import { betfair } from "../betfair-fetch";
import { fantasyscotus } from "../fantasyscotus-fetch";
import { foretold } from "../foretold-fetch";
import { goodjudgment } from "../goodjudgment-fetch";
import { goodjudgmentopen } from "../goodjudmentopen-fetch";
import { infer } from "../infer-fetch";
import { kalshi } from "../kalshi-fetch";
import { manifoldmarkets } from "../manifoldmarkets-fetch";
import { metaculus } from "../metaculus-fetch";
import { polymarket } from "../polymarket-fetch";
import { predictit } from "../predictit-fetch";
import { rootclaim } from "../rootclaim-fetch";
import { smarkets } from "../smarkets-fetch";
import { wildeford } from "../wildeford-fetch";
import { betfair } from '../betfair-fetch';
import { fantasyscotus } from '../fantasyscotus-fetch';
import { foretold } from '../foretold-fetch';
import { goodjudgment } from '../goodjudgment-fetch';
import { goodjudgmentopen } from '../goodjudmentopen-fetch';
import { infer } from '../infer-fetch';
import { kalshi } from '../kalshi-fetch';
import { manifoldmarkets } from '../manifoldmarkets-fetch';
import { metaculus } from '../metaculus-fetch';
import { polymarket } from '../polymarket-fetch';
import { predictit } from '../predictit-fetch';
import { rootclaim } from '../rootclaim-fetch';
import { smarkets } from '../smarkets-fetch';
import { wildeford } from '../wildeford-fetch';
/* Deprecated
import { astralcodexten } from "../platforms/astralcodexten-fetch"

View File

@ -1,7 +1,8 @@
/* Imports */
import axios from "axios";
import { databaseUpsert } from "../utils/database-wrapper";
import { calculateStars } from "../utils/stars";
import axios from 'axios';
import { databaseUpsert } from '../utils/database-wrapper';
import { calculateStars } from '../utils/stars';
/* Definitions */
let graphQLendpoint = "https://api.foretold.io/graphql";

View File

@ -1,6 +0,0 @@
// see: https://nextjs.org/docs/api-routes/dynamic-api-routes
export default function handler(req, res) {
const { platform } = req.query;
res.end(`Platform: ${platform}`);
}

View File

@ -1,6 +0,0 @@
// see: https://nextjs.org/docs/api-routes/dynamic-api-routes
export default function handler(req, res) {
const { question } = req.query;
res.end(`Platform: ${question}`);
}