feat: Added postgres database in parallel to the mongo one
This commit is contained in:
parent
65fbddf0cf
commit
6bb3da0e32
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -4,7 +4,7 @@ node_modules/*
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
**/betfaircertificates/
|
**/betfaircertificates/
|
||||||
**/privatekeys.json
|
**/secrets.json
|
||||||
|
|
||||||
## Personal notes
|
## Personal notes
|
||||||
notes/*
|
notes/*
|
||||||
|
|
237
package-lock.json
generated
237
package-lock.json
generated
|
@ -19,6 +19,7 @@
|
||||||
"mongodb": "^3.6.6",
|
"mongodb": "^3.6.6",
|
||||||
"open": "^7.3.1",
|
"open": "^7.3.1",
|
||||||
"papaparse": "^5.3.0",
|
"papaparse": "^5.3.0",
|
||||||
|
"pg": "^8.7.3",
|
||||||
"tabletojson": "^2.0.4",
|
"tabletojson": "^2.0.4",
|
||||||
"textversionjs": "^1.1.3",
|
"textversionjs": "^1.1.3",
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6"
|
||||||
|
@ -308,6 +309,14 @@
|
||||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||||
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
|
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
|
||||||
},
|
},
|
||||||
|
"node_modules/buffer-writer": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/cacheable-lookup": {
|
"node_modules/cacheable-lookup": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
|
||||||
|
@ -1058,6 +1067,11 @@
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/packet-reader": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
|
||||||
|
},
|
||||||
"node_modules/papaparse": {
|
"node_modules/papaparse": {
|
||||||
"version": "5.3.0",
|
"version": "5.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
|
||||||
|
@ -1076,6 +1090,115 @@
|
||||||
"parse5": "^6.0.1"
|
"parse5": "^6.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pg": {
|
||||||
|
"version": "8.7.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg/-/pg-8.7.3.tgz",
|
||||||
|
"integrity": "sha512-HPmH4GH4H3AOprDJOazoIcpI49XFsHCe8xlrjHkWiapdbHK+HLtbm/GQzXYAZwmPju/kzKhjaSfMACG+8cgJcw==",
|
||||||
|
"dependencies": {
|
||||||
|
"buffer-writer": "2.0.0",
|
||||||
|
"packet-reader": "1.0.0",
|
||||||
|
"pg-connection-string": "^2.5.0",
|
||||||
|
"pg-pool": "^3.5.1",
|
||||||
|
"pg-protocol": "^1.5.0",
|
||||||
|
"pg-types": "^2.1.0",
|
||||||
|
"pgpass": "1.x"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 8.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"pg-native": ">=2.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"pg-native": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pg-connection-string": {
|
||||||
|
"version": "2.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
|
||||||
|
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
|
||||||
|
},
|
||||||
|
"node_modules/pg-int8": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pg-pool": {
|
||||||
|
"version": "3.5.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.1.tgz",
|
||||||
|
"integrity": "sha512-6iCR0wVrro6OOHFsyavV+i6KYL4lVNyYAB9RD18w66xSzN+d8b66HiwuP30Gp1SH5O9T82fckkzsRjlrhD0ioQ==",
|
||||||
|
"peerDependencies": {
|
||||||
|
"pg": ">=8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pg-protocol": {
|
||||||
|
"version": "1.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
|
||||||
|
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
|
||||||
|
},
|
||||||
|
"node_modules/pg-types": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
|
||||||
|
"dependencies": {
|
||||||
|
"pg-int8": "1.0.1",
|
||||||
|
"postgres-array": "~2.0.0",
|
||||||
|
"postgres-bytea": "~1.0.0",
|
||||||
|
"postgres-date": "~1.0.4",
|
||||||
|
"postgres-interval": "^1.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pgpass": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
|
||||||
|
"dependencies": {
|
||||||
|
"split2": "^4.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/postgres-array": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/postgres-bytea": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/postgres-date": {
|
||||||
|
"version": "1.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
|
||||||
|
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/postgres-interval": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"xtend": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/process-nextick-args": {
|
"node_modules/process-nextick-args": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||||
|
@ -1157,6 +1280,14 @@
|
||||||
"memory-pager": "^1.0.2"
|
"memory-pager": "^1.0.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/split2": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/string_decoder": {
|
"node_modules/string_decoder": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||||
|
@ -1232,6 +1363,14 @@
|
||||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||||
},
|
},
|
||||||
|
"node_modules/xtend": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/yallist": {
|
"node_modules/yallist": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||||
|
@ -1488,6 +1627,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||||
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
|
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
|
||||||
},
|
},
|
||||||
|
"buffer-writer": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw=="
|
||||||
|
},
|
||||||
"cacheable-lookup": {
|
"cacheable-lookup": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz",
|
||||||
|
@ -2029,6 +2173,11 @@
|
||||||
"p-finally": "^1.0.0"
|
"p-finally": "^1.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"packet-reader": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
|
||||||
|
},
|
||||||
"papaparse": {
|
"papaparse": {
|
||||||
"version": "5.3.0",
|
"version": "5.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/papaparse/-/papaparse-5.3.0.tgz",
|
||||||
|
@ -2047,6 +2196,84 @@
|
||||||
"parse5": "^6.0.1"
|
"parse5": "^6.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"pg": {
|
||||||
|
"version": "8.7.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg/-/pg-8.7.3.tgz",
|
||||||
|
"integrity": "sha512-HPmH4GH4H3AOprDJOazoIcpI49XFsHCe8xlrjHkWiapdbHK+HLtbm/GQzXYAZwmPju/kzKhjaSfMACG+8cgJcw==",
|
||||||
|
"requires": {
|
||||||
|
"buffer-writer": "2.0.0",
|
||||||
|
"packet-reader": "1.0.0",
|
||||||
|
"pg-connection-string": "^2.5.0",
|
||||||
|
"pg-pool": "^3.5.1",
|
||||||
|
"pg-protocol": "^1.5.0",
|
||||||
|
"pg-types": "^2.1.0",
|
||||||
|
"pgpass": "1.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pg-connection-string": {
|
||||||
|
"version": "2.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
|
||||||
|
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
|
||||||
|
},
|
||||||
|
"pg-int8": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="
|
||||||
|
},
|
||||||
|
"pg-pool": {
|
||||||
|
"version": "3.5.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.1.tgz",
|
||||||
|
"integrity": "sha512-6iCR0wVrro6OOHFsyavV+i6KYL4lVNyYAB9RD18w66xSzN+d8b66HiwuP30Gp1SH5O9T82fckkzsRjlrhD0ioQ==",
|
||||||
|
"requires": {}
|
||||||
|
},
|
||||||
|
"pg-protocol": {
|
||||||
|
"version": "1.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
|
||||||
|
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
|
||||||
|
},
|
||||||
|
"pg-types": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
|
||||||
|
"requires": {
|
||||||
|
"pg-int8": "1.0.1",
|
||||||
|
"postgres-array": "~2.0.0",
|
||||||
|
"postgres-bytea": "~1.0.0",
|
||||||
|
"postgres-date": "~1.0.4",
|
||||||
|
"postgres-interval": "^1.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pgpass": {
|
||||||
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
|
||||||
|
"requires": {
|
||||||
|
"split2": "^4.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"postgres-array": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="
|
||||||
|
},
|
||||||
|
"postgres-bytea": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU="
|
||||||
|
},
|
||||||
|
"postgres-date": {
|
||||||
|
"version": "1.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
|
||||||
|
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="
|
||||||
|
},
|
||||||
|
"postgres-interval": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
|
||||||
|
"requires": {
|
||||||
|
"xtend": "^4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"process-nextick-args": {
|
"process-nextick-args": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||||
|
@ -2113,6 +2340,11 @@
|
||||||
"memory-pager": "^1.0.2"
|
"memory-pager": "^1.0.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"split2": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ=="
|
||||||
|
},
|
||||||
"string_decoder": {
|
"string_decoder": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||||
|
@ -2178,6 +2410,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||||
},
|
},
|
||||||
|
"xtend": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
|
||||||
|
},
|
||||||
"yallist": {
|
"yallist": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||||
|
|
|
@ -35,6 +35,7 @@
|
||||||
"mongodb": "^3.6.6",
|
"mongodb": "^3.6.6",
|
||||||
"open": "^7.3.1",
|
"open": "^7.3.1",
|
||||||
"papaparse": "^5.3.0",
|
"papaparse": "^5.3.0",
|
||||||
|
"pg": "^8.7.3",
|
||||||
"tabletojson": "^2.0.4",
|
"tabletojson": "^2.0.4",
|
||||||
"textversionjs": "^1.1.3",
|
"textversionjs": "^1.1.3",
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6"
|
||||||
|
|
|
@ -1,6 +1,29 @@
|
||||||
import {mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements} from "./mongo-wrapper.js"
|
import { mongoUpsert, mongoRead, mongoReadWithReadCredentials, mongoGetAllElements } from "./mongo-wrapper.js"
|
||||||
|
import { pgUpsert } from "./pg-wrapper.js"
|
||||||
|
|
||||||
export const databaseUpsert = mongoUpsert;
|
export async function databaseUpsert({ contents, group }) {
|
||||||
|
// (contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase"){
|
||||||
|
let mongoDocName;
|
||||||
|
switch (group) {
|
||||||
|
case 'combined':
|
||||||
|
mongoDocName = "metaforecasts"
|
||||||
|
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||||
|
await pgUpsert({contents, schema: "latest", tableName: "combined"})
|
||||||
|
break;
|
||||||
|
case 'history':
|
||||||
|
let currentDate = new Date()
|
||||||
|
let dateUpToMonth = currentDate.toISOString().slice(0, 7).replace("-", "_")
|
||||||
|
mongoDocName = `metaforecast_history_${dateUpToMonth}`
|
||||||
|
await mongoUpsert(data, mongoDocName, "metaforecastHistory", "metaforecastDatabase")
|
||||||
|
await pgUpsert({contents, schema: "history", tableName: "combined"})
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
mongoDocName = `${group}-questions`
|
||||||
|
await mongoUpsert(contents, mongoDocName, "metaforecastCollection", "metaforecastDatabase")
|
||||||
|
await pgUpsert({contents, schema: "latest", tableName: group})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
// databaseUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase")
|
||||||
|
|
||||||
export const databaseRead = mongoRead;
|
export const databaseRead = mongoRead;
|
||||||
|
|
|
@ -1,42 +1,10 @@
|
||||||
import pkg from 'mongodb';
|
import pkg from 'mongodb';
|
||||||
const { MongoClient } = pkg;
|
const { MongoClient } = pkg;
|
||||||
import { getCookie } from "../utils/getCookies.js"
|
import { getSecret } from "../utils/getSecrets.js"
|
||||||
|
import { roughSizeOfObject } from "../utils/roughSize.js"
|
||||||
function roughSizeOfObject(object) {
|
|
||||||
var objectList = [];
|
|
||||||
var stack = [object];
|
|
||||||
var bytes = 0;
|
|
||||||
|
|
||||||
while (stack.length) {
|
|
||||||
var value = stack.pop();
|
|
||||||
if (typeof value === 'boolean') {
|
|
||||||
bytes += 4;
|
|
||||||
}
|
|
||||||
else if (typeof value === 'string') {
|
|
||||||
bytes += value.length * 2;
|
|
||||||
}
|
|
||||||
else if (typeof value === 'number') {
|
|
||||||
bytes += 8;
|
|
||||||
}
|
|
||||||
else if
|
|
||||||
(
|
|
||||||
typeof value === 'object'
|
|
||||||
&& objectList.indexOf(value) === -1
|
|
||||||
) {
|
|
||||||
objectList.push(value);
|
|
||||||
|
|
||||||
for (var i in value) {
|
|
||||||
stack.push(value[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let megaBytes = bytes / (1024) ** 2
|
|
||||||
let megaBytesRounded = Math.round(megaBytes * 10) / 10
|
|
||||||
return megaBytesRounded;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
|
export async function mongoUpsert(contents, documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
|
||||||
const url = process.env.MONGODB_URL || getCookie("mongodb");
|
const url = process.env.MONGODB_URL || getSecret("mongodb");
|
||||||
const client = new MongoClient(url);
|
const client = new MongoClient(url);
|
||||||
try {
|
try {
|
||||||
await client.connect();
|
await client.connect();
|
||||||
|
@ -58,7 +26,7 @@ export async function mongoUpsert(contents, documentName, collectionName = "meta
|
||||||
|
|
||||||
// Insert a single document, wait for promise so we can read it back
|
// Insert a single document, wait for promise so we can read it back
|
||||||
// const p = await collection.insertOne(metaforecastDocument);
|
// const p = await collection.insertOne(metaforecastDocument);
|
||||||
await collection.replaceOne(filter, document, { databaseUpsert: true });
|
await collection.replaceOne(filter, document, { upsert: true });
|
||||||
console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`)
|
console.log(`Pushed document ${documentName} in collection ${collectionName} in database ${databaseName} with approximate size ${roughSizeOfObject(document)} MB`)
|
||||||
|
|
||||||
// Find one document
|
// Find one document
|
||||||
|
@ -76,7 +44,7 @@ export async function mongoUpsert(contents, documentName, collectionName = "meta
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function mongoRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
|
export async function mongoRead(documentName, collectionName = "metaforecastCollection", databaseName = "metaforecastDatabase") {
|
||||||
const url = process.env.MONGODB_URL || getCookie("mongodb");
|
const url = process.env.MONGODB_URL || getSecret("mongodb");
|
||||||
|
|
||||||
const client = new MongoClient(url, {
|
const client = new MongoClient(url, {
|
||||||
useNewUrlParser: true,
|
useNewUrlParser: true,
|
||||||
|
@ -152,7 +120,7 @@ export async function mongoReadWithReadCredentials(documentName, collectionName
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function mongoGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") {
|
export async function mongoGetAllElements(databaseName = "metaforecastDatabase", collectionName = "metaforecastCollection") {
|
||||||
const url = process.env.MONGODB_URL || getCookie("mongodb");
|
const url = process.env.MONGODB_URL || getSecret("mongodb");
|
||||||
const client = new MongoClient(url, {
|
const client = new MongoClient(url, {
|
||||||
useNewUrlParser: true,
|
useNewUrlParser: true,
|
||||||
useUnifiedTopology: true,
|
useUnifiedTopology: true,
|
||||||
|
|
|
@ -1,49 +1,173 @@
|
||||||
import pkg from 'pg';
|
import pkg from 'pg';
|
||||||
const { Pool } = pkg
|
const { Pool } = pkg;
|
||||||
|
import { platformNames } from "../platforms/all/platformNames.js"
|
||||||
|
import { getSecret } from '../utils/getSecrets.js';
|
||||||
|
import { roughSizeOfObject } from "../utils/roughSize.js"
|
||||||
|
|
||||||
|
// Definitions
|
||||||
|
const schemas = ["latest", "history"]
|
||||||
|
const tableNamesWhitelist = ["combined", ...platformNames]
|
||||||
|
const createFullName = (schemaName, namesArray) => namesArray.map(name => `${schemaName}.${name}`)
|
||||||
|
const tableWhiteList = [...createFullName("latest", tableNamesWhitelist), ...createFullName("history", tableNamesWhitelist)]
|
||||||
|
|
||||||
|
|
||||||
/* Postgres database connection code */
|
/* Postgres database connection code */
|
||||||
const pool = new Pool({
|
const pool = new Pool({
|
||||||
connectionString: process.env.DATABASE_URL,
|
connectionString: process.env.DATABASE_URL || getSecret("heroku-postgres"),
|
||||||
ssl: {
|
ssl: {
|
||||||
rejectUnauthorized: false
|
rejectUnauthorized: false
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const tableWhiteList = ["latest.combined"]
|
|
||||||
|
|
||||||
export async function pgRead(tableName="latest.combined"){
|
// Helpers
|
||||||
if(tableWhiteList.includes(tableName)){
|
const runPgCommand = async (query) => {
|
||||||
const client = await pool.connect();
|
console.log(query)
|
||||||
const result = await client.query(`SELECT * from ${tableName}`);
|
const client = await pool.connect();
|
||||||
const results = { 'results': (result) ? result.rows : null};
|
const result = await client.query(query);
|
||||||
// response.render('pages/db', results );
|
const results = { 'results': (result) ? result.rows : null };
|
||||||
client.release();
|
client.release();
|
||||||
return results
|
// console.log(results)
|
||||||
}else{
|
return results
|
||||||
throw Error("Table not in whitelist; stopping to avoid tricky sql injections")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function pgInsert(data, tableName="latest.combined"){
|
// Initialize
|
||||||
if(tableWhiteList.includes(tableName)){
|
let dropTable = (schema, table) => `DROP TABLE IF EXISTS ${schema}.${table}`
|
||||||
let text = `INSERT INTO ${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
|
let buildMetaforecastTable = (schema, table) => `CREATE TABLE ${schema}.${table} (
|
||||||
|
id text,
|
||||||
|
title text,
|
||||||
|
url text,
|
||||||
|
platform text,
|
||||||
|
description text,
|
||||||
|
options json,
|
||||||
|
timestamp timestamp,
|
||||||
|
stars int,
|
||||||
|
qualityindicators json,
|
||||||
|
extra json
|
||||||
|
);`
|
||||||
|
let createIndex = (schema, table) => `CREATE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
|
||||||
|
let createUniqueIndex = (schema, table) => `CREATE UNIQUE INDEX ${schema}_${table}_id_index ON ${schema}.${table} (id);`
|
||||||
|
|
||||||
|
export async function pgInitialize() {
|
||||||
|
|
||||||
|
for (let schema of schemas) {
|
||||||
|
runPgCommand(`CREATE SCHEMA IF NOT EXISTS ${schema}`)
|
||||||
|
}
|
||||||
|
runPgCommand(`SET search_path TO ${schemas.join(",")},public;`)
|
||||||
|
|
||||||
|
for (let schema of schemas) {
|
||||||
|
for (let table of tableNamesWhitelist) {
|
||||||
|
await runPgCommand(dropTable(schema, table))
|
||||||
|
await runPgCommand(buildMetaforecastTable(schema, table))
|
||||||
|
if (schema == "history") {
|
||||||
|
await runPgCommand(createIndex(schema, table))
|
||||||
|
} else {
|
||||||
|
await runPgCommand(createUniqueIndex(schema, table))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
// pgInitialize()
|
||||||
|
|
||||||
|
// Read
|
||||||
|
export async function pgRead({schema, tableName}) {
|
||||||
|
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||||
|
let command = `SELECT * from ${schema}.${tableName}`
|
||||||
|
let response = await runPgCommand(command)
|
||||||
|
let results = response. results
|
||||||
|
return results
|
||||||
|
} else {
|
||||||
|
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function pgInsert({ datum, schema, tableName }) {
|
||||||
|
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||||
|
let text = `INSERT INTO ${schema}.${tableName} VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`
|
||||||
|
let timestamp = datum.timestamp || new Date().toISOString()
|
||||||
|
timestamp = timestamp.slice(0, 19).replace("T", " ")
|
||||||
let values = [
|
let values = [
|
||||||
data.id,
|
datum.id,
|
||||||
data.title,
|
datum.title,
|
||||||
data.url,
|
datum.url,
|
||||||
data.platform,
|
datum.platform,
|
||||||
data.description || '',
|
datum.description || '',
|
||||||
data.options || [],
|
JSON.stringify(datum.options || []),
|
||||||
data.timestamp || Date.now(), // fix
|
timestamp, // fix
|
||||||
data.stars || (data.qualityindicators ? data.qualityindicators.stars : 2),
|
datum.stars || (datum.qualityindicators ? datum.qualityindicators.stars : 2),
|
||||||
data.qualityindicators || [],
|
JSON.stringify(datum.qualityindicators || []),
|
||||||
data.extra || []
|
JSON.stringify(datum.extra || [])
|
||||||
]
|
]
|
||||||
|
|
||||||
const client = await pool.connect();
|
const client = await pool.connect();
|
||||||
const result = await client.query(text, values);
|
const result = await client.query(text, values);
|
||||||
client.release();
|
client.release();
|
||||||
}else{
|
// console.log(result)
|
||||||
throw Error("Table not in whitelist; stopping to avoid tricky sql injections")
|
return result
|
||||||
|
} else {
|
||||||
|
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pgInsert({
|
||||||
|
"id": "fantasyscotus-580",
|
||||||
|
"title": "In Wooden v. U.S., the SCOTUS will affirm the lower court's decision",
|
||||||
|
"url": "https://fantasyscotus.net/user-predictions/case/wooden-v-us/",
|
||||||
|
"platform": "FantasySCOTUS",
|
||||||
|
"description": "62.50% (75 out of 120) of FantasySCOTUS players predict that the lower court's decision will be affirmed. FantasySCOTUS overall predicts an outcome of Affirm 6-3. Historically, FantasySCOTUS has chosen the correct side 50.00% of the time.",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"name": "Yes",
|
||||||
|
"probability": 0.625,
|
||||||
|
"type": "PROBABILITY"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "No",
|
||||||
|
"probability": 0.375,
|
||||||
|
"type": "PROBABILITY"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2022-02-11T21:42:19.291Z",
|
||||||
|
"qualityindicators": {
|
||||||
|
"numforecasts": 120,
|
||||||
|
"stars": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
*/
|
||||||
|
|
||||||
|
export async function pgUpsert({ contents, schema, tableName }) {
|
||||||
|
|
||||||
|
|
||||||
|
if (tableWhiteList.includes(`${schema}.${tableName}`)) {
|
||||||
|
if (schema == "latest") {
|
||||||
|
await dropTable(schema, tableName);
|
||||||
|
await buildMetaforecastTable(schema, tableName);
|
||||||
|
await createUniqueIndex(schema, tableName)
|
||||||
|
}
|
||||||
|
console.log(`Inserting into postgres table ${schema}.${tableName}`)
|
||||||
|
let i = 0
|
||||||
|
for (let datum of contents) {
|
||||||
|
await pgInsert({ datum, schema, tableName })
|
||||||
|
if (i < 10) {
|
||||||
|
console.log(`Inserted ${datum.id}`)
|
||||||
|
i++
|
||||||
|
} else if (i == 10) {
|
||||||
|
console.log("...")
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log(`Inserted rows with approximate cummulative size ${roughSizeOfObject(contents)} MB into ${schema}.${tableName}.`)
|
||||||
|
let check = await pgRead({schema, tableName})
|
||||||
|
console.log(`Received rows with approximate cummulative size ${roughSizeOfObject(check)} MB from ${schema}.${tableName}.`)
|
||||||
|
console.log("Sample: ")
|
||||||
|
console.log(JSON.stringify(check.slice(0,1), null, 4));
|
||||||
|
|
||||||
|
//console.log(JSON.stringify(check.slice(0, 1), null, 4));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
throw Error(`Table ${schema}.${tableName} not in whitelist; stopping to avoid tricky sql injections`)
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -4,11 +4,10 @@ import { updateHistory } from "./history/updateHistory.js"
|
||||||
import { rebuildAlgoliaDatabase } from "../utils/algolia.js"
|
import { rebuildAlgoliaDatabase } from "../utils/algolia.js"
|
||||||
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData.js"
|
import { rebuildNetlifySiteWithNewData } from "./rebuildNetliftySiteWithNewData.js"
|
||||||
import { platformFetchers } from "../platforms/all-platforms.js"
|
import { platformFetchers } from "../platforms/all-platforms.js"
|
||||||
|
|
||||||
/* Do everything */
|
/* Do everything */
|
||||||
function sleep(ms) {
|
function sleep(ms) {
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function tryCatchTryAgain(fun) {
|
export async function tryCatchTryAgain(fun) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -59,7 +59,7 @@ export async function addToHistory(){
|
||||||
newHistoryJSON.push(newHistoryElement)
|
newHistoryJSON.push(newHistoryElement)
|
||||||
}
|
}
|
||||||
|
|
||||||
await databaseUpsert(newHistoryJSON, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory")
|
await databaseUpsert({contents: newHistoryJSON, group: "history"})
|
||||||
|
|
||||||
// console.log(newHistoryJSON.slice(0,5))
|
// console.log(newHistoryJSON.slice(0,5))
|
||||||
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
|
// writeFileSync("metaforecast_history.json", JSON.stringify(newHistoryJSON, null, 2))
|
||||||
|
|
|
@ -21,7 +21,7 @@ export async function createHistoryForMonth(){
|
||||||
})
|
})
|
||||||
}).filter(element => element.platform != "Metaculus" && element.platform != "Estimize")
|
}).filter(element => element.platform != "Metaculus" && element.platform != "Estimize")
|
||||||
//console.log(metaforecastsHistorySeed)
|
//console.log(metaforecastsHistorySeed)
|
||||||
await databaseUpsert(metaforecastsHistorySeed, `metaforecast_history_${dateUpToMonth}`, "metaforecastHistory")
|
await databaseUpsert({contents: metaforecastsHistorySeed, group: "history"})
|
||||||
|
|
||||||
}
|
}
|
||||||
////createInitialHistory()
|
////createInitialHistory()
|
|
@ -22,6 +22,6 @@ export async function mergeEverythingInner() {
|
||||||
|
|
||||||
export async function mergeEverything() {
|
export async function mergeEverything() {
|
||||||
let merged = await mergeEverythingInner();
|
let merged = await mergeEverythingInner();
|
||||||
await databaseUpsert(merged, "metaforecasts");
|
await databaseUpsert({ contents: merged, group: "combined" });
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import axios from "axios"
|
import axios from "axios"
|
||||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"
|
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js"
|
||||||
|
|
||||||
export async function rebuildNetlifySiteWithNewData_inner(cookie) {
|
export async function rebuildNetlifySiteWithNewData_inner(cookie) {
|
||||||
let payload = ({});
|
let payload = ({});
|
||||||
|
@ -9,6 +9,6 @@ export async function rebuildNetlifySiteWithNewData_inner(cookie) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function rebuildNetlifySiteWithNewData() {
|
export async function rebuildNetlifySiteWithNewData() {
|
||||||
let cookie = process.env.REBUIDNETLIFYHOOKURL || getCookie("netlify");
|
let cookie = process.env.REBUIDNETLIFYHOOKURL || getSecret("netlify");
|
||||||
await applyIfCookieExists(cookie, rebuildNetlifySiteWithNewData_inner)
|
await applyIfSecretExists(cookie, rebuildNetlifySiteWithNewData_inner)
|
||||||
}
|
}
|
10
src/index.js
10
src/index.js
|
@ -7,19 +7,21 @@ import { mergeEverything } from "./flow/mergeEverything.js"
|
||||||
import { updateHistory } from "./flow/history/updateHistory.js"
|
import { updateHistory } from "./flow/history/updateHistory.js"
|
||||||
import { rebuildAlgoliaDatabase } from "./utils/algolia.js"
|
import { rebuildAlgoliaDatabase } from "./utils/algolia.js"
|
||||||
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js"
|
import { rebuildNetlifySiteWithNewData } from "./flow/rebuildNetliftySiteWithNewData.js"
|
||||||
|
import { pgInitialize } from "./database/pg-wrapper.js"
|
||||||
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"
|
import { doEverything, tryCatchTryAgain } from "./flow/doEverything.js"
|
||||||
|
|
||||||
/* Support functions */
|
/* Support functions */
|
||||||
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, rebuildNetlifySiteWithNewData, doEverything]
|
let functions = [...platformFetchers, mergeEverything, updateHistory, rebuildAlgoliaDatabase, pgInitialize, rebuildNetlifySiteWithNewData, doEverything]
|
||||||
let functionNames = functions.map(fun => fun.name)
|
let functionNames = functions.map(fun => fun.name)
|
||||||
|
|
||||||
let whattodoMessage = functionNames
|
let whattodoMessage = functionNames
|
||||||
.slice(0, platformFetchers.length)
|
.slice(0, platformFetchers.length)
|
||||||
.map((functionName, i) => `[${i}]: Download predictions from ${functionName}`)
|
.map((functionName, i) => `[${i}]: Download predictions from ${functionName}`)
|
||||||
.join('\n') +
|
.join('\n') +
|
||||||
`\n[${functionNames.length - 5}]: Merge jsons them into one big json (and push it to mongodb database)` +
|
`\n[${functionNames.length - 6}]: Merge jsons them into one big json (and push it to mongodb database)` +
|
||||||
`\n[${functionNames.length - 4}]: Update history` +
|
`\n[${functionNames.length - 5}]: Update history` +
|
||||||
`\n[${functionNames.length - 3}]: Rebuild algolia database ("index")` +
|
`\n[${functionNames.length - 4}]: Rebuild algolia database ("index")` +
|
||||||
|
`\n[${functionNames.length - 3}]: Rebuild postgres database` +
|
||||||
`\n[${functionNames.length - 2}]: Rebuild netlify site with new data` +
|
`\n[${functionNames.length - 2}]: Rebuild netlify site with new data` +
|
||||||
// `\n[${functionNames.length-1}]: Add to history` +
|
// `\n[${functionNames.length-1}]: Add to history` +
|
||||||
`\n[${functionNames.length - 1}]: All of the above` +
|
`\n[${functionNames.length - 1}]: All of the above` +
|
||||||
|
|
|
@ -12,7 +12,7 @@ let main = async () => {
|
||||||
let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`);
|
let fileRaw = fs.readFileSync(`./src/input/${file + suffixFiles}`);
|
||||||
let fileContents = JSON.parse(fileRaw);
|
let fileContents = JSON.parse(fileRaw);
|
||||||
console.log(fileContents);
|
console.log(fileContents);
|
||||||
await databaseUpsert(fileContents, file + suffixMongo);
|
await databaseUpsert({contents: fileContents, group: file });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
main();
|
main();
|
||||||
|
|
|
@ -1,61 +1,2 @@
|
||||||
import { betfair } from "./betfair-fetch.js";
|
export { platformFetchers } from "./all/platformFetchers.js";
|
||||||
import { fantasyscotus } from "./fantasyscotus-fetch.js";
|
export { platformNames } from "./all/platformNames.js";
|
||||||
import { foretold } from "./foretold-fetch.js";
|
|
||||||
import { goodjudgment } from "./goodjudgment-fetch.js";
|
|
||||||
import { goodjudgmentopen } from "./goodjudmentopen-fetch.js";
|
|
||||||
import { infer } from "./infer-fetch.js";
|
|
||||||
import { kalshi } from "./kalshi-fetch.js";
|
|
||||||
import { manifoldmarkets } from "./manifoldmarkets-fetch.js";
|
|
||||||
import { metaculus } from "./metaculus-fetch.js";
|
|
||||||
import { polymarket } from "./polymarket-fetch.js";
|
|
||||||
import { predictit } from "./predictit-fetch.js";
|
|
||||||
import { rootclaim } from "./rootclaim-fetch.js";
|
|
||||||
import { smarkets } from "./smarkets-fetch.js";
|
|
||||||
import { wildeford } from "./wildeford-fetch.js";
|
|
||||||
|
|
||||||
/* Deprecated
|
|
||||||
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
|
|
||||||
import { coupcast } from "../platforms/coupcast-fetch.js"
|
|
||||||
import { csetforetell } from "../platforms/csetforetell-fetch.js"
|
|
||||||
import { elicit } from "../platforms/elicit-fetch.js"
|
|
||||||
import { estimize } from "../platforms/estimize-fetch.js"
|
|
||||||
import { hypermind } from "../platforms/hypermind-fetch.js"
|
|
||||||
import { ladbrokes } from "../platforms/ladbrokes-fetch.js";
|
|
||||||
import { williamhill } from "../platforms/williamhill-fetch.js";
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const platformFetchers = [
|
|
||||||
betfair,
|
|
||||||
fantasyscotus,
|
|
||||||
foretold,
|
|
||||||
goodjudgment,
|
|
||||||
goodjudgmentopen,
|
|
||||||
infer,
|
|
||||||
kalshi,
|
|
||||||
manifoldmarkets,
|
|
||||||
metaculus,
|
|
||||||
polymarket,
|
|
||||||
predictit,
|
|
||||||
rootclaim,
|
|
||||||
smarkets,
|
|
||||||
wildeford,
|
|
||||||
];
|
|
||||||
export const platformNames = [
|
|
||||||
"betfair",
|
|
||||||
"fantasyscotus",
|
|
||||||
"foretold",
|
|
||||||
"givewellopenphil",
|
|
||||||
"goodjudgment",
|
|
||||||
"goodjudmentopen",
|
|
||||||
"infer",
|
|
||||||
"kalshi",
|
|
||||||
"manifoldmarkets",
|
|
||||||
"metaculus",
|
|
||||||
"polymarket",
|
|
||||||
"predictit",
|
|
||||||
"rootclaim",
|
|
||||||
"smarkets",
|
|
||||||
"wildeford",
|
|
||||||
"xrisk",
|
|
||||||
];
|
|
||||||
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc
|
|
||||||
|
|
42
src/platforms/all/platformFetchers.js
Normal file
42
src/platforms/all/platformFetchers.js
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import { betfair } from "../betfair-fetch.js";
|
||||||
|
import { fantasyscotus } from "../fantasyscotus-fetch.js";
|
||||||
|
import { foretold } from "../foretold-fetch.js";
|
||||||
|
import { goodjudgment } from "../goodjudgment-fetch.js";
|
||||||
|
import { goodjudgmentopen } from "../goodjudmentopen-fetch.js";
|
||||||
|
import { infer } from "../infer-fetch.js";
|
||||||
|
import { kalshi } from "../kalshi-fetch.js";
|
||||||
|
import { manifoldmarkets } from "../manifoldmarkets-fetch.js";
|
||||||
|
import { metaculus } from "../metaculus-fetch.js";
|
||||||
|
import { polymarket } from "../polymarket-fetch.js";
|
||||||
|
import { predictit } from "../predictit-fetch.js";
|
||||||
|
import { rootclaim } from "../rootclaim-fetch.js";
|
||||||
|
import { smarkets } from "../smarkets-fetch.js";
|
||||||
|
import { wildeford } from "../wildeford-fetch.js";
|
||||||
|
|
||||||
|
/* Deprecated
|
||||||
|
import { astralcodexten } from "../platforms/astralcodexten-fetch.js"
|
||||||
|
import { coupcast } from "../platforms/coupcast-fetch.js"
|
||||||
|
import { csetforetell } from "../platforms/csetforetell-fetch.js"
|
||||||
|
import { elicit } from "../platforms/elicit-fetch.js"
|
||||||
|
import { estimize } from "../platforms/estimize-fetch.js"
|
||||||
|
import { hypermind } from "../platforms/hypermind-fetch.js"
|
||||||
|
import { ladbrokes } from "../platforms/ladbrokes-fetch.js";
|
||||||
|
import { williamhill } from "../platforms/williamhill-fetch.js";
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const platformFetchers = [
|
||||||
|
betfair,
|
||||||
|
fantasyscotus,
|
||||||
|
foretold,
|
||||||
|
goodjudgment,
|
||||||
|
goodjudgmentopen,
|
||||||
|
infer,
|
||||||
|
kalshi,
|
||||||
|
manifoldmarkets,
|
||||||
|
metaculus,
|
||||||
|
polymarket,
|
||||||
|
predictit,
|
||||||
|
rootclaim,
|
||||||
|
smarkets,
|
||||||
|
wildeford,
|
||||||
|
];
|
20
src/platforms/all/platformNames.js
Normal file
20
src/platforms/all/platformNames.js
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
// This needs to be its own file to avoid cyclical dependencies.
|
||||||
|
export const platformNames = [
|
||||||
|
"betfair",
|
||||||
|
"fantasyscotus",
|
||||||
|
"foretold",
|
||||||
|
"givewellopenphil",
|
||||||
|
"goodjudgment",
|
||||||
|
"goodjudmentopen",
|
||||||
|
"infer",
|
||||||
|
"kalshi",
|
||||||
|
"manifoldmarkets",
|
||||||
|
"metaculus",
|
||||||
|
"polymarket",
|
||||||
|
"predictit",
|
||||||
|
"rootclaim",
|
||||||
|
"smarkets",
|
||||||
|
"wildeford",
|
||||||
|
"xrisk",
|
||||||
|
];
|
||||||
|
// deprecated: "astralcodexten", "csetforetell", "coupcast", "elicit", "estimize", "hypermind", "ladbrokes", "omen", "williamhill", etc
|
|
@ -143,7 +143,7 @@ export async function betfair() {
|
||||||
// console.log(results.map(result => ({title: result.title, description: result.description})))
|
// console.log(results.map(result => ({title: result.title, description: result.description})))
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polyprediction-questions.json', string);
|
// fs.writeFileSync('polyprediction-questions.json', string);
|
||||||
await databaseUpsert(results, "betfair-questions");
|
await databaseUpsert({ contents: results, group: "betfair" });
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
// betfair()
|
// betfair()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios"
|
import axios from "axios"
|
||||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js"
|
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js"
|
||||||
import { Tabletojson } from "tabletojson"
|
import { Tabletojson } from "tabletojson"
|
||||||
import toMarkdown from "../utils/toMarkdown.js"
|
import toMarkdown from "../utils/toMarkdown.js"
|
||||||
import { calculateStars } from "../utils/stars.js"
|
import { calculateStars } from "../utils/stars.js"
|
||||||
|
@ -249,6 +249,6 @@ async function csetforetell_inner(cookie) {
|
||||||
|
|
||||||
|
|
||||||
export async function csetforetell() {
|
export async function csetforetell() {
|
||||||
let cookie = process.env.CSETFORETELL_COOKIE || getCookie("csetforetell")
|
let cookie = process.env.CSETFORETELL_COOKIE || getSecret("csetforetell")
|
||||||
await applyIfCookieExists(cookie, csetforetell_inner)
|
await applyIfSecretExists(cookie, csetforetell_inner)
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import fs from 'fs'
|
||||||
import axios from "axios"
|
import axios from "axios"
|
||||||
import https from "https"
|
import https from "https"
|
||||||
import fetch from "isomorphic-fetch"
|
import fetch from "isomorphic-fetch"
|
||||||
import {getCookie, applyIfCookieExists} from "../utils/getCookies.js"
|
import {getSecret, applyIfSecretExists} from "../utils/getSecrets.js"
|
||||||
import toMarkdown from "../utils/toMarkdown.js"
|
import toMarkdown from "../utils/toMarkdown.js"
|
||||||
import { calculateStars } from "../utils/stars.js"
|
import { calculateStars } from "../utils/stars.js"
|
||||||
import { databaseUpsert } from "../utils/database-wrapper.js"
|
import { databaseUpsert } from "../utils/database-wrapper.js"
|
||||||
|
@ -178,6 +178,6 @@ async function hypermind_inner(cookie) {
|
||||||
//hypermind()
|
//hypermind()
|
||||||
|
|
||||||
export async function hypermind() {
|
export async function hypermind() {
|
||||||
let cookie = process.env.HYPERMINDCOOKIE || getCookie("hypermind")
|
let cookie = process.env.HYPERMINDCOOKIE || getSecret("hypermind")
|
||||||
await applyIfCookieExists(cookie, hypermind_inner)
|
await applyIfSecretExists(cookie, hypermind_inner)
|
||||||
}
|
}
|
|
@ -64,7 +64,7 @@ export async function example() {
|
||||||
// console.log(results)
|
// console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polyprediction-questions.json', string);
|
// fs.writeFileSync('polyprediction-questions.json', string);
|
||||||
await databaseUpsert(results, "example-questions");
|
await databaseUpsert({ contents: results, group: "example" });
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
//example()
|
//example()
|
||||||
|
|
|
@ -118,7 +118,7 @@ export async function fantasyscotus() {
|
||||||
//console.log(results)
|
//console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('./data/fantasyscotus-questions.json', string);
|
// fs.writeFileSync('./data/fantasyscotus-questions.json', string);
|
||||||
await databaseUpsert(results, "fantasyscotus-questions");
|
await databaseUpsert({ contents: results, group: "fantasyscotus" });
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
//fantasyscotus()
|
//fantasyscotus()
|
||||||
|
|
|
@ -101,7 +101,8 @@ export async function foretold() {
|
||||||
}
|
}
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('./data/foretold-questions.json', string);
|
// fs.writeFileSync('./data/foretold-questions.json', string);
|
||||||
await databaseUpsert(results, "foretold-questions");
|
await databaseUpsert({ contents: results, group: "foretold" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
// foretold()
|
// foretold()
|
||||||
|
|
|
@ -64,6 +64,7 @@ async function main() {
|
||||||
}
|
}
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('./data/givewell-questions-unprocessed.json', string);
|
// fs.writeFileSync('./data/givewell-questions-unprocessed.json', string);
|
||||||
await databaseUpsert(results, "givewell-questions-unprocessed");
|
await databaseUpsert({ contents: results, group: "givewell-questions-unprocessed" });
|
||||||
|
|
||||||
}
|
}
|
||||||
main();
|
main();
|
||||||
|
|
|
@ -122,7 +122,8 @@ export async function goodjudgment() {
|
||||||
// fs.writeFileSync('./data/goodjudgment-questions.json', string);
|
// fs.writeFileSync('./data/goodjudgment-questions.json', string);
|
||||||
// fs.writeFileSync('./goodjudgment-questions-test.json', string);
|
// fs.writeFileSync('./goodjudgment-questions-test.json', string);
|
||||||
console.log(results);
|
console.log(results);
|
||||||
await databaseUpsert(results, "goodjudgment-questions");
|
await databaseUpsert({ contents: results, group: "goodjudgment" });
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
"Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js"
|
"Failing is not unexpected; see utils/pullSuperforecastsManually.sh/js"
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
|
||||||
import { Tabletojson } from "tabletojson";
|
import { Tabletojson } from "tabletojson";
|
||||||
import { calculateStars } from "../utils/stars.js";
|
import { calculateStars } from "../utils/stars.js";
|
||||||
import toMarkdown from "../utils/toMarkdown.js";
|
import toMarkdown from "../utils/toMarkdown.js";
|
||||||
|
@ -223,7 +223,8 @@ async function goodjudgmentopen_inner(cookie) {
|
||||||
// fs.writeFileSync('./data/goodjudmentopen-questions.json', string);
|
// fs.writeFileSync('./data/goodjudmentopen-questions.json', string);
|
||||||
console.log(results);
|
console.log(results);
|
||||||
if (results.length > 0) {
|
if (results.length > 0) {
|
||||||
await databaseUpsert(results, "goodjudmentopen-questions");
|
await databaseUpsert({ contents: results, group: "goodjudmentopen" });
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
console.log("Not updating results, as process was not signed in");
|
console.log("Not updating results, as process was not signed in");
|
||||||
}
|
}
|
||||||
|
@ -237,6 +238,6 @@ async function goodjudgmentopen_inner(cookie) {
|
||||||
|
|
||||||
export async function goodjudgmentopen() {
|
export async function goodjudgmentopen() {
|
||||||
let cookie =
|
let cookie =
|
||||||
process.env.GOODJUDGMENTOPENCOOKIE || getCookie("goodjudmentopen");
|
process.env.GOODJUDGMENTOPENCOOKIE || getSecret("goodjudmentopen");
|
||||||
await applyIfCookieExists(cookie, goodjudgmentopen_inner);
|
await applyIfSecretExists(cookie, goodjudgmentopen_inner);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
/* Imports */
|
/* Imports */
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
|
||||||
import { Tabletojson } from "tabletojson";
|
import { Tabletojson } from "tabletojson";
|
||||||
import toMarkdown from "../utils/toMarkdown.js";
|
import toMarkdown from "../utils/toMarkdown.js";
|
||||||
import { calculateStars } from "../utils/stars.js";
|
import { calculateStars } from "../utils/stars.js";
|
||||||
|
@ -269,7 +269,7 @@ async function infer_inner(cookie) {
|
||||||
// fs.writeFileSync('./data/infer-questions.json', string);
|
// fs.writeFileSync('./data/infer-questions.json', string);
|
||||||
// console.log(results)
|
// console.log(results)
|
||||||
if (results.length > 0) {
|
if (results.length > 0) {
|
||||||
await databaseUpsert(results, "infer-questions");
|
await databaseUpsert({ contents: results, group: "infer" });
|
||||||
} else {
|
} else {
|
||||||
console.log("Not updating results, as process was not signed in");
|
console.log("Not updating results, as process was not signed in");
|
||||||
}
|
}
|
||||||
|
@ -282,6 +282,6 @@ async function infer_inner(cookie) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function infer() {
|
export async function infer() {
|
||||||
let cookie = process.env.INFER_COOKIE || getCookie("infer");
|
let cookie = process.env.INFER_COOKIE || getSecret("infer");
|
||||||
await applyIfCookieExists(cookie, infer_inner);
|
await applyIfSecretExists(cookie, infer_inner);
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,8 @@ export async function kalshi() {
|
||||||
// console.log(results)
|
// console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polymarket-questions.json', string);
|
// fs.writeFileSync('polymarket-questions.json', string);
|
||||||
await databaseUpsert(results, "kalshi-questions");
|
await databaseUpsert({ contents: results, group: "kalshi" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
// kalshi()
|
// kalshi()
|
||||||
|
|
|
@ -96,7 +96,8 @@ export async function manifoldmarkets() {
|
||||||
// console.log(results)
|
// console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polyprediction-questions.json', string);
|
// fs.writeFileSync('polyprediction-questions.json', string);
|
||||||
await databaseUpsert(results, "manifoldmarkets-questions");
|
await databaseUpsert({ contents: results, group: "manifoldmarkets" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
// manifoldmarkets()
|
// manifoldmarkets()
|
||||||
|
|
|
@ -154,7 +154,7 @@ export async function metaculus() {
|
||||||
|
|
||||||
// let string = JSON.stringify(all_questions, null, 2)
|
// let string = JSON.stringify(all_questions, null, 2)
|
||||||
// fs.writeFileSync('./metaculus-questions.json', string);
|
// fs.writeFileSync('./metaculus-questions.json', string);
|
||||||
await databaseUpsert(all_questions, "metaculus-questions");
|
await databaseUpsert({ contents: all_questions, group: "metaculus" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
|
|
|
@ -152,7 +152,8 @@ export async function polymarket() {
|
||||||
// console.log(results)
|
// console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polymarket-questions.json', string);
|
// fs.writeFileSync('polymarket-questions.json', string);
|
||||||
await databaseUpsert(results, "polymarket-questions");
|
await databaseUpsert({ contents: results, group: "polymarket" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
// polymarket()
|
// polymarket()
|
||||||
|
|
|
@ -110,7 +110,7 @@ export async function predictit() {
|
||||||
//console.log(results)
|
//console.log(results)
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('./data/predictit-questions.json', string);
|
// fs.writeFileSync('./data/predictit-questions.json', string);
|
||||||
await databaseUpsert(results, "predictit-questions");
|
await databaseUpsert({ contents: results, group: "predictit" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,8 @@ export async function rootclaim() {
|
||||||
//console.log(JSON.stringify(results, null, 4))
|
//console.log(JSON.stringify(results, null, 4))
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('rootclaim-questions.json', string);
|
// fs.writeFileSync('rootclaim-questions.json', string);
|
||||||
await databaseUpsert(results, "rootclaim-questions");
|
await databaseUpsert({ contents: results, group: "rootclaim" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
//rootclaim()
|
//rootclaim()
|
||||||
|
|
|
@ -153,6 +153,7 @@ export async function smarkets() {
|
||||||
|
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('./data/smarkets-questions.json', string);
|
// fs.writeFileSync('./data/smarkets-questions.json', string);
|
||||||
await databaseUpsert(results, "smarkets-questions");
|
await databaseUpsert({ contents: results, group: "smarkets" });
|
||||||
|
|
||||||
}
|
}
|
||||||
//smarkets()
|
//smarkets()
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
// import axios from "axios"
|
// import axios from "axios"
|
||||||
import { GoogleSpreadsheet } from "google-spreadsheet";
|
import { GoogleSpreadsheet } from "google-spreadsheet";
|
||||||
import { getCookie, applyIfCookieExists } from "../utils/getCookies.js";
|
import { getSecret, applyIfSecretExists } from "../utils/getSecrets.js";
|
||||||
import toMarkdown from "../utils/toMarkdown.js";
|
import toMarkdown from "../utils/toMarkdown.js";
|
||||||
import { calculateStars } from "../utils/stars.js";
|
import { calculateStars } from "../utils/stars.js";
|
||||||
import { hash } from "../utils/hash.js";
|
import { hash } from "../utils/hash.js";
|
||||||
|
@ -125,12 +125,13 @@ export async function wildeford_inner(google_api_key) {
|
||||||
// console.log(results.sort((a,b) => (a.title > b.title)))
|
// console.log(results.sort((a,b) => (a.title > b.title)))
|
||||||
// let string = JSON.stringify(results, null, 2)
|
// let string = JSON.stringify(results, null, 2)
|
||||||
// fs.writeFileSync('polyprediction-questions.json', string);
|
// fs.writeFileSync('polyprediction-questions.json', string);
|
||||||
await databaseUpsert(results, "wildeford-questions");
|
await databaseUpsert({ contents: results, group: "wildeford" });
|
||||||
|
|
||||||
console.log("Done");
|
console.log("Done");
|
||||||
}
|
}
|
||||||
//example()
|
//example()
|
||||||
|
|
||||||
export async function wildeford() {
|
export async function wildeford() {
|
||||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || getCookie("google-api"); // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
|
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || getSecret("google-api"); // See: https://developers.google.com/sheets/api/guides/authorizing#APIKey
|
||||||
await applyIfCookieExists(GOOGLE_API_KEY, wildeford_inner);
|
await applyIfSecretExists(GOOGLE_API_KEY, wildeford_inner);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import algoliasearch from 'algoliasearch';
|
import algoliasearch from 'algoliasearch';
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import {getCookie} from "./getCookies.js"
|
import {getSecret} from "./getSecrets.js"
|
||||||
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"
|
import { databaseReadWithReadCredentials } from "../database/database-wrapper.js"
|
||||||
import { mergeEverythingInner } from '../flow/mergeEverything.js';
|
import { mergeEverythingInner } from '../flow/mergeEverything.js';
|
||||||
|
|
||||||
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getCookie("algolia")
|
let cookie = process.env.ALGOLIA_MASTER_API_KEY || getSecret("algolia")
|
||||||
const client = algoliasearch('96UD3NTQ7L', cookie);
|
const client = algoliasearch('96UD3NTQ7L', cookie);
|
||||||
const index = client.initIndex('metaforecast');
|
const index = client.initIndex('metaforecast');
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
|
|
||||||
export function getCookie(property){
|
export function getSecret(property){
|
||||||
let answer = 0
|
let answer = 0
|
||||||
try {
|
try {
|
||||||
let rawcookie = fs.readFileSync("./src/input/privatekeys.json")
|
let rawcookie = fs.readFileSync("./src/input/secrets.json")
|
||||||
let cookie = JSON.parse(rawcookie)
|
let cookie = JSON.parse(rawcookie)
|
||||||
if (cookie[property]){
|
if (cookie[property]){
|
||||||
answer = cookie[property]
|
answer = cookie[property]
|
||||||
|
@ -15,7 +15,7 @@ export function getCookie(property){
|
||||||
return answer
|
return answer
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function applyIfCookieExists(cookie, fun){
|
export async function applyIfSecretExists(cookie, fun){
|
||||||
if(cookie){
|
if(cookie){
|
||||||
await fun(cookie)
|
await fun(cookie)
|
||||||
}else if(!cookie){
|
}else if(!cookie){
|
32
src/utils/roughSize.js
Normal file
32
src/utils/roughSize.js
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
export function roughSizeOfObject(object) {
|
||||||
|
var objectList = [];
|
||||||
|
var stack = [object];
|
||||||
|
var bytes = 0;
|
||||||
|
|
||||||
|
while (stack.length) {
|
||||||
|
var value = stack.pop();
|
||||||
|
if (typeof value === 'boolean') {
|
||||||
|
bytes += 4;
|
||||||
|
}
|
||||||
|
else if (typeof value === 'string') {
|
||||||
|
bytes += value.length * 2;
|
||||||
|
}
|
||||||
|
else if (typeof value === 'number') {
|
||||||
|
bytes += 8;
|
||||||
|
}
|
||||||
|
else if
|
||||||
|
(
|
||||||
|
typeof value === 'object'
|
||||||
|
&& objectList.indexOf(value) === -1
|
||||||
|
) {
|
||||||
|
objectList.push(value);
|
||||||
|
|
||||||
|
for (var i in value) {
|
||||||
|
stack.push(value[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let megaBytes = bytes / (1024) ** 2
|
||||||
|
let megaBytesRounded = Math.round(megaBytes * 10) / 10
|
||||||
|
return megaBytesRounded;
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user