From 83d7bad1caad1903a32caa450abaffcc357ca1b4 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Fri, 1 Dec 2023 03:02:47 +0000 Subject: [PATCH] convert dev scripts to es modules, delete unused ones --- dev/{accountHelper.js => accountHelper.mjs} | 0 dev/allMmr.js | 61 --------- dev/bootstrapMmr.js | 41 ------ dev/cassandraRandom.js | 42 ------ dev/cassandraTest.js | 35 ----- dev/{checkAccounts.js => checkAccounts.mjs} | 8 +- dev/{createAccounts.js => createAccounts.mjs} | 7 +- dev/export-example.js | 13 -- dev/{findProMatches.js => findProMatches.mjs} | 9 +- ...FakeRatings.js => generateFakeRatings.mjs} | 4 +- dev/getMatch.js | 45 ------- dev/getMatchesSeq.js | 94 ------------- dev/{keyTest.js => keyTest.mjs} | 6 +- dev/loadTest.js | 31 ----- dev/{lobby.js => lobby.mjs} | 0 dev/{loginTest.js => loginTest.mjs} | 4 +- dev/{scratch.js => metaParse.mjs} | 6 +- dev/mmrToPostgres.js | 31 ----- dev/moveMmrEstimate.js | 35 ----- dev/postgresToCassandra.js | 125 ------------------ dev/reGcData.js | 32 ----- dev/reParse.js | 68 ---------- dev/{rePatch.js => rePatch.mjs} | 10 +- dev/{teamElo.js => teamElo.mjs} | 7 +- dev/{teamMatch.js => teamMatch.mjs} | 6 +- dev/{wordcount.js => wordcount.mjs} | 8 +- 26 files changed, 35 insertions(+), 693 deletions(-) rename dev/{accountHelper.js => accountHelper.mjs} (100%) delete mode 100644 dev/allMmr.js delete mode 100644 dev/bootstrapMmr.js delete mode 100644 dev/cassandraRandom.js delete mode 100644 dev/cassandraTest.js rename dev/{checkAccounts.js => checkAccounts.mjs} (91%) rename dev/{createAccounts.js => createAccounts.mjs} (85%) delete mode 100644 dev/export-example.js rename dev/{findProMatches.js => findProMatches.mjs} (92%) rename dev/{generateFakeRatings.js => generateFakeRatings.mjs} (93%) delete mode 100644 dev/getMatch.js delete mode 100644 dev/getMatchesSeq.js rename dev/{keyTest.js => keyTest.mjs} (83%) delete mode 100644 dev/loadTest.js rename dev/{lobby.js => lobby.mjs} (100%) rename dev/{loginTest.js => loginTest.mjs} (92%) rename dev/{scratch.js => metaParse.mjs} (91%) delete mode 100644 dev/mmrToPostgres.js delete mode 100644 dev/moveMmrEstimate.js delete mode 100644 dev/postgresToCassandra.js delete mode 100644 dev/reGcData.js delete mode 100644 dev/reParse.js rename dev/{rePatch.js => rePatch.mjs} (78%) rename dev/{teamElo.js => teamElo.mjs} (96%) rename dev/{teamMatch.js => teamMatch.mjs} (90%) rename dev/{wordcount.js => wordcount.mjs} (73%) diff --git a/dev/accountHelper.js b/dev/accountHelper.mjs similarity index 100% rename from dev/accountHelper.js rename to dev/accountHelper.mjs diff --git a/dev/allMmr.js b/dev/allMmr.js deleted file mode 100644 index 24ac04a95..000000000 --- a/dev/allMmr.js +++ /dev/null @@ -1,61 +0,0 @@ -const async = require("async"); -const db = require("../store/db"); -const utility = require("../util/utility"); -const queries = require("../store/queries"); -const config = require("../config"); - -const { generateJob, getData } = utility; -const retrieverArr = config.RETRIEVER_HOST.split(","); -let count = 0; -const args = process.argv.slice(2); -const startId = Number(args[0]) || 0; -db.select("account_id") - .from("players") - .where("account_id", ">", startId) - .orderByRaw(startId ? "account_id asc" : "random()") - .asCallback((err, players) => { - if (err) { - process.exit(1); - } - async.eachLimit( - players, - 5, - (p, cb) => { - const job = { - data: generateJob("mmr", { - account_id: p.account_id, - url: retrieverArr.map( - (r) => - `http://${r}?key=${config.RETRIEVER_SECRET}&account_id=${p.account_id}` - )[p.account_id % retrieverArr.length], - }), - }; - getData( - { - url: job.data.url, - noRetry: true, - }, - (err, data) => { - if (err) { - console.error(err); - } - count += 1; - console.log(count, p.account_id); - if (data && (data.solo_competitive_rank || data.competitive_rank)) { - console.log(data); - data.account_id = job.data.payload.account_id; - data.match_id = job.data.payload.match_id; - data.time = new Date(); - queries.insertPlayerRating(db, data, cb); - } else { - cb(); - } - } - ); - }, - (err) => { - console.log(err); - process.exit(Number(err)); - } - ); - }); diff --git a/dev/bootstrapMmr.js b/dev/bootstrapMmr.js deleted file mode 100644 index ccbaae658..000000000 --- a/dev/bootstrapMmr.js +++ /dev/null @@ -1,41 +0,0 @@ -const JSONStream = require("JSONStream"); -const db = require("../store/db"); - -const args = process.argv.slice(2); -const startId = Number(args[0]) || 0; -let conc = 0; -const stream = db - .raw( - ` -SELECT pr.account_id, solo_competitive_rank from player_ratings pr -JOIN -(select account_id, max(time) as maxtime from player_ratings GROUP by account_id) grouped -ON pr.account_id = grouped.account_id -AND pr.time = grouped.maxtime -WHERE pr.account_id > ? -AND solo_competitive_rank > 0 -AND solo_competitive_rank IS NOT NULL -ORDER BY account_id asc -`, - [startId] - ) - .stream(); - -function exit(err) { - if (err) { - console.error(err); - } - process.exit(Number(err)); -} - -stream.on("end", exit); -stream.pipe(JSONStream.parse()); -stream.on("data", (player) => { - conc += 1; - if (conc > 10) { - stream.pause(); - } - console.log(player.account_id); - conc -= 1; - stream.resume(); -}); diff --git a/dev/cassandraRandom.js b/dev/cassandraRandom.js deleted file mode 100644 index 0a0ea92a4..000000000 --- a/dev/cassandraRandom.js +++ /dev/null @@ -1,42 +0,0 @@ -const cassandra = require("../store/cassandra"); -const db = require("../store/db"); - -function randomInteger(min, max) { - return Math.floor(Math.random() * (max - min + 1)) + min; -} - -let total = 0; -let haveRows = 0; - -async function start() { - // Get the current max_match_id from postgres, subtract 200000000 - const max = (await db.raw("select max(match_id) from public_matches")) - ?.rows?.[0]?.max; - const limit = max - 200000000; - while (true) { - // Test a random match ID - const rand = randomInteger(1, limit); - - const result = await cassandra.execute( - "select match_id, player_slot, stuns from player_matches where match_id = ?", - [rand.toString()], - { - prepare: true, - fetchSize: 10, - autoPage: true, - } - ); - total += 1; - // Check if there are rows - if (result.rows.length) { - haveRows += 1; - console.log(result.rows[0].match_id.toString(), "has rows"); - } - if (total % 100 === 0) { - // Log number that have rows/don't have rows - console.log(haveRows, "/", total, "have rows"); - } - } -} - -start(); diff --git a/dev/cassandraTest.js b/dev/cassandraTest.js deleted file mode 100644 index 1d459c25b..000000000 --- a/dev/cassandraTest.js +++ /dev/null @@ -1,35 +0,0 @@ -const cassandra = require("../store/cassandra"); - -const myArgs = process.argv.slice(2); - -// 5000000000 -const test = async () => { - let ok = 0; - let noResult = 0; - let error = 0; - const query = "SELECT match_id FROM player_matches WHERE match_id = ?"; - for (let i = Number(myArgs[0]); i < Number(myArgs[1]); i++) { - try { - const result = await cassandra.execute(query, [i], { - prepare: true, - fetchSize: 24, - autoPage: true, - }); - // console.log(result.rows); - if (result.rows[0] != null) { - ok += 1; - } else { - noResult += 1; - } - } catch (e) { - console.error(i); - console.error(e.message); - error += 1; - // Remediate by deleting and requesting - // await cassandra.execute(`DELETE from player_matches where match_id = ?`, [ i ]); - } - } - console.log("ok: %s, noResult: %s, error: %s", ok, noResult, error); -}; - -test(); diff --git a/dev/checkAccounts.js b/dev/checkAccounts.mjs similarity index 91% rename from dev/checkAccounts.js rename to dev/checkAccounts.mjs index f3b7025df..63a8a9372 100644 --- a/dev/checkAccounts.js +++ b/dev/checkAccounts.mjs @@ -1,8 +1,6 @@ -/* eslint-disable */ -const fs = require("fs"); -const Steam = require("steam"); -const async = require("async"); - +import fs from 'fs'; +import Steam from 'steam'; +import async from 'async'; const accountData = fs.readFileSync("./STEAM_ACCOUNT_DATA_BAD.txt", "utf8"); const accountArray = accountData.split(require("os").EOL); diff --git a/dev/createAccounts.js b/dev/createAccounts.mjs similarity index 85% rename from dev/createAccounts.js rename to dev/createAccounts.mjs index 7b25190df..d6ec95ef6 100644 --- a/dev/createAccounts.js +++ b/dev/createAccounts.mjs @@ -1,7 +1,6 @@ -/* eslint-disable */ -const Steam = require("steam"); -const SteamUser = require("steam-user"); -const async = require("async"); +import Steam from 'steam'; +import SteamUser from 'steam-user'; +import async from 'async'; const steam = new SteamUser(); const time = Math.floor(new Date() / 1000); diff --git a/dev/export-example.js b/dev/export-example.js deleted file mode 100644 index 9c1cf1e04..000000000 --- a/dev/export-example.js +++ /dev/null @@ -1,13 +0,0 @@ -const zlib = require("zlib"); -const JSONStream = require("JSONStream"); -const fs = require("fs"); - -const fileName = "../export/dump.json.gz"; -const write = fs.createReadStream(fileName); -const stream = JSONStream.parse("*.match_id"); - -stream.on("data", (d) => { - console.log(d); -}); - -write.pipe(zlib.createGunzip()).pipe(JSONStream); diff --git a/dev/findProMatches.js b/dev/findProMatches.mjs similarity index 92% rename from dev/findProMatches.js rename to dev/findProMatches.mjs index b4d36b891..ed181f4ab 100644 --- a/dev/findProMatches.js +++ b/dev/findProMatches.mjs @@ -1,9 +1,8 @@ -const async = require("async"); -const queries = require("../store/queries"); -const db = require("../store/db"); -const utility = require("../util/utility"); +import async from 'async'; +import queries from '../store/queries.js'; +import db from '../store/db.js'; +import { generateJob, getData } from '../util/utility.js'; -const { generateJob, getData } = utility; // const leagueUrl = generateJob('api_leagues', {}).url; function getPage(url, leagueid, cb) { diff --git a/dev/generateFakeRatings.js b/dev/generateFakeRatings.mjs similarity index 93% rename from dev/generateFakeRatings.js rename to dev/generateFakeRatings.mjs index d0a5ea079..636056b06 100644 --- a/dev/generateFakeRatings.js +++ b/dev/generateFakeRatings.mjs @@ -1,5 +1,5 @@ -const async = require("async"); -const db = require("../store/db"); +import async from 'async'; +import db from '../store/db.js'; function randByCentralLimitTheorem() { let v = 0; diff --git a/dev/getMatch.js b/dev/getMatch.js deleted file mode 100644 index a58ea9cbf..000000000 --- a/dev/getMatch.js +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Load a single match by ID from the steam API, insert, and request a parse - * */ -const utility = require("../util/utility"); -const queries = require("../store/queries"); - -const { generateJob, getData } = utility; -const { insertMatch } = queries; -const args = process.argv.slice(2); -const matchId = Number(args[0]); -const delay = 1000; -const job = generateJob("api_details", { - match_id: matchId, -}); -const { url } = job; -getData( - { - url, - delay, - }, - (err, body) => { - if (err) { - throw err; - } - if (body.result) { - const match = body.result; - insertMatch( - match, - { - skipCounts: true, - forceParse: true, - attempts: 1, - }, - (err) => { - if (err) { - throw err; - } - process.exit(0); - } - ); - } else { - throw body; - } - } -); diff --git a/dev/getMatchesSeq.js b/dev/getMatchesSeq.js deleted file mode 100644 index 91eaf4261..000000000 --- a/dev/getMatchesSeq.js +++ /dev/null @@ -1,94 +0,0 @@ -/** - * Load a range of matches by match_seq_num from the Steam API, without replay parsing - * */ -const async = require("async"); -const cluster = require("cluster"); -const utility = require("../util/utility"); -const redis = require("../store/redis"); -const queries = require("../store/queries"); - -const { generateJob, getData } = utility; -const { insertMatch } = queries; -const args = process.argv.slice(2); -const startSeqNum = Number(args[0]) || 0; -const endSeqNum = Number(args[1]) || 0; -const delay = Number(args[2]) || 1000; -const bucketSize = 100000000; -// match seq num 59622 has a 32-bit unsigned int max (4294967295) in tower damage -// match seq num 239190 for hero_healing -// match seq num 542284 for hero_healing -// may need to cap values down to 2.1b if we encounter them -// postgres int type only supports up to 2.1b (signed int) -// bucket idspace into groups of 100000000 -// save progress to redis key complete_history:n - -function getPage(matchSeqNum, bucket) { - if (matchSeqNum > bucket + bucketSize || matchSeqNum > endSeqNum) { - process.exit(0); - } - const job = generateJob("api_sequence", { - start_at_match_seq_num: matchSeqNum, - }); - const { url } = job; - getData( - { - url, - delay, - }, - (err, body) => { - if (err) { - throw err; - } - if (body.result) { - const { matches } = body.result; - async.each( - matches, - (match, cb) => { - insertMatch( - match, - { - skipCounts: true, - skipParse: true, - }, - cb - ); - }, - (err) => { - if (err) { - throw err; - } - const nextSeqNum = matches[matches.length - 1].match_seq_num + 1; - redis.set(`complete_history:${bucket}`, nextSeqNum); - return getPage(nextSeqNum, bucket); - } - ); - } else { - throw body; - } - } - ); -} - -if (cluster.isMaster) { - // Fork workers. - for (let i = startSeqNum; i < endSeqNum; i += bucketSize) { - cluster.fork({ - BUCKET: i, - }); - } - cluster.on("exit", (worker, code) => { - if (code !== 0) { - throw new Error("worker died"); - } - console.log("worker exited successfully"); - }); -} else { - const bucket = Number(process.env.BUCKET); - redis.get(`complete_history:${bucket}`, (err, result) => { - if (err) { - throw err; - } - result = result ? Number(result) : bucket; - getPage(result, bucket); - }); -} diff --git a/dev/keyTest.js b/dev/keyTest.mjs similarity index 83% rename from dev/keyTest.js rename to dev/keyTest.mjs index 0de936f8f..d147ed43f 100644 --- a/dev/keyTest.js +++ b/dev/keyTest.mjs @@ -1,6 +1,6 @@ -const request = require("request"); -const async = require("async"); -const config = require("../config"); +import request from 'request'; +import async from 'async'; +import config from '../config.js'; const output = []; diff --git a/dev/loadTest.js b/dev/loadTest.js deleted file mode 100644 index b98666049..000000000 --- a/dev/loadTest.js +++ /dev/null @@ -1,31 +0,0 @@ -const async = require("async"); -const request = require("request"); -const db = require("../store/db"); - -const host = "localhost:5000"; -function cb(err) { - process.exit(Number(err)); -} - -db.select("account_id", "last_login") - .from("players") - .whereNotNull("last_login") - .orderBy("last_login") - .orderBy("account_id") - .asCallback((err, results) => { - if (err) { - return cb(err); - } - return async.eachLimit( - results, - 10, - (r, cb) => { - console.time(r.account_id); - request(`http://${host}/players/${r.account_id}`, (err) => { - console.timeEnd(r.account_id); - cb(err); - }); - }, - cb - ); - }); diff --git a/dev/lobby.js b/dev/lobby.mjs similarity index 100% rename from dev/lobby.js rename to dev/lobby.mjs diff --git a/dev/loginTest.js b/dev/loginTest.mjs similarity index 92% rename from dev/loginTest.js rename to dev/loginTest.mjs index 33d085814..882b60671 100644 --- a/dev/loginTest.js +++ b/dev/loginTest.mjs @@ -1,5 +1,5 @@ -const Steam = require("steam"); -const Dota2 = require("dota2"); +import Steam from 'steam'; +import Dota2 from 'dota2'; const client = new Steam.SteamClient(); const user = process.env.STEAM_USER; diff --git a/dev/scratch.js b/dev/metaParse.mjs similarity index 91% rename from dev/scratch.js rename to dev/metaParse.mjs index e4b5521e3..a8358e006 100644 --- a/dev/scratch.js +++ b/dev/metaParse.mjs @@ -1,7 +1,5 @@ -/* eslint-disable */ -const request = require("request"); -const ProtoBuf = require("protobufjs"); -const fs = require("fs"); +import ProtoBuf from 'protobufjs'; +import fs from 'fs'; /* const files = fs.readdirSync('./proto'); diff --git a/dev/mmrToPostgres.js b/dev/mmrToPostgres.js deleted file mode 100644 index 619530671..000000000 --- a/dev/mmrToPostgres.js +++ /dev/null @@ -1,31 +0,0 @@ -const async = require("async"); -const redis = require("../store/redis"); -const db = require("../store/db"); - -redis.zrange("solo_competitive_rank", 0, -1, "WITHSCORES", (err, ids) => { - const inserts = []; - for (let i = 0; i < ids.length; i += 2) { - inserts.push({ account_id: ids[i], rating: ids[i + 1] }); - } - async.eachSeries(inserts, (ins, cb) => { - console.log(ins); - db.raw( - "INSERT INTO solo_competitive_rank(account_id, rating) VALUES (?, ?) ON CONFLICT(account_id) DO NOTHING", - [ins.account_id, ins.rating] - ).asCallback(cb); - }); -}); - -redis.zrange("competitive_rank", 0, -1, "WITHSCORES", (err, ids) => { - const inserts = []; - for (let i = 0; i < ids.length; i += 2) { - inserts.push({ account_id: ids[i], rating: ids[i + 1] }); - } - async.eachSeries(inserts, (ins, cb) => { - console.log(ins); - db.raw( - "INSERT INTO competitive_rank(account_id, rating) VALUES (?, ?) ON CONFLICT(account_id) DO NOTHING", - [ins.account_id, ins.rating] - ).asCallback(cb); - }); -}); diff --git a/dev/moveMmrEstimate.js b/dev/moveMmrEstimate.js deleted file mode 100644 index 5f76314af..000000000 --- a/dev/moveMmrEstimate.js +++ /dev/null @@ -1,35 +0,0 @@ -const async = require("async"); -const redis = require("../store/redis"); -const db = require("../store/db"); -const utility = require("../util/utility"); - -db.transaction((trx) => { - redis.keys("mmr_estimates:*", (err, keys) => { - async.eachLimit( - keys, - 1000, - (key, cb) => { - console.log(key); - redis.lrange(key, 0, -1, (err, result) => { - const accountId = key.split(":")[1]; - const data = result.filter((d) => d).map((d) => Number(d)); - const estimate = utility.average(data); - if (accountId && estimate) { - db.raw( - "INSERT INTO mmr_estimates VALUES (?, ?) ON CONFLICT(account_id) DO UPDATE SET estimate = ?", - [accountId, estimate, estimate] - ).asCallback(cb); - } else { - cb(); - } - }); - }, - (err) => { - if (err) { - return trx.rollback(err); - } - return trx.commit(); - } - ); - }); -}); diff --git a/dev/postgresToCassandra.js b/dev/postgresToCassandra.js deleted file mode 100644 index 5845fdd42..000000000 --- a/dev/postgresToCassandra.js +++ /dev/null @@ -1,125 +0,0 @@ -/* -const args = process.argv.slice(2); -const start_id = Number(args[0]) || 0; -const end_id = Number(args[1]) || 2400000000; -const JSONStream = require('JSONStream'); -const constants = require('dotaconstants'); -const db = require('../store/db'); -const cassandra = require('../store/cassandra'); -const redis = require('../store/redis'); -const utility = require('../util/utility'); -const async = require('async'); -const serialize = utility.serialize; -const cluster = require('cluster'); -const bucket_size = 100000000; -if (cluster.isMaster) { - // Fork workers. - for (let i = start_id; i < end_id; i += bucket_size) { - cluster.fork( - { - BUCKET: i, - }); - } - cluster.on('exit', (worker, code, signal) => { - if (code !== 0) { - throw 'worker died'; - } else { - console.log('worker exited successfully'); - } - }); -} else { - var bucket = Number(process.env.BUCKET); - redis.get(`postgresToCassandra:${bucket}`, (err, result) => { - if (err) { - throw err; - } - result = result ? Number(result) : bucket; - run(result); - }); -} - -function run(start_id) { - const stream = db.select() - .from('matches') - .where('match_id', '>=', start_id) - .where('match_id', '<', bucket + bucket_size) - .orderBy('match_id', 'asc') - .stream(); - stream.on('end', exit); - stream.pipe(JSONStream.parse()); - stream.on('data', (match) => { - stream.pause(); - redis.set(`postgresToCassandra:${bucket}`, match.match_id); - delete match.parse_status; - insertMatch(match, (err) => { - if (err) { - return exit(err); - } - - db.select([ - 'player_matches.match_id', - 'player_matches.account_id', - 'player_slot', - 'hero_id', - 'item_0', 'item_1', 'item_2', 'item_3', 'item_4', 'item_5', - 'kills', 'deaths', 'assists', 'leaver_status', 'gold', 'last_hits', 'denies', - 'gold_per_min', 'xp_per_min', 'gold_spent', 'hero_damage', 'tower_damage', 'hero_healing', - 'level', 'additional_units', 'stuns', 'max_hero_hit', 'times', 'gold_t', 'lh_t', 'xp_t', - 'obs_log', 'sen_log', 'purchase_log', 'kills_log', 'buyback_log', 'lane_pos', 'obs', 'sen', - 'actions', 'pings', 'purchase', 'gold_reasons', 'xp_reasons', 'killed', - 'item_uses', 'ability_uses', 'hero_hits', 'damage', 'damage_taken', 'damage_inflictor', - 'runes', 'killed_by', 'kill_streaks', 'multi_kills', 'life_state']) - .from('player_matches') - .join('matches', 'player_matches.match_id', 'matches.match_id') - .where('matches.match_id', '=', match.match_id) - .asCallback((err, pms) => { - if (err) { - return exit(err); - } - async.each(pms, insertPlayerMatch, (err) => { - if (err) { - return exit(err); - } - match.players = pms; - updateCache(match, (err) => { - if (err) { - return exit(err); - } - console.log(match.match_id); - stream.resume(); - }); - }); - }); - }); - }); - - function exit(err) { - if (err) { - console.error(err); - } - process.exit(err ? 1 : 0); - } - - function insertMatch(match, cb) { - const obj = serialize(match); - delete obj.pgroup; - const query = 'INSERT INTO matches JSON ?'; - cassandra.execute(query, [JSON.stringify(obj)], - { - prepare: true, - }, cb); - } - - function insertPlayerMatch(pm, cb) { - if (pm.account_id === constants.anonymous_account_id) { - delete pm.account_id; - } - const obj2 = serialize(pm); - const query2 = 'INSERT INTO player_matches JSON ?'; - cassandra.execute(query2, [JSON.stringify(obj2)], - { - prepare: true, - }, cb); - } -} -*/ diff --git a/dev/reGcData.js b/dev/reGcData.js deleted file mode 100644 index cc16b845e..000000000 --- a/dev/reGcData.js +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Call getGcData for all matches in match table - * */ -const async = require("async"); -const db = require("../store/db"); -const getGcData = require("../util/getGcData"); - -db.select(["match_id"]) - .from("matches") - .asCallback((err, matches) => { - if (err) { - throw err; - } - async.eachSeries( - matches, - (match, cb) => { - console.log(match.match_id); - getGcData(match, (err) => { - if (err) { - console.error(err); - } - cb(); - }); - }, - (err) => { - if (err) { - console.error(err); - } - process.exit(Number(err)); - } - ); - }); diff --git a/dev/reParse.js b/dev/reParse.js deleted file mode 100644 index 4f8bd1f58..000000000 --- a/dev/reParse.js +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Load match IDs from database, then issue re-insert and re-parse on all of them - * */ -const async = require("async"); -const utility = require("../util/utility"); -const queries = require("../store/queries"); -const db = require("../store/db"); - -const { generateJob, getData } = utility; -const { insertMatch } = queries; -const delay = 50; -const args = process.argv.slice(2); -const matchId = Number(args[0]) || 0; -const targetVersion = Number(args[1]) || 0; - -db.select("match_id") - .from("matches") - .where("match_id", ">", matchId) - .where("version", "!=", targetVersion) - .orWhereNull("version") - .orderBy("match_id") - .asCallback((err, result) => { - if (err) { - throw err; - } - async.eachSeries( - result, - (row, cb) => { - const job = generateJob("api_details", { - match_id: row.match_id, - }); - const { url } = job; - getData( - { - url, - delay, - }, - (err, body) => { - if (err) { - throw err; - } - if (body.result) { - const match = body.result; - insertMatch( - match, - { - skipCounts: true, - forceParse: true, - attempts: 1, - }, - (err) => { - if (err) { - throw err; - } - cb(err); - } - ); - } else { - throw body; - } - } - ); - }, - (err) => { - process.exit(Number(err)); - } - ); - }); diff --git a/dev/rePatch.js b/dev/rePatch.mjs similarity index 78% rename from dev/rePatch.js rename to dev/rePatch.mjs index aa0d9a526..41a4f83f2 100644 --- a/dev/rePatch.js +++ b/dev/rePatch.mjs @@ -1,11 +1,11 @@ /** * Recalculate patch ID for matches in match table * */ -const async = require("async"); -const constants = require("dotaconstants"); -const db = require("../store/db"); -const queries = require("../store/queries"); -const utility = require("../util/utility"); +import async from 'async'; +import constants from 'dotaconstants'; +import db from '../store/db.js'; +import queries from '../store/queries.js'; +import utility from '../util/utility.js'; db.select(["match_id", "start_time"]) .from("matches") diff --git a/dev/teamElo.js b/dev/teamElo.mjs similarity index 96% rename from dev/teamElo.js rename to dev/teamElo.mjs index 4b8ba6b73..3e7c55a2c 100644 --- a/dev/teamElo.js +++ b/dev/teamElo.mjs @@ -1,9 +1,10 @@ /** * Computes team Elo ratings by game * */ -const JSONStream = require("JSONStream"); -const async = require("async"); -const db = require("../store/db"); +import JSONStream from 'JSONStream'; +import async from 'async'; +import db from '../store/db'; + // Keep each team's rating in memory and update const teams = {}; const wins = {}; diff --git a/dev/teamMatch.js b/dev/teamMatch.mjs similarity index 90% rename from dev/teamMatch.js rename to dev/teamMatch.mjs index 9cab3c9e4..f16efd1d4 100644 --- a/dev/teamMatch.js +++ b/dev/teamMatch.mjs @@ -1,6 +1,6 @@ -const async = require("async"); -const db = require("../store/db"); -const queries = require("../store/queries"); +import async from 'async'; +import db from '../store/db'; +import queries from '../store/queries'; db.select(["radiant_team_id", "dire_team_id", "match_id"]) .from("matches") diff --git a/dev/wordcount.js b/dev/wordcount.mjs similarity index 73% rename from dev/wordcount.js rename to dev/wordcount.mjs index 2a6e32a48..1873d3ea3 100644 --- a/dev/wordcount.js +++ b/dev/wordcount.mjs @@ -1,7 +1,7 @@ -const JSONStream = require("JSONStream"); -const db = require("../store/db"); -const utility = require("../util/utility"); -const compute = require("../util/compute"); +import JSONStream from 'JSONStream'; +import db from '../store/db'; +import utility from '../util/utility.js'; +import compute from '../util/compute.js'; const args = process.argv.slice(2); const limit = Number(args[0]) || 1;