diff --git a/package-lock.json b/package-lock.json index 147846f..07ed7b0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -214,11 +214,12 @@ } }, "@firestone-hs/aws-lambda-utils": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@firestone-hs/aws-lambda-utils/-/aws-lambda-utils-0.0.20.tgz", - "integrity": "sha512-xcmd0r4e7yY9nerCP9xskiP+xXF4b9Rqpru31rsqi/v9DEvSnW/Coi/JXnk7Q1wbAp7KbDWm6HQ6kxgkOAg1Ww==", + "version": "0.0.26", + "resolved": "https://registry.npmjs.org/@firestone-hs/aws-lambda-utils/-/aws-lambda-utils-0.0.26.tgz", + "integrity": "sha512-5R23GmEbOppj26taNDiqk3QNGxi/IlNqAUjymIhsc6VLSlCZWfVg9gNqSZVApsec45IXe90Y6kzEAUCiRMq7ew==", "requires": { "@firestone-hs/reference-data": "^0.1.196", + "JSONStream": "^1.3.5", "aws-lambda": "^1.0.7", "aws-sdk": "^2.888.0", "cross-fetch": "^3.1.5", @@ -909,6 +910,15 @@ "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", "dev": true }, + "JSONStream": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "requires": { + "jsonparse": "^1.2.0", + "through": ">=2.2.7 <3" + } + }, "abab": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.1.tgz", @@ -983,7 +993,8 @@ }, "ansi-regex": { "version": "3.0.0", - "resolved": "", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==", "dev": true }, "ansi-styles": { @@ -3235,7 +3246,8 @@ }, "ini": { "version": "1.3.5", - "resolved": "", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", "dev": true, "optional": true }, @@ -3434,7 +3446,8 @@ "dependencies": { "minimist": { "version": "1.2.5", - "resolved": "", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true, "optional": true } @@ -4917,6 +4930,11 @@ "graceful-fs": "^4.1.6" } }, + "jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==" + }, "jsprim": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", @@ -7172,9 +7190,9 @@ } }, "terser": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-4.8.0.tgz", - "integrity": "sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==", + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-4.8.1.tgz", + "integrity": "sha512-4GnLC0x667eJG0ewJTa6z/yXrbLGv80D9Ru6HIpCQmO+Q4PfEtBFi0ObSckqwL6VyQv/7ENJieXHo2ANmdQwgw==", "dev": true, "requires": { "commander": "^2.20.0", @@ -7277,8 +7295,7 @@ "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", - "dev": true + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { "version": "2.0.5", diff --git a/package.json b/package.json index a9ccdc7..f2c38a6 100644 --- a/package.json +++ b/package.json @@ -24,10 +24,11 @@ "dist/**/*" ], "dependencies": { - "@firestone-hs/aws-lambda-utils": "0.0.20", + "@firestone-hs/aws-lambda-utils": "0.0.26", "@firestone-hs/hs-replay-xml-parser": "0.0.78", "@firestone-hs/reference-data": "^1.0.1", "@types/elementtree": "^0.1.0", + "JSONStream": "^1.3.5", "aws-sdk": "^2.1040.0", "elementtree": "^0.1.7", "immutable": "3.8.2", diff --git a/src/build-battlegrounds-hero-stats-new.ts b/src/build-battlegrounds-hero-stats-new.ts index a58f139..937f02a 100644 --- a/src/build-battlegrounds-hero-stats-new.ts +++ b/src/build-battlegrounds-hero-stats-new.ts @@ -1,64 +1,172 @@ /* eslint-disable @typescript-eslint/no-use-before-define */ -import { groupByFunction } from '@firestone-hs/aws-lambda-utils'; +import { getConnection, groupByFunction, http, logBeforeTimeout, logger, S3 } from '@firestone-hs/aws-lambda-utils'; import { AllCardsService, CardIds, Race } from '@firestone-hs/reference-data'; +import { Context } from 'aws-lambda'; +import AWS from 'aws-sdk'; import { ServerlessMysql } from 'serverless-mysql'; +import { Readable } from 'stream'; import { gzipSync } from 'zlib'; import { BgsGlobalHeroStat2, BgsGlobalStats2, MmrPercentile } from './bgs-global-stats'; -import { getConnection as getConnectionStats } from './db/rds'; -import { S3 } from './db/s3'; -import { formatDate, http, normalizeHeroCardId } from './utils/util-functions'; +import { formatDate, normalizeHeroCardId } from './utils/util-functions'; const s3 = new S3(); const allCards = new AllCardsService(); +const lambda = new AWS.Lambda(); + +// const tableName = 'temp_cron_build_bgs_hero_stats'; // This example demonstrates a NodeJS 8.10 async handler[1], however of course you could use // the more traditional callback-style handler. // [1]: https://aws.amazon.com/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/ -export default async (event): Promise => { - await handleNewStats(); +export default async (event, context: Context): Promise => { + await handleNewStats(event, context); }; -export const handleNewStats = async () => { +export const handleNewStats = async (event, context: Context) => { + logger.log('event', event); + // const test = await readRowsFromS3(); + // logger.log('test', test?.length, test); + // throw new Error('stopping'); await allCards.initializeCardsDb(); const lastPatch = await getLastBattlegroundsPatch(); - const mysql = await getConnectionStats(); - const rows: readonly InternalBgsRow[] = await loadRows(mysql, lastPatch); - const mmrPercentiles: readonly MmrPercentile[] = buildMmrPercentiles(rows); + if (event.permutation) { + // const mysql = await getConnection(); + // const rows: readonly InternalBgsRow[] = await loadRows(mysql); + const rows: readonly InternalBgsRow[] = await readRowsFromS3(); + // await mysql.end(); + await handlePermutation(event.permutation, event.allTribes, rows, lastPatch); + } else { + const mysql = await getConnection(); + // await extractData(mysql); + const rows: readonly InternalBgsRow[] = await loadRows(mysql); + await mysql.end(); + await saveRowsOnS3(rows); + await dispatchNewLambdas(rows, context); + } + + return { statusCode: 200, body: null }; +}; +const dispatchNewLambdas = async (rows: readonly InternalBgsRow[], context: Context) => { // A basic approach could be to generate all files for all tribe combinations. That way, // the app will only query static files with all the info for the specific tribe combination const allTribes = extractAllTribes(rows); console.log('all tribes', allTribes); - - const tribePermutations = [null, ...combine(allTribes, 5)]; + const tribePermutations: ('all' | Race[])[] = ['all', ...combine(allTribes, 5)]; // const tribePermutations = [null]; console.log('tribe permutations, should be 127 (126 + 1), because 9 tribes', tribePermutations.length); // console.log('tribe permutations', tribePermutations); for (const tribes of tribePermutations) { - // console.log('handling tribes', index++, tribes); - const tribesStr = !!tribes?.length ? tribes.join(',') : null; - const statsForTribes: BgsGlobalStats2 = { - lastUpdateDate: formatDate(new Date()), - mmrPercentiles: mmrPercentiles, - heroStats: buildHeroes(rows, lastPatch, mmrPercentiles, tribesStr), + console.log('handling tribes', tribes); + const newEvent = { + permutation: tribes, allTribes: allTribes, }; - // console.log('\tstats for tribes', tribes, ); - const stringResults = JSON.stringify(statsForTribes); - const gzippedResults = gzipSync(stringResults); - await s3.writeFile( - gzippedResults, - 'static.zerotoheroes.com', - `api/bgs/bgs-global-stats-${!!tribes?.length ? tribes.join('-') : 'all-tribes'}.gz.json`, - 'application/json', - 'gzip', - ); + const params = { + FunctionName: context.functionName, + InvocationType: 'Event', + LogType: 'Tail', + Payload: JSON.stringify(newEvent), + }; + logger.log('\tinvoking lambda', params); + const result = await lambda + .invoke({ + FunctionName: context.functionName, + InvocationType: 'Event', + LogType: 'Tail', + Payload: JSON.stringify(newEvent), + }) + .promise(); + logger.log('\tinvocation result', result); + // throw new Error('stopping process'); } +}; - await mysql.end(); +const handlePermutation = async ( + tribes: 'all' | readonly Race[], + allTribes: readonly Race[], + rows: readonly InternalBgsRow[], + lastPatch: PatchInfo, +) => { + const mmrPercentiles: readonly MmrPercentile[] = buildMmrPercentiles(rows); + logger.log('handling permutation', tribes); + const tribesStr = tribes === 'all' ? null : tribes.join(','); + const statsForTribes: BgsGlobalStats2 = { + lastUpdateDate: formatDate(new Date()), + mmrPercentiles: mmrPercentiles, + heroStats: buildHeroes(rows, lastPatch, mmrPercentiles, tribesStr), + allTribes: allTribes, + }; + console.log('\tbuilt stats', statsForTribes.heroStats?.length); + await s3.writeFile( + gzipSync(JSON.stringify(statsForTribes)), + 'static.zerotoheroes.com', + `api/bgs/bgs-global-stats-${tribes === 'all' ? 'all-tribes' : tribes.join('-')}.gz.json`, + 'application/json', + 'gzip', + ); +}; - return { statusCode: 200, body: null }; +// const extractData = async (mysql: ServerlessMysql) => { +// await mysql.query(`DROP TABLE IF EXISTS ${tableName};`); +// const persistQuery = ` +// CREATE TABLE ${tableName} +// AS SELECT * FROM bgs_run_stats WHERE creationDate > DATE_SUB(NOW(), INTERVAL 30 DAY); +// `; +// logger.log('creating work table', persistQuery); +// await mysql.query(persistQuery); +// }; + +const saveRowsOnS3 = async (rows: readonly InternalBgsRow[]) => { + logger.log('saving rows on s3', rows.length); + await s3.writeArrayAsMultipart(rows, 'static.zerotoheroes.com', `api/bgs/working-rows.json`, 'application/json'); + logger.log('file saved'); +}; + +const readRowsFromS3 = async (): Promise => { + return new Promise((resolve, reject) => { + let parseErrors = 0; + let totalParsed = 0; + const stream: Readable = s3.readStream('static.zerotoheroes.com', `api/bgs/working-rows.json`); + const result: InternalBgsRow[] = []; + let previousString = ''; + stream + .on('data', chunk => { + // logger.log('received data', chunk); + const str = Buffer.from(chunk).toString('utf-8'); + // logger.log('string', str); + const newStr = previousString + str; + const split = newStr.split('\n'); + // logger.log('splits', split.length); + // split.forEach(s => logger.log('split item', s)); + // logger.log('leftover', split[split.length - 1]); + // const cleanPrefixStr = str.startsWith('[') ? str.slice(1) : str; + const rows: readonly InternalBgsRow[] = split.slice(0, split.length - 1).map(row => { + try { + const result = JSON.parse(row); + totalParsed++; + return result; + } catch (e) { + logger.warn('could not parse row', row); + parseErrors++; + // throw e; + } + }); + previousString = split[split.length - 1]; + // logger.log('rows', rows); + // logger.log('parsing errors', parseErrors, 'and successes', totalParsed); + result.push(...rows); + }) + .on('end', () => { + const finalResult = result.filter(row => !!row); + logger.log('stream end', result.length, finalResult.length); + logger.log('parsing errors', parseErrors, 'and successes', totalParsed); + resolve(finalResult); + }); + }); + // const str = await s3.readContentAsString('static.zerotoheroes.com', `api/bgs/working-rows.json`); + // return JSON.parse(str); }; // https://stackoverflow.com/a/47204248/548701 @@ -132,13 +240,13 @@ const buildHeroesForMmr = ( const rowsWithTribes = !!tribesStr ? rows.filter(row => !!row.tribes).filter(row => row.tribes === tribesStr) : rows; - console.log( - '\tNumber of total rows matching tribes', - tribesStr, - mmr.percentile, - rowsWithTribes.length, - rows.length, - ); + // console.log( + // '\tNumber of total rows matching tribes', + // tribesStr, + // mmr.percentile, + // rowsWithTribes.length, + // rows.length, + // ); const allTimeHeroes = buildHeroStats(rowsWithTribes, 'all-time', tribesStr); const rowsForLastPatch = rowsWithTribes.filter( @@ -146,25 +254,25 @@ const buildHeroesForMmr = ( row.buildNumber >= lastPatch.number || row.creationDate > new Date(new Date(lastPatch.date).getTime() + 24 * 60 * 60 * 1000), ); - console.log( - '\tNumber of last patch rows matching tribes', - tribesStr, - mmr.percentile, - rowsForLastPatch.length, - lastPatch.number, - ); + // console.log( + // '\tNumber of last patch rows matching tribes', + // tribesStr, + // mmr.percentile, + // rowsForLastPatch.length, + // lastPatch.number, + // ); const lastPatchHeroes = buildHeroStats(rowsForLastPatch, 'last-patch', tribesStr); const rowsForLastThree = rowsWithTribes.filter( row => row.creationDate >= new Date(new Date().getTime() - 3 * 24 * 60 * 60 * 1000), ); - console.log('\tNumber of last three rows matching tribes', tribesStr, mmr.percentile, rowsForLastThree.length); + // console.log('\tNumber of last three rows matching tribes', tribesStr, mmr.percentile, rowsForLastThree.length); const threeDaysHeroes = buildHeroStats(rowsForLastThree, 'past-three', tribesStr); const rowsForLastSeven = rowsWithTribes.filter( row => row.creationDate >= new Date(new Date().getTime() - 7 * 24 * 60 * 60 * 1000), ); - console.log('\tNumber of last seven rows matching tribes', tribesStr, mmr.percentile, rowsForLastSeven.length); + // console.log('\tNumber of last seven rows matching tribes', tribesStr, mmr.percentile, rowsForLastSeven.length); const sevenDaysHeroes = buildHeroStats(rowsForLastSeven, 'past-seven', tribesStr); // console.log('\tbuilt heroes for mmr', tribesStr); @@ -318,13 +426,10 @@ const buildPlacementDistribution = ( return placementDistribution; }; -const loadRows = async (mysql: ServerlessMysql, patch: PatchInfo): Promise => { - console.log('loading rows', patch); +const loadRows = async (mysql: ServerlessMysql): Promise => { const query = ` SELECT * FROM bgs_run_stats - WHERE creationDate > DATE_SUB(NOW(), INTERVAL 30 DAY) - ORDER BY id DESC - LIMIT 1000000 + WHERE creationDate > DATE_SUB(NOW(), INTERVAL 30 DAY); `; console.log('running query', query); const rows: readonly InternalBgsRow[] = await mysql.query(query); diff --git a/src/build-bgs-stats-slices.ts b/src/build-bgs-stats-slices.ts deleted file mode 100644 index 8b9bb5b..0000000 --- a/src/build-bgs-stats-slices.ts +++ /dev/null @@ -1,207 +0,0 @@ -/* eslint-disable @typescript-eslint/no-use-before-define */ -import { getConnection, http, logBeforeTimeout, logger, S3 } from '@firestone-hs/aws-lambda-utils'; -import { AllCardsService, CardIds, Race } from '@firestone-hs/reference-data'; -import { ObjectList } from 'aws-sdk/clients/s3'; -import SqlString from 'sqlstring'; -import { constants, gzipSync } from 'zlib'; -import { InternalBgsGlobalStats, InternalBgsRow, RankGroup, Slice } from './internal-model'; -import { buildStatsForTribes } from './slices/merger'; -import { buildAllRankGroups, buildMmrPercentiles } from './slices/rank-groups'; -import { buildNewSlice } from './slices/slice-builder'; -import { combine, normalizeHeroCardId } from './utils/util-functions'; - -export const allCards = new AllCardsService(); -const s3 = new S3(); - -const S3_BUCKET_NAME = 'static.zerotoheroes.com'; -const S3_FOLDER = `api/bgs/heroes`; -const S3_FOLDER_SLICE = `${S3_FOLDER}/slices`; - -// This example demonstrates a NodeJS 8.10 async handler[1], however of course you could use -// the more traditional callback-style handler. -// [1]: https://aws.amazon.com/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/ -export default async (event, context): Promise => { - const cleanup = logBeforeTimeout(context); - await allCards.initializeCardsDb(); - - const existingSlices: readonly Slice[] = await loadExistingSlices(); - logger.log('existingSlices', existingSlices.length, existingSlices[0]); - const lastDataTimestamp: number = !existingSlices?.length - ? null - : Math.max(...existingSlices.map(data => data.lastUpdateDate.getTime())); - logger.log('lastDataTimestamp', lastDataTimestamp); - const lastDataDate: Date = lastDataTimestamp ? new Date(lastDataTimestamp) : null; - logger.log('lastDataDate', lastDataDate); - - const rows: readonly InternalBgsRow[] = await loadRows(lastDataDate); - const validRows = rows - .filter(row => row.heroCardId.startsWith('TB_BaconShop_') || row.heroCardId.startsWith('BG')) - .filter( - row => - row.heroCardId !== CardIds.ArannaStarseeker_ArannaUnleashedTokenBattlegrounds && - row.heroCardId !== CardIds.QueenAzshara_NagaQueenAzsharaToken, - ) - .map(row => ({ - ...row, - heroCardId: normalizeHeroCardId(row.heroCardId), - })); - logger.log('rows', validRows.length); - - const allTribes: readonly Race[] = extractAllTribes(rows); - const allRankGroups: readonly RankGroup[] = buildAllRankGroups(existingSlices, rows); - logger.log('allRankGroups', allRankGroups); - const newSlice = buildNewSlice(validRows, allRankGroups, allTribes); - logger.log('newSlice', newSlice); - await saveSingleSlice(newSlice); - - const allSlices = [...existingSlices, newSlice]; - await buildFinalStats(allSlices, allTribes); - - cleanup(); - return { statusCode: 200, body: null }; -}; - -const buildFinalStats = async (allSlices: readonly Slice[], allTribes: readonly Race[]) => { - const lastPatch = await getLastBattlegroundsPatch(); - const allTimePeriods: ('all-time' | 'past-three' | 'past-seven' | 'last-patch')[] = [ - 'all-time', - 'past-three', - 'past-seven', - 'last-patch', - ]; - - for (const timePeriod of allTimePeriods) { - logger.log('building stats for time period', timePeriod); - const tribePermutations = [null, ...combine(allTribes, 5)]; - for (const tribes of tribePermutations) { - // logger.log('\thandling tribes', tribes); - const relevantSlices = allSlices.filter(slice => isValidDate(slice.lastUpdateDate, timePeriod, lastPatch)); - const mmrPercentiles = buildMmrPercentiles(relevantSlices); - const stats: InternalBgsGlobalStats = buildStatsForTribes( - relevantSlices, - tribes, - mmrPercentiles, - timePeriod, - allTribes, - ); - await saveFinalFile(stats, tribes, timePeriod); - } - } -}; - -const isValidDate = ( - theDate: Date, - timePeriod: 'all-time' | 'past-three' | 'past-seven' | 'last-patch', - lastPatch: PatchInfo, -): boolean => { - switch (timePeriod) { - case 'all-time': - return true; - case 'past-seven': - return new Date().getTime() - theDate.getTime() <= 7 * 24 * 60 * 60 * 1000; - case 'past-three': - return new Date().getTime() - theDate.getTime() <= 3 * 24 * 60 * 60 * 1000; - case 'last-patch': - const lastPatchDate = new Date(lastPatch.date); - return lastPatchDate.getTime() < theDate.getTime(); - } -}; - -const saveFinalFile = async ( - stats: InternalBgsGlobalStats, - tribes: readonly Race[], - timePeriod: string, -): Promise => { - const stringResults = JSON.stringify(stats); - const gzippedResults = gzipSync(stringResults); - const tribesText = !!tribes?.length ? tribes.join('-') : 'all-tribes'; - await s3.writeFile( - gzippedResults, - S3_BUCKET_NAME, - `${S3_FOLDER}/bgs-global-stats-${tribesText}-${timePeriod}.gz.json`, - 'application/json', - 'gzip', - ); -}; - -const saveSingleSlice = async (slice: Slice): Promise => { - const dataStr = JSON.stringify(slice, null, 4); - const gzipped = gzipSync(dataStr, { - level: constants.Z_BEST_COMPRESSION, - }); - logger.log('gzipped buckets'); - await s3.writeFile( - gzipped, - S3_BUCKET_NAME, - `${S3_FOLDER_SLICE}/hero-stats-${new Date().toISOString()}.gz.json`, - 'application/json', - 'gzip', - ); - logger.log('slice saved', `${S3_FOLDER_SLICE}/hero-stats-${new Date().toISOString()}.gz.json`); -}; - -const loadRows = async (lastDataDate: Date): Promise => { - const mysql = await getConnection(); - const query = ` - SELECT creationDate, buildNumber, rating, heroCardId, rank, tribes, combatWinrate, warbandStats - FROM bgs_run_stats - WHERE creationDate >= ${!!lastDataDate ? SqlString.escape(lastDataDate) : 'DATE_SUB(NOW(), INTERVAL 4 HOUR)'}; - `; - logger.log('\n', new Date().toLocaleString(), 'running query', query); - const result: readonly InternalBgsRow[] = await mysql.query(query); - logger.log(new Date().toLocaleString(), 'result', result?.length); - await mysql.end(); - logger.log(new Date().toLocaleString(), 'connection closed'); - return result; -}; - -const loadExistingSlices = async (): Promise => { - const files: ObjectList = await s3.loadAllFileKeys(S3_BUCKET_NAME, S3_FOLDER_SLICE); - logger.log('fileKeys', files.length, files[0]); - const allContent = await Promise.all( - files.filter(file => !file.Key.endsWith('/')).map(file => s3.readGzipContent(S3_BUCKET_NAME, file.Key, 1)), - ); - // Delete old data. The main goal is to keep the number of keys below 1000 - // so that we don't have to handle pagination in the replies - // Keeping a history of 40 days also allows us to move to hourly updates if - // we want to get fresh data after patches - const keysToDelete = files - .filter(file => Date.now() - file.LastModified.getTime() > 40 * 24 * 60 * 60 * 1000) - .map(file => file.Key); - await s3.deleteFiles(S3_BUCKET_NAME, keysToDelete); - return allContent - .map(content => JSON.parse(content)) - .map( - data => - ({ - ...data, - lastUpdateDate: new Date(data.lastUpdateDate), - } as Slice), - ); -}; - -const getLastBattlegroundsPatch = async (): Promise => { - const patchInfo = await http(`https://static.zerotoheroes.com/hearthstone/data/patches.json`); - const structuredPatch = JSON.parse(patchInfo); - const patchNumber = structuredPatch.currentBattlegroundsMetaPatch; - return structuredPatch.patches.find(patch => patch.number === patchNumber); -}; - -const extractAllTribes = (rows: readonly InternalBgsRow[]): readonly Race[] => { - return [ - ...new Set( - rows - .map(row => row.tribes) - .filter(tribes => !!tribes?.length) - .map(tribes => tribes.split(',').map(strTribe => parseInt(strTribe) as Race)) - .reduce((a, b) => [...new Set(a.concat(b))], []), - ), - ]; -}; - -export interface PatchInfo { - readonly number: number; - readonly version: string; - readonly name: string; - readonly date: string; -} diff --git a/src/db/rds.ts b/src/db/rds.ts deleted file mode 100644 index ec6be2d..0000000 --- a/src/db/rds.ts +++ /dev/null @@ -1,56 +0,0 @@ -/* eslint-disable @typescript-eslint/no-use-before-define */ -/* eslint-disable @typescript-eslint/no-var-requires */ -import { SecretsManager } from 'aws-sdk'; -import { GetSecretValueRequest, GetSecretValueResponse } from 'aws-sdk/clients/secretsmanager'; -import { default as MySQLServerless, default as serverlessMysql } from 'serverless-mysql'; - -const secretsManager = new SecretsManager({ region: 'us-west-2' }); -let connection, connectionPromise; - -const connect = async (): Promise => { - const secretRequest: GetSecretValueRequest = { - SecretId: 'rds-connection', - }; - const secret: SecretInfo = await getSecret(secretRequest); - const config = { - host: secret.host, - user: secret.username, - password: secret.password, - database: 'replay_summary', - port: secret.port, - }; - connection = MySQLServerless({ config }); - - return connection; -}; - -const getConnection = async (): Promise => { - if (connection) { - return connection; - } - if (connectionPromise) { - return connectionPromise; - } - connectionPromise = connect(); - - return connectionPromise; -}; - -export { getConnection }; - -const getSecret = (secretRequest: GetSecretValueRequest) => { - return new Promise(resolve => { - secretsManager.getSecretValue(secretRequest, (err, data: GetSecretValueResponse) => { - const secretInfo: SecretInfo = JSON.parse(data.SecretString); - resolve(secretInfo); - }); - }); -}; - -interface SecretInfo { - readonly username: string; - readonly password: string; - readonly host: string; - readonly port: number; - readonly dbClusterIdentifier: string; -} diff --git a/src/db/s3.ts b/src/db/s3.ts deleted file mode 100644 index 89e4201..0000000 --- a/src/db/s3.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { S3 as S3AWS } from 'aws-sdk'; -import { GetObjectRequest, Metadata, PutObjectRequest } from 'aws-sdk/clients/s3'; -import * as JSZip from 'jszip'; -import { loadAsync } from 'jszip'; - -export class S3 { - private readonly s3: S3AWS; - - constructor() { - this.s3 = new S3AWS({ region: 'us-west-2' }); - } - - public async getObjectMetaData(bucketName: string, key: string): Promise { - return new Promise(resolve => { - const params: GetObjectRequest = { - Bucket: bucketName, - Key: key, - }; - this.s3.getObject(params, (err, data) => { - resolve(data.Metadata); - }); - }); - } - - // Since S3 is only eventually consistent, it's possible that we try to read a file that is not - // available yet - public async readContentAsString(bucketName: string, key: string): Promise { - return new Promise(resolve => { - this.readContentAsStringInternal(bucketName, key, result => resolve(result)); - }); - } - - private readContentAsStringInternal(bucketName: string, key: string, callback, retriesLeft = 10) { - if (retriesLeft <= 0) { - console.error('could not read s3 object', bucketName, key); - callback(null); - return; - } - const input = { Bucket: bucketName, Key: key }; - this.s3.getObject(input, (err, data) => { - if (err) { - console.warn('could not read s3 object', bucketName, key, err, retriesLeft); - setTimeout(() => { - this.readContentAsStringInternal(bucketName, key, callback, retriesLeft - 1); - }, 3000); - return; - } - const objectContent = data.Body.toString('utf8'); - callback(objectContent); - }); - } - - public async readZippedContent(bucketName: string, key: string): Promise { - return new Promise(resolve => { - this.readZippedContentInternal(bucketName, key, result => resolve(result)); - }); - } - - private readZippedContentInternal(bucketName: string, key: string, callback, retriesLeft = 10) { - if (retriesLeft <= 0) { - console.error('could not read s3 object', bucketName, key); - callback(null); - return; - } - const input = { Bucket: bucketName, Key: key }; - this.s3.getObject(input, async (err, data) => { - if (err) { - console.warn('could not read s3 object', bucketName, key, err, retriesLeft); - setTimeout(() => { - this.readZippedContentInternal(bucketName, key, callback, retriesLeft - 1); - }, 1000); - return; - } - try { - const zipContent = await loadAsync(data.Body as any); - const file = Object.keys(zipContent.files)[0]; - const objectContent = await zipContent.file(file).async('string'); - callback(objectContent); - } catch (e) { - console.warn('could not read s3 object', bucketName, key, err, retriesLeft, e); - setTimeout(() => { - this.readZippedContentInternal(bucketName, key, callback, retriesLeft - 1); - }, 1000); - return; - } - }); - } - - public async writeCompressedFile(content: any, bucket: string, fileName: string): Promise { - const jszip = new JSZip.default(); - jszip.file('replay.xml', content); - const blob: Buffer = await jszip.generateAsync({ - type: 'nodebuffer', - compression: 'DEFLATE', - compressionOptions: { - level: 9, - }, - }); - return this.writeFile(blob, bucket, fileName, 'application/zip'); - } - - public async writeFile( - content: any, - bucket: string, - fileName: string, - type = 'application/json', - encoding?: 'gzip' | null, - ): Promise { - return new Promise((resolve, reject) => { - const input: PutObjectRequest = { - Body: type === 'application/json' && encoding !== 'gzip' ? JSON.stringify(content) : content, - Bucket: bucket, - Key: fileName, - ACL: 'public-read', - ContentType: type, - }; - if (encoding) { - input.ContentEncoding = encoding; - } - this.s3.upload(input, (err, data) => { - if (err) { - console.error('could not upload file to S3', err, input); - resolve(false); - return; - } - resolve(true); - }); - }); - } -} diff --git a/src/utils/util-functions.ts b/src/utils/util-functions.ts index 3776cc3..3bf6cf0 100644 --- a/src/utils/util-functions.ts +++ b/src/utils/util-functions.ts @@ -1,35 +1,4 @@ import { AllCardsService, CardIds, ReferenceCard } from '@firestone-hs/reference-data'; -import fetch, { RequestInfo } from 'node-fetch'; - -function partitionArray(array: readonly T[], partitionSize: number): readonly T[][] { - const workingCopy: T[] = [...array]; - const result: T[][] = []; - while (workingCopy.length) { - result.push(workingCopy.splice(0, partitionSize)); - } - return result; -} - -async function http(request: RequestInfo): Promise { - return new Promise(resolve => { - fetch(request) - .then( - response => { - return response.text(); - }, - error => { - console.warn('could not retrieve review', error); - }, - ) - .then(body => { - resolve(body); - }); - }); -} - -async function sleep(ms) { - return new Promise(resolve => setTimeout(resolve, ms)); -} export const toCreationDate = (today: Date): string => { return `${today @@ -51,8 +20,6 @@ export const getCardFromCardId = (cardId: number | string, cards: AllCardsServic return card; }; -export { partitionArray, http, sleep }; - export const normalizeHeroCardId = (heroCardId: string, allCards: AllCardsService = null): string => { if (!heroCardId) { return heroCardId; diff --git a/template.yaml b/template.yaml index 1748909..732eb73 100644 --- a/template.yaml +++ b/template.yaml @@ -12,43 +12,20 @@ Resources: Runtime: nodejs12.x Timeout: 880 CodeUri: ./dist/build-battlegrounds-hero-stats-new - MemorySize: 3000 + MemorySize: 5000 Policies: - AWSLambdaVPCAccessExecutionRole - AmazonSESFullAccess - SecretsManagerReadWrite - AmazonSNSReadOnlyAccess - AmazonS3FullAccess + - AWSLambda_FullAccess # - arn:aws:iam::478062583808:policy/CloudWatchPolicyForLambda # See https://stackoverflow.com/questions/51166504/disable-cloudwatch-to-monitor-logs-for-lambda-function Tags: LambdaName: cron-build-bgs-hero-stats - Events: - # Populate the info on the last hour - LastPeriodTrigger: - Type: Schedule - Properties: - Schedule: cron(0 4 * * ? *) # Triggers every day at 4 AM - - BuildBgsHeroStatsSliceFunction: - Type: AWS::Serverless::Function - Properties: - Handler: build-bgs-stats-slices.default - Runtime: nodejs12.x - Timeout: 880 - CodeUri: ./dist/build-bgs-stats-slices - MemorySize: 3000 - Policies: - - AWSLambdaVPCAccessExecutionRole - - AmazonSESFullAccess - - SecretsManagerReadWrite - - AmazonSNSReadOnlyAccess - - AmazonS3FullAccess - # - arn:aws:iam::478062583808:policy/CloudWatchPolicyForLambda # See https://stackoverflow.com/questions/51166504/disable-cloudwatch-to-monitor-logs-for-lambda-function - Tags: - LambdaName: cron-build-bgs-hero-stats-slice - Events: - # Populate the info on the last hour - LastPeriodTrigger: - Type: Schedule - Properties: - Schedule: cron(30 */4 * * ? *) + # Events: + # # Populate the info on the last hour + # LastPeriodTrigger: + # Type: Schedule + # Properties: + # Schedule: cron(0 4 * * ? *) # Triggers every day at 4 AM