Skip to content

Commit

Permalink
fix inserting pro data
Browse files Browse the repository at this point in the history
  • Loading branch information
howardchung committed Dec 1, 2023
1 parent be2dce9 commit b5be678
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 30 deletions.
8 changes: 8 additions & 0 deletions sql/create_tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -451,6 +451,14 @@ CREATE TABLE IF NOT EXISTS subscriber (
status varchar(100)
);

CREATE TABLE IF NOT EXISTS match_blobs (
PRIMARY KEY (match_id),
match_id bigint,
basic json,
gcdata json,
replay json
);

DO $$
BEGIN
IF EXISTS (SELECT 1 FROM pg_roles WHERE rolname = 'readonly') THEN
Expand Down
6 changes: 3 additions & 3 deletions store/queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -1095,12 +1095,12 @@ function insertMatch(match, options, cb) {
async function upsertMatchPostgres(cb) {
// Check if leagueid is premium/professional
const result =
match.leagueid &&
match.leagueid ?
(await db.raw(
`select leagueid from leagues where leagueid = ? and (tier = 'premium' OR tier = 'professional')`,
[match.leagueid]
));
const pass = result?.rows?.length > 0 && utility.isProMatch(match);
)) : null;
const pass = result?.rows?.length > 0;
if (!pass) {
// Skip this if not a pro match
return cb();
Expand Down
50 changes: 23 additions & 27 deletions svc/cassandraDelete.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ function genRandomNumber(byteCount, radix) {
);
}

const PARSED_DATA_DELETE_ID = 0;

async function start() {
// Get the current max_match_id from postgres, subtract 200000000
const max = (await db.raw('select max(match_id) from public_matches'))
Expand All @@ -29,39 +27,32 @@ async function start() {
[randomBigint.toString()],
{
prepare: true,
fetchSize: 500,
fetchSize: 100,
autoPage: true,
}
);

// Put the ones that don't have parsed data or are too old into an array
const ids = result.rows
const unparsedIds = result.rows
.filter(
(result) =>
(result.version == null ||
result.match_id < PARSED_DATA_DELETE_ID) &&
result.version == null &&
result.match_id < limit
)
.map((result) => result.match_id);
console.log(
ids.length,
'out of',
const parsedIds = result.rows
.filter((result) => result.version != null && result.match_id < limit)
.map((result) => result.match_id);
console.log('%s unparsed to delete, %s parsed to archive, %s total, del ID: %s',
unparsedIds.length,
parsedIds.length,
result.rows.length,
'to delete, ex:',
ids[0]?.toString()
);

// Delete matches
await Promise.all(
ids.map((id) =>
cassandra.execute('DELETE from matches where match_id = ?', [id], {
prepare: true,
})
)
unparsedIds[0]?.toString()
);
// NOTE: Due to lack of transactions there might be some orphaned player_matches without match
// Delete player_matches
await Promise.all(
ids.map((id) =>
unparsedIds.map((id) =>
cassandra.execute(
'DELETE from player_matches where match_id = ?',
[id],
Expand All @@ -71,9 +62,14 @@ async function start() {
)
)
);
const parsedIds = result.rows
.filter((result) => result.version != null)
.map((result) => result.match_id);
// Delete matches
await Promise.all(
unparsedIds.map((id) =>
cassandra.execute('DELETE from matches where match_id = ?', [id], {
prepare: true,
})
)
);
config.MATCH_ARCHIVE_S3_ENDPOINT &&
(await Promise.all(parsedIds.map((id) => doArchive(id))));

Expand Down Expand Up @@ -102,16 +98,16 @@ async function doArchive(matchId) {
const result = await archivePut(matchId.toString(), blob);
if (result) {
// TODO Delete from Cassandra after archival
// await cassandra.execute("DELETE from matches where match_id = ?", [matchId], {
// prepare: true,
// });
// await cassandra.execute(
// "DELETE from player_matches where match_id = ?",
// [matchId],
// {
// prepare: true,
// }
// );
// await cassandra.execute("DELETE from matches where match_id = ?", [matchId], {
// prepare: true,
// });
}
return;
}
Expand Down

0 comments on commit b5be678

Please sign in to comment.