From 60687dbd78509f0c1e9273e65edf7307824e1cf9 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Mon, 15 May 2023 13:54:30 -0600 Subject: [PATCH 01/56] chore: add brc20 migrations --- migrations/1684174644336_brc20-deploys.ts | 53 ++++++++++++++++++++ migrations/1684175792528_brc20-mints.ts | 49 +++++++++++++++++++ migrations/1684175795592_brc20-transfers.ts | 54 +++++++++++++++++++++ migrations/1684175810998_brc20-balances.ts | 31 ++++++++++++ 4 files changed, 187 insertions(+) create mode 100644 migrations/1684174644336_brc20-deploys.ts create mode 100644 migrations/1684175792528_brc20-mints.ts create mode 100644 migrations/1684175795592_brc20-transfers.ts create mode 100644 migrations/1684175810998_brc20-balances.ts diff --git a/migrations/1684174644336_brc20-deploys.ts b/migrations/1684174644336_brc20-deploys.ts new file mode 100644 index 00000000..3f1de7a1 --- /dev/null +++ b/migrations/1684174644336_brc20-deploys.ts @@ -0,0 +1,53 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('brc20_deploys', { + id: { + type: 'bigserial', + primaryKey: true, + }, + inscription_id: { + type: 'bigint', + notNull: true, + }, + block_height: { + type: 'bigint', + notNull: true, + }, + tx_id: { + type: 'text', + notNull: true, + }, + address: { + type: 'text', + notNull: true, + }, + ticker: { + type: 'text', + notNull: true, + }, + max: { + type: 'numeric', + notNull: true, + }, + limit: { + type: 'numeric', + notNull: true, + }, + decimals: { + type: 'numeric', + notNull: true, + }, + }); + pgm.createConstraint( + 'brc20_deploys', + 'brc20_deploys_inscription_id_fk', + 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' + ); + pgm.createConstraint('brc20_deploys', 'brc20_deploys_ticker_unique', 'UNIQUE(ticker)'); + pgm.createIndex('brc20_deploys', ['block_height']); + pgm.createIndex('brc20_deploys', ['address']); +} diff --git a/migrations/1684175792528_brc20-mints.ts b/migrations/1684175792528_brc20-mints.ts new file mode 100644 index 00000000..794cadc3 --- /dev/null +++ b/migrations/1684175792528_brc20-mints.ts @@ -0,0 +1,49 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('brc20_mints', { + id: { + type: 'bigserial', + primaryKey: true, + }, + inscription_id: { + type: 'bigint', + notNull: true, + }, + brc20_deploy_id: { + type: 'bigint', + notNull: true, + }, + block_height: { + type: 'bigint', + notNull: true, + }, + tx_id: { + type: 'text', + notNull: true, + }, + address: { + type: 'text', + notNull: true, + }, + amount: { + type: 'numeric', + notNull: true, + }, + }); + pgm.createConstraint( + 'brc20_mints', + 'brc20_mints_inscription_id_fk', + 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_mints', + 'brc20_mints_brc20_deploy_id_fk', + 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + ); + pgm.createIndex('brc20_mints', ['block_height']); + pgm.createIndex('brc20_mints', ['address']); +} diff --git a/migrations/1684175795592_brc20-transfers.ts b/migrations/1684175795592_brc20-transfers.ts new file mode 100644 index 00000000..c6a0e0f2 --- /dev/null +++ b/migrations/1684175795592_brc20-transfers.ts @@ -0,0 +1,54 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('brc20_transfers', { + id: { + type: 'bigserial', + primaryKey: true, + }, + inscription_id: { + type: 'bigint', + notNull: true, + }, + brc20_deploy_id: { + type: 'bigint', + notNull: true, + }, + block_height: { + type: 'bigint', + notNull: true, + }, + tx_id: { + type: 'text', + notNull: true, + }, + from_address: { + type: 'text', + notNull: true, + }, + to_address: { + type: 'text', + notNull: true, + }, + amount: { + type: 'numeric', + notNull: true, + }, + }); + pgm.createConstraint( + 'brc20_transfers', + 'brc20_transfers_inscription_id_fk', + 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_transfers', + 'brc20_transfers_brc20_deploy_id_fk', + 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + ); + pgm.createIndex('brc20_transfers', ['block_height']); + pgm.createIndex('brc20_transfers', ['from_address']); + pgm.createIndex('brc20_transfers', ['to_address']); +} diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts new file mode 100644 index 00000000..c3f1d991 --- /dev/null +++ b/migrations/1684175810998_brc20-balances.ts @@ -0,0 +1,31 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('brc20_balances', { + id: { + type: 'bigserial', + primaryKey: true, + }, + brc20_deploy_id: { + type: 'bigint', + notNull: true, + }, + address: { + type: 'text', + notNull: true, + }, + balance: { + type: 'numeric', + notNull: true, + }, + }); + pgm.createConstraint( + 'brc20_balances', + 'brc20_balances_brc20_deploy_id_fk', + 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + ); + pgm.createIndex('brc20_balances', ['address']); +} From bf4c7f6f27903f18d30ddb7fc2b1a779cc991114 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 16 May 2023 11:39:02 -0600 Subject: [PATCH 02/56] feat: start storing token deploys --- migrations/1684174644336_brc20-deploys.ts | 5 +- src/pg/helpers.ts | 56 +++++- src/pg/pg-store.ts | 117 +++++++++++-- src/pg/types.ts | 24 +++ tests/brc20.test.ts | 200 ++++++++++++++++++++++ tests/helpers.ts | 24 +++ 6 files changed, 405 insertions(+), 21 deletions(-) create mode 100644 tests/brc20.test.ts diff --git a/migrations/1684174644336_brc20-deploys.ts b/migrations/1684174644336_brc20-deploys.ts index 3f1de7a1..6532ad51 100644 --- a/migrations/1684174644336_brc20-deploys.ts +++ b/migrations/1684174644336_brc20-deploys.ts @@ -35,10 +35,9 @@ export function up(pgm: MigrationBuilder): void { }, limit: { type: 'numeric', - notNull: true, }, decimals: { - type: 'numeric', + type: 'int', notNull: true, }, }); @@ -47,7 +46,7 @@ export function up(pgm: MigrationBuilder): void { 'brc20_deploys_inscription_id_fk', 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' ); - pgm.createConstraint('brc20_deploys', 'brc20_deploys_ticker_unique', 'UNIQUE(ticker)'); + pgm.createIndex('brc20_deploys', 'LOWER(ticker)', { unique: true }); pgm.createIndex('brc20_deploys', ['block_height']); pgm.createIndex('brc20_deploys', ['address']); } diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index ed5ba7b5..04521828 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -7,16 +7,54 @@ import { DbInscriptionInsert, } from './types'; -const OpJson = Type.Object( +const OpJsonSchema = Type.Object( { p: Type.String(), op: Type.String(), }, { additionalProperties: true } ); -const OpJsonC = TypeCompiler.Compile(OpJson); -export type OpJson = Static; +const OpJsonC = TypeCompiler.Compile(OpJsonSchema); +export type OpJson = Static; +const Brc20DeploySchema = Type.Object({ + p: Type.Literal('brc-20'), + op: Type.Literal('deploy'), + tick: Type.String(), + max: Type.String(), + lim: Type.Optional(Type.String()), + dec: Type.Optional(Type.String()), +}); +const Brc20DeployC = TypeCompiler.Compile(Brc20DeploySchema); +export type Brc20Deploy = Static; + +const Brc20MintSchema = Type.Object({ + p: Type.Literal('brc-20'), + op: Type.Literal('mint'), + tick: Type.String(), + amt: Type.String(), +}); +const Brc20MintC = TypeCompiler.Compile(Brc20MintSchema); +export type Brc20Mint = Static; + +const Brc20TransferSchema = Type.Object({ + p: Type.Literal('brc-20'), + op: Type.Literal('transfer'), + tick: Type.String(), + amt: Type.String(), +}); +const Brc20TransferC = TypeCompiler.Compile(Brc20TransferSchema); +export type Brc20Transfer = Static; + +const Brc20Schema = Type.Union([Brc20DeploySchema, Brc20MintSchema, Brc20TransferSchema]); +// const Brc20C = TypeCompiler.Compile(Brc20Schema); +export type Brc20 = Static; + +/** + * Tries to parse a text inscription into an OpJson schema. + * @param inscription - Inscription content + * @returns OpJson + */ export function inscriptionContentToJson(inscription: DbInscriptionInsert): OpJson | undefined { if ( inscription.mime_type.startsWith('text/plain') || @@ -37,6 +75,18 @@ export function inscriptionContentToJson(inscription: DbInscriptionInsert): OpJs } } +export function brc20DeployFromOpJson(json: OpJson): Brc20Deploy | undefined { + if (Brc20DeployC.Check(json)) { + return json; + } +} + +export function brc20MintFromOpJson(json: OpJson): Brc20Mint | undefined { + if (Brc20MintC.Check(json)) { + return json; + } +} + /** * Returns which inscription count is required based on filters sent to the index endpoint. * @param filters - DbInscriptionIndexFilters diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 66d31683..ec603416 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -1,14 +1,23 @@ import { Order, OrderBy } from '../api/schemas'; import { normalizedHexString } from '../api/util/helpers'; import { OrdinalSatoshi, SatoshiRarity } from '../api/util/ordinal-satoshi'; -import { ChainhookPayload, InscriptionEvent } from '../chainhook/schemas'; +import { ChainhookPayload } from '../chainhook/schemas'; import { ENV } from '../env'; import { logger } from '../logger'; -import { getIndexResultCountType, inscriptionContentToJson } from './helpers'; +import { + Brc20Deploy, + Brc20Mint, + brc20DeployFromOpJson, + brc20MintFromOpJson, + getIndexResultCountType, + inscriptionContentToJson, +} from './helpers'; import { runMigrations } from './migrations'; import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; import { + BRC20_DEPLOYS_COLUMNS, + DbBrc20Deploy, DbFullyLocatedInscriptionResult, DbInscriptionContent, DbInscriptionIndexFilters, @@ -474,6 +483,18 @@ export class PgStore extends BasePgStore { } } + async getBrc20Deploy(args: { ticker: string }): Promise { + const results = await this.sql` + SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} + FROM brc20_deploys + WHERE LOWER(ticker) = LOWER(${args.ticker}) + LIMIT 1 + `; + if (results.count === 1) { + return results[0]; + } + } + async refreshMaterializedView(viewName: string) { const isProd = process.env.NODE_ENV === 'production'; await this.sql`REFRESH MATERIALIZED VIEW ${ @@ -553,21 +574,20 @@ export class PgStore extends BasePgStore { sat_coinbase_height = EXCLUDED.sat_coinbase_height, timestamp = EXCLUDED.timestamp `; + // TODO: No valid action can occur via the spending of an ordinal via transaction fee. If it + // occurs during the inscription process then the resulting inscription is ignored. If it + // occurs during the second phase of the transfer process, the balance is returned to the + // senders available balance. const json = inscriptionContentToJson(args.inscription); if (json) { - const values = { - inscription_id, - p: json.p, - op: json.op, - content: json, - }; - await sql` - INSERT INTO json_contents ${sql(values)} - ON CONFLICT ON CONSTRAINT json_contents_inscription_id_unique DO UPDATE SET - p = EXCLUDED.p, - op = EXCLUDED.op, - content = EXCLUDED.content - `; + // Is this a BRC-20 operation? + const deploy = brc20DeployFromOpJson(json); + if (deploy) { + await this.insertBrc20Deploy({ deploy, inscription_id, location: args.location }); + } else { + const mint = brc20MintFromOpJson(json); + if (mint) await this.insertBrc20Mint({ mint, inscription_id, location: args.location }); + } } }); return inscription_id; @@ -671,4 +691,71 @@ export class PgStore extends BasePgStore { } }); } + + private async insertBrc20Deploy(args: { + deploy: Brc20Deploy; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + const deploy = { + inscription_id: args.inscription_id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + address: args.location.address, + ticker: args.deploy.tick, + max: args.deploy.max, + limit: args.deploy.lim ?? null, + decimals: args.deploy.dec ?? 18, + }; + const insertion = await this.sql` + INSERT INTO brc20_deploys ${this.sql(deploy)} + ON CONFLICT (LOWER(ticker)) DO NOTHING + `; + if (insertion.count > 0) { + logger.info( + `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` + ); + } else { + logger.debug( + `PgStore [BRC-20] attempted to insert deploy for ${args.deploy.tick} at block ${args.location.block_height} but a previous entry existed` + ); + } + } + + private async insertBrc20Mint(args: { + mint: Brc20Mint; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + await this.sqlWriteTransaction(async sql => { + // Get which token this belongs to + const deploy = await sql<{ id: number }[]>` + SELECT id FROM brc20_deploys WHERE ticker = ${args.mint.tick} + `; + if (deploy.count === 0) { + logger.debug( + `PgStore [BRC-20] attempted to insert mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` + ); + return; + } + const deploy_id = deploy[0].id; + // TODO: The first mint to exceed the maximum supply will receive the fraction that is valid. + // (ex. 21,000,000 maximum supply, 20,999,242 circulating supply, and 1000 mint inscription = + // 758 balance state applied) + + // TODO: Check limit per mint + const mint = { + inscription_id: args.inscription_id, + brc20_deploy_id: deploy_id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + address: args.location.address, + amount: args.mint.amt, + }; + await this.sql`INSERT INTO brc20_mints ${this.sql(mint)}`; + logger.info( + `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` + ); + }); + } } diff --git a/src/pg/types.ts b/src/pg/types.ts index eb542b89..e01aa2e5 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -212,3 +212,27 @@ export enum DbInscriptionIndexResultCountType { /** Filtered by custom arguments */ custom, } + +export type DbBrc20Deploy = { + id: string; + inscription_id: string; + block_height: string; + tx_id: string; + address: string; + ticker: string; + max: string; + limit?: string; + decimals: number; +}; + +export const BRC20_DEPLOYS_COLUMNS = [ + 'id', + 'inscription_id', + 'block_height', + 'tx_id', + 'address', + 'ticker', + 'max', + 'limit', + 'decimals', +]; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts new file mode 100644 index 00000000..65835a62 --- /dev/null +++ b/tests/brc20.test.ts @@ -0,0 +1,200 @@ +import { cycleMigrations } from '../src/pg/migrations'; +import { PgStore } from '../src/pg/pg-store'; +import { TestChainhookPayloadBuilder, brc20Reveal } from './helpers'; + +describe('BRC-20', () => { + let db: PgStore; + + beforeEach(async () => { + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(); + }); + + afterEach(async () => { + await db.close(); + }); + + describe('deploy', () => { + test('deploy is saved', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); + expect(deploy).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: '775617', + decimals: 18, + id: '1', + inscription_id: '1', + limit: null, + max: '21000000', + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }); + }); + + test('ignores deploys for existing token', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '000000000000000000021a0207fa97024506baaa74396822fb0a07ac20e70148', + }) + .transaction({ + hash: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '19000000', + }, + number: 6, + tx_id: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); + expect(deploy).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: '775617', + decimals: 18, + id: '1', + inscription_id: '1', + limit: null, + max: '21000000', + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }); + }); + + test('ignores case insensitive deploy for existing token', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '000000000000000000021a0207fa97024506baaa74396822fb0a07ac20e70148', + }) + .transaction({ + hash: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'pepe', + max: '19000000', + }, + number: 6, + tx_id: '3f8067a6e9b45308b5a090c2987feeb2d08cbaf814ef2ffabad7c381b62f5f7e', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); + expect(deploy).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: '775617', + decimals: 18, + id: '1', + inscription_id: '1', + limit: null, + max: '21000000', + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }); + const deploy2 = await db.getBrc20Deploy({ ticker: 'pepe' }); // Lowercase + expect(deploy2).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: '775617', + decimals: 18, + id: '1', + inscription_id: '1', + limit: null, + max: '21000000', + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }); + }); + }); +}); diff --git a/tests/helpers.ts b/tests/helpers.ts index ab059308..cf6cba4d 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -8,6 +8,7 @@ import { InscriptionTransferred, Transaction, } from '../src/chainhook/schemas'; +import { Brc20 } from '../src/pg/helpers'; export type TestFastifyServer = FastifyInstance< Server, @@ -92,3 +93,26 @@ export class TestChainhookPayloadBuilder { return this.payload; } } + +export function brc20Reveal(args: { + json: Brc20; + number: number; + address: string; + tx_id: string; +}): InscriptionRevealed { + const content = Buffer.from(JSON.stringify(args.json), 'utf-8'); + return { + content_bytes: `0x${content.toString('hex')}`, + content_type: 'text/plain;charset=utf-8', + content_length: content.length, + inscription_number: args.number, + inscription_fee: 2000, + inscription_id: `${args.tx_id}i0`, + inscription_output_value: 10000, + inscriber_address: args.address, + ordinal_number: 0, + ordinal_block_height: 0, + ordinal_offset: 0, + satpoint_post_inscription: `${args.tx_id}:0:0`, + }; +} From 32e90f73696aa403417869f0c71fa76da115048e Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 17 May 2023 10:44:50 -0600 Subject: [PATCH 03/56] feat: mints with balance changes --- migrations/1684175810998_brc20-balances.ts | 15 ++- src/pg/pg-store.ts | 60 ++++++++--- src/pg/types.ts | 9 ++ tests/brc20.test.ts | 120 +++++++++++++++++++++ 4 files changed, 191 insertions(+), 13 deletions(-) diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts index c3f1d991..e75e4b79 100644 --- a/migrations/1684175810998_brc20-balances.ts +++ b/migrations/1684175810998_brc20-balances.ts @@ -13,11 +13,19 @@ export function up(pgm: MigrationBuilder): void { type: 'bigint', notNull: true, }, + block_height: { + type: 'bigint', + notNull: true, + }, address: { type: 'text', notNull: true, }, - balance: { + avail_balance: { + type: 'numeric', + notNull: true, + }, + trans_balance: { type: 'numeric', notNull: true, }, @@ -27,5 +35,10 @@ export function up(pgm: MigrationBuilder): void { 'brc20_balances_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); + pgm.createConstraint( + 'brc20_balances', + 'brc20_balances_brc20_deploy_id_address_unique', + 'UNIQUE(brc20_deploy_id, address)' + ); pgm.createIndex('brc20_balances', ['address']); } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index ec603416..6ddded76 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -1,3 +1,4 @@ +import BigNumber from 'bignumber.js'; import { Order, OrderBy } from '../api/schemas'; import { normalizedHexString } from '../api/util/helpers'; import { OrdinalSatoshi, SatoshiRarity } from '../api/util/ordinal-satoshi'; @@ -17,6 +18,7 @@ import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; import { BRC20_DEPLOYS_COLUMNS, + DbBrc20Balance, DbBrc20Deploy, DbFullyLocatedInscriptionResult, DbInscriptionContent, @@ -187,7 +189,7 @@ export class PgStore extends BasePgStore { return result[0].count; } - async geSatRarityInscriptionCount(satRarity?: SatoshiRarity[]): Promise { + async getSatRarityInscriptionCount(satRarity?: SatoshiRarity[]): Promise { if (!satRarity) return 0; const result = await this.sql<{ count: number }[]>` SELECT SUM(count) AS count @@ -384,7 +386,7 @@ export class PgStore extends BasePgStore { total = await this.getMimeTypeInscriptionCount(filters?.mime_type); break; case DbInscriptionIndexResultCountType.satRarity: - total = await this.geSatRarityInscriptionCount(filters?.sat_rarity); + total = await this.getSatRarityInscriptionCount(filters?.sat_rarity); break; } return { @@ -495,6 +497,21 @@ export class PgStore extends BasePgStore { } } + async getBrc20Balance(args: { + ticker: string; + address: string; + }): Promise { + const results = await this.sql` + SELECT d.ticker, d.decimals, b.address, b.block_height, b.avail_balance, b.trans_balance + FROM brc20_balances AS b + INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id + WHERE LOWER(d.ticker) = LOWER(${args.ticker}) AND b.address = ${args.address} + `; + if (results.count === 1) { + return results[0]; + } + } + async refreshMaterializedView(viewName: string) { const isProd = process.env.NODE_ENV === 'production'; await this.sql`REFRESH MATERIALIZED VIEW ${ @@ -707,6 +724,7 @@ export class PgStore extends BasePgStore { limit: args.deploy.lim ?? null, decimals: args.deploy.dec ?? 18, }; + // TODO: Maximum supply cannot exceed uint64_max const insertion = await this.sql` INSERT INTO brc20_deploys ${this.sql(deploy)} ON CONFLICT (LOWER(ticker)) DO NOTHING @@ -728,34 +746,52 @@ export class PgStore extends BasePgStore { location: DbLocationInsert; }): Promise { await this.sqlWriteTransaction(async sql => { - // Get which token this belongs to - const deploy = await sql<{ id: number }[]>` - SELECT id FROM brc20_deploys WHERE ticker = ${args.mint.tick} - `; - if (deploy.count === 0) { + // Is the token deployed? + const deploy = await this.getBrc20Deploy({ ticker: args.mint.tick }); + if (!deploy) { logger.debug( - `PgStore [BRC-20] attempted to insert mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` + `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` ); return; } - const deploy_id = deploy[0].id; // TODO: The first mint to exceed the maximum supply will receive the fraction that is valid. // (ex. 21,000,000 maximum supply, 20,999,242 circulating supply, and 1000 mint inscription = // 758 balance state applied) - // TODO: Check limit per mint + // Is the mint amount within the allowed token limits? + if (deploy.limit && BigNumber(args.mint.amt).isGreaterThan(deploy.limit)) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${deploy.limit} at block ${args.location.block_height}` + ); + return; + } const mint = { inscription_id: args.inscription_id, - brc20_deploy_id: deploy_id, + brc20_deploy_id: deploy.id, block_height: args.location.block_height, tx_id: args.location.tx_id, address: args.location.address, amount: args.mint.amt, }; - await this.sql`INSERT INTO brc20_mints ${this.sql(mint)}`; + await sql`INSERT INTO brc20_mints ${sql(mint)}`; logger.info( `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` ); + + // Upsert available balance for minting address + const balance = { + brc20_deploy_id: deploy.id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: args.mint.amt, + trans_balance: 0, + }; + await sql` + INSERT INTO brc20_balances ${sql(balance)} + ON CONFLICT ON CONSTRAINT brc20_balances_brc20_deploy_id_address_unique DO UPDATE SET + block_height = EXCLUDED.block_height, + avail_balance = brc20_balances.avail_balance + EXCLUDED.avail_balance + `; }); } } diff --git a/src/pg/types.ts b/src/pg/types.ts index e01aa2e5..a570b1aa 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -236,3 +236,12 @@ export const BRC20_DEPLOYS_COLUMNS = [ 'limit', 'decimals', ]; + +export type DbBrc20Balance = { + ticker: string; + decimals: number; + address: string; + block_height: string; + avail_balance: string; + trans_balance: string; +}; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 65835a62..3698772a 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -196,5 +196,125 @@ describe('BRC-20', () => { tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); }); + + test.skip('deploy exceeds decimal limit', async () => {}); + + test.skip('deploy exceeds supply limit', async () => {}); + }); + + describe('mint', () => { + test('valid mint is saved and balance reflected', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '250000', + }, + number: 6, + tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + + const balance = await db.getBrc20Balance({ + ticker: 'pepe', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }); + expect(balance).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + avail_balance: '250000', + block_height: '775618', + decimals: 18, + ticker: 'PEPE', + trans_balance: '0', + }); + + // New mint + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '0000000000000000000077163227125e51d838787d6af031bc9b55a3a1cc1b2c', + }) + .transaction({ + hash: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'pepe', + amt: '100000', + }, + number: 6, + tx_id: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + + const balance2 = await db.getBrc20Balance({ + ticker: 'pepe', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }); + expect(balance2).toStrictEqual({ + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + avail_balance: '350000', + block_height: '775619', + decimals: 18, + ticker: 'PEPE', + trans_balance: '0', + }); + }); + + test('mint exceeds token supply', async () => {}); + + test('ignores mint for non-existent token', async () => {}); + + test('mint exceeds token mint limit', async () => {}); + + test('ignores mint for token with no more supply', async () => {}); }); }); From 70982983631dbccd6233b73abfc69e465dee8cd5 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 17 May 2023 15:40:50 -0600 Subject: [PATCH 04/56] chore: events table draft --- migrations/1684344022290_brc20-events.ts | 60 +++++++++++++++ src/pg/pg-store.ts | 96 ++++++++++++++++++------ src/pg/types.ts | 28 +++++++ tests/brc20.test.ts | 8 +- 4 files changed, 166 insertions(+), 26 deletions(-) create mode 100644 migrations/1684344022290_brc20-events.ts diff --git a/migrations/1684344022290_brc20-events.ts b/migrations/1684344022290_brc20-events.ts new file mode 100644 index 00000000..dfc0befc --- /dev/null +++ b/migrations/1684344022290_brc20-events.ts @@ -0,0 +1,60 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('brc20_events', { + id: { + type: 'bigserial', + primaryKey: true, + }, + inscription_id: { + type: 'bigint', + notNull: true, + }, + brc20_deploy_id: { + type: 'bigint', + notNull: true, + }, + deploy_id: { + type: 'bigint', + }, + mint_id: { + type: 'bigint', + }, + transfer_id: { + type: 'bigint', + }, + }); + pgm.createConstraint( + 'brc20_events', + 'brc20_events_inscription_id_fk', + 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_events', + 'brc20_events_brc20_deploy_id_fk', + 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_events', + 'brc20_events_deploy_id_fk', + 'FOREIGN KEY(deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_events', + 'brc20_events_mint_id_fk', + 'FOREIGN KEY(mint_id) REFERENCES brc20_mints(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_events', + 'brc20_events_transfer_id_fk', + 'FOREIGN KEY(transfer_id) REFERENCES brc20_transfers(id) ON DELETE CASCADE' + ); + pgm.createConstraint( + 'brc20_events', + 'brc20_valid_event', + 'CHECK(NUM_NONNULLS(deploy_id, mint_id, transfer_id) = 1)' + ); +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 6ddded76..52725960 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -18,6 +18,7 @@ import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; import { BRC20_DEPLOYS_COLUMNS, + DbBrc20DeployInsert, DbBrc20Balance, DbBrc20Deploy, DbFullyLocatedInscriptionResult, @@ -34,6 +35,8 @@ import { DbPaginatedResult, JSON_CONTENTS_COLUMNS, LOCATIONS_COLUMNS, + DbBrc20EventInsert, + BRC20_EVENTS_COLUMNS, } from './types'; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -497,6 +500,12 @@ export class PgStore extends BasePgStore { } } + /** + * Returns an address balance for a BRC-20 token. + * @param ticker - BRC-20 ticker + * @param address - Owner address + * @returns `DbBrc20Balance` + */ async getBrc20Balance(args: { ticker: string; address: string; @@ -512,6 +521,24 @@ export class PgStore extends BasePgStore { } } + async getBrc20History(args: { ticker: string } & DbInscriptionIndexPaging): Promise { + const results = await this.sql` + WITH events AS ( + SELECT ${this.sql(BRC20_EVENTS_COLUMNS)} + FROM brc20_events AS e + INNER JOIN brc20_deploys AS d ON d.id = e.brc20_deploy_id + INNER JOIN inscriptions AS i ON i.id = e.inscription_id + WHERE LOWER(d.ticker) = LOWER(${args.ticker}) + ORDER BY i.number DESC + LIMIT ${args.limit} + OFFSET ${args.offset} + ) + SELECT * + FROM events + INNER JOIN + `; + } + async refreshMaterializedView(viewName: string) { const isProd = process.env.NODE_ENV === 'production'; await this.sql`REFRESH MATERIALIZED VIEW ${ @@ -714,30 +741,51 @@ export class PgStore extends BasePgStore { inscription_id: number; location: DbLocationInsert; }): Promise { - const deploy = { - inscription_id: args.inscription_id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - address: args.location.address, - ticker: args.deploy.tick, - max: args.deploy.max, - limit: args.deploy.lim ?? null, - decimals: args.deploy.dec ?? 18, - }; - // TODO: Maximum supply cannot exceed uint64_max - const insertion = await this.sql` - INSERT INTO brc20_deploys ${this.sql(deploy)} - ON CONFLICT (LOWER(ticker)) DO NOTHING - `; - if (insertion.count > 0) { - logger.info( - `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` - ); - } else { - logger.debug( - `PgStore [BRC-20] attempted to insert deploy for ${args.deploy.tick} at block ${args.location.block_height} but a previous entry existed` - ); - } + await this.sqlWriteTransaction(async sql => { + const address = args.location.address; + if (!address) { + logger.debug( + `PgStore [BRC-20] ignoring deploy with null address for ${args.deploy.tick} at block ${args.location.block_height}` + ); + return; + } + const deploy: DbBrc20DeployInsert = { + inscription_id: args.inscription_id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + address: address, + ticker: args.deploy.tick, + max: args.deploy.max, + limit: args.deploy.lim ?? null, + decimals: args.deploy.dec ?? '18', + }; + // TODO: Maximum supply cannot exceed uint64_max + const insertion = await sql<{ id: string }[]>` + INSERT INTO brc20_deploys ${sql(deploy)} + ON CONFLICT (LOWER(ticker)) DO NOTHING + RETURNING id + `; + if (insertion.count > 0) { + // Add to history + const event: DbBrc20EventInsert = { + inscription_id: args.inscription_id, + brc20_deploy_id: insertion[0].id, + deploy_id: insertion[0].id, + mint_id: null, + transfer_id: null, + }; + await sql` + INSERT INTO brc20_events ${sql(event)} + `; + logger.info( + `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` + ); + } else { + logger.debug( + `PgStore [BRC-20] ignoring duplicate deploy for ${args.deploy.tick} at block ${args.location.block_height}` + ); + } + }); } private async insertBrc20Mint(args: { diff --git a/src/pg/types.ts b/src/pg/types.ts index a570b1aa..2b205887 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -213,6 +213,17 @@ export enum DbInscriptionIndexResultCountType { custom, } +export type DbBrc20DeployInsert = { + inscription_id: number; + block_height: number; + tx_id: string; + address: string; + ticker: string; + max: string; + decimals: string; + limit: string | null; +}; + export type DbBrc20Deploy = { id: string; inscription_id: string; @@ -245,3 +256,20 @@ export type DbBrc20Balance = { avail_balance: string; trans_balance: string; }; + +export type DbBrc20EventInsert = { + inscription_id: number; + brc20_deploy_id: string; + deploy_id: string | null; + mint_id: string | null; + transfer_id: string | null; +}; + +export const BRC20_EVENTS_COLUMNS = [ + 'id', + 'inscription_id', + 'brc20_deploy_id', + 'deploy_id', + 'mint_id', + 'transfer_id', +]; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 3698772a..c3ce0e49 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -203,7 +203,7 @@ describe('BRC-20', () => { }); describe('mint', () => { - test('valid mint is saved and balance reflected', async () => { + test('valid mints are saved and balance reflected', async () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -309,7 +309,11 @@ describe('BRC-20', () => { }); }); - test('mint exceeds token supply', async () => {}); + test('rollback mints deduct balance correctly', async () => {}); + + test.skip('mint exceeds token supply', async () => {}); + + test('mints in same block are applied in order', async () => {}); test('ignores mint for non-existent token', async () => {}); From f9c66540b9d173d2981bc2af5ee13fd082dc5547 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 17 May 2023 16:27:10 -0600 Subject: [PATCH 05/56] feat: first balance endpoint --- src/api/init.ts | 2 ++ src/api/routes/brc20.ts | 59 +++++++++++++++++++++++++++++++++++++++ src/api/schemas.ts | 14 ++++++++++ src/api/util/helpers.ts | 12 ++++++++ src/pg/pg-store.ts | 32 +++++++++++++-------- tests/brc20.test.ts | 61 ++++++++++++++++++++++++----------------- 6 files changed, 144 insertions(+), 36 deletions(-) create mode 100644 src/api/routes/brc20.ts diff --git a/src/api/init.ts b/src/api/init.ts index f9c796dd..5c701116 100644 --- a/src/api/init.ts +++ b/src/api/init.ts @@ -8,6 +8,7 @@ import { PgStore } from '../pg/pg-store'; import { SatRoutes } from './routes/sats'; import { StatusRoutes } from './routes/status'; import FastifyMetrics from 'fastify-metrics'; +import { Brc20Routes } from './routes/brc20'; export const Api: FastifyPluginAsync< Record, @@ -17,6 +18,7 @@ export const Api: FastifyPluginAsync< await fastify.register(StatusRoutes); await fastify.register(InscriptionsRoutes); await fastify.register(SatRoutes); + await fastify.register(Brc20Routes); }; export async function buildApiServer(args: { db: PgStore }) { diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts new file mode 100644 index 00000000..1f1d496c --- /dev/null +++ b/src/api/routes/brc20.ts @@ -0,0 +1,59 @@ +import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; +import { Type } from '@sinclair/typebox'; +import { FastifyPluginCallback } from 'fastify'; +import { Server } from 'http'; +import { + AddressParam, + Brc20BalanceResponseSchema, + Brc20TickersParam, + LimitParam, + OffsetParam, + PaginatedResponse, +} from '../schemas'; +import { DEFAULT_API_LIMIT, parseBrc20Balances } from '../util/helpers'; + +export const Brc20Routes: FastifyPluginCallback< + Record, + Server, + TypeBoxTypeProvider +> = (fastify, options, done) => { + fastify.get( + '/brc-20/balances', + { + schema: { + operationId: 'getBrc20Balances', + summary: 'BRC-20 Balances', + description: 'Retrieves BRC-20 token balances for a Bitcoin address', + tags: ['BRC-20'], + querystring: Type.Object({ + address: AddressParam, + ticker: Type.Optional(Brc20TickersParam), + // Pagination + offset: Type.Optional(OffsetParam), + limit: Type.Optional(LimitParam), + }), + response: { + 200: PaginatedResponse(Brc20BalanceResponseSchema, 'Paginated BRC-20 Balance Response'), + }, + }, + }, + async (request, reply) => { + const limit = request.query.limit ?? DEFAULT_API_LIMIT; + const offset = request.query.offset ?? 0; + const balances = await fastify.db.getBrc20Balances({ + limit, + offset, + address: request.query.address, + ticker: request.query.ticker, + }); + await reply.send({ + limit, + offset, + total: balances.total, + results: parseBrc20Balances(balances.results), + }); + } + ); + + done(); +}; diff --git a/src/api/schemas.ts b/src/api/schemas.ts index 970efdad..49f0979e 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -30,6 +30,10 @@ export const OpenApiSchemaOptions: SwaggerOptions = { name: 'Satoshis', description: 'Endpoints to query Satoshi ordinal and rarity information', }, + { + name: 'BRC-20', + description: 'Endpoints to query BRC-20 token balances and events', + }, ], }, }; @@ -57,6 +61,8 @@ export const AddressesParam = Type.Array(AddressParam, { ], }); +export const Brc20TickersParam = Type.Array(Type.String()); + export const InscriptionIdParam = Type.RegEx(/^[a-fA-F0-9]{64}i[0-9]+$/, { title: 'Inscription ID', description: 'Inscription ID', @@ -323,6 +329,14 @@ export const BlockInscriptionTransferSchema = Type.Object({ }); export type BlockInscriptionTransfer = Static; +export const Brc20BalanceResponseSchema = Type.Object({ + ticker: Type.String({ examples: ['PEPE'] }), + available_balance: Type.String({ examples: ['1500.00000'] }), + transferrable_balance: Type.String({ examples: ['500.00000'] }), + overall_balance: Type.String({ examples: ['2000.00000'] }), +}); +export type Brc20BalanceResponse = Static; + export const NotFoundResponse = Type.Object( { error: Type.Literal('Not found'), diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index b4739ca0..dcd4c3aa 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,10 +1,13 @@ +import BigNumber from 'bignumber.js'; import { + DbBrc20Balance, DbFullyLocatedInscriptionResult, DbInscriptionLocationChange, DbLocation, } from '../../pg/types'; import { BlockInscriptionTransfer, + Brc20BalanceResponse, InscriptionLocationResponse, InscriptionResponseType, } from '../schemas'; @@ -87,6 +90,15 @@ export function parseBlockTransfers( })); } +export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceResponse[] { + return items.map(i => ({ + ticker: i.ticker, + available_balance: i.avail_balance, + transferrable_balance: i.trans_balance, + overall_balance: BigNumber(i.avail_balance).plus(i.trans_balance).toString(), + })); +} + /** * Decodes a `0x` prefixed hex string to a buffer. * @param hex - A hex string with a `0x` prefix. diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 52725960..4a5f1879 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -502,23 +502,33 @@ export class PgStore extends BasePgStore { /** * Returns an address balance for a BRC-20 token. - * @param ticker - BRC-20 ticker * @param address - Owner address + * @param ticker - BRC-20 tickers * @returns `DbBrc20Balance` */ - async getBrc20Balance(args: { - ticker: string; - address: string; - }): Promise { - const results = await this.sql` - SELECT d.ticker, d.decimals, b.address, b.block_height, b.avail_balance, b.trans_balance + async getBrc20Balances( + args: { + address: string; + ticker?: string[]; + } & DbInscriptionIndexPaging + ): Promise> { + const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const results = await this.sql<(DbBrc20Balance & { total: number })[]>` + SELECT + d.ticker, d.decimals, b.address, b.block_height, b.avail_balance, b.trans_balance, + COUNT(*) OVER() as total FROM brc20_balances AS b INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id - WHERE LOWER(d.ticker) = LOWER(${args.ticker}) AND b.address = ${args.address} + WHERE + b.address = ${args.address} + ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + LIMIT ${args.limit} + OFFSET ${args.offset} `; - if (results.count === 1) { - return results[0]; - } + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; } async getBrc20History(args: { ticker: string } & DbInscriptionIndexPaging): Promise { diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index c3ce0e49..1c613857 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1,16 +1,20 @@ +import { buildApiServer } from '../src/api/init'; import { cycleMigrations } from '../src/pg/migrations'; import { PgStore } from '../src/pg/pg-store'; -import { TestChainhookPayloadBuilder, brc20Reveal } from './helpers'; +import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal } from './helpers'; describe('BRC-20', () => { let db: PgStore; + let fastify: TestFastifyServer; beforeEach(async () => { db = await PgStore.connect({ skipMigrations: true }); + fastify = await buildApiServer({ db }); await cycleMigrations(); }); afterEach(async () => { + await fastify.close(); await db.close(); }); @@ -204,6 +208,7 @@ describe('BRC-20', () => { describe('mint', () => { test('valid mints are saved and balance reflected', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -224,7 +229,7 @@ describe('BRC-20', () => { }, number: 5, tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + address: address, }) ) .build() @@ -249,24 +254,27 @@ describe('BRC-20', () => { }, number: 6, tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + address: address, }) ) .build() ); - const balance = await db.getBrc20Balance({ - ticker: 'pepe', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }); - expect(balance).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - avail_balance: '250000', - block_height: '775618', - decimals: 18, - ticker: 'PEPE', - trans_balance: '0', + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, }); + expect(response1.statusCode).toBe(200); + const responseJson1 = response1.json(); + expect(responseJson1.total).toBe(1); + expect(responseJson1.results).toStrictEqual([ + { + ticker: 'PEPE', + available_balance: '250000', + overall_balance: '250000', + transferrable_balance: '0', + }, + ]); // New mint await db.updateInscriptions( @@ -295,18 +303,21 @@ describe('BRC-20', () => { .build() ); - const balance2 = await db.getBrc20Balance({ - ticker: 'pepe', - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - }); - expect(balance2).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - avail_balance: '350000', - block_height: '775619', - decimals: 18, - ticker: 'PEPE', - trans_balance: '0', + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(1); + expect(responseJson2.results).toStrictEqual([ + { + ticker: 'PEPE', + available_balance: '350000', + overall_balance: '350000', + transferrable_balance: '0', + }, + ]); }); test('rollback mints deduct balance correctly', async () => {}); From 61b413955f6ce1428a6a3b1c6b023ae4464c111d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 18 May 2023 10:19:49 -0600 Subject: [PATCH 06/56] fix: balances and rollbacks --- migrations/1684175810998_brc20-balances.ts | 12 ++- src/api/util/helpers.ts | 3 +- src/pg/pg-store.ts | 14 ++-- src/pg/types.ts | 4 +- tests/brc20.test.ts | 93 +++++++++++++++++++++- 5 files changed, 108 insertions(+), 18 deletions(-) diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts index e75e4b79..2dafc556 100644 --- a/migrations/1684175810998_brc20-balances.ts +++ b/migrations/1684175810998_brc20-balances.ts @@ -9,6 +9,10 @@ export function up(pgm: MigrationBuilder): void { type: 'bigserial', primaryKey: true, }, + inscription_id: { + type: 'bigint', + notNull: true, + }, brc20_deploy_id: { type: 'bigint', notNull: true, @@ -32,13 +36,13 @@ export function up(pgm: MigrationBuilder): void { }); pgm.createConstraint( 'brc20_balances', - 'brc20_balances_brc20_deploy_id_fk', - 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' + 'brc20_balances_inscription_id_fk', + 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' ); pgm.createConstraint( 'brc20_balances', - 'brc20_balances_brc20_deploy_id_address_unique', - 'UNIQUE(brc20_deploy_id, address)' + 'brc20_balances_brc20_deploy_id_fk', + 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); pgm.createIndex('brc20_balances', ['address']); } diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index dcd4c3aa..af3a9e10 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,4 +1,3 @@ -import BigNumber from 'bignumber.js'; import { DbBrc20Balance, DbFullyLocatedInscriptionResult, @@ -95,7 +94,7 @@ export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceRespons ticker: i.ticker, available_balance: i.avail_balance, transferrable_balance: i.trans_balance, - overall_balance: BigNumber(i.avail_balance).plus(i.trans_balance).toString(), + overall_balance: i.total_balance, })); } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 4a5f1879..1dbb17ba 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -515,13 +515,17 @@ export class PgStore extends BasePgStore { const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; const results = await this.sql<(DbBrc20Balance & { total: number })[]>` SELECT - d.ticker, d.decimals, b.address, b.block_height, b.avail_balance, b.trans_balance, + d.ticker, + SUM(b.avail_balance) AS avail_balance, + SUM(b.trans_balance) AS trans_balance, + SUM(b.avail_balance + b.trans_balance) AS total_balance, COUNT(*) OVER() as total FROM brc20_balances AS b INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id WHERE b.address = ${args.address} - ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + GROUP BY d.ticker LIMIT ${args.limit} OFFSET ${args.offset} `; @@ -836,8 +840,9 @@ export class PgStore extends BasePgStore { `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` ); - // Upsert available balance for minting address + // Insert balance change for minting address const balance = { + inscription_id: args.inscription_id, brc20_deploy_id: deploy.id, block_height: args.location.block_height, address: args.location.address, @@ -846,9 +851,6 @@ export class PgStore extends BasePgStore { }; await sql` INSERT INTO brc20_balances ${sql(balance)} - ON CONFLICT ON CONSTRAINT brc20_balances_brc20_deploy_id_address_unique DO UPDATE SET - block_height = EXCLUDED.block_height, - avail_balance = brc20_balances.avail_balance + EXCLUDED.avail_balance `; }); } diff --git a/src/pg/types.ts b/src/pg/types.ts index 2b205887..0fb93916 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -250,11 +250,9 @@ export const BRC20_DEPLOYS_COLUMNS = [ export type DbBrc20Balance = { ticker: string; - decimals: number; - address: string; - block_height: string; avail_balance: string; trans_balance: string; + total_balance: string; }; export type DbBrc20EventInsert = { diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 1c613857..53ae3897 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -295,7 +295,7 @@ describe('BRC-20', () => { tick: 'pepe', amt: '100000', }, - number: 6, + number: 7, tx_id: '7a1adbc3e93ddf8d7c4e0ba75aa11c98c431521dd850be8b955feedb716d8bec', address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', }) @@ -320,11 +320,98 @@ describe('BRC-20', () => { ]); }); - test('rollback mints deduct balance correctly', async () => {}); + test('rollback mints deduct balance correctly', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '250000', + }, + number: 6, + tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + address: address, + }) + ) + .build() + ); + // Rollback + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .rollback() + .block({ + height: 775619, + hash: '0000000000000000000077163227125e51d838787d6af031bc9b55a3a1cc1b2c', + }) + .transaction({ + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '250000', + }, + number: 6, + tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + address: address, + }) + ) + .build() + ); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(0); + expect(responseJson2.results).toStrictEqual([]); + }); test.skip('mint exceeds token supply', async () => {}); - test('mints in same block are applied in order', async () => {}); + test.skip('mints in same block are applied in order', async () => {}); test('ignores mint for non-existent token', async () => {}); From 8fad6b96c0fffc302a3e61922677bdfb56b74b85 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 19 May 2023 09:03:44 -0600 Subject: [PATCH 07/56] feat: token info endpoint --- src/api/routes/brc20.ts | 33 +++++++++++++++++- src/api/schemas.ts | 26 +++++++++++++- src/api/util/helpers.ts | 16 +++++++++ src/pg/pg-store.ts | 14 ++++---- src/pg/types.ts | 15 ++------ tests/brc20.test.ts | 76 ++++++++++++++++++++++++++--------------- 6 files changed, 131 insertions(+), 49 deletions(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 1f1d496c..88cfd3c7 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -5,18 +5,49 @@ import { Server } from 'http'; import { AddressParam, Brc20BalanceResponseSchema, + Brc20TickerParam, Brc20TickersParam, + Brc20TokenResponseSchema, LimitParam, + NotFoundResponse, OffsetParam, PaginatedResponse, } from '../schemas'; -import { DEFAULT_API_LIMIT, parseBrc20Balances } from '../util/helpers'; +import { DEFAULT_API_LIMIT, parseBrc20Balances, parseBrc20Token } from '../util/helpers'; +import { Value } from '@sinclair/typebox/value'; export const Brc20Routes: FastifyPluginCallback< Record, Server, TypeBoxTypeProvider > = (fastify, options, done) => { + fastify.get( + '/brc-20/tokens', + { + schema: { + operationId: 'getBrc20Tokens', + summary: 'BRC-20 Tokens', + description: 'Retrieves deployment and supply info for BRC-20 tokens', + tags: ['BRC-20'], + querystring: Type.Object({ + ticker: Brc20TickerParam, + }), + response: { + 200: Brc20TokenResponseSchema, + 404: NotFoundResponse, + }, + }, + }, + async (request, reply) => { + const response = await fastify.db.getBrc20Token({ ticker: request.query.ticker }); + if (response) { + await reply.send(parseBrc20Token(response)); + } else { + await reply.code(404).send(Value.Create(NotFoundResponse)); + } + } + ); + fastify.get( '/brc-20/balances', { diff --git a/src/api/schemas.ts b/src/api/schemas.ts index 49f0979e..06623e5e 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -61,7 +61,9 @@ export const AddressesParam = Type.Array(AddressParam, { ], }); -export const Brc20TickersParam = Type.Array(Type.String()); +export const Brc20TickerParam = Type.String(); + +export const Brc20TickersParam = Type.Array(Brc20TickerParam); export const InscriptionIdParam = Type.RegEx(/^[a-fA-F0-9]{64}i[0-9]+$/, { title: 'Inscription ID', @@ -337,6 +339,28 @@ export const Brc20BalanceResponseSchema = Type.Object({ }); export type Brc20BalanceResponse = Static; +export const Brc20TokenResponseSchema = Type.Object( + { + id: Type.String({ + examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'], + }), + number: Type.Integer({ examples: [248751] }), + block_height: Type.Integer({ examples: [752860] }), + tx_id: Type.String({ + examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'], + }), + address: Type.String({ + examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'], + }), + ticker: Type.String({ examples: ['PEPE'] }), + max_supply: Type.String({ examples: ['21000000'] }), + mint_limit: Nullable(Type.String({ examples: ['100000'] })), + decimals: Type.Integer({ examples: [18] }), + }, + { title: 'BRC-20 Token Response' } +); +export type Brc20TokenResponse = Static; + export const NotFoundResponse = Type.Object( { error: Type.Literal('Not found'), diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index af3a9e10..141f562d 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,5 +1,6 @@ import { DbBrc20Balance, + DbBrc20Deploy, DbFullyLocatedInscriptionResult, DbInscriptionLocationChange, DbLocation, @@ -7,6 +8,7 @@ import { import { BlockInscriptionTransfer, Brc20BalanceResponse, + Brc20TokenResponse, InscriptionLocationResponse, InscriptionResponseType, } from '../schemas'; @@ -89,6 +91,20 @@ export function parseBlockTransfers( })); } +export function parseBrc20Token(item: DbBrc20Deploy): Brc20TokenResponse { + return { + id: item.genesis_id, + number: parseInt(item.number), + block_height: parseInt(item.block_height), + tx_id: item.tx_id, + address: item.address, + ticker: item.ticker, + max_supply: item.max, + mint_limit: item.limit ?? null, + decimals: item.decimals, + }; +} + export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceResponse[] { return items.map(i => ({ ticker: i.ticker, diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 1dbb17ba..9f5337e2 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -17,7 +17,6 @@ import { runMigrations } from './migrations'; import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; import { - BRC20_DEPLOYS_COLUMNS, DbBrc20DeployInsert, DbBrc20Balance, DbBrc20Deploy, @@ -488,11 +487,14 @@ export class PgStore extends BasePgStore { } } - async getBrc20Deploy(args: { ticker: string }): Promise { + async getBrc20Token(args: { ticker: string }): Promise { const results = await this.sql` - SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} - FROM brc20_deploys - WHERE LOWER(ticker) = LOWER(${args.ticker}) + SELECT + d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, + d.decimals + FROM brc20_deploys AS d + INNER JOIN inscriptions AS i ON i.id = d.inscription_id + WHERE LOWER(d.ticker) = LOWER(${args.ticker}) LIMIT 1 `; if (results.count === 1) { @@ -809,7 +811,7 @@ export class PgStore extends BasePgStore { }): Promise { await this.sqlWriteTransaction(async sql => { // Is the token deployed? - const deploy = await this.getBrc20Deploy({ ticker: args.mint.tick }); + const deploy = await this.getBrc20Token({ ticker: args.mint.tick }); if (!deploy) { logger.debug( `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` diff --git a/src/pg/types.ts b/src/pg/types.ts index 0fb93916..98767d66 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -226,7 +226,8 @@ export type DbBrc20DeployInsert = { export type DbBrc20Deploy = { id: string; - inscription_id: string; + genesis_id: string; + number: string; block_height: string; tx_id: string; address: string; @@ -236,18 +237,6 @@ export type DbBrc20Deploy = { decimals: number; }; -export const BRC20_DEPLOYS_COLUMNS = [ - 'id', - 'inscription_id', - 'block_height', - 'tx_id', - 'address', - 'ticker', - 'max', - 'limit', - 'decimals', -]; - export type DbBrc20Balance = { ticker: string; avail_balance: string; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 53ae3897..8dec2180 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -45,15 +45,20 @@ describe('BRC-20', () => { ) .build() ); - const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); - expect(deploy).toStrictEqual({ + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PEPE`, + }); + expect(response1.statusCode).toBe(200); + const responseJson1 = response1.json(); + expect(responseJson1).toStrictEqual({ address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: '775617', + block_height: 775617, decimals: 18, - id: '1', - inscription_id: '1', - limit: null, - max: '21000000', + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 5, + mint_limit: null, + max_supply: '21000000', ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); @@ -110,15 +115,20 @@ describe('BRC-20', () => { ) .build() ); - const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); - expect(deploy).toStrictEqual({ + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PEPE`, + }); + expect(response1.statusCode).toBe(200); + const responseJson1 = response1.json(); + expect(responseJson1).toStrictEqual({ address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: '775617', + block_height: 775617, decimals: 18, - id: '1', - inscription_id: '1', - limit: null, - max: '21000000', + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); @@ -175,27 +185,37 @@ describe('BRC-20', () => { ) .build() ); - const deploy = await db.getBrc20Deploy({ ticker: 'PEPE' }); - expect(deploy).toStrictEqual({ + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PEPE`, + }); + expect(response1.statusCode).toBe(200); + const responseJson1 = response1.json(); + expect(responseJson1).toStrictEqual({ address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: '775617', + block_height: 775617, decimals: 18, - id: '1', - inscription_id: '1', - limit: null, - max: '21000000', + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); - const deploy2 = await db.getBrc20Deploy({ ticker: 'pepe' }); // Lowercase - expect(deploy2).toStrictEqual({ + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=pepe`, // Lowercase + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2).toStrictEqual({ address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: '775617', + block_height: 775617, decimals: 18, - id: '1', - inscription_id: '1', - limit: null, - max: '21000000', + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }); From ae2049baf04950d810aa997bc0f31b585aaf3391 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 19 May 2023 09:45:54 -0600 Subject: [PATCH 08/56] feat: tokens endpoint as paginated index --- src/api/routes/brc20.ts | 28 ++++++++++++++++------------ src/api/util/helpers.ts | 26 +++++++++++++------------- src/pg/pg-store.ts | 35 ++++++++++++++++++++--------------- src/pg/types.ts | 2 +- 4 files changed, 50 insertions(+), 41 deletions(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 88cfd3c7..ea59f5fb 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -5,7 +5,6 @@ import { Server } from 'http'; import { AddressParam, Brc20BalanceResponseSchema, - Brc20TickerParam, Brc20TickersParam, Brc20TokenResponseSchema, LimitParam, @@ -13,7 +12,7 @@ import { OffsetParam, PaginatedResponse, } from '../schemas'; -import { DEFAULT_API_LIMIT, parseBrc20Balances, parseBrc20Token } from '../util/helpers'; +import { DEFAULT_API_LIMIT, parseBrc20Balances, parseBrc20Tokens } from '../util/helpers'; import { Value } from '@sinclair/typebox/value'; export const Brc20Routes: FastifyPluginCallback< @@ -27,24 +26,29 @@ export const Brc20Routes: FastifyPluginCallback< schema: { operationId: 'getBrc20Tokens', summary: 'BRC-20 Tokens', - description: 'Retrieves deployment and supply info for BRC-20 tokens', + description: 'Retrieves information for BRC-20 tokens', tags: ['BRC-20'], querystring: Type.Object({ - ticker: Brc20TickerParam, + ticker: Type.Optional(Brc20TickersParam), + // Pagination + offset: Type.Optional(OffsetParam), + limit: Type.Optional(LimitParam), }), response: { - 200: Brc20TokenResponseSchema, - 404: NotFoundResponse, + 200: PaginatedResponse(Brc20TokenResponseSchema, 'Paginated BRC-20 Token Response'), }, }, }, async (request, reply) => { - const response = await fastify.db.getBrc20Token({ ticker: request.query.ticker }); - if (response) { - await reply.send(parseBrc20Token(response)); - } else { - await reply.code(404).send(Value.Create(NotFoundResponse)); - } + const limit = request.query.limit ?? DEFAULT_API_LIMIT; + const offset = request.query.offset ?? 0; + const response = await fastify.db.getBrc20Tokens({ ticker: request.query.ticker }); + await reply.send({ + limit, + offset, + total: response.total, + results: parseBrc20Tokens(response.results), + }); } ); diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 141f562d..681b6618 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,6 +1,6 @@ import { DbBrc20Balance, - DbBrc20Deploy, + DbBrc20Token, DbFullyLocatedInscriptionResult, DbInscriptionLocationChange, DbLocation, @@ -91,18 +91,18 @@ export function parseBlockTransfers( })); } -export function parseBrc20Token(item: DbBrc20Deploy): Brc20TokenResponse { - return { - id: item.genesis_id, - number: parseInt(item.number), - block_height: parseInt(item.block_height), - tx_id: item.tx_id, - address: item.address, - ticker: item.ticker, - max_supply: item.max, - mint_limit: item.limit ?? null, - decimals: item.decimals, - }; +export function parseBrc20Tokens(items: DbBrc20Token[]): Brc20TokenResponse[] { + return items.map(i => ({ + id: i.genesis_id, + number: parseInt(i.number), + block_height: parseInt(i.block_height), + tx_id: i.tx_id, + address: i.address, + ticker: i.ticker, + max_supply: i.max, + mint_limit: i.limit ?? null, + decimals: i.decimals, + })); } export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceResponse[] { diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 9f5337e2..21b58754 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -19,7 +19,7 @@ import { BasePgStore } from './postgres-tools/base-pg-store'; import { DbBrc20DeployInsert, DbBrc20Balance, - DbBrc20Deploy, + DbBrc20Token, DbFullyLocatedInscriptionResult, DbInscriptionContent, DbInscriptionIndexFilters, @@ -487,19 +487,20 @@ export class PgStore extends BasePgStore { } } - async getBrc20Token(args: { ticker: string }): Promise { - const results = await this.sql` + async getBrc20Tokens(args: { ticker?: string[] }): Promise> { + const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const results = await this.sql<(DbBrc20Token & { total: number })[]>` SELECT d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, - d.decimals + d.decimals, COUNT(*) OVER() as total FROM brc20_deploys AS d INNER JOIN inscriptions AS i ON i.id = d.inscription_id - WHERE LOWER(d.ticker) = LOWER(${args.ticker}) - LIMIT 1 + ${lowerTickers ? this.sql`WHERE LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} `; - if (results.count === 1) { - return results[0]; - } + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; } /** @@ -811,27 +812,31 @@ export class PgStore extends BasePgStore { }): Promise { await this.sqlWriteTransaction(async sql => { // Is the token deployed? - const deploy = await this.getBrc20Token({ ticker: args.mint.tick }); - if (!deploy) { + const deploy = await sql<{ id: string; limit?: string }[]>` + SELECT id, "limit" FROM brc20_deploys WHERE LOWER(ticker) = LOWER(${args.mint.tick}) + `; + if (deploy.count === 0) { logger.debug( `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` ); return; } + const token = deploy[0]; + // TODO: The first mint to exceed the maximum supply will receive the fraction that is valid. // (ex. 21,000,000 maximum supply, 20,999,242 circulating supply, and 1000 mint inscription = // 758 balance state applied) // Is the mint amount within the allowed token limits? - if (deploy.limit && BigNumber(args.mint.amt).isGreaterThan(deploy.limit)) { + if (token.limit && BigNumber(args.mint.amt).isGreaterThan(token.limit)) { logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${deploy.limit} at block ${args.location.block_height}` + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${token.limit} at block ${args.location.block_height}` ); return; } const mint = { inscription_id: args.inscription_id, - brc20_deploy_id: deploy.id, + brc20_deploy_id: token.id, block_height: args.location.block_height, tx_id: args.location.tx_id, address: args.location.address, @@ -845,7 +850,7 @@ export class PgStore extends BasePgStore { // Insert balance change for minting address const balance = { inscription_id: args.inscription_id, - brc20_deploy_id: deploy.id, + brc20_deploy_id: token.id, block_height: args.location.block_height, address: args.location.address, avail_balance: args.mint.amt, diff --git a/src/pg/types.ts b/src/pg/types.ts index 98767d66..267d4a22 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -224,7 +224,7 @@ export type DbBrc20DeployInsert = { limit: string | null; }; -export type DbBrc20Deploy = { +export type DbBrc20Token = { id: string; genesis_id: string; number: string; From f09eb5be644be827c8d19c921b46420b653a6078 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 19 May 2023 10:54:16 -0600 Subject: [PATCH 09/56] test: paginated index --- tests/brc20.test.ts | 100 +++++++++++++++++++++++++------------------- 1 file changed, 56 insertions(+), 44 deletions(-) diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 8dec2180..907937c1 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -51,17 +51,20 @@ describe('BRC-20', () => { }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); - expect(responseJson1).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: 775617, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - number: 5, - mint_limit: null, - max_supply: '21000000', - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }); + expect(responseJson1.total).toBe(1); + expect(responseJson1.results).toStrictEqual([ + { + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: 775617, + decimals: 18, + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 5, + mint_limit: null, + max_supply: '21000000', + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }, + ]); }); test('ignores deploys for existing token', async () => { @@ -121,17 +124,20 @@ describe('BRC-20', () => { }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); - expect(responseJson1).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: 775617, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000', - mint_limit: null, - number: 5, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }); + expect(responseJson1.total).toBe(1); + expect(responseJson1.results).toStrictEqual([ + { + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: 775617, + decimals: 18, + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }, + ]); }); test('ignores case insensitive deploy for existing token', async () => { @@ -191,34 +197,40 @@ describe('BRC-20', () => { }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); - expect(responseJson1).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: 775617, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000', - mint_limit: null, - number: 5, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }); + expect(responseJson1.total).toBe(1); + expect(responseJson1.results).toStrictEqual([ + { + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: 775617, + decimals: 18, + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }, + ]); const response2 = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/tokens?ticker=pepe`, // Lowercase }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); - expect(responseJson2).toStrictEqual({ - address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - block_height: 775617, - decimals: 18, - id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', - max_supply: '21000000', - mint_limit: null, - number: 5, - ticker: 'PEPE', - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }); + expect(responseJson2.total).toBe(1); + expect(responseJson2.results).toStrictEqual([ + { + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + block_height: 775617, + decimals: 18, + id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + max_supply: '21000000', + mint_limit: null, + number: 5, + ticker: 'PEPE', + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }, + ]); }); test.skip('deploy exceeds decimal limit', async () => {}); From b114f6efdb44d78cfa6b7c025153c05e3d81335b Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 19 May 2023 14:44:23 -0600 Subject: [PATCH 10/56] test: brc20 format --- src/pg/helpers.ts | 81 ++++++++---------- src/pg/pg-store.ts | 38 ++++----- tests/brc20.test.ts | 204 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 256 insertions(+), 67 deletions(-) diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 04521828..2977255f 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -17,45 +17,48 @@ const OpJsonSchema = Type.Object( const OpJsonC = TypeCompiler.Compile(OpJsonSchema); export type OpJson = Static; -const Brc20DeploySchema = Type.Object({ - p: Type.Literal('brc-20'), - op: Type.Literal('deploy'), - tick: Type.String(), - max: Type.String(), - lim: Type.Optional(Type.String()), - dec: Type.Optional(Type.String()), -}); -const Brc20DeployC = TypeCompiler.Compile(Brc20DeploySchema); +const Brc20TickerSchema = Type.String({ minLength: 1, maxLength: 4 }); + +const Brc20DeploySchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('deploy'), + tick: Brc20TickerSchema, + max: Type.String({ minLength: 1 }), + lim: Type.Optional(Type.String({ minLength: 1 })), + dec: Type.Optional(Type.String({ minLength: 1 })), + }, + { additionalProperties: true } +); export type Brc20Deploy = Static; -const Brc20MintSchema = Type.Object({ - p: Type.Literal('brc-20'), - op: Type.Literal('mint'), - tick: Type.String(), - amt: Type.String(), -}); -const Brc20MintC = TypeCompiler.Compile(Brc20MintSchema); +const Brc20MintSchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('mint'), + tick: Brc20TickerSchema, + amt: Type.String({ minLength: 1 }), + }, + { additionalProperties: true } +); export type Brc20Mint = Static; -const Brc20TransferSchema = Type.Object({ - p: Type.Literal('brc-20'), - op: Type.Literal('transfer'), - tick: Type.String(), - amt: Type.String(), -}); -const Brc20TransferC = TypeCompiler.Compile(Brc20TransferSchema); +const Brc20TransferSchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('transfer'), + tick: Brc20TickerSchema, + amt: Type.String({ minLength: 1 }), + }, + { additionalProperties: true } +); export type Brc20Transfer = Static; const Brc20Schema = Type.Union([Brc20DeploySchema, Brc20MintSchema, Brc20TransferSchema]); -// const Brc20C = TypeCompiler.Compile(Brc20Schema); +const Brc20C = TypeCompiler.Compile(Brc20Schema); export type Brc20 = Static; -/** - * Tries to parse a text inscription into an OpJson schema. - * @param inscription - Inscription content - * @returns OpJson - */ -export function inscriptionContentToJson(inscription: DbInscriptionInsert): OpJson | undefined { +export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | undefined { if ( inscription.mime_type.startsWith('text/plain') || inscription.mime_type.startsWith('application/json') @@ -66,27 +69,13 @@ export function inscriptionContentToJson(inscription: DbInscriptionInsert): OpJs ? hexToBuffer(inscription.content) : inscription.content; const result = JSON.parse(buf.toString('utf-8')); - if (OpJsonC.Check(result)) { - return result; - } + if (Brc20C.Check(result)) return result; } catch (error) { - // Not a JSON inscription. + // Not a BRC-20 inscription. } } } -export function brc20DeployFromOpJson(json: OpJson): Brc20Deploy | undefined { - if (Brc20DeployC.Check(json)) { - return json; - } -} - -export function brc20MintFromOpJson(json: OpJson): Brc20Mint | undefined { - if (Brc20MintC.Check(json)) { - return json; - } -} - /** * Returns which inscription count is required based on filters sent to the index endpoint. * @param filters - DbInscriptionIndexFilters diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 21b58754..6e173382 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -5,14 +5,7 @@ import { OrdinalSatoshi, SatoshiRarity } from '../api/util/ordinal-satoshi'; import { ChainhookPayload } from '../chainhook/schemas'; import { ENV } from '../env'; import { logger } from '../logger'; -import { - Brc20Deploy, - Brc20Mint, - brc20DeployFromOpJson, - brc20MintFromOpJson, - getIndexResultCountType, - inscriptionContentToJson, -} from './helpers'; +import { Brc20Deploy, Brc20Mint, brc20FromInscription, getIndexResultCountType } from './helpers'; import { runMigrations } from './migrations'; import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; @@ -635,19 +628,22 @@ export class PgStore extends BasePgStore { sat_coinbase_height = EXCLUDED.sat_coinbase_height, timestamp = EXCLUDED.timestamp `; - // TODO: No valid action can occur via the spending of an ordinal via transaction fee. If it - // occurs during the inscription process then the resulting inscription is ignored. If it - // occurs during the second phase of the transfer process, the balance is returned to the - // senders available balance. - const json = inscriptionContentToJson(args.inscription); - if (json) { - // Is this a BRC-20 operation? - const deploy = brc20DeployFromOpJson(json); - if (deploy) { - await this.insertBrc20Deploy({ deploy, inscription_id, location: args.location }); - } else { - const mint = brc20MintFromOpJson(json); - if (mint) await this.insertBrc20Mint({ mint, inscription_id, location: args.location }); + + // Is this a BRC-20 operation? + // TODO: No valid action can occur via the spending of an ordinal via transaction fee. + const brc20 = brc20FromInscription(args.inscription); + if (brc20) { + switch (brc20.op) { + case 'deploy': + await this.insertBrc20Deploy({ + deploy: brc20, + inscription_id, + location: args.location, + }); + break; + case 'mint': + await this.insertBrc20Mint({ mint: brc20, inscription_id, location: args.location }); + break; } } }); diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 907937c1..f1db0923 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1,6 +1,8 @@ import { buildApiServer } from '../src/api/init'; +import { brc20FromInscription } from '../src/pg/helpers'; import { cycleMigrations } from '../src/pg/migrations'; import { PgStore } from '../src/pg/pg-store'; +import { DbInscriptionInsert } from '../src/pg/types'; import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal } from './helpers'; describe('BRC-20', () => { @@ -18,6 +20,208 @@ describe('BRC-20', () => { await db.close(); }); + describe('token standard validation', () => { + const testInsert = (json: any): DbInscriptionInsert => { + const content = Buffer.from(JSON.stringify(json), 'utf-8'); + return { + genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 1, + mime_type: 'application/json', + content_type: 'application/json', + content_length: content.length, + content: `0x${content.toString('hex')}`, + fee: '200', + }; + }; + + test('ignores incorrect MIME type', () => { + const content = Buffer.from( + JSON.stringify({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }), + 'utf-8' + ); + const insert: DbInscriptionInsert = { + genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 1, + mime_type: 'foo/bar', + content_type: 'foo/bar;x=1', + content_length: content.length, + content: `0x${content.toString('hex')}`, + fee: '200', + }; + expect(brc20FromInscription(insert)).toBeUndefined(); + insert.content_type = 'application/json'; + insert.mime_type = 'application/json'; + expect(brc20FromInscription(insert)).not.toBeUndefined(); + insert.content_type = 'text/plain;charset=utf-8'; + insert.mime_type = 'text/plain'; + expect(brc20FromInscription(insert)).not.toBeUndefined(); + }); + + test('ignores invalid JSON', () => { + const content = Buffer.from( + '{"p": "brc-20", "op": "deploy", "tick": "PEPE", "max": "21000000"', + 'utf-8' + ); + const insert: DbInscriptionInsert = { + genesis_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + number: 1, + mime_type: 'application/json', + content_type: 'application/json', + content_length: content.length, + content: `0x${content.toString('hex')}`, + fee: '200', + }; + expect(brc20FromInscription(insert)).toBeUndefined(); + }); + + test('ignores incorrect p field', () => { + const insert = testInsert({ + p: 'brc20', // incorrect + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }); + expect(brc20FromInscription(insert)).toBeUndefined(); + }); + + test('ignores incorrect op field', () => { + const insert = testInsert({ + p: 'brc-20', + op: 'deploi', // incorrect + tick: 'PEPE', + max: '21000000', + }); + expect(brc20FromInscription(insert)).toBeUndefined(); + }); + + test('ignores invalid tick fields', () => { + const insert = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPETESTER', // incorrect length + max: '21000000', + }); + expect(brc20FromInscription(insert)).toBeUndefined(); + const insert2 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'Pé P', // valid + max: '21000000', + }); + expect(brc20FromInscription(insert2)).not.toBeUndefined(); + }); + + test('all fields must be strings', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: 21000000, + }); + expect(brc20FromInscription(insert1)).toBeUndefined(); + const insert1a = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: 300, + }); + expect(brc20FromInscription(insert1a)).toBeUndefined(); + const insert1b = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '300', + dec: 2, + }); + expect(brc20FromInscription(insert1b)).toBeUndefined(); + const insert2 = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: 2, + }); + expect(brc20FromInscription(insert2)).toBeUndefined(); + const insert3 = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: 2, + }); + expect(brc20FromInscription(insert3)).toBeUndefined(); + }); + + test('ignores empty strings', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: '', + max: '21000000', + }); + expect(brc20FromInscription(insert1)).toBeUndefined(); + const insert1a = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '', + }); + expect(brc20FromInscription(insert1a)).toBeUndefined(); + const insert1b = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '', + }); + expect(brc20FromInscription(insert1b)).toBeUndefined(); + const insert1c = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '200', + dec: '', + }); + expect(brc20FromInscription(insert1c)).toBeUndefined(); + const insert2 = testInsert({ + p: 'brc-20', + op: 'mint', + tick: '', + }); + expect(brc20FromInscription(insert2)).toBeUndefined(); + const insert2a = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '', + }); + expect(brc20FromInscription(insert2a)).toBeUndefined(); + const insert3 = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: '', + }); + expect(brc20FromInscription(insert3)).toBeUndefined(); + const insert3a = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '', + }); + expect(brc20FromInscription(insert3a)).toBeUndefined(); + }); + + test.skip('numeric strings must be valid', () => {}); + + test.skip('valid JSONs can have additional properties', () => {}); + }); + describe('deploy', () => { test('deploy is saved', async () => { await db.updateInscriptions( From 5c58ddedd89452a459399452e84b13be4cabff68 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 31 May 2023 16:29:13 -0600 Subject: [PATCH 11/56] chore: more standard validation --- src/api/routes/brc20.ts | 2 - src/pg/helpers.ts | 36 +++++++-- tests/brc20.test.ts | 167 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 190 insertions(+), 15 deletions(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index ea59f5fb..90c71296 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -8,12 +8,10 @@ import { Brc20TickersParam, Brc20TokenResponseSchema, LimitParam, - NotFoundResponse, OffsetParam, PaginatedResponse, } from '../schemas'; import { DEFAULT_API_LIMIT, parseBrc20Balances, parseBrc20Tokens } from '../util/helpers'; -import { Value } from '@sinclair/typebox/value'; export const Brc20Routes: FastifyPluginCallback< Record, diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 2977255f..d24cc003 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -6,6 +6,7 @@ import { DbInscriptionIndexResultCountType, DbInscriptionInsert, } from './types'; +import BigNumber from 'bignumber.js'; const OpJsonSchema = Type.Object( { @@ -18,15 +19,16 @@ const OpJsonC = TypeCompiler.Compile(OpJsonSchema); export type OpJson = Static; const Brc20TickerSchema = Type.String({ minLength: 1, maxLength: 4 }); +const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); const Brc20DeploySchema = Type.Object( { p: Type.Literal('brc-20'), op: Type.Literal('deploy'), tick: Brc20TickerSchema, - max: Type.String({ minLength: 1 }), - lim: Type.Optional(Type.String({ minLength: 1 })), - dec: Type.Optional(Type.String({ minLength: 1 })), + max: Brc20NumberSchema, + lim: Type.Optional(Brc20NumberSchema), + dec: Type.Optional(Type.RegEx(/^\d+$/)), }, { additionalProperties: true } ); @@ -37,7 +39,7 @@ const Brc20MintSchema = Type.Object( p: Type.Literal('brc-20'), op: Type.Literal('mint'), tick: Brc20TickerSchema, - amt: Type.String({ minLength: 1 }), + amt: Brc20NumberSchema, }, { additionalProperties: true } ); @@ -48,7 +50,7 @@ const Brc20TransferSchema = Type.Object( p: Type.Literal('brc-20'), op: Type.Literal('transfer'), tick: Brc20TickerSchema, - amt: Type.String({ minLength: 1 }), + amt: Brc20NumberSchema, }, { additionalProperties: true } ); @@ -68,8 +70,28 @@ export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | typeof inscription.content === 'string' ? hexToBuffer(inscription.content) : inscription.content; - const result = JSON.parse(buf.toString('utf-8')); - if (Brc20C.Check(result)) return result; + const json = JSON.parse(buf.toString('utf-8')); + if (Brc20C.Check(json)) { + // Check numeric values. + const uint64_max = BigNumber('18446744073709551615'); + if (json.op === 'deploy') { + const max = BigNumber(json.max); + if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) return; + if (json.lim) { + const lim = BigNumber(json.lim); + if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) return; + } + if (json.dec) { + // `dec` can have a value of 0 but must be no more than 18. + const dec = BigNumber(json.dec); + if (dec.isNaN() || dec.isGreaterThan(18)) return; + } + } else { + const amt = BigNumber(json.amt); + if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) return; + } + return json; + } } catch (error) { // Not a BRC-20 inscription. } diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index f1db0923..ea8c040d 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -99,7 +99,7 @@ describe('BRC-20', () => { expect(brc20FromInscription(insert)).toBeUndefined(); }); - test('ignores invalid tick fields', () => { + test('tick must be 4 bytes wide', () => { const insert = testInsert({ p: 'brc-20', op: 'deploy', @@ -217,9 +217,162 @@ describe('BRC-20', () => { expect(brc20FromInscription(insert3a)).toBeUndefined(); }); - test.skip('numeric strings must be valid', () => {}); + test('numeric strings must not be zero', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '0', + }); + expect(brc20FromInscription(insert1)).toBeUndefined(); + const insert1b = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '0.0', + }); + expect(brc20FromInscription(insert1b)).toBeUndefined(); + const insert1c = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '200', + dec: '0', + }); + // `dec` can have a value of 0 + expect(brc20FromInscription(insert1c)).not.toBeUndefined(); + const insert2a = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '0', + }); + expect(brc20FromInscription(insert2a)).toBeUndefined(); + const insert3a = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '.0000', + }); + expect(brc20FromInscription(insert3a)).toBeUndefined(); + }); + + test('numeric fields are not stripped/trimmed', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: ' 200 ', + }); + expect(brc20FromInscription(insert1)).toBeUndefined(); + const insert1b = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '+10000', + }); + expect(brc20FromInscription(insert1b)).toBeUndefined(); + const insert1c = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '200', + dec: ' 0 ', + }); + expect(brc20FromInscription(insert1c)).toBeUndefined(); + const insert2a = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '.05 ', + }); + expect(brc20FromInscription(insert2a)).toBeUndefined(); + const insert3a = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '-25.00', + }); + expect(brc20FromInscription(insert3a)).toBeUndefined(); + }); - test.skip('valid JSONs can have additional properties', () => {}); + test('max value of dec is 18', () => { + const insert1c = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '200', + dec: '20', + }); + expect(brc20FromInscription(insert1c)).toBeUndefined(); + }); + + test('max value of any numeric field is uint64_max', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '18446744073709551999', + }); + expect(brc20FromInscription(insert1)).toBeUndefined(); + const insert1b = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + lim: '18446744073709551999', + }); + expect(brc20FromInscription(insert1b)).toBeUndefined(); + const insert2a = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '18446744073709551999', + }); + expect(brc20FromInscription(insert2a)).toBeUndefined(); + const insert3a = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '18446744073709551999', + }); + expect(brc20FromInscription(insert3a)).toBeUndefined(); + }); + + test('valid JSONs can have additional properties', () => { + const insert1 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '200', + foo: 'bar', + test: 1, + }); + expect(brc20FromInscription(insert1)).not.toBeUndefined(); + const insert2a = testInsert({ + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '5', + foo: 'bar', + test: 1, + }); + expect(brc20FromInscription(insert2a)).not.toBeUndefined(); + const insert3a = testInsert({ + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '25', + foo: 'bar', + test: 1, + }); + expect(brc20FromInscription(insert3a)).not.toBeUndefined(); + }); }); describe('deploy', () => { @@ -645,14 +798,16 @@ describe('BRC-20', () => { expect(responseJson2.results).toStrictEqual([]); }); + test.skip('numbers should not have more decimal digits than "dec" of ticker', async () => {}); + test.skip('mint exceeds token supply', async () => {}); test.skip('mints in same block are applied in order', async () => {}); - test('ignores mint for non-existent token', async () => {}); + test.skip('ignores mint for non-existent token', async () => {}); - test('mint exceeds token mint limit', async () => {}); + test.skip('mint exceeds token mint limit', async () => {}); - test('ignores mint for token with no more supply', async () => {}); + test.skip('ignores mint for token with no more supply', async () => {}); }); }); From f6fd0a656d6520f90eda4d6610c04a077fa70354 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 31 May 2023 16:34:15 -0600 Subject: [PATCH 12/56] fix: tick must be 4 bytes or less --- src/pg/helpers.ts | 22 +++++++++++++++++----- tests/brc20.test.ts | 11 +++++++++-- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index d24cc003..208ef018 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -18,7 +18,7 @@ const OpJsonSchema = Type.Object( const OpJsonC = TypeCompiler.Compile(OpJsonSchema); export type OpJson = Static; -const Brc20TickerSchema = Type.String({ minLength: 1, maxLength: 4 }); +const Brc20TickerSchema = Type.String({ minLength: 1 }); const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); const Brc20DeploySchema = Type.Object( @@ -72,23 +72,35 @@ export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | : inscription.content; const json = JSON.parse(buf.toString('utf-8')); if (Brc20C.Check(json)) { + // Check ticker byte length + if (Buffer.from(json.tick).length > 4) { + return; + } // Check numeric values. const uint64_max = BigNumber('18446744073709551615'); if (json.op === 'deploy') { const max = BigNumber(json.max); - if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) return; + if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) { + return; + } if (json.lim) { const lim = BigNumber(json.lim); - if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) return; + if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) { + return; + } } if (json.dec) { // `dec` can have a value of 0 but must be no more than 18. const dec = BigNumber(json.dec); - if (dec.isNaN() || dec.isGreaterThan(18)) return; + if (dec.isNaN() || dec.isGreaterThan(18)) { + return; + } } } else { const amt = BigNumber(json.amt); - if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) return; + if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) { + return; + } } return json; } diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index ea8c040d..b580bb1f 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -103,17 +103,24 @@ describe('BRC-20', () => { const insert = testInsert({ p: 'brc-20', op: 'deploy', - tick: 'PEPETESTER', // incorrect length + tick: 'PEPETESTER', // more than 4 bytes max: '21000000', }); expect(brc20FromInscription(insert)).toBeUndefined(); const insert2 = testInsert({ p: 'brc-20', op: 'deploy', - tick: 'Pé P', // valid + tick: 'Pe P', // valid max: '21000000', }); expect(brc20FromInscription(insert2)).not.toBeUndefined(); + const insert3 = testInsert({ + p: 'brc-20', + op: 'deploy', + tick: '🤬😉', // more than 4 bytes + max: '21000000', + }); + expect(brc20FromInscription(insert3)).toBeUndefined(); }); test('all fields must be strings', () => { From aa15b0e4843435cacfa12856b881566ba0c2f3a3 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Mon, 12 Jun 2023 14:31:59 -0600 Subject: [PATCH 13/56] fix: invalid decimal count --- src/pg/pg-store.ts | 15 ++++++++-- tests/brc20.test.ts | 67 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 79 insertions(+), 3 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index a154705c..98491460 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -891,8 +891,8 @@ export class PgStore extends BasePgStore { }): Promise { await this.sqlWriteTransaction(async sql => { // Is the token deployed? - const deploy = await sql<{ id: string; limit?: string }[]>` - SELECT id, "limit" FROM brc20_deploys WHERE LOWER(ticker) = LOWER(${args.mint.tick}) + const deploy = await sql<{ id: string; limit?: string; decimals: string }[]>` + SELECT id, "limit", decimals FROM brc20_deploys WHERE LOWER(ticker) = LOWER(${args.mint.tick}) `; if (deploy.count === 0) { logger.debug( @@ -913,6 +913,17 @@ export class PgStore extends BasePgStore { ); return; } + // Is the number of decimals correct? + if ( + args.mint.amt.includes('.') && + args.mint.amt.split('.')[1].length > parseInt(token.decimals) + ) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because amount ${args.mint.amt} exceeds token decimals at block ${args.location.block_height}` + ); + return; + } + const mint = { inscription_id: args.inscription_id, brc20_deploy_id: token.id, diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index b580bb1f..211d0ab0 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -31,6 +31,7 @@ describe('BRC-20', () => { content_length: content.length, content: `0x${content.toString('hex')}`, fee: '200', + curse_type: null, }; }; @@ -52,6 +53,7 @@ describe('BRC-20', () => { content_length: content.length, content: `0x${content.toString('hex')}`, fee: '200', + curse_type: null, }; expect(brc20FromInscription(insert)).toBeUndefined(); insert.content_type = 'application/json'; @@ -75,6 +77,7 @@ describe('BRC-20', () => { content_length: content.length, content: `0x${content.toString('hex')}`, fee: '200', + curse_type: null, }; expect(brc20FromInscription(insert)).toBeUndefined(); }); @@ -805,7 +808,69 @@ describe('BRC-20', () => { expect(responseJson2.results).toStrictEqual([]); }); - test.skip('numbers should not have more decimal digits than "dec" of ticker', async () => {}); + test('numbers should not have more decimal digits than "dec" of ticker', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + dec: '1', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '250000.000', // Invalid decimal count + }, + number: 6, + tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + address: address, + }) + ) + .build() + ); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(0); + expect(responseJson2.results).toStrictEqual([]); + }); test.skip('mint exceeds token supply', async () => {}); From c8e582055956c9381d14d5ec1bae5a70c0a4d4a8 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Mon, 12 Jun 2023 15:12:45 -0600 Subject: [PATCH 14/56] feat: mint within supply --- src/pg/pg-store.ts | 39 +++++++++---- src/pg/types.ts | 24 ++++++++ tests/brc20.test.ts | 138 +++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 189 insertions(+), 12 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 98491460..03c731e6 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -29,6 +29,8 @@ import { LOCATIONS_COLUMNS, DbBrc20EventInsert, BRC20_EVENTS_COLUMNS, + DbBrc20Deploy, + BRC20_DEPLOYS_COLUMNS, } from './types'; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -884,6 +886,15 @@ export class PgStore extends BasePgStore { }); } + private async getBrc20Deploy(args: { ticker: string }): Promise { + const deploy = await this.sql` + SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} + FROM brc20_deploys + WHERE LOWER(ticker) = LOWER(${args.ticker}) + `; + if (deploy.count) return deploy[0]; + } + private async insertBrc20Mint(args: { mint: Brc20Mint; inscription_id: number; @@ -891,20 +902,13 @@ export class PgStore extends BasePgStore { }): Promise { await this.sqlWriteTransaction(async sql => { // Is the token deployed? - const deploy = await sql<{ id: string; limit?: string; decimals: string }[]>` - SELECT id, "limit", decimals FROM brc20_deploys WHERE LOWER(ticker) = LOWER(${args.mint.tick}) - `; - if (deploy.count === 0) { + const token = await this.getBrc20Deploy({ ticker: args.mint.tick }); + if (!token) { logger.debug( `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` ); return; } - const token = deploy[0]; - - // TODO: The first mint to exceed the maximum supply will receive the fraction that is valid. - // (ex. 21,000,000 maximum supply, 20,999,242 circulating supply, and 1000 mint inscription = - // 758 balance state applied) // Is the mint amount within the allowed token limits? if (token.limit && BigNumber(args.mint.amt).isGreaterThan(token.limit)) { @@ -923,6 +927,19 @@ export class PgStore extends BasePgStore { ); return; } + // Does the mint amount exceed remaining supply? + const mintedSupply = await sql<{ minted: string }[]>` + SELECT COALESCE(SUM(amount), 0) AS minted FROM brc20_mints WHERE brc20_deploy_id = ${token.id} + `; + const minted = new BigNumber(mintedSupply[0].minted); + const availSupply = new BigNumber(token.max).minus(minted); + if (availSupply.isLessThanOrEqualTo(0)) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because token has been completely minted at block ${args.location.block_height}` + ); + return; + } + const mintAmt = BigNumber.min(availSupply, args.mint.amt); const mint = { inscription_id: args.inscription_id, @@ -930,7 +947,7 @@ export class PgStore extends BasePgStore { block_height: args.location.block_height, tx_id: args.location.tx_id, address: args.location.address, - amount: args.mint.amt, + amount: args.mint.amt, // Original requested amount }; await sql`INSERT INTO brc20_mints ${sql(mint)}`; logger.info( @@ -943,7 +960,7 @@ export class PgStore extends BasePgStore { brc20_deploy_id: token.id, block_height: args.location.block_height, address: args.location.address, - avail_balance: args.mint.amt, + avail_balance: mintAmt, // Real minted balance trans_balance: 0, }; await sql` diff --git a/src/pg/types.ts b/src/pg/types.ts index a0f6412b..437d55bb 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -231,6 +231,18 @@ export type DbBrc20DeployInsert = { limit: string | null; }; +export type DbBrc20Deploy = { + id: string; + inscription_id: number; + block_height: number; + tx_id: string; + address: string; + ticker: string; + max: string; + decimals: string; + limit?: string; +}; + export type DbBrc20Token = { id: string; genesis_id: string; @@ -259,6 +271,18 @@ export type DbBrc20EventInsert = { transfer_id: string | null; }; +export const BRC20_DEPLOYS_COLUMNS = [ + 'id', + 'inscription_id', + 'block_height', + 'tx_id', + 'address', + 'ticker', + 'max', + 'decimals', + 'limit', +]; + export const BRC20_EVENTS_COLUMNS = [ 'id', 'inscription_id', diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 211d0ab0..63cad7ae 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -872,7 +872,143 @@ describe('BRC-20', () => { expect(responseJson2.results).toStrictEqual([]); }); - test.skip('mint exceeds token supply', async () => {}); + test('mint exceeds token supply', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '2500', + dec: '1', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '1000', + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .transaction({ + hash: '7e09bda2cba34bca648cca6d79a074940d39b6137150d3a3edcf80c0e01419a5', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '1000', + }, + number: 6, + tx_id: '7e09bda2cba34bca648cca6d79a074940d39b6137150d3a3edcf80c0e01419a5', + address: address, + }) + ) + .transaction({ + hash: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '250000', // Exceeds supply + }, + number: 8, + tx_id: '8aec77f855549d98cb9fb5f35e02a03f9a2354fd05a5f89fc610b32c3b01f99f', + address: address, + }) + ) + .build() + ); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(1); + expect(responseJson2.results).toStrictEqual([ + { + available_balance: '2500', // Max capacity + overall_balance: '2500', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + + // No more mints allowed + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '000000000000000000001f14513d722146fddab04a1855665a5eca22df288c3c', + }) + .transaction({ + hash: 'bf7a3e1a0647ca88f6539119b2defaec302683704ea270b3302e709597643548', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '1000', + }, + number: 9, + tx_id: 'bf7a3e1a0647ca88f6539119b2defaec302683704ea270b3302e709597643548', + address: address, + }) + ) + .build() + ); + + const response3 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response3.statusCode).toBe(200); + const responseJson3 = response3.json(); + expect(responseJson3).toStrictEqual(responseJson2); + }); test.skip('mints in same block are applied in order', async () => {}); From 187d2d77e6e606155e273ed1ce80cf3a0d0db16a Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Mon, 12 Jun 2023 15:15:53 -0600 Subject: [PATCH 15/56] test: mint limit --- tests/brc20.test.ts | 103 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 99 insertions(+), 4 deletions(-) diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 63cad7ae..e3371fe4 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1010,12 +1010,107 @@ describe('BRC-20', () => { expect(responseJson3).toStrictEqual(responseJson2); }); - test.skip('mints in same block are applied in order', async () => {}); + test('ignores mint for non-existent token', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '1000', + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .build() + ); - test.skip('ignores mint for non-existent token', async () => {}); + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(0); + expect(responseJson2.results).toStrictEqual([]); + }); - test.skip('mint exceeds token mint limit', async () => {}); + test('mint exceeds token mint limit', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '2500', + dec: '1', + lim: '100', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '1000', // Greater than limit + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .build() + ); - test.skip('ignores mint for token with no more supply', async () => {}); + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response2.statusCode).toBe(200); + const responseJson2 = response2.json(); + expect(responseJson2.total).toBe(0); + expect(responseJson2.results).toStrictEqual([]); + }); }); }); From e612dc0c8ee49ffccd2f78c52a38068923ffd01d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jun 2023 10:28:31 -0600 Subject: [PATCH 16/56] chore: first transfers --- migrations/1684175795592_brc20-transfers.ts | 1 - src/pg/pg-store.ts | 60 +++++++++++- tests/brc20.test.ts | 103 +++++++++++++++++++- 3 files changed, 158 insertions(+), 6 deletions(-) diff --git a/migrations/1684175795592_brc20-transfers.ts b/migrations/1684175795592_brc20-transfers.ts index c6a0e0f2..3ad5e0ae 100644 --- a/migrations/1684175795592_brc20-transfers.ts +++ b/migrations/1684175795592_brc20-transfers.ts @@ -31,7 +31,6 @@ export function up(pgm: MigrationBuilder): void { }, to_address: { type: 'text', - notNull: true, }, amount: { type: 'numeric', diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 03c731e6..fb3f483d 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -5,7 +5,13 @@ import { OrdinalSatoshi, SatoshiRarity } from '../api/util/ordinal-satoshi'; import { ChainhookPayload } from '../chainhook/schemas'; import { ENV } from '../env'; import { logger } from '../logger'; -import { Brc20Deploy, Brc20Mint, brc20FromInscription, getIndexResultCountType } from './helpers'; +import { + Brc20Deploy, + Brc20Mint, + Brc20Transfer, + brc20FromInscription, + getIndexResultCountType, +} from './helpers'; import { runMigrations } from './migrations'; import { connectPostgres } from './postgres-tools'; import { BasePgStore } from './postgres-tools/base-pg-store'; @@ -727,6 +733,13 @@ export class PgStore extends BasePgStore { case 'mint': await this.insertBrc20Mint({ mint: brc20, inscription_id, location: args.location }); break; + case 'transfer': + await this.insertBrc20Transfer({ + transfer: brc20, + inscription_id, + location: args.location, + }); + break; } } }); @@ -968,4 +981,49 @@ export class PgStore extends BasePgStore { `; }); } + + private async insertBrc20Transfer(args: { + transfer: Brc20Transfer; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + await this.sqlWriteTransaction(async sql => { + // Is the token deployed? + const token = await this.getBrc20Deploy({ ticker: args.transfer.tick }); + if (!token) { + logger.debug( + `PgStore [BRC-20] ignoring transfer for non-deployed token ${args.transfer.tick} at block ${args.location.block_height}` + ); + return; + } + + const transfer = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + from_address: args.location.address, + to_address: null, // We don't know the receiver address yet + amount: args.transfer.amt, + }; + await sql`INSERT INTO brc20_transfers ${sql(transfer)}`; + logger.info( + `PgStore [BRC-20] inserted transfer for ${args.transfer.tick} (${args.transfer.amt}) at block ${args.location.block_height}` + ); + + // Insert balance change for minting address + const transAmt = new BigNumber(args.transfer.amt); + const balance = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: transAmt.negated(), + trans_balance: transAmt, + }; + await sql` + INSERT INTO brc20_balances ${sql(balance)} + `; + }); + } } diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index e3371fe4..f533ad8a 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -599,10 +599,6 @@ describe('BRC-20', () => { }, ]); }); - - test.skip('deploy exceeds decimal limit', async () => {}); - - test.skip('deploy exceeds supply limit', async () => {}); }); describe('mint', () => { @@ -1113,4 +1109,103 @@ describe('BRC-20', () => { expect(responseJson2.results).toStrictEqual([]); }); }); + + describe('transfer', () => { + test('available balance decreases on transfer inscription', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '250000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '10000', + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '2000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '8000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '2000', + }, + ]); + }); + + test.skip('cannot transfer more than available balance', async () => {}); + }); }); From f8780d041b3e47ab05d2009d93079b3aceb930a5 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jun 2023 10:41:39 -0600 Subject: [PATCH 17/56] test: dont exceed avail balance for transfer --- src/pg/pg-store.ts | 28 +++++++++++-- tests/brc20.test.ts | 95 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 119 insertions(+), 4 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index fb3f483d..bd92d0e7 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -988,6 +988,13 @@ export class PgStore extends BasePgStore { location: DbLocationInsert; }): Promise { await this.sqlWriteTransaction(async sql => { + // Is the destination a valid address? + if (!args.location.address) { + logger.debug( + `PgStore [BRC-20] ignoring transfer spent as fee for ${args.transfer.tick} at block ${args.location.block_height}` + ); + return; + } // Is the token deployed? const token = await this.getBrc20Deploy({ ticker: args.transfer.tick }); if (!token) { @@ -996,6 +1003,22 @@ export class PgStore extends BasePgStore { ); return; } + // Get balance for this address and this token + const balanceResult = await this.getBrc20Balances({ + address: args.location.address, + ticker: [args.transfer.tick], + limit: 1, + offset: 0, + }); + // Do we have enough available balance to do this transfer? + const transAmt = new BigNumber(args.transfer.amt); + const available = new BigNumber(balanceResult.results[0]?.avail_balance ?? 0); + if (transAmt.gt(available)) { + logger.debug( + `PgStore [BRC-20] ignoring transfer for token ${args.transfer.tick} due to unavailable balance at block ${args.location.block_height}` + ); + return; + } const transfer = { inscription_id: args.inscription_id, @@ -1012,8 +1035,7 @@ export class PgStore extends BasePgStore { ); // Insert balance change for minting address - const transAmt = new BigNumber(args.transfer.amt); - const balance = { + const values = { inscription_id: args.inscription_id, brc20_deploy_id: token.id, block_height: args.location.block_height, @@ -1022,7 +1044,7 @@ export class PgStore extends BasePgStore { trans_balance: transAmt, }; await sql` - INSERT INTO brc20_balances ${sql(balance)} + INSERT INTO brc20_balances ${sql(values)} `; }); } diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index f533ad8a..349de76f 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1206,6 +1206,99 @@ describe('BRC-20', () => { ]); }); - test.skip('cannot transfer more than available balance', async () => {}); + test('cannot transfer more than available balance', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '250000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '10000', + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '20000', // More than was minted + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '10000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + }); }); }); From edde5d17b0775a91fd5f98b3a0388aae253005c9 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jun 2023 11:28:32 -0600 Subject: [PATCH 18/56] test: multiple transfers in block --- tests/brc20.test.ts | 111 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 349de76f..978a798e 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1300,5 +1300,116 @@ describe('BRC-20', () => { }, ]); }); + + test('multiple transfers in block', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775617, + hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + }) + .transaction({ + hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '250000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775618, + hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', + }) + .transaction({ + hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'mint', + tick: 'PEPE', + amt: '10000', + }, + number: 6, + tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '9000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .transaction({ + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '2000', // Will exceed available balance + }, + number: 8, + tx_id: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + address: address, + }) + ) + .build() + ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '1000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '9000', + }, + ]); + }); }); }); From dd8ec07d366e6bf15e74b528077c8fa1836958e9 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jun 2023 14:37:27 -0600 Subject: [PATCH 19/56] feat: first balance transfers --- src/pg/pg-store.ts | 60 +++++++++++++++ src/pg/types.ts | 22 ++++++ tests/brc20.test.ts | 180 +++++++++++++++++++++----------------------- 3 files changed, 169 insertions(+), 93 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index bd92d0e7..d10dfa6f 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -37,6 +37,8 @@ import { BRC20_EVENTS_COLUMNS, DbBrc20Deploy, BRC20_DEPLOYS_COLUMNS, + BRC20_TRANSFERS_COLUMNS, + DbBrc20Transfer, } from './types'; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -792,10 +794,31 @@ export class PgStore extends BasePgStore { sat_coinbase_height = EXCLUDED.sat_coinbase_height, timestamp = EXCLUDED.timestamp `; + + // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by + // this address that hasn't been sent to another address before. + const brc20Transfer = await sql` + SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} + FROM locations AS l + INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id + WHERE + l.inscription_id = ${inscription_id} + AND l.address = ${args.location.address} + AND l.genesis = TRUE + AND l.current = TRUE + LIMIT 1 + `; + if (brc20Transfer.count > 0) { + await this.applyBrc20BalanceTransfer({ + transfer: brc20Transfer[0], + location: args.location, + }); + } }); return inscription_id; } + // TODO: Roll back BRC20 transfers private async rollBackInscriptionGenesis(args: { genesis_id: string }): Promise { // This will cascade into dependent tables. await this.sql`DELETE FROM inscriptions WHERE genesis_id = ${args.genesis_id}`; @@ -1048,4 +1071,41 @@ export class PgStore extends BasePgStore { `; }); } + + private async applyBrc20BalanceTransfer(args: { + transfer: DbBrc20Transfer; + location: DbLocationInsert; + }): Promise { + await this.sqlWriteTransaction(async sql => { + // Reflect balance transfer + const amount = new BigNumber(args.transfer.amount); + const changes = [ + { + inscription_id: args.transfer.inscription_id, + brc20_deploy_id: args.transfer.brc20_deploy_id, + block_height: args.location.block_height, + address: args.transfer.from_address, + avail_balance: 0, + trans_balance: amount.negated(), + }, + { + inscription_id: args.transfer.inscription_id, + brc20_deploy_id: args.transfer.brc20_deploy_id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: amount, + trans_balance: 0, + }, + ]; + await sql` + INSERT INTO brc20_balances ${sql(changes)} + `; + // Keep the new valid owner of the transfer inscription + await sql` + UPDATE brc20_transfers + SET to_address = ${args.location.address} + WHERE id = ${args.transfer.id} + `; + }); + } } diff --git a/src/pg/types.ts b/src/pg/types.ts index 437d55bb..40931c27 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -243,6 +243,17 @@ export type DbBrc20Deploy = { limit?: string; }; +export type DbBrc20Transfer = { + id: string; + inscription_id: number; + brc20_deploy_id: number; + block_height: number; + tx_id: string; + from_address: string; + to_address?: string; + amount: string; +}; + export type DbBrc20Token = { id: string; genesis_id: string; @@ -283,6 +294,17 @@ export const BRC20_DEPLOYS_COLUMNS = [ 'limit', ]; +export const BRC20_TRANSFERS_COLUMNS = [ + 'id', + 'inscription_id', + 'brc20_deploy_id', + 'block_height', + 'tx_id', + 'from_address', + 'to_address', + 'amount', +]; + export const BRC20_EVENTS_COLUMNS = [ 'id', 'inscription_id', diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 978a798e..a7a260af 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1111,8 +1111,7 @@ describe('BRC-20', () => { }); describe('transfer', () => { - test('available balance decreases on transfer inscription', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + const deployAndMintPEPE = async (address: string) => { await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -1163,6 +1162,11 @@ describe('BRC-20', () => { ) .build() ); + }; + + test('available balance decreases on transfer inscription', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -1208,56 +1212,7 @@ describe('BRC-20', () => { test('cannot transfer more than available balance', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775617, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', - }) - .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'deploy', - tick: 'PEPE', - max: '250000', - }, - number: 5, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', - address: address, - }) - ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775618, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) - .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'mint', - tick: 'PEPE', - amt: '10000', - }, - number: 6, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', - address: address, - }) - ) - .build() - ); + await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -1303,56 +1258,70 @@ describe('BRC-20', () => { test('multiple transfers in block', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() .block({ - height: 775617, - hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', }) .transaction({ - hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', }) .inscriptionRevealed( brc20Reveal({ json: { p: 'brc-20', - op: 'deploy', + op: 'transfer', tick: 'PEPE', - max: '250000', + amt: '9000', }, - number: 5, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', address: address, }) ) - .build() - ); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775618, - hash: '0000000000000000000098d8f2663891d439f6bb7de230d4e9f6bcc2e85452bf', - }) .transaction({ - hash: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) .inscriptionRevealed( brc20Reveal({ json: { p: 'brc-20', - op: 'mint', + op: 'transfer', tick: 'PEPE', - amt: '10000', + amt: '2000', // Will exceed available balance }, - number: 6, - tx_id: '3b55f624eaa4f8de6c42e0c490176b67123a83094384f658611faf7bfb85dd0f', + number: 8, + tx_id: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', address: address, }) ) .build() ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '1000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '9000', + }, + ]); + }); + + test('send balance to address', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; + await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -1376,40 +1345,65 @@ describe('BRC-20', () => { address: address, }) ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) .transaction({ hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '2000', // Will exceed available balance - }, - number: 8, - tx_id: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - address: address, - }) - ) + .inscriptionTransferred({ + inscription_number: 7, + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + ordinal_number: 0, + updated_address: address2, + satpoint_pre_transfer: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', + satpoint_post_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + post_transfer_output_value: null, + }) .build() ); - const response = await fastify.inject({ + const response1 = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/balances?address=${address}`, }); - expect(response.statusCode).toBe(200); - const json = response.json(); - expect(json.total).toBe(1); - expect(json.results).toStrictEqual([ + expect(response1.statusCode).toBe(200); + const json1 = response1.json(); + expect(json1.total).toBe(1); + expect(json1.results).toStrictEqual([ { available_balance: '1000', - overall_balance: '10000', + overall_balance: '1000', ticker: 'PEPE', - transferrable_balance: '9000', + transferrable_balance: '0', + }, + ]); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address2}`, + }); + expect(response2.statusCode).toBe(200); + const json2 = response2.json(); + expect(json2.total).toBe(1); + expect(json2.results).toStrictEqual([ + { + available_balance: '9000', + overall_balance: '9000', + ticker: 'PEPE', + transferrable_balance: '0', }, ]); }); + + test.skip('cannot spend valid transfer twice', async () => {}); }); }); From 542ec34292d7535d01f62832b270e11b80b59da4 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 14 Jun 2023 18:20:01 -0600 Subject: [PATCH 20/56] fix: transfers only usable once --- src/pg/pg-store.ts | 24 ++++++---- tests/brc20.test.ts | 114 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 127 insertions(+), 11 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index d10dfa6f..c2979fac 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -217,8 +217,10 @@ export class PgStore extends BasePgStore { } } } + await this.normalizeInscriptionLocations({ + inscription_id: Array.from(updatedInscriptionIds), + }); }); - await this.normalizeInscriptionLocations({ inscription_id: Array.from(updatedInscriptionIds) }); await this.refreshMaterializedView('chain_tip'); await this.refreshMaterializedView('inscription_count'); await this.refreshMaterializedView('mime_type_counts'); @@ -796,23 +798,27 @@ export class PgStore extends BasePgStore { `; // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by - // this address that hasn't been sent to another address before. + // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way + // of checking if we have just inserted the first transfer for this inscription (genesis + + // transfer). const brc20Transfer = await sql` SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} FROM locations AS l INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id - WHERE - l.inscription_id = ${inscription_id} - AND l.address = ${args.location.address} - AND l.genesis = TRUE - AND l.current = TRUE - LIMIT 1 + WHERE l.inscription_id = ${inscription_id} + LIMIT 3 `; - if (brc20Transfer.count > 0) { + if (brc20Transfer.count === 2) { + // This is the first time this BRC-20 transfer is being used. Apply the balance change. await this.applyBrc20BalanceTransfer({ transfer: brc20Transfer[0], location: args.location, }); + } else { + logger.debug( + { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, + `PgStore [BRC-20] ignoring balance change for transfer that was already used` + ); } }); return inscription_id; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index a7a260af..09ae371b 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1347,6 +1347,67 @@ describe('BRC-20', () => { ) .build() ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775620, + hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', + }) + .transaction({ + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + }) + .inscriptionTransferred({ + inscription_number: 7, + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + ordinal_number: 0, + updated_address: address2, + satpoint_pre_transfer: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', + satpoint_post_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + post_transfer_output_value: null, + }) + .build() + ); + + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address}`, + }); + expect(response1.statusCode).toBe(200); + const json1 = response1.json(); + expect(json1.total).toBe(1); + expect(json1.results).toStrictEqual([ + { + available_balance: '1000', + overall_balance: '1000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances?address=${address2}`, + }); + expect(response2.statusCode).toBe(200); + const json2 = response2.json(); + expect(json2.total).toBe(1); + expect(json2.results).toStrictEqual([ + { + available_balance: '9000', + overall_balance: '9000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + }); + + test('cannot spend valid transfer twice', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; + await deployAndMintPEPE(address); await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() @@ -1354,6 +1415,31 @@ describe('BRC-20', () => { height: 775619, hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '9000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775620, + hash: '000000000000000000016ddf56d0fe72476165acee9500d48d3e2aaf8412f489', + }) .transaction({ hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', }) @@ -1370,7 +1456,33 @@ describe('BRC-20', () => { }) .build() ); + // Attempt to transfer the same inscription back to the original address to change its + // balance. + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775621, + hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', + }) + .transaction({ + hash: '55bec906eadc9f5c120cc39555ba46e85e562eacd6217e4dd0b8552783286d0e', + }) + .inscriptionTransferred({ + inscription_number: 7, + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + ordinal_number: 0, + updated_address: address, + satpoint_pre_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + satpoint_post_transfer: + '55bec906eadc9f5c120cc39555ba46e85e562eacd6217e4dd0b8552783286d0e:0:0', + post_transfer_output_value: null, + }) + .build() + ); + // Balances only reflect the first transfer. const response1 = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/balances?address=${address}`, @@ -1403,7 +1515,5 @@ describe('BRC-20', () => { }, ]); }); - - test.skip('cannot spend valid transfer twice', async () => {}); }); }); From da5b4b0d39f3bc1cb650864abd1a4786aebecb98 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 15 Jun 2023 10:55:59 -0600 Subject: [PATCH 21/56] refactor: extract to separate functions --- src/pg/pg-store.ts | 128 ++++++++++++++++++++++++++++----------------- 1 file changed, 80 insertions(+), 48 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index c2979fac..971810f5 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -722,30 +722,12 @@ export class PgStore extends BasePgStore { timestamp = EXCLUDED.timestamp `; - // Is this a BRC-20 operation? - // TODO: No valid action can occur via the spending of an ordinal via transaction fee. - const brc20 = brc20FromInscription(args.inscription); - if (brc20) { - switch (brc20.op) { - case 'deploy': - await this.insertBrc20Deploy({ - deploy: brc20, - inscription_id, - location: args.location, - }); - break; - case 'mint': - await this.insertBrc20Mint({ mint: brc20, inscription_id, location: args.location }); - break; - case 'transfer': - await this.insertBrc20Transfer({ - transfer: brc20, - inscription_id, - location: args.location, - }); - break; - } - } + // Insert BRC-20 op genesis (if any). + await this.insertBrc20OperationGenesis({ + inscription_id, + inscription: args.inscription, + location: args.location, + }); }); return inscription_id; } @@ -797,29 +779,8 @@ export class PgStore extends BasePgStore { timestamp = EXCLUDED.timestamp `; - // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by - // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way - // of checking if we have just inserted the first transfer for this inscription (genesis + - // transfer). - const brc20Transfer = await sql` - SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} - FROM locations AS l - INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id - WHERE l.inscription_id = ${inscription_id} - LIMIT 3 - `; - if (brc20Transfer.count === 2) { - // This is the first time this BRC-20 transfer is being used. Apply the balance change. - await this.applyBrc20BalanceTransfer({ - transfer: brc20Transfer[0], - location: args.location, - }); - } else { - logger.debug( - { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, - `PgStore [BRC-20] ignoring balance change for transfer that was already used` - ); - } + // Insert BRC-20 balance transfers (if any). + await this.insertBrc20OperationTransfer({ inscription_id, location: args.location }); }); return inscription_id; } @@ -876,6 +837,78 @@ export class PgStore extends BasePgStore { }); } + private async insertBrc20OperationGenesis(args: { + inscription_id: number; + inscription: DbInscriptionInsert; + location: DbLocationInsert; + }): Promise { + // Is this a BRC-20 operation? Is it being inscribed to a valid address? + const brc20 = brc20FromInscription(args.inscription); + if (brc20) { + if (args.location.address) { + switch (brc20.op) { + case 'deploy': + await this.insertBrc20Deploy({ + deploy: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + case 'mint': + await this.insertBrc20Mint({ + mint: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + case 'transfer': + await this.insertBrc20Transfer({ + transfer: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + } + } else { + logger.debug( + { block_height: args.location.block_height, tick: brc20.tick }, + `PgStore [BRC-20] ignoring operation spent as fee` + ); + } + } + } + + private async insertBrc20OperationTransfer(args: { + inscription_id: number; + location: DbLocationInsert; + }): Promise { + // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by + // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way + // of checking if we have just inserted the first transfer for this inscription (genesis + + // transfer). + await this.sqlWriteTransaction(async sql => { + const brc20Transfer = await sql` + SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} + FROM locations AS l + INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id + WHERE l.inscription_id = ${args.inscription_id} + LIMIT 3 + `; + if (brc20Transfer.count === 2) { + // This is the first time this BRC-20 transfer is being used. Apply the balance change. + await this.applyBrc20BalanceTransfer({ + transfer: brc20Transfer[0], + location: args.location, + }); + } else { + logger.debug( + { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, + `PgStore [BRC-20] ignoring balance change for transfer that was already used` + ); + } + }); + } + private async insertBrc20Deploy(args: { deploy: Brc20Deploy; inscription_id: number; @@ -899,7 +932,6 @@ export class PgStore extends BasePgStore { limit: args.deploy.lim ?? null, decimals: args.deploy.dec ?? '18', }; - // TODO: Maximum supply cannot exceed uint64_max const insertion = await sql<{ id: string }[]>` INSERT INTO brc20_deploys ${sql(deploy)} ON CONFLICT (LOWER(ticker)) DO NOTHING From 07320489889b85c881ab49a4ce10d0d21a750114 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 15 Jun 2023 10:58:40 -0600 Subject: [PATCH 22/56] fix: remove old json content tables --- src/pg/pg-store.ts | 20 ----------- src/pg/types.ts | 16 --------- tests/server.test.ts | 79 -------------------------------------------- 3 files changed, 115 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 971810f5..3f57753b 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -27,11 +27,9 @@ import { DbInscriptionIndexResultCountType, DbInscriptionInsert, DbInscriptionLocationChange, - DbJsonContent, DbLocation, DbLocationInsert, DbPaginatedResult, - JSON_CONTENTS_COLUMNS, LOCATIONS_COLUMNS, DbBrc20EventInsert, BRC20_EVENTS_COLUMNS, @@ -526,24 +524,6 @@ export class PgStore extends BasePgStore { }; } - async getJsonContent(args: InscriptionIdentifier): Promise { - const results = await this.sql` - SELECT ${this.sql(JSON_CONTENTS_COLUMNS.map(c => `j.${c}`))} - FROM json_contents AS j - INNER JOIN inscriptions AS i ON j.inscription_id = i.id - WHERE - ${ - 'number' in args - ? this.sql`i.number = ${args.number}` - : this.sql`i.genesis_id = ${args.genesis_id}` - } - LIMIT 1 - `; - if (results.count === 1) { - return results[0]; - } - } - async getBrc20Tokens(args: { ticker?: string[] }): Promise> { const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; const results = await this.sql<(DbBrc20Token & { total: number })[]>` diff --git a/src/pg/types.ts b/src/pg/types.ts index 40931c27..b9daab6a 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -160,22 +160,6 @@ export const INSCRIPTIONS_COLUMNS = [ 'curse_type', ]; -export type DbJsonContent = { - id: string; - inscription_id: string; - p?: string; - op?: string; - content: OpJson; -}; - -export type DbJsonContentInsert = { - p: string | null; - op: string | null; - content: PgJsonb; -}; - -export const JSON_CONTENTS_COLUMNS = ['id', 'inscription_id', 'p', 'op', 'content']; - export type DbInscriptionIndexPaging = { limit: number; offset: number; diff --git a/tests/server.test.ts b/tests/server.test.ts index 5f53527b..0af81cad 100644 --- a/tests/server.test.ts +++ b/tests/server.test.ts @@ -385,84 +385,5 @@ describe('EventServer', () => { const c2 = await db.sql<{ count: number }[]>`SELECT COUNT(*)::int FROM locations`; expect(c2[0].count).toBe(1); }); - - test('saves p/op json content', async () => { - const reveal = { - block_identifier: { - index: 107, - hash: '0x163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88', - }, - parent_block_identifier: { - index: 106, - hash: '0x117374e7078440835a744b6b1b13dd2c48c4eff8c58dde07162241a8f15d1e03', - }, - timestamp: 1677803510, - transactions: [ - { - transaction_identifier: { - hash: '0x0268dd9743c862d80ab02cb1d0228036cfe172522850eb96be60cfee14b31fb8', - }, - operations: [], - metadata: { - ordinal_operations: [ - { - inscription_revealed: { - // { "p": "sns", "op": "reg", "name": "treysongz.sats" } - content_bytes: - '0x7b202270223a2022736e73222c20226f70223a2022726567222c20226e616d65223a202274726579736f6e677a2e7361747322207d', - content_type: 'text/plain;charset=utf-8', - content_length: 12, - inscription_number: 100, - inscription_fee: 3425, - inscription_id: - '0268dd9743c862d80ab02cb1d0228036cfe172522850eb96be60cfee14b31fb8i0', - inscriber_address: - 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', - ordinal_number: 125348773618236, - ordinal_block_height: 566462, - ordinal_offset: 0, - satpoint_post_inscription: - '0268dd9743c862d80ab02cb1d0228036cfe172522850eb96be60cfee14b31fb8:0:0', - inscription_output_value: 10000, - }, - }, - ], - proof: '0x12341234', - }, - }, - ], - metadata: {}, - }; - - // Apply - const payload1 = { - apply: [reveal], - rollback: [], - chainhook: { - uuid: '1', - predicate: { - scope: 'ordinals_protocol', - operation: 'inscription_feed', - }, - }, - }; - const response = await fastify.inject({ - method: 'POST', - url: '/chainhook/inscription_feed', - headers: { authorization: `Bearer ${ENV.CHAINHOOK_NODE_AUTH_TOKEN}` }, - payload: payload1, - }); - expect(response.statusCode).toBe(200); - - const json = await db.getJsonContent({ number: 100 }); - expect(json).not.toBeUndefined(); - expect(json?.p).toBe('sns'); - expect(json?.op).toBe('reg'); - expect(json?.content).toStrictEqual({ - name: 'treysongz.sats', - op: 'reg', - p: 'sns', - }); - }); }); }); From 687c2e43cc5782a2521c3442c0d7fcfe90943b67 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 16 Jun 2023 09:52:05 -0600 Subject: [PATCH 23/56] fix: balances/:address --- src/api/routes/brc20.ts | 8 +++++--- tests/brc20.test.ts | 30 +++++++++++++++--------------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 90c71296..681b84dc 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -51,15 +51,17 @@ export const Brc20Routes: FastifyPluginCallback< ); fastify.get( - '/brc-20/balances', + '/brc-20/balances/:address', { schema: { operationId: 'getBrc20Balances', summary: 'BRC-20 Balances', description: 'Retrieves BRC-20 token balances for a Bitcoin address', tags: ['BRC-20'], - querystring: Type.Object({ + params: Type.Object({ address: AddressParam, + }), + querystring: Type.Object({ ticker: Type.Optional(Brc20TickersParam), // Pagination offset: Type.Optional(OffsetParam), @@ -76,7 +78,7 @@ export const Brc20Routes: FastifyPluginCallback< const balances = await fastify.db.getBrc20Balances({ limit, offset, - address: request.query.address, + address: request.params.address, ticker: request.query.ticker, }); await reply.send({ diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 09ae371b..7281efe3 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -657,7 +657,7 @@ describe('BRC-20', () => { const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response1.statusCode).toBe(200); const responseJson1 = response1.json(); @@ -700,7 +700,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -796,7 +796,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -860,7 +860,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -956,7 +956,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -999,7 +999,7 @@ describe('BRC-20', () => { const response3 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response3.statusCode).toBe(200); const responseJson3 = response3.json(); @@ -1036,7 +1036,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -1101,7 +1101,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response2.statusCode).toBe(200); const responseJson2 = response2.json(); @@ -1195,7 +1195,7 @@ describe('BRC-20', () => { const response = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response.statusCode).toBe(200); const json = response.json(); @@ -1241,7 +1241,7 @@ describe('BRC-20', () => { const response = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response.statusCode).toBe(200); const json = response.json(); @@ -1303,7 +1303,7 @@ describe('BRC-20', () => { const response = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response.statusCode).toBe(200); const json = response.json(); @@ -1373,7 +1373,7 @@ describe('BRC-20', () => { const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response1.statusCode).toBe(200); const json1 = response1.json(); @@ -1389,7 +1389,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address2}`, + url: `/ordinals/brc-20/balances/${address2}`, }); expect(response2.statusCode).toBe(200); const json2 = response2.json(); @@ -1485,7 +1485,7 @@ describe('BRC-20', () => { // Balances only reflect the first transfer. const response1 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address}`, + url: `/ordinals/brc-20/balances/${address}`, }); expect(response1.statusCode).toBe(200); const json1 = response1.json(); @@ -1501,7 +1501,7 @@ describe('BRC-20', () => { const response2 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances?address=${address2}`, + url: `/ordinals/brc-20/balances/${address2}`, }); expect(response2.statusCode).toBe(200); const json2 = response2.json(); From 5d35d5b0eefb46eeac91ead52f4909279e39404d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 20 Jun 2023 22:06:39 -0600 Subject: [PATCH 24/56] feat: token details --- src/api/routes/brc20.ts | 48 ++++++++++++++++++++++++++++++++++++++++- src/api/schemas.ts | 16 ++++++++++++++ src/api/util/helpers.ts | 10 +++++++++ src/pg/pg-store.ts | 30 ++++++++++++++++++++++++++ src/pg/types.ts | 6 ++++++ 5 files changed, 109 insertions(+), 1 deletion(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 681b84dc..777ef942 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -5,13 +5,22 @@ import { Server } from 'http'; import { AddressParam, Brc20BalanceResponseSchema, + Brc20TickerParam, Brc20TickersParam, + Brc20TokenDetailsSchema, Brc20TokenResponseSchema, LimitParam, + NotFoundResponse, OffsetParam, PaginatedResponse, } from '../schemas'; -import { DEFAULT_API_LIMIT, parseBrc20Balances, parseBrc20Tokens } from '../util/helpers'; +import { + DEFAULT_API_LIMIT, + parseBrc20Balances, + parseBrc20Supply, + parseBrc20Tokens, +} from '../util/helpers'; +import { Value } from '@sinclair/typebox/value'; export const Brc20Routes: FastifyPluginCallback< Record, @@ -50,6 +59,43 @@ export const Brc20Routes: FastifyPluginCallback< } ); + fastify.get( + '/brc-20/tokens/:ticker', + { + schema: { + operationId: 'getBrc20TokenDetails', + summary: 'BRC-20 Token Details', + description: 'Retrieves information for a BRC-20 token including supply and holders', + tags: ['BRC-20'], + params: Type.Object({ + ticker: Brc20TickerParam, + }), + response: { + 200: Brc20TokenDetailsSchema, + 404: NotFoundResponse, + }, + }, + }, + async (request, reply) => { + await fastify.db.sqlTransaction(async sql => { + const token = await fastify.db.getBrc20Tokens({ ticker: [request.params.ticker] }); + if (!token) { + await reply.code(404).send(Value.Create(NotFoundResponse)); + return; + } + const supply = await fastify.db.getBrc20TokenSupply({ ticker: request.params.ticker }); + if (!supply) { + await reply.code(404).send(Value.Create(NotFoundResponse)); + return; + } + await reply.send({ + token: parseBrc20Tokens(token.results)[0], + supply: parseBrc20Supply(supply), + }); + }); + } + ); + fastify.get( '/brc-20/balances/:address', { diff --git a/src/api/schemas.ts b/src/api/schemas.ts index b8669990..b3c1d60d 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -363,6 +363,22 @@ export const Brc20TokenResponseSchema = Type.Object( ); export type Brc20TokenResponse = Static; +export const Brc20SupplySchema = Type.Object({ + max_supply: Type.String({ examples: ['21000000'] }), + minted_supply: Type.String({ examples: ['1000000'] }), + holders: Type.Integer({ examples: [240] }), +}); +export type Brc20Supply = Static; + +export const Brc20TokenDetailsSchema = Type.Object( + { + token: Brc20TokenResponseSchema, + supply: Brc20SupplySchema, + }, + { title: 'BRC-20 Token Details Response' } +); +export type Brc20TokenDetails = Static; + export const NotFoundResponse = Type.Object( { error: Type.Literal('Not found'), diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index a682cbc5..05c13b19 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,5 +1,6 @@ import { DbBrc20Balance, + DbBrc20Supply, DbBrc20Token, DbFullyLocatedInscriptionResult, DbInscriptionLocationChange, @@ -8,6 +9,7 @@ import { import { BlockInscriptionTransfer, Brc20BalanceResponse, + Brc20Supply, Brc20TokenResponse, InscriptionLocationResponse, InscriptionResponseType, @@ -114,6 +116,14 @@ export function parseBrc20Tokens(items: DbBrc20Token[]): Brc20TokenResponse[] { })); } +export function parseBrc20Supply(item: DbBrc20Supply): Brc20Supply { + return { + max_supply: item.max_supply, + minted_supply: item.minted_supply, + holders: parseInt(item.holders), + }; +} + export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceResponse[] { return items.map(i => ({ ticker: i.ticker, diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 3f57753b..933c7b53 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -37,6 +37,7 @@ import { BRC20_DEPLOYS_COLUMNS, BRC20_TRANSFERS_COLUMNS, DbBrc20Transfer, + DbBrc20Supply, } from './types'; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -593,6 +594,35 @@ export class PgStore extends BasePgStore { `; } + async getBrc20TokenSupply(args: { ticker: string }): Promise { + return await this.sqlTransaction(async sql => { + const deploy = await this.getBrc20Deploy(args); + if (!deploy) { + return; + } + const minted = await sql<{ total: string }[]>` + SELECT SUM(avail_balance + trans_balance) AS total + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY ticker + `; + const holders = await sql<{ count: string }[]>` + SELECT SUM(avail_balance + trans_balance) AS balance, COUNT(*) OVER() AS count + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} AND balance > 0 + GROUP BY address + `; + const supply = await sql<{ max: string }[]>` + SELECT max FROM brc20_deploys WHERE id = ${deploy.id} + `; + return { + max_supply: supply[0].max, + minted_supply: minted[0].total, + holders: holders[0].count, + }; + }); + } + async refreshMaterializedView(viewName: string) { const isProd = process.env.NODE_ENV === 'production'; await this.sql`REFRESH MATERIALIZED VIEW ${ diff --git a/src/pg/types.ts b/src/pg/types.ts index b9daab6a..2368f416 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -251,6 +251,12 @@ export type DbBrc20Token = { decimals: number; }; +export type DbBrc20Supply = { + max_supply: string; + minted_supply: string; + holders: string; +}; + export type DbBrc20Balance = { ticker: string; avail_balance: string; From a01f77ef6c9c03576a07a7cdc14d0279afc44cbb Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Sun, 25 Jun 2023 12:22:23 -0600 Subject: [PATCH 25/56] feat: holders endpoint --- src/api/routes/brc20.ts | 60 +++++++++++++++++++++++++++++++++++++++-- src/api/schemas.ts | 8 ++++++ src/api/util/helpers.ts | 9 +++++++ src/chainhook/server.ts | 6 ++--- src/pg/pg-store.ts | 49 ++++++++++++++++++++++++++++----- src/pg/types.ts | 5 ++++ 6 files changed, 126 insertions(+), 11 deletions(-) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 777ef942..14eb4bb6 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -5,6 +5,7 @@ import { Server } from 'http'; import { AddressParam, Brc20BalanceResponseSchema, + Brc20HolderResponseSchema, Brc20TickerParam, Brc20TickersParam, Brc20TokenDetailsSchema, @@ -17,16 +18,20 @@ import { import { DEFAULT_API_LIMIT, parseBrc20Balances, + parseBrc20Holders, parseBrc20Supply, parseBrc20Tokens, } from '../util/helpers'; import { Value } from '@sinclair/typebox/value'; +import { handleInscriptionTransfersCache } from '../util/cache'; export const Brc20Routes: FastifyPluginCallback< Record, Server, TypeBoxTypeProvider > = (fastify, options, done) => { + fastify.addHook('preHandler', handleInscriptionTransfersCache); + fastify.get( '/brc-20/tokens', { @@ -49,7 +54,11 @@ export const Brc20Routes: FastifyPluginCallback< async (request, reply) => { const limit = request.query.limit ?? DEFAULT_API_LIMIT; const offset = request.query.offset ?? 0; - const response = await fastify.db.getBrc20Tokens({ ticker: request.query.ticker }); + const response = await fastify.db.getBrc20Tokens({ + limit, + offset, + ticker: request.query.ticker, + }); await reply.send({ limit, offset, @@ -78,7 +87,11 @@ export const Brc20Routes: FastifyPluginCallback< }, async (request, reply) => { await fastify.db.sqlTransaction(async sql => { - const token = await fastify.db.getBrc20Tokens({ ticker: [request.params.ticker] }); + const token = await fastify.db.getBrc20Tokens({ + limit: 1, + offset: 0, + ticker: [request.params.ticker], + }); if (!token) { await reply.code(404).send(Value.Create(NotFoundResponse)); return; @@ -96,6 +109,49 @@ export const Brc20Routes: FastifyPluginCallback< } ); + fastify.get( + '/brc-20/tokens/:ticker/holders', + { + schema: { + operationId: 'getBrc20TokenHolders', + summary: 'BRC-20 Token Holders', + description: 'Retrieves a list of holders and their balances for a BRC-20 token', + tags: ['BRC-20'], + params: Type.Object({ + ticker: Brc20TickerParam, + }), + querystring: Type.Object({ + // Pagination + offset: Type.Optional(OffsetParam), + limit: Type.Optional(LimitParam), + }), + response: { + 200: PaginatedResponse(Brc20HolderResponseSchema, 'Paginated BRC-20 Holders Response'), + 404: NotFoundResponse, + }, + }, + }, + async (request, reply) => { + const limit = request.query.limit ?? DEFAULT_API_LIMIT; + const offset = request.query.offset ?? 0; + const holders = await fastify.db.getBrc20TokenHolders({ + limit, + offset, + ticker: request.params.ticker, + }); + if (!holders) { + await reply.code(404).send(Value.Create(NotFoundResponse)); + return; + } + await reply.send({ + limit, + offset, + total: holders.total, + results: parseBrc20Holders(holders.results), + }); + } + ); + fastify.get( '/brc-20/balances/:address', { diff --git a/src/api/schemas.ts b/src/api/schemas.ts index b3c1d60d..d48c4ff9 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -370,6 +370,14 @@ export const Brc20SupplySchema = Type.Object({ }); export type Brc20Supply = Static; +export const Brc20HolderResponseSchema = Type.Object({ + address: Type.String({ + examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'], + }), + overall_balance: Type.String({ examples: ['2000.00000'] }), +}); +export type Brc20HolderResponse = Static; + export const Brc20TokenDetailsSchema = Type.Object( { token: Brc20TokenResponseSchema, diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 05c13b19..926c8dbe 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,5 +1,6 @@ import { DbBrc20Balance, + DbBrc20Holder, DbBrc20Supply, DbBrc20Token, DbFullyLocatedInscriptionResult, @@ -9,6 +10,7 @@ import { import { BlockInscriptionTransfer, Brc20BalanceResponse, + Brc20HolderResponse, Brc20Supply, Brc20TokenResponse, InscriptionLocationResponse, @@ -133,6 +135,13 @@ export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceRespons })); } +export function parseBrc20Holders(items: DbBrc20Holder[]): Brc20HolderResponse[] { + return items.map(i => ({ + address: i.address, + overall_balance: i.total_balance, + })); +} + export function parseSatPoint(satpoint: string): { tx_id: string; vout: string; diff --git a/src/chainhook/server.ts b/src/chainhook/server.ts index 14e6db5c..31af9219 100644 --- a/src/chainhook/server.ts +++ b/src/chainhook/server.ts @@ -146,9 +146,9 @@ export async function buildChainhookServer(args: { db: PgStore }) { }).withTypeProvider(); fastify.decorate('db', args.db); - fastify.addHook('onReady', waitForChainhookNode); - fastify.addHook('onReady', registerChainhookPredicates); - fastify.addHook('onClose', removeChainhookPredicates); + // fastify.addHook('onReady', waitForChainhookNode); + // fastify.addHook('onReady', registerChainhookPredicates); + // fastify.addHook('onClose', removeChainhookPredicates); await fastify.register(Chainhook); return fastify; diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 933c7b53..0d29a618 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -38,6 +38,7 @@ import { BRC20_TRANSFERS_COLUMNS, DbBrc20Transfer, DbBrc20Supply, + DbBrc20Holder, } from './types'; type InscriptionIdentifier = { genesis_id: string } | { number: number }; @@ -525,7 +526,9 @@ export class PgStore extends BasePgStore { }; } - async getBrc20Tokens(args: { ticker?: string[] }): Promise> { + async getBrc20Tokens( + args: { ticker?: string[] } & DbInscriptionIndexPaging + ): Promise> { const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; const results = await this.sql<(DbBrc20Token & { total: number })[]>` SELECT @@ -534,6 +537,8 @@ export class PgStore extends BasePgStore { FROM brc20_deploys AS d INNER JOIN inscriptions AS i ON i.id = d.inscription_id ${lowerTickers ? this.sql`WHERE LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + OFFSET ${args.offset} + LIMIT ${args.limit} `; return { total: results[0]?.total ?? 0, @@ -604,13 +609,18 @@ export class PgStore extends BasePgStore { SELECT SUM(avail_balance + trans_balance) AS total FROM brc20_balances WHERE brc20_deploy_id = ${deploy.id} - GROUP BY ticker + GROUP BY brc20_deploy_id `; const holders = await sql<{ count: string }[]>` - SELECT SUM(avail_balance + trans_balance) AS balance, COUNT(*) OVER() AS count - FROM brc20_balances - WHERE brc20_deploy_id = ${deploy.id} AND balance > 0 - GROUP BY address + WITH historical_holders AS ( + SELECT SUM(avail_balance + trans_balance) AS balance + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY address + ) + SELECT COUNT(*) AS count + FROM historical_holders + WHERE balance > 0 `; const supply = await sql<{ max: string }[]>` SELECT max FROM brc20_deploys WHERE id = ${deploy.id} @@ -623,6 +633,33 @@ export class PgStore extends BasePgStore { }); } + async getBrc20TokenHolders( + args: { + ticker: string; + } & DbInscriptionIndexPaging + ): Promise | undefined> { + return await this.sqlTransaction(async sql => { + const deploy = await this.getBrc20Deploy(args); + if (!deploy) { + return; + } + const results = await this.sql<(DbBrc20Holder & { total: number })[]>` + SELECT + address, SUM(avail_balance + trans_balance) AS total_balance, COUNT(*) OVER() AS total + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY address + ORDER BY total_balance DESC + LIMIT ${args.limit} + OFFSET ${args.offset} + `; + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; + }); + } + async refreshMaterializedView(viewName: string) { const isProd = process.env.NODE_ENV === 'production'; await this.sql`REFRESH MATERIALIZED VIEW ${ diff --git a/src/pg/types.ts b/src/pg/types.ts index 2368f416..50c6a814 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -257,6 +257,11 @@ export type DbBrc20Supply = { holders: string; }; +export type DbBrc20Holder = { + address: string; + total_balance: string; +}; + export type DbBrc20Balance = { ticker: string; avail_balance: string; From 44d918453c5adc27f75abcb88270cbd9512cfae7 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 20 Jul 2023 12:06:14 -0600 Subject: [PATCH 26/56] refactor: move brc20 pg to separate file --- src/api/routes/brc20.ts | 10 +- src/pg/brc20-pg-store.ts | 477 ++++++++++++++++++++++++++++++++++++++ src/pg/pg-store.ts | 484 ++------------------------------------- 3 files changed, 500 insertions(+), 471 deletions(-) create mode 100644 src/pg/brc20-pg-store.ts diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index 14eb4bb6..d2cfc5aa 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -54,7 +54,7 @@ export const Brc20Routes: FastifyPluginCallback< async (request, reply) => { const limit = request.query.limit ?? DEFAULT_API_LIMIT; const offset = request.query.offset ?? 0; - const response = await fastify.db.getBrc20Tokens({ + const response = await fastify.db.brc20.getTokens({ limit, offset, ticker: request.query.ticker, @@ -87,7 +87,7 @@ export const Brc20Routes: FastifyPluginCallback< }, async (request, reply) => { await fastify.db.sqlTransaction(async sql => { - const token = await fastify.db.getBrc20Tokens({ + const token = await fastify.db.brc20.getTokens({ limit: 1, offset: 0, ticker: [request.params.ticker], @@ -96,7 +96,7 @@ export const Brc20Routes: FastifyPluginCallback< await reply.code(404).send(Value.Create(NotFoundResponse)); return; } - const supply = await fastify.db.getBrc20TokenSupply({ ticker: request.params.ticker }); + const supply = await fastify.db.brc20.getTokenSupply({ ticker: request.params.ticker }); if (!supply) { await reply.code(404).send(Value.Create(NotFoundResponse)); return; @@ -134,7 +134,7 @@ export const Brc20Routes: FastifyPluginCallback< async (request, reply) => { const limit = request.query.limit ?? DEFAULT_API_LIMIT; const offset = request.query.offset ?? 0; - const holders = await fastify.db.getBrc20TokenHolders({ + const holders = await fastify.db.brc20.getTokenHolders({ limit, offset, ticker: request.params.ticker, @@ -177,7 +177,7 @@ export const Brc20Routes: FastifyPluginCallback< async (request, reply) => { const limit = request.query.limit ?? DEFAULT_API_LIMIT; const offset = request.query.offset ?? 0; - const balances = await fastify.db.getBrc20Balances({ + const balances = await fastify.db.brc20.getBalances({ limit, offset, address: request.params.address, diff --git a/src/pg/brc20-pg-store.ts b/src/pg/brc20-pg-store.ts new file mode 100644 index 00000000..bf59b8b9 --- /dev/null +++ b/src/pg/brc20-pg-store.ts @@ -0,0 +1,477 @@ +import { PgSqlClient, logger } from '@hirosystems/api-toolkit'; +import { PgStore } from './pg-store'; +import { + DbInscriptionIndexPaging, + DbPaginatedResult, + DbBrc20Token, + BRC20_EVENTS_COLUMNS, + DbBrc20Balance, + DbBrc20Holder, + DbBrc20Supply, + BRC20_DEPLOYS_COLUMNS, + BRC20_TRANSFERS_COLUMNS, + DbBrc20Deploy, + DbBrc20DeployInsert, + DbBrc20EventInsert, + DbBrc20Transfer, + DbInscriptionInsert, + DbLocationInsert, +} from './types'; +import BigNumber from 'bignumber.js'; +import { brc20FromInscription, Brc20Deploy, Brc20Mint, Brc20Transfer } from './helpers'; + +export class Brc20PgStore { + // TODO: Move this to the api-toolkit so we can have pg submodules. + private readonly parent: PgStore; + private get sql(): PgSqlClient { + return this.parent.sql; + } + + constructor(db: PgStore) { + this.parent = db; + } + + async getTokens( + args: { ticker?: string[] } & DbInscriptionIndexPaging + ): Promise> { + const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const results = await this.sql<(DbBrc20Token & { total: number })[]>` + SELECT + d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, + d.decimals, COUNT(*) OVER() as total + FROM brc20_deploys AS d + INNER JOIN inscriptions AS i ON i.id = d.inscription_id + ${lowerTickers ? this.sql`WHERE LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + OFFSET ${args.offset} + LIMIT ${args.limit} + `; + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; + } + + /** + * Returns an address balance for a BRC-20 token. + * @param address - Owner address + * @param ticker - BRC-20 tickers + * @returns `DbBrc20Balance` + */ + async getBalances( + args: { + address: string; + ticker?: string[]; + } & DbInscriptionIndexPaging + ): Promise> { + const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const results = await this.sql<(DbBrc20Balance & { total: number })[]>` + SELECT + d.ticker, + SUM(b.avail_balance) AS avail_balance, + SUM(b.trans_balance) AS trans_balance, + SUM(b.avail_balance + b.trans_balance) AS total_balance, + COUNT(*) OVER() as total + FROM brc20_balances AS b + INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id + WHERE + b.address = ${args.address} + ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + GROUP BY d.ticker + LIMIT ${args.limit} + OFFSET ${args.offset} + `; + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; + } + + async getHistory(args: { ticker: string } & DbInscriptionIndexPaging): Promise { + const results = await this.sql` + WITH events AS ( + SELECT ${this.sql(BRC20_EVENTS_COLUMNS)} + FROM brc20_events AS e + INNER JOIN brc20_deploys AS d ON d.id = e.brc20_deploy_id + INNER JOIN inscriptions AS i ON i.id = e.inscription_id + WHERE LOWER(d.ticker) = LOWER(${args.ticker}) + ORDER BY i.number DESC + LIMIT ${args.limit} + OFFSET ${args.offset} + ) + SELECT * + FROM events + INNER JOIN + `; + } + + async getTokenSupply(args: { ticker: string }): Promise { + return await this.parent.sqlTransaction(async sql => { + const deploy = await this.getDeploy(args); + if (!deploy) { + return; + } + const minted = await sql<{ total: string }[]>` + SELECT SUM(avail_balance + trans_balance) AS total + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY brc20_deploy_id + `; + const holders = await sql<{ count: string }[]>` + WITH historical_holders AS ( + SELECT SUM(avail_balance + trans_balance) AS balance + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY address + ) + SELECT COUNT(*) AS count + FROM historical_holders + WHERE balance > 0 + `; + const supply = await sql<{ max: string }[]>` + SELECT max FROM brc20_deploys WHERE id = ${deploy.id} + `; + return { + max_supply: supply[0].max, + minted_supply: minted[0].total, + holders: holders[0].count, + }; + }); + } + + async getTokenHolders( + args: { + ticker: string; + } & DbInscriptionIndexPaging + ): Promise | undefined> { + return await this.parent.sqlTransaction(async sql => { + const deploy = await this.getDeploy(args); + if (!deploy) { + return; + } + const results = await this.sql<(DbBrc20Holder & { total: number })[]>` + SELECT + address, SUM(avail_balance + trans_balance) AS total_balance, COUNT(*) OVER() AS total + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY address + ORDER BY total_balance DESC + LIMIT ${args.limit} + OFFSET ${args.offset} + `; + return { + total: results[0]?.total ?? 0, + results: results ?? [], + }; + }); + } + + async insertOperationGenesis(args: { + inscription_id: number; + inscription: DbInscriptionInsert; + location: DbLocationInsert; + }): Promise { + // Is this a BRC-20 operation? Is it being inscribed to a valid address? + const brc20 = brc20FromInscription(args.inscription); + if (brc20) { + if (args.location.address) { + switch (brc20.op) { + case 'deploy': + await this.insertDeploy({ + deploy: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + case 'mint': + await this.insertMint({ + mint: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + case 'transfer': + await this.insertTransfer({ + transfer: brc20, + inscription_id: args.inscription_id, + location: args.location, + }); + break; + } + } else { + logger.debug( + { block_height: args.location.block_height, tick: brc20.tick }, + `PgStore [BRC-20] ignoring operation spent as fee` + ); + } + } + } + + async insertOperationTransfer(args: { + inscription_id: number; + location: DbLocationInsert; + }): Promise { + // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by + // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way + // of checking if we have just inserted the first transfer for this inscription (genesis + + // transfer). + await this.parent.sqlWriteTransaction(async sql => { + const brc20Transfer = await sql` + SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} + FROM locations AS l + INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id + WHERE l.inscription_id = ${args.inscription_id} + LIMIT 3 + `; + if (brc20Transfer.count === 2) { + // This is the first time this BRC-20 transfer is being used. Apply the balance change. + await this.applyBalanceTransfer({ + transfer: brc20Transfer[0], + location: args.location, + }); + } else { + logger.debug( + { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, + `PgStore [BRC-20] ignoring balance change for transfer that was already used` + ); + } + }); + } + + private async insertDeploy(args: { + deploy: Brc20Deploy; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + await this.parent.sqlWriteTransaction(async sql => { + const address = args.location.address; + if (!address) { + logger.debug( + `PgStore [BRC-20] ignoring deploy with null address for ${args.deploy.tick} at block ${args.location.block_height}` + ); + return; + } + const deploy: DbBrc20DeployInsert = { + inscription_id: args.inscription_id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + address: address, + ticker: args.deploy.tick, + max: args.deploy.max, + limit: args.deploy.lim ?? null, + decimals: args.deploy.dec ?? '18', + }; + const insertion = await sql<{ id: string }[]>` + INSERT INTO brc20_deploys ${sql(deploy)} + ON CONFLICT (LOWER(ticker)) DO NOTHING + RETURNING id + `; + if (insertion.count > 0) { + // Add to history + const event: DbBrc20EventInsert = { + inscription_id: args.inscription_id, + brc20_deploy_id: insertion[0].id, + deploy_id: insertion[0].id, + mint_id: null, + transfer_id: null, + }; + await sql` + INSERT INTO brc20_events ${sql(event)} + `; + logger.info( + `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` + ); + } else { + logger.debug( + `PgStore [BRC-20] ignoring duplicate deploy for ${args.deploy.tick} at block ${args.location.block_height}` + ); + } + }); + } + + private async getDeploy(args: { ticker: string }): Promise { + const deploy = await this.sql` + SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} + FROM brc20_deploys + WHERE LOWER(ticker) = LOWER(${args.ticker}) + `; + if (deploy.count) return deploy[0]; + } + + private async insertMint(args: { + mint: Brc20Mint; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + await this.parent.sqlWriteTransaction(async sql => { + // Is the token deployed? + const token = await this.getDeploy({ ticker: args.mint.tick }); + if (!token) { + logger.debug( + `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` + ); + return; + } + + // Is the mint amount within the allowed token limits? + if (token.limit && BigNumber(args.mint.amt).isGreaterThan(token.limit)) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${token.limit} at block ${args.location.block_height}` + ); + return; + } + // Is the number of decimals correct? + if ( + args.mint.amt.includes('.') && + args.mint.amt.split('.')[1].length > parseInt(token.decimals) + ) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because amount ${args.mint.amt} exceeds token decimals at block ${args.location.block_height}` + ); + return; + } + // Does the mint amount exceed remaining supply? + const mintedSupply = await sql<{ minted: string }[]>` + SELECT COALESCE(SUM(amount), 0) AS minted FROM brc20_mints WHERE brc20_deploy_id = ${token.id} + `; + const minted = new BigNumber(mintedSupply[0].minted); + const availSupply = new BigNumber(token.max).minus(minted); + if (availSupply.isLessThanOrEqualTo(0)) { + logger.debug( + `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because token has been completely minted at block ${args.location.block_height}` + ); + return; + } + const mintAmt = BigNumber.min(availSupply, args.mint.amt); + + const mint = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + address: args.location.address, + amount: args.mint.amt, // Original requested amount + }; + await sql`INSERT INTO brc20_mints ${sql(mint)}`; + logger.info( + `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` + ); + + // Insert balance change for minting address + const balance = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: mintAmt, // Real minted balance + trans_balance: 0, + }; + await sql` + INSERT INTO brc20_balances ${sql(balance)} + `; + }); + } + + private async insertTransfer(args: { + transfer: Brc20Transfer; + inscription_id: number; + location: DbLocationInsert; + }): Promise { + await this.parent.sqlWriteTransaction(async sql => { + // Is the destination a valid address? + if (!args.location.address) { + logger.debug( + `PgStore [BRC-20] ignoring transfer spent as fee for ${args.transfer.tick} at block ${args.location.block_height}` + ); + return; + } + // Is the token deployed? + const token = await this.getDeploy({ ticker: args.transfer.tick }); + if (!token) { + logger.debug( + `PgStore [BRC-20] ignoring transfer for non-deployed token ${args.transfer.tick} at block ${args.location.block_height}` + ); + return; + } + // Get balance for this address and this token + const balanceResult = await this.getBalances({ + address: args.location.address, + ticker: [args.transfer.tick], + limit: 1, + offset: 0, + }); + // Do we have enough available balance to do this transfer? + const transAmt = new BigNumber(args.transfer.amt); + const available = new BigNumber(balanceResult.results[0]?.avail_balance ?? 0); + if (transAmt.gt(available)) { + logger.debug( + `PgStore [BRC-20] ignoring transfer for token ${args.transfer.tick} due to unavailable balance at block ${args.location.block_height}` + ); + return; + } + + const transfer = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + tx_id: args.location.tx_id, + from_address: args.location.address, + to_address: null, // We don't know the receiver address yet + amount: args.transfer.amt, + }; + await sql`INSERT INTO brc20_transfers ${sql(transfer)}`; + logger.info( + `PgStore [BRC-20] inserted transfer for ${args.transfer.tick} (${args.transfer.amt}) at block ${args.location.block_height}` + ); + + // Insert balance change for minting address + const values = { + inscription_id: args.inscription_id, + brc20_deploy_id: token.id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: transAmt.negated(), + trans_balance: transAmt, + }; + await sql` + INSERT INTO brc20_balances ${sql(values)} + `; + }); + } + + private async applyBalanceTransfer(args: { + transfer: DbBrc20Transfer; + location: DbLocationInsert; + }): Promise { + await this.parent.sqlWriteTransaction(async sql => { + // Reflect balance transfer + const amount = new BigNumber(args.transfer.amount); + const changes = [ + { + inscription_id: args.transfer.inscription_id, + brc20_deploy_id: args.transfer.brc20_deploy_id, + block_height: args.location.block_height, + address: args.transfer.from_address, + avail_balance: 0, + trans_balance: amount.negated(), + }, + { + inscription_id: args.transfer.inscription_id, + brc20_deploy_id: args.transfer.brc20_deploy_id, + block_height: args.location.block_height, + address: args.location.address, + avail_balance: amount, + trans_balance: 0, + }, + ]; + await sql` + INSERT INTO brc20_balances ${sql(changes)} + `; + // Keep the new valid owner of the transfer inscription + await sql` + UPDATE brc20_transfers + SET to_address = ${args.location.address} + WHERE id = ${args.transfer.id} + `; + }); + } +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 178b36e8..b3c281e9 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -3,25 +3,8 @@ import { Order, OrderBy } from '../api/schemas'; import { isProdEnv, isTestEnv, normalizedHexString, parseSatPoint } from '../api/util/helpers'; import { OrdinalSatoshi, SatoshiRarity } from '../api/util/ordinal-satoshi'; import { ENV } from '../env'; +import { getIndexResultCountType } from './helpers'; import { - Brc20Deploy, - Brc20Mint, - Brc20Transfer, - brc20FromInscription, - getIndexResultCountType, -} from './helpers'; -import { - BRC20_DEPLOYS_COLUMNS, - BRC20_EVENTS_COLUMNS, - BRC20_TRANSFERS_COLUMNS, - DbBrc20Balance, - DbBrc20Deploy, - DbBrc20DeployInsert, - DbBrc20EventInsert, - DbBrc20Holder, - DbBrc20Supply, - DbBrc20Token, - DbBrc20Transfer, DbFullyLocatedInscriptionResult, DbInscriptionContent, DbInscriptionCountPerBlock, @@ -38,15 +21,23 @@ import { DbPaginatedResult, LOCATIONS_COLUMNS, } from './types'; -import { BasePgStore, connectPostgres, logger, runMigrations } from '@hirosystems/api-toolkit'; +import { + BasePgStore, + PgSqlClient, + connectPostgres, + logger, + runMigrations, +} from '@hirosystems/api-toolkit'; import * as path from 'path'; -import BigNumber from 'bignumber.js'; +import { Brc20PgStore } from './brc20-pg-store'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); type InscriptionIdentifier = { genesis_id: string } | { number: number }; export class PgStore extends BasePgStore { + readonly brc20: Brc20PgStore; + static async connect(opts?: { skipMigrations: boolean }): Promise { const pgConfig = { host: ENV.PGHOST, @@ -70,6 +61,11 @@ export class PgStore extends BasePgStore { return new PgStore(sql); } + constructor(sql: PgSqlClient) { + super(sql); + this.brc20 = new Brc20PgStore(this); + } + /** * Inserts inscription genesis and transfers from Chainhook events. Also handles rollbacks from * chain re-orgs and materialized view refreshes. @@ -589,140 +585,6 @@ export class PgStore extends BasePgStore { } ${this.sql(viewName)}`; } - async getBrc20Tokens( - args: { ticker?: string[] } & DbInscriptionIndexPaging - ): Promise> { - const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; - const results = await this.sql<(DbBrc20Token & { total: number })[]>` - SELECT - d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, - d.decimals, COUNT(*) OVER() as total - FROM brc20_deploys AS d - INNER JOIN inscriptions AS i ON i.id = d.inscription_id - ${lowerTickers ? this.sql`WHERE LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} - OFFSET ${args.offset} - LIMIT ${args.limit} - `; - return { - total: results[0]?.total ?? 0, - results: results ?? [], - }; - } - - /** - * Returns an address balance for a BRC-20 token. - * @param address - Owner address - * @param ticker - BRC-20 tickers - * @returns `DbBrc20Balance` - */ - async getBrc20Balances( - args: { - address: string; - ticker?: string[]; - } & DbInscriptionIndexPaging - ): Promise> { - const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; - const results = await this.sql<(DbBrc20Balance & { total: number })[]>` - SELECT - d.ticker, - SUM(b.avail_balance) AS avail_balance, - SUM(b.trans_balance) AS trans_balance, - SUM(b.avail_balance + b.trans_balance) AS total_balance, - COUNT(*) OVER() as total - FROM brc20_balances AS b - INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id - WHERE - b.address = ${args.address} - ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} - GROUP BY d.ticker - LIMIT ${args.limit} - OFFSET ${args.offset} - `; - return { - total: results[0]?.total ?? 0, - results: results ?? [], - }; - } - - async getBrc20History(args: { ticker: string } & DbInscriptionIndexPaging): Promise { - const results = await this.sql` - WITH events AS ( - SELECT ${this.sql(BRC20_EVENTS_COLUMNS)} - FROM brc20_events AS e - INNER JOIN brc20_deploys AS d ON d.id = e.brc20_deploy_id - INNER JOIN inscriptions AS i ON i.id = e.inscription_id - WHERE LOWER(d.ticker) = LOWER(${args.ticker}) - ORDER BY i.number DESC - LIMIT ${args.limit} - OFFSET ${args.offset} - ) - SELECT * - FROM events - INNER JOIN - `; - } - - async getBrc20TokenSupply(args: { ticker: string }): Promise { - return await this.sqlTransaction(async sql => { - const deploy = await this.getBrc20Deploy(args); - if (!deploy) { - return; - } - const minted = await sql<{ total: string }[]>` - SELECT SUM(avail_balance + trans_balance) AS total - FROM brc20_balances - WHERE brc20_deploy_id = ${deploy.id} - GROUP BY brc20_deploy_id - `; - const holders = await sql<{ count: string }[]>` - WITH historical_holders AS ( - SELECT SUM(avail_balance + trans_balance) AS balance - FROM brc20_balances - WHERE brc20_deploy_id = ${deploy.id} - GROUP BY address - ) - SELECT COUNT(*) AS count - FROM historical_holders - WHERE balance > 0 - `; - const supply = await sql<{ max: string }[]>` - SELECT max FROM brc20_deploys WHERE id = ${deploy.id} - `; - return { - max_supply: supply[0].max, - minted_supply: minted[0].total, - holders: holders[0].count, - }; - }); - } - - async getBrc20TokenHolders( - args: { - ticker: string; - } & DbInscriptionIndexPaging - ): Promise | undefined> { - return await this.sqlTransaction(async sql => { - const deploy = await this.getBrc20Deploy(args); - if (!deploy) { - return; - } - const results = await this.sql<(DbBrc20Holder & { total: number })[]>` - SELECT - address, SUM(avail_balance + trans_balance) AS total_balance, COUNT(*) OVER() AS total - FROM brc20_balances - WHERE brc20_deploy_id = ${deploy.id} - GROUP BY address - ORDER BY total_balance DESC - LIMIT ${args.limit} - OFFSET ${args.offset} - `; - return { - total: results[0]?.total ?? 0, - results: results ?? [], - }; - }); - } - private async insertInscription(args: { inscription: DbInscriptionInsert; location: DbLocationInsert; @@ -786,7 +648,7 @@ export class PgStore extends BasePgStore { address: args.location.address, }); // Insert BRC-20 op genesis (if any). - await this.insertBrc20OperationGenesis({ + await this.brc20.insertOperationGenesis({ inscription_id, inscription: args.inscription, location: args.location, @@ -870,7 +732,7 @@ export class PgStore extends BasePgStore { }); // Insert BRC-20 balance transfers (if any). - await this.insertBrc20OperationTransfer({ inscription_id, location: args.location }); + await this.brc20.insertOperationTransfer({ inscription_id, location: args.location }); } logger.info( `PgStore${upsert.count > 0 ? ' upsert ' : ' '}transfer (${ @@ -994,314 +856,4 @@ export class PgStore extends BasePgStore { `; }); } - - private async insertBrc20OperationGenesis(args: { - inscription_id: number; - inscription: DbInscriptionInsert; - location: DbLocationInsert; - }): Promise { - // Is this a BRC-20 operation? Is it being inscribed to a valid address? - const brc20 = brc20FromInscription(args.inscription); - if (brc20) { - if (args.location.address) { - switch (brc20.op) { - case 'deploy': - await this.insertBrc20Deploy({ - deploy: brc20, - inscription_id: args.inscription_id, - location: args.location, - }); - break; - case 'mint': - await this.insertBrc20Mint({ - mint: brc20, - inscription_id: args.inscription_id, - location: args.location, - }); - break; - case 'transfer': - await this.insertBrc20Transfer({ - transfer: brc20, - inscription_id: args.inscription_id, - location: args.location, - }); - break; - } - } else { - logger.debug( - { block_height: args.location.block_height, tick: brc20.tick }, - `PgStore [BRC-20] ignoring operation spent as fee` - ); - } - } - } - - private async insertBrc20OperationTransfer(args: { - inscription_id: number; - location: DbLocationInsert; - }): Promise { - // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by - // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way - // of checking if we have just inserted the first transfer for this inscription (genesis + - // transfer). - await this.sqlWriteTransaction(async sql => { - const brc20Transfer = await sql` - SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} - FROM locations AS l - INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id - WHERE l.inscription_id = ${args.inscription_id} - LIMIT 3 - `; - if (brc20Transfer.count === 2) { - // This is the first time this BRC-20 transfer is being used. Apply the balance change. - await this.applyBrc20BalanceTransfer({ - transfer: brc20Transfer[0], - location: args.location, - }); - } else { - logger.debug( - { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, - `PgStore [BRC-20] ignoring balance change for transfer that was already used` - ); - } - }); - } - - private async insertBrc20Deploy(args: { - deploy: Brc20Deploy; - inscription_id: number; - location: DbLocationInsert; - }): Promise { - await this.sqlWriteTransaction(async sql => { - const address = args.location.address; - if (!address) { - logger.debug( - `PgStore [BRC-20] ignoring deploy with null address for ${args.deploy.tick} at block ${args.location.block_height}` - ); - return; - } - const deploy: DbBrc20DeployInsert = { - inscription_id: args.inscription_id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - address: address, - ticker: args.deploy.tick, - max: args.deploy.max, - limit: args.deploy.lim ?? null, - decimals: args.deploy.dec ?? '18', - }; - const insertion = await sql<{ id: string }[]>` - INSERT INTO brc20_deploys ${sql(deploy)} - ON CONFLICT (LOWER(ticker)) DO NOTHING - RETURNING id - `; - if (insertion.count > 0) { - // Add to history - const event: DbBrc20EventInsert = { - inscription_id: args.inscription_id, - brc20_deploy_id: insertion[0].id, - deploy_id: insertion[0].id, - mint_id: null, - transfer_id: null, - }; - await sql` - INSERT INTO brc20_events ${sql(event)} - `; - logger.info( - `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` - ); - } else { - logger.debug( - `PgStore [BRC-20] ignoring duplicate deploy for ${args.deploy.tick} at block ${args.location.block_height}` - ); - } - }); - } - - private async getBrc20Deploy(args: { ticker: string }): Promise { - const deploy = await this.sql` - SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} - FROM brc20_deploys - WHERE LOWER(ticker) = LOWER(${args.ticker}) - `; - if (deploy.count) return deploy[0]; - } - - private async insertBrc20Mint(args: { - mint: Brc20Mint; - inscription_id: number; - location: DbLocationInsert; - }): Promise { - await this.sqlWriteTransaction(async sql => { - // Is the token deployed? - const token = await this.getBrc20Deploy({ ticker: args.mint.tick }); - if (!token) { - logger.debug( - `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` - ); - return; - } - - // Is the mint amount within the allowed token limits? - if (token.limit && BigNumber(args.mint.amt).isGreaterThan(token.limit)) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${token.limit} at block ${args.location.block_height}` - ); - return; - } - // Is the number of decimals correct? - if ( - args.mint.amt.includes('.') && - args.mint.amt.split('.')[1].length > parseInt(token.decimals) - ) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because amount ${args.mint.amt} exceeds token decimals at block ${args.location.block_height}` - ); - return; - } - // Does the mint amount exceed remaining supply? - const mintedSupply = await sql<{ minted: string }[]>` - SELECT COALESCE(SUM(amount), 0) AS minted FROM brc20_mints WHERE brc20_deploy_id = ${token.id} - `; - const minted = new BigNumber(mintedSupply[0].minted); - const availSupply = new BigNumber(token.max).minus(minted); - if (availSupply.isLessThanOrEqualTo(0)) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because token has been completely minted at block ${args.location.block_height}` - ); - return; - } - const mintAmt = BigNumber.min(availSupply, args.mint.amt); - - const mint = { - inscription_id: args.inscription_id, - brc20_deploy_id: token.id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - address: args.location.address, - amount: args.mint.amt, // Original requested amount - }; - await sql`INSERT INTO brc20_mints ${sql(mint)}`; - logger.info( - `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` - ); - - // Insert balance change for minting address - const balance = { - inscription_id: args.inscription_id, - brc20_deploy_id: token.id, - block_height: args.location.block_height, - address: args.location.address, - avail_balance: mintAmt, // Real minted balance - trans_balance: 0, - }; - await sql` - INSERT INTO brc20_balances ${sql(balance)} - `; - }); - } - - private async insertBrc20Transfer(args: { - transfer: Brc20Transfer; - inscription_id: number; - location: DbLocationInsert; - }): Promise { - await this.sqlWriteTransaction(async sql => { - // Is the destination a valid address? - if (!args.location.address) { - logger.debug( - `PgStore [BRC-20] ignoring transfer spent as fee for ${args.transfer.tick} at block ${args.location.block_height}` - ); - return; - } - // Is the token deployed? - const token = await this.getBrc20Deploy({ ticker: args.transfer.tick }); - if (!token) { - logger.debug( - `PgStore [BRC-20] ignoring transfer for non-deployed token ${args.transfer.tick} at block ${args.location.block_height}` - ); - return; - } - // Get balance for this address and this token - const balanceResult = await this.getBrc20Balances({ - address: args.location.address, - ticker: [args.transfer.tick], - limit: 1, - offset: 0, - }); - // Do we have enough available balance to do this transfer? - const transAmt = new BigNumber(args.transfer.amt); - const available = new BigNumber(balanceResult.results[0]?.avail_balance ?? 0); - if (transAmt.gt(available)) { - logger.debug( - `PgStore [BRC-20] ignoring transfer for token ${args.transfer.tick} due to unavailable balance at block ${args.location.block_height}` - ); - return; - } - - const transfer = { - inscription_id: args.inscription_id, - brc20_deploy_id: token.id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - from_address: args.location.address, - to_address: null, // We don't know the receiver address yet - amount: args.transfer.amt, - }; - await sql`INSERT INTO brc20_transfers ${sql(transfer)}`; - logger.info( - `PgStore [BRC-20] inserted transfer for ${args.transfer.tick} (${args.transfer.amt}) at block ${args.location.block_height}` - ); - - // Insert balance change for minting address - const values = { - inscription_id: args.inscription_id, - brc20_deploy_id: token.id, - block_height: args.location.block_height, - address: args.location.address, - avail_balance: transAmt.negated(), - trans_balance: transAmt, - }; - await sql` - INSERT INTO brc20_balances ${sql(values)} - `; - }); - } - - private async applyBrc20BalanceTransfer(args: { - transfer: DbBrc20Transfer; - location: DbLocationInsert; - }): Promise { - await this.sqlWriteTransaction(async sql => { - // Reflect balance transfer - const amount = new BigNumber(args.transfer.amount); - const changes = [ - { - inscription_id: args.transfer.inscription_id, - brc20_deploy_id: args.transfer.brc20_deploy_id, - block_height: args.location.block_height, - address: args.transfer.from_address, - avail_balance: 0, - trans_balance: amount.negated(), - }, - { - inscription_id: args.transfer.inscription_id, - brc20_deploy_id: args.transfer.brc20_deploy_id, - block_height: args.location.block_height, - address: args.location.address, - avail_balance: amount, - trans_balance: 0, - }, - ]; - await sql` - INSERT INTO brc20_balances ${sql(changes)} - `; - // Keep the new valid owner of the transfer inscription - await sql` - UPDATE brc20_transfers - SET to_address = ${args.location.address} - WHERE id = ${args.transfer.id} - `; - }); - } } From 354ddd0559a32a2aba1d407a2c7486348eb91d1c Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 20 Jul 2023 13:56:17 -0600 Subject: [PATCH 27/56] fix: add indexes for fks --- migrations/1684174644336_brc20-deploys.ts | 1 + migrations/1684175792528_brc20-mints.ts | 2 ++ migrations/1684175795592_brc20-transfers.ts | 2 ++ migrations/1684175810998_brc20-balances.ts | 2 ++ 4 files changed, 7 insertions(+) diff --git a/migrations/1684174644336_brc20-deploys.ts b/migrations/1684174644336_brc20-deploys.ts index 6532ad51..3604fa03 100644 --- a/migrations/1684174644336_brc20-deploys.ts +++ b/migrations/1684174644336_brc20-deploys.ts @@ -46,6 +46,7 @@ export function up(pgm: MigrationBuilder): void { 'brc20_deploys_inscription_id_fk', 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' ); + pgm.createIndex('brc20_deploys', ['inscription_id']); pgm.createIndex('brc20_deploys', 'LOWER(ticker)', { unique: true }); pgm.createIndex('brc20_deploys', ['block_height']); pgm.createIndex('brc20_deploys', ['address']); diff --git a/migrations/1684175792528_brc20-mints.ts b/migrations/1684175792528_brc20-mints.ts index 794cadc3..9435bf73 100644 --- a/migrations/1684175792528_brc20-mints.ts +++ b/migrations/1684175792528_brc20-mints.ts @@ -44,6 +44,8 @@ export function up(pgm: MigrationBuilder): void { 'brc20_mints_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); + pgm.createIndex('brc20_mints', ['inscription_id']); + pgm.createIndex('brc20_mints', ['brc20_deploy_id']); pgm.createIndex('brc20_mints', ['block_height']); pgm.createIndex('brc20_mints', ['address']); } diff --git a/migrations/1684175795592_brc20-transfers.ts b/migrations/1684175795592_brc20-transfers.ts index 3ad5e0ae..30f08071 100644 --- a/migrations/1684175795592_brc20-transfers.ts +++ b/migrations/1684175795592_brc20-transfers.ts @@ -47,6 +47,8 @@ export function up(pgm: MigrationBuilder): void { 'brc20_transfers_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); + pgm.createIndex('brc20_transfers', ['inscription_id']); + pgm.createIndex('brc20_transfers', ['brc20_deploy_id']); pgm.createIndex('brc20_transfers', ['block_height']); pgm.createIndex('brc20_transfers', ['from_address']); pgm.createIndex('brc20_transfers', ['to_address']); diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts index 2dafc556..de8bc0a3 100644 --- a/migrations/1684175810998_brc20-balances.ts +++ b/migrations/1684175810998_brc20-balances.ts @@ -44,5 +44,7 @@ export function up(pgm: MigrationBuilder): void { 'brc20_balances_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); + pgm.createIndex('brc20_balances', ['inscription_id']); + pgm.createIndex('brc20_balances', ['brc20_deploy_id']); pgm.createIndex('brc20_balances', ['address']); } From 8cc7f8adcb9d70cd511b09583dd45f9dc770cd92 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 20 Jul 2023 13:56:34 -0600 Subject: [PATCH 28/56] fix: remove old json schemas --- src/pg/helpers.ts | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 2d7be698..5a99d8db 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -8,16 +8,6 @@ import { } from './types'; import BigNumber from 'bignumber.js'; -const OpJsonSchema = Type.Object( - { - p: Type.String(), - op: Type.String(), - }, - { additionalProperties: true } -); -const OpJsonC = TypeCompiler.Compile(OpJsonSchema); -export type OpJson = Static; - const Brc20TickerSchema = Type.String({ minLength: 1 }); const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); From 3db302c2d8c0b95615655e6ee703f2e12590edc1 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 20 Jul 2023 14:56:23 -0600 Subject: [PATCH 29/56] refactor: move brc-20 files to its own folder --- migrations/1684175810998_brc20-balances.ts | 10 +- src/api/util/helpers.ts | 5 +- src/pg/{ => brc20}/brc20-pg-store.ts | 102 +++++++------ src/pg/brc20/helpers.ts | 97 +++++++++++++ src/pg/brc20/types.ts | 104 ++++++++++++++ src/pg/helpers.ts | 102 +------------ src/pg/pg-store.ts | 22 +-- src/pg/types.ts | 105 -------------- tests/brc20.test.ts | 157 ++++++++++++++++++++- tests/helpers.ts | 2 +- 10 files changed, 429 insertions(+), 277 deletions(-) rename src/pg/{ => brc20}/brc20-pg-store.ts (90%) create mode 100644 src/pg/brc20/helpers.ts create mode 100644 src/pg/brc20/types.ts diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts index de8bc0a3..6e8d1b94 100644 --- a/migrations/1684175810998_brc20-balances.ts +++ b/migrations/1684175810998_brc20-balances.ts @@ -13,11 +13,11 @@ export function up(pgm: MigrationBuilder): void { type: 'bigint', notNull: true, }, - brc20_deploy_id: { + location_id: { type: 'bigint', notNull: true, }, - block_height: { + brc20_deploy_id: { type: 'bigint', notNull: true, }, @@ -39,12 +39,18 @@ export function up(pgm: MigrationBuilder): void { 'brc20_balances_inscription_id_fk', 'FOREIGN KEY(inscription_id) REFERENCES inscriptions(id) ON DELETE CASCADE' ); + pgm.createConstraint( + 'brc20_balances', + 'brc20_balances_location_id_fk', + 'FOREIGN KEY(location_id) REFERENCES locations(id) ON DELETE CASCADE' + ); pgm.createConstraint( 'brc20_balances', 'brc20_balances_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); pgm.createIndex('brc20_balances', ['inscription_id']); + pgm.createIndex('brc20_balances', ['location_id']); pgm.createIndex('brc20_balances', ['brc20_deploy_id']); pgm.createIndex('brc20_balances', ['address']); } diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index 59ec9e68..32f07682 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -1,8 +1,5 @@ +import { DbBrc20Token, DbBrc20Supply, DbBrc20Balance, DbBrc20Holder } from '../../pg/brc20/types'; import { - DbBrc20Balance, - DbBrc20Holder, - DbBrc20Supply, - DbBrc20Token, DbFullyLocatedInscriptionResult, DbInscriptionLocationChange, DbLocation, diff --git a/src/pg/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts similarity index 90% rename from src/pg/brc20-pg-store.ts rename to src/pg/brc20/brc20-pg-store.ts index bf59b8b9..b39c98a9 100644 --- a/src/pg/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -1,23 +1,25 @@ import { PgSqlClient, logger } from '@hirosystems/api-toolkit'; -import { PgStore } from './pg-store'; +import { PgStore } from '../pg-store'; import { DbInscriptionIndexPaging, DbPaginatedResult, + DbInscriptionInsert, + DbLocationInsert, +} from '../types'; +import BigNumber from 'bignumber.js'; +import { DbBrc20Token, - BRC20_EVENTS_COLUMNS, DbBrc20Balance, - DbBrc20Holder, + BRC20_EVENTS_COLUMNS, DbBrc20Supply, - BRC20_DEPLOYS_COLUMNS, + DbBrc20Holder, + DbBrc20Transfer, BRC20_TRANSFERS_COLUMNS, - DbBrc20Deploy, DbBrc20DeployInsert, DbBrc20EventInsert, - DbBrc20Transfer, - DbInscriptionInsert, - DbLocationInsert, + DbBrc20Deploy, + BRC20_DEPLOYS_COLUMNS, } from './types'; -import BigNumber from 'bignumber.js'; import { brc20FromInscription, Brc20Deploy, Brc20Mint, Brc20Transfer } from './helpers'; export class Brc20PgStore { @@ -165,8 +167,9 @@ export class Brc20PgStore { }); } - async insertOperationGenesis(args: { + async insertOperation(args: { inscription_id: number; + location_id: number; inscription: DbInscriptionInsert; location: DbLocationInsert; }): Promise { @@ -186,6 +189,7 @@ export class Brc20PgStore { await this.insertMint({ mint: brc20, inscription_id: args.inscription_id, + location_id: args.location_id, location: args.location, }); break; @@ -193,6 +197,7 @@ export class Brc20PgStore { await this.insertTransfer({ transfer: brc20, inscription_id: args.inscription_id, + location_id: args.location_id, location: args.location, }); break; @@ -208,6 +213,7 @@ export class Brc20PgStore { async insertOperationTransfer(args: { inscription_id: number; + location_id: number; location: DbLocationInsert; }): Promise { // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by @@ -223,11 +229,36 @@ export class Brc20PgStore { LIMIT 3 `; if (brc20Transfer.count === 2) { + const transfer = brc20Transfer[0]; // This is the first time this BRC-20 transfer is being used. Apply the balance change. - await this.applyBalanceTransfer({ - transfer: brc20Transfer[0], - location: args.location, - }); + const amount = new BigNumber(transfer.amount); + const changes = [ + { + inscription_id: transfer.inscription_id, + location_id: args.location_id, + brc20_deploy_id: transfer.brc20_deploy_id, + address: transfer.from_address, + avail_balance: 0, + trans_balance: amount.negated(), + }, + { + inscription_id: transfer.inscription_id, + location_id: args.location_id, + brc20_deploy_id: transfer.brc20_deploy_id, + address: args.location.address, + avail_balance: amount, + trans_balance: 0, + }, + ]; + await sql` + INSERT INTO brc20_balances ${sql(changes)} + `; + // Keep the new valid owner of the transfer inscription + await sql` + UPDATE brc20_transfers + SET to_address = ${args.location.address} + WHERE id = ${transfer.id} + `; } else { logger.debug( { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, @@ -300,6 +331,7 @@ export class Brc20PgStore { private async insertMint(args: { mint: Brc20Mint; inscription_id: number; + location_id: number; location: DbLocationInsert; }): Promise { await this.parent.sqlWriteTransaction(async sql => { @@ -359,8 +391,8 @@ export class Brc20PgStore { // Insert balance change for minting address const balance = { inscription_id: args.inscription_id, + location_id: args.location_id, brc20_deploy_id: token.id, - block_height: args.location.block_height, address: args.location.address, avail_balance: mintAmt, // Real minted balance trans_balance: 0, @@ -374,6 +406,7 @@ export class Brc20PgStore { private async insertTransfer(args: { transfer: Brc20Transfer; inscription_id: number; + location_id: number; location: DbLocationInsert; }): Promise { await this.parent.sqlWriteTransaction(async sql => { @@ -426,8 +459,8 @@ export class Brc20PgStore { // Insert balance change for minting address const values = { inscription_id: args.inscription_id, + location_id: args.location_id, brc20_deploy_id: token.id, - block_height: args.location.block_height, address: args.location.address, avail_balance: transAmt.negated(), trans_balance: transAmt, @@ -437,41 +470,4 @@ export class Brc20PgStore { `; }); } - - private async applyBalanceTransfer(args: { - transfer: DbBrc20Transfer; - location: DbLocationInsert; - }): Promise { - await this.parent.sqlWriteTransaction(async sql => { - // Reflect balance transfer - const amount = new BigNumber(args.transfer.amount); - const changes = [ - { - inscription_id: args.transfer.inscription_id, - brc20_deploy_id: args.transfer.brc20_deploy_id, - block_height: args.location.block_height, - address: args.transfer.from_address, - avail_balance: 0, - trans_balance: amount.negated(), - }, - { - inscription_id: args.transfer.inscription_id, - brc20_deploy_id: args.transfer.brc20_deploy_id, - block_height: args.location.block_height, - address: args.location.address, - avail_balance: amount, - trans_balance: 0, - }, - ]; - await sql` - INSERT INTO brc20_balances ${sql(changes)} - `; - // Keep the new valid owner of the transfer inscription - await sql` - UPDATE brc20_transfers - SET to_address = ${args.location.address} - WHERE id = ${args.transfer.id} - `; - }); - } } diff --git a/src/pg/brc20/helpers.ts b/src/pg/brc20/helpers.ts new file mode 100644 index 00000000..ad7ea895 --- /dev/null +++ b/src/pg/brc20/helpers.ts @@ -0,0 +1,97 @@ +import { Static, Type } from '@fastify/type-provider-typebox'; +import { TypeCompiler } from '@sinclair/typebox/compiler'; +import BigNumber from 'bignumber.js'; +import { hexToBuffer } from '../../api/util/helpers'; +import { DbInscriptionInsert } from '../types'; + +const Brc20TickerSchema = Type.String({ minLength: 1 }); +const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); + +const Brc20DeploySchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('deploy'), + tick: Brc20TickerSchema, + max: Brc20NumberSchema, + lim: Type.Optional(Brc20NumberSchema), + dec: Type.Optional(Type.RegEx(/^\d+$/)), + }, + { additionalProperties: true } +); +export type Brc20Deploy = Static; + +const Brc20MintSchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('mint'), + tick: Brc20TickerSchema, + amt: Brc20NumberSchema, + }, + { additionalProperties: true } +); +export type Brc20Mint = Static; + +const Brc20TransferSchema = Type.Object( + { + p: Type.Literal('brc-20'), + op: Type.Literal('transfer'), + tick: Brc20TickerSchema, + amt: Brc20NumberSchema, + }, + { additionalProperties: true } +); +export type Brc20Transfer = Static; + +const Brc20Schema = Type.Union([Brc20DeploySchema, Brc20MintSchema, Brc20TransferSchema]); +const Brc20C = TypeCompiler.Compile(Brc20Schema); +export type Brc20 = Static; + +export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | undefined { + if ( + inscription.mime_type.startsWith('text/plain') || + inscription.mime_type.startsWith('application/json') + ) { + try { + const buf = + typeof inscription.content === 'string' + ? hexToBuffer(inscription.content) + : inscription.content; + const json = JSON.parse(buf.toString('utf-8')); + if (Brc20C.Check(json)) { + // Check ticker byte length + if (Buffer.from(json.tick).length > 4) { + return; + } + // Check numeric values. + const uint64_max = BigNumber('18446744073709551615'); + if (json.op === 'deploy') { + const max = BigNumber(json.max); + if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) { + return; + } + if (json.lim) { + const lim = BigNumber(json.lim); + if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) { + return; + } + } + if (json.dec) { + // `dec` can have a value of 0 but must be no more than 18. + const dec = BigNumber(json.dec); + if (dec.isNaN() || dec.isGreaterThan(18)) { + return; + } + } + } else { + const amt = BigNumber(json.amt); + if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) { + return; + } + } + return json; + } + } catch (error) { + // Not a BRC-20 inscription. + } + } +} diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts new file mode 100644 index 00000000..ec0e2d7b --- /dev/null +++ b/src/pg/brc20/types.ts @@ -0,0 +1,104 @@ +export type DbBrc20DeployInsert = { + inscription_id: number; + block_height: number; + tx_id: string; + address: string; + ticker: string; + max: string; + decimals: string; + limit: string | null; +}; + +export type DbBrc20Deploy = { + id: string; + inscription_id: number; + block_height: number; + tx_id: string; + address: string; + ticker: string; + max: string; + decimals: string; + limit?: string; +}; + +export type DbBrc20Transfer = { + id: string; + inscription_id: number; + brc20_deploy_id: number; + block_height: number; + tx_id: string; + from_address: string; + to_address?: string; + amount: string; +}; + +export type DbBrc20Token = { + id: string; + genesis_id: string; + number: string; + block_height: string; + tx_id: string; + address: string; + ticker: string; + max: string; + limit?: string; + decimals: number; +}; + +export type DbBrc20Supply = { + max_supply: string; + minted_supply: string; + holders: string; +}; + +export type DbBrc20Holder = { + address: string; + total_balance: string; +}; + +export type DbBrc20Balance = { + ticker: string; + avail_balance: string; + trans_balance: string; + total_balance: string; +}; + +export type DbBrc20EventInsert = { + inscription_id: number; + brc20_deploy_id: string; + deploy_id: string | null; + mint_id: string | null; + transfer_id: string | null; +}; + +export const BRC20_DEPLOYS_COLUMNS = [ + 'id', + 'inscription_id', + 'block_height', + 'tx_id', + 'address', + 'ticker', + 'max', + 'decimals', + 'limit', +]; + +export const BRC20_TRANSFERS_COLUMNS = [ + 'id', + 'inscription_id', + 'brc20_deploy_id', + 'block_height', + 'tx_id', + 'from_address', + 'to_address', + 'amount', +]; + +export const BRC20_EVENTS_COLUMNS = [ + 'id', + 'inscription_id', + 'brc20_deploy_id', + 'deploy_id', + 'mint_id', + 'transfer_id', +]; diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 5a99d8db..c4b9e99c 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -1,104 +1,4 @@ -import { Static, Type } from '@sinclair/typebox'; -import { TypeCompiler } from '@sinclair/typebox/compiler'; -import { hexToBuffer } from '../api/util/helpers'; -import { - DbInscriptionIndexFilters, - DbInscriptionIndexResultCountType, - DbInscriptionInsert, -} from './types'; -import BigNumber from 'bignumber.js'; - -const Brc20TickerSchema = Type.String({ minLength: 1 }); -const Brc20NumberSchema = Type.RegEx(/^((\d+)|(\d*\.?\d+))$/); - -const Brc20DeploySchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('deploy'), - tick: Brc20TickerSchema, - max: Brc20NumberSchema, - lim: Type.Optional(Brc20NumberSchema), - dec: Type.Optional(Type.RegEx(/^\d+$/)), - }, - { additionalProperties: true } -); -export type Brc20Deploy = Static; - -const Brc20MintSchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('mint'), - tick: Brc20TickerSchema, - amt: Brc20NumberSchema, - }, - { additionalProperties: true } -); -export type Brc20Mint = Static; - -const Brc20TransferSchema = Type.Object( - { - p: Type.Literal('brc-20'), - op: Type.Literal('transfer'), - tick: Brc20TickerSchema, - amt: Brc20NumberSchema, - }, - { additionalProperties: true } -); -export type Brc20Transfer = Static; - -const Brc20Schema = Type.Union([Brc20DeploySchema, Brc20MintSchema, Brc20TransferSchema]); -const Brc20C = TypeCompiler.Compile(Brc20Schema); -export type Brc20 = Static; - -export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | undefined { - if ( - inscription.mime_type.startsWith('text/plain') || - inscription.mime_type.startsWith('application/json') - ) { - try { - const buf = - typeof inscription.content === 'string' - ? hexToBuffer(inscription.content) - : inscription.content; - const json = JSON.parse(buf.toString('utf-8')); - if (Brc20C.Check(json)) { - // Check ticker byte length - if (Buffer.from(json.tick).length > 4) { - return; - } - // Check numeric values. - const uint64_max = BigNumber('18446744073709551615'); - if (json.op === 'deploy') { - const max = BigNumber(json.max); - if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) { - return; - } - if (json.lim) { - const lim = BigNumber(json.lim); - if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) { - return; - } - } - if (json.dec) { - // `dec` can have a value of 0 but must be no more than 18. - const dec = BigNumber(json.dec); - if (dec.isNaN() || dec.isGreaterThan(18)) { - return; - } - } - } else { - const amt = BigNumber(json.amt); - if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) { - return; - } - } - return json; - } - } catch (error) { - // Not a BRC-20 inscription. - } - } -} +import { DbInscriptionIndexFilters, DbInscriptionIndexResultCountType } from './types'; /** * Returns which inscription count is required based on filters sent to the index endpoint. diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index b3c281e9..f99f5251 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -29,7 +29,7 @@ import { runMigrations, } from '@hirosystems/api-toolkit'; import * as path from 'path'; -import { Brc20PgStore } from './brc20-pg-store'; +import { Brc20PgStore } from './brc20/brc20-pg-store'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); @@ -639,6 +639,12 @@ export class PgStore extends BasePgStore { timestamp = EXCLUDED.timestamp RETURNING id `; + await this.brc20.insertOperation({ + inscription_id, + location_id: locationRes[0].id, + inscription: args.inscription, + location: args.location, + }); await this.updateInscriptionLocationPointers({ inscription_id, genesis_id: args.inscription.genesis_id, @@ -647,12 +653,6 @@ export class PgStore extends BasePgStore { tx_index: args.location.tx_index, address: args.location.address, }); - // Insert BRC-20 op genesis (if any). - await this.brc20.insertOperationGenesis({ - inscription_id, - inscription: args.inscription, - location: args.location, - }); logger.info( `PgStore${upsert.count > 0 ? ' upsert ' : ' '}reveal #${args.inscription.number} (${ args.location.genesis_id @@ -730,9 +730,11 @@ export class PgStore extends BasePgStore { tx_index: args.location.tx_index, address: args.location.address, }); - - // Insert BRC-20 balance transfers (if any). - await this.brc20.insertOperationTransfer({ inscription_id, location: args.location }); + await this.brc20.insertOperationTransfer({ + inscription_id, + location_id: locationRes[0].id, + location: args.location, + }); } logger.info( `PgStore${upsert.count > 0 ? ' upsert ' : ' '}transfer (${ diff --git a/src/pg/types.ts b/src/pg/types.ts index 49f059dd..d181844a 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -211,111 +211,6 @@ export enum DbInscriptionIndexResultCountType { intractable, } -export type DbBrc20DeployInsert = { - inscription_id: number; - block_height: number; - tx_id: string; - address: string; - ticker: string; - max: string; - decimals: string; - limit: string | null; -}; - -export type DbBrc20Deploy = { - id: string; - inscription_id: number; - block_height: number; - tx_id: string; - address: string; - ticker: string; - max: string; - decimals: string; - limit?: string; -}; - -export type DbBrc20Transfer = { - id: string; - inscription_id: number; - brc20_deploy_id: number; - block_height: number; - tx_id: string; - from_address: string; - to_address?: string; - amount: string; -}; - -export type DbBrc20Token = { - id: string; - genesis_id: string; - number: string; - block_height: string; - tx_id: string; - address: string; - ticker: string; - max: string; - limit?: string; - decimals: number; -}; - -export type DbBrc20Supply = { - max_supply: string; - minted_supply: string; - holders: string; -}; - -export type DbBrc20Holder = { - address: string; - total_balance: string; -}; - -export type DbBrc20Balance = { - ticker: string; - avail_balance: string; - trans_balance: string; - total_balance: string; -}; - -export type DbBrc20EventInsert = { - inscription_id: number; - brc20_deploy_id: string; - deploy_id: string | null; - mint_id: string | null; - transfer_id: string | null; -}; - -export const BRC20_DEPLOYS_COLUMNS = [ - 'id', - 'inscription_id', - 'block_height', - 'tx_id', - 'address', - 'ticker', - 'max', - 'decimals', - 'limit', -]; - -export const BRC20_TRANSFERS_COLUMNS = [ - 'id', - 'inscription_id', - 'brc20_deploy_id', - 'block_height', - 'tx_id', - 'from_address', - 'to_address', - 'amount', -]; - -export const BRC20_EVENTS_COLUMNS = [ - 'id', - 'inscription_id', - 'brc20_deploy_id', - 'deploy_id', - 'mint_id', - 'transfer_id', -]; - export type DbInscriptionCountPerBlockFilters = { from_block_height?: number; to_block_height?: number; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 26dabb73..475e0dab 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1,9 +1,9 @@ import { cycleMigrations } from '@hirosystems/api-toolkit'; import { buildApiServer } from '../src/api/init'; -import { brc20FromInscription } from '../src/pg/helpers'; import { MIGRATIONS_DIR, PgStore } from '../src/pg/pg-store'; import { DbInscriptionInsert } from '../src/pg/types'; import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal } from './helpers'; +import { brc20FromInscription } from '../src/pg/brc20/helpers'; describe('BRC-20', () => { let db: PgStore; @@ -1522,5 +1522,160 @@ describe('BRC-20', () => { }, ]); }); + + test('balance transfer gap fill applied correctly', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; + await deployAndMintPEPE(address); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775640, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '9000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + + // Make the first seen transfer + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775651, + hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', + }) + .transaction({ + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + }) + .inscriptionTransferred({ + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + updated_address: address2, + satpoint_pre_transfer: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', + satpoint_post_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + post_transfer_output_value: null, + tx_index: 0, + }) + .build() + ); + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}`, + }); + expect(response1.statusCode).toBe(200); + const json1 = response1.json(); + expect(json1.total).toBe(1); + expect(json1.results).toStrictEqual([ + { + available_balance: '1000', + overall_balance: '1000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}`, + }); + expect(response2.statusCode).toBe(200); + const json2 = response2.json(); + expect(json2.total).toBe(1); + expect(json2.results).toStrictEqual([ + { + available_balance: '9000', + overall_balance: '9000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + + // Oops, turns out there was a gap fill with another transfer first + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775645, + hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', + }) + .transaction({ + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + }) + .inscriptionTransferred({ + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + updated_address: 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz', + satpoint_pre_transfer: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', + satpoint_post_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + post_transfer_output_value: null, + tx_index: 0, + }) + .build() + ); + const response1b = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}`, + }); + expect(response1b.statusCode).toBe(200); + const json1b = response1b.json(); + expect(json1b.total).toBe(1); + expect(json1b.results).toStrictEqual([ + { + available_balance: '1000', + overall_balance: '1000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + const response2b = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}`, + }); + expect(response2b.statusCode).toBe(200); + const json2b = response2b.json(); + expect(json2b.total).toBe(1); + expect(json2b.results).toStrictEqual([ + { + available_balance: '0', + overall_balance: '0', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + // This address is the one that should have the balance. + const response3 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}`, + }); + expect(response3.statusCode).toBe(200); + const json3 = response3.json(); + expect(json3.total).toBe(1); + expect(json3.results).toStrictEqual([ + { + available_balance: '9000', + overall_balance: '9000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + }); }); }); diff --git a/tests/helpers.ts b/tests/helpers.ts index bba789f3..5aba1f0b 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -1,7 +1,6 @@ import { FastifyBaseLogger, FastifyInstance } from 'fastify'; import { IncomingMessage, Server, ServerResponse } from 'http'; import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; -import { Brc20 } from '../src/pg/helpers'; import { BitcoinCursedInscriptionRevealed, BitcoinEvent, @@ -10,6 +9,7 @@ import { BitcoinTransaction, Payload, } from '@hirosystems/chainhook-client'; +import { Brc20 } from '../src/pg/brc20/helpers'; export type TestFastifyServer = FastifyInstance< Server, From 026c2755483efbc8b54753a9a1bf315a6a833d88 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Mon, 24 Jul 2023 10:51:27 -0600 Subject: [PATCH 30/56] fix: allow gap fills for transfers --- migrations/1684175810998_brc20-balances.ts | 11 ++++-- src/pg/brc20/brc20-pg-store.ts | 39 ++++++++++++++-------- src/pg/brc20/types.ts | 19 +++++++++++ tests/brc20.test.ts | 21 ++++-------- 4 files changed, 61 insertions(+), 29 deletions(-) diff --git a/migrations/1684175810998_brc20-balances.ts b/migrations/1684175810998_brc20-balances.ts index 6e8d1b94..4f918dff 100644 --- a/migrations/1684175810998_brc20-balances.ts +++ b/migrations/1684175810998_brc20-balances.ts @@ -23,7 +23,6 @@ export function up(pgm: MigrationBuilder): void { }, address: { type: 'text', - notNull: true, }, avail_balance: { type: 'numeric', @@ -33,6 +32,10 @@ export function up(pgm: MigrationBuilder): void { type: 'numeric', notNull: true, }, + type: { + type: 'smallint', + notNull: true, + }, }); pgm.createConstraint( 'brc20_balances', @@ -49,7 +52,11 @@ export function up(pgm: MigrationBuilder): void { 'brc20_balances_brc20_deploy_id_fk', 'FOREIGN KEY(brc20_deploy_id) REFERENCES brc20_deploys(id) ON DELETE CASCADE' ); - pgm.createIndex('brc20_balances', ['inscription_id']); + pgm.createConstraint( + 'brc20_balances', + 'brc20_balances_inscription_id_type_unique', + 'UNIQUE(inscription_id, type)' + ); pgm.createIndex('brc20_balances', ['location_id']); pgm.createIndex('brc20_balances', ['brc20_deploy_id']); pgm.createIndex('brc20_balances', ['address']); diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index b39c98a9..b9b701fb 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -19,6 +19,8 @@ import { DbBrc20EventInsert, DbBrc20Deploy, BRC20_DEPLOYS_COLUMNS, + DbBrc20BalanceInsert, + DbBrc20BalanceTypeId, } from './types'; import { brc20FromInscription, Brc20Deploy, Brc20Mint, Brc20Transfer } from './helpers'; @@ -226,32 +228,41 @@ export class Brc20PgStore { FROM locations AS l INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id WHERE l.inscription_id = ${args.inscription_id} + AND l.block_height <= ${args.location.block_height} LIMIT 3 `; if (brc20Transfer.count === 2) { const transfer = brc20Transfer[0]; // This is the first time this BRC-20 transfer is being used. Apply the balance change. const amount = new BigNumber(transfer.amount); - const changes = [ + const changes: DbBrc20BalanceInsert[] = [ { inscription_id: transfer.inscription_id, location_id: args.location_id, brc20_deploy_id: transfer.brc20_deploy_id, address: transfer.from_address, - avail_balance: 0, - trans_balance: amount.negated(), + avail_balance: '0', + trans_balance: amount.negated().toString(), + type: DbBrc20BalanceTypeId.transferFrom, }, { inscription_id: transfer.inscription_id, location_id: args.location_id, brc20_deploy_id: transfer.brc20_deploy_id, address: args.location.address, - avail_balance: amount, - trans_balance: 0, + avail_balance: amount.toString(), + trans_balance: '0', + type: DbBrc20BalanceTypeId.transferTo, }, ]; await sql` INSERT INTO brc20_balances ${sql(changes)} + ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO UPDATE SET + location_id = EXCLUDED.location_id, + brc20_deploy_id = EXCLUDED.brc20_deploy_id, + address = EXCLUDED.address, + avail_balance = EXCLUDED.avail_balance, + trans_balance = EXCLUDED.trans_balance `; // Keep the new valid owner of the transfer inscription await sql` @@ -389,13 +400,14 @@ export class Brc20PgStore { ); // Insert balance change for minting address - const balance = { + const balance: DbBrc20BalanceInsert = { inscription_id: args.inscription_id, location_id: args.location_id, - brc20_deploy_id: token.id, + brc20_deploy_id: parseInt(token.id), address: args.location.address, - avail_balance: mintAmt, // Real minted balance - trans_balance: 0, + avail_balance: mintAmt.toString(), + trans_balance: '0', + type: DbBrc20BalanceTypeId.mint, }; await sql` INSERT INTO brc20_balances ${sql(balance)} @@ -457,13 +469,14 @@ export class Brc20PgStore { ); // Insert balance change for minting address - const values = { + const values: DbBrc20BalanceInsert = { inscription_id: args.inscription_id, location_id: args.location_id, - brc20_deploy_id: token.id, + brc20_deploy_id: parseInt(token.id), address: args.location.address, - avail_balance: transAmt.negated(), - trans_balance: transAmt, + avail_balance: transAmt.negated().toString(), + trans_balance: transAmt.toString(), + type: DbBrc20BalanceTypeId.transferIntent, }; await sql` INSERT INTO brc20_balances ${sql(values)} diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index ec0e2d7b..7c304759 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -1,3 +1,5 @@ +import { PgNumeric } from '@hirosystems/api-toolkit'; + export type DbBrc20DeployInsert = { inscription_id: number; block_height: number; @@ -63,6 +65,23 @@ export type DbBrc20Balance = { total_balance: string; }; +export enum DbBrc20BalanceTypeId { + mint = 0, + transferIntent = 1, + transferFrom = 2, + transferTo = 3, +} + +export type DbBrc20BalanceInsert = { + inscription_id: number; + location_id: number; + brc20_deploy_id: number; + address: string | null; + avail_balance: PgNumeric; + trans_balance: PgNumeric; + type: DbBrc20BalanceTypeId; +}; + export type DbBrc20EventInsert = { inscription_id: number; brc20_deploy_id: string; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 475e0dab..e116c2bc 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -940,7 +940,7 @@ describe('BRC-20', () => { tick: 'PEPE', amt: '1000', }, - number: 6, + number: 7, tx_id: '7e09bda2cba34bca648cca6d79a074940d39b6137150d3a3edcf80c0e01419a5', address: address, }) @@ -1562,15 +1562,15 @@ describe('BRC-20', () => { hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', }) .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + hash: 'ce32d47452a4dfae6510fd283e1cec587c5cac217dec09ac4b01541adc86cd34', }) .inscriptionTransferred({ inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', updated_address: address2, satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + satpoint_post_transfer: + 'ce32d47452a4dfae6510fd283e1cec587c5cac217dec09ac4b01541adc86cd34:0:0', post_transfer_output_value: null, tx_index: 0, }) @@ -1645,25 +1645,18 @@ describe('BRC-20', () => { transferrable_balance: '0', }, ]); + // No movements at all for this address. const response2b = await fastify.inject({ method: 'GET', url: `/ordinals/brc-20/balances/${address2}`, }); expect(response2b.statusCode).toBe(200); const json2b = response2b.json(); - expect(json2b.total).toBe(1); - expect(json2b.results).toStrictEqual([ - { - available_balance: '0', - overall_balance: '0', - ticker: 'PEPE', - transferrable_balance: '0', - }, - ]); + expect(json2b.total).toBe(0); // This address is the one that should have the balance. const response3 = await fastify.inject({ method: 'GET', - url: `/ordinals/brc-20/balances/${address2}`, + url: `/ordinals/brc-20/balances/bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz`, }); expect(response3.statusCode).toBe(200); const json3 = response3.json(); From 2a4700c5ca851b799fba534ff8060004f7ca2f5d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 16 Aug 2023 13:20:23 -0600 Subject: [PATCH 31/56] fix: only consider blessed inscriptions --- src/pg/brc20/brc20-pg-store.ts | 3 +++ src/pg/pg-store.ts | 5 +++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index b9b701fb..c050daf7 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -175,6 +175,7 @@ export class Brc20PgStore { inscription: DbInscriptionInsert; location: DbLocationInsert; }): Promise { + if (args.inscription.number < 0) return; // No cursed inscriptions apply. // Is this a BRC-20 operation? Is it being inscribed to a valid address? const brc20 = brc20FromInscription(args.inscription); if (brc20) { @@ -215,9 +216,11 @@ export class Brc20PgStore { async insertOperationTransfer(args: { inscription_id: number; + inscription_number: number; location_id: number; location: DbLocationInsert; }): Promise { + if (args.inscription_number < 0) return; // No cursed inscriptions apply. // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way // of checking if we have just inserted the first transfer for this inscription (genesis + diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 07e37530..25a8c61f 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -664,8 +664,8 @@ export class PgStore extends BasePgStore { private async insertLocation(args: { location: DbLocationInsert }): Promise { await this.sqlWriteTransaction(async sql => { // Does the inscription exist? Warn if it doesn't. - const genesis = await sql<{ id: number }[]>` - SELECT id FROM inscriptions WHERE genesis_id = ${args.location.genesis_id} + const genesis = await sql<{ id: number; number: number }[]>` + SELECT id, number FROM inscriptions WHERE genesis_id = ${args.location.genesis_id} `; if (genesis.count === 0) { logger.warn( @@ -729,6 +729,7 @@ export class PgStore extends BasePgStore { }); await this.brc20.insertOperationTransfer({ inscription_id, + inscription_number: genesis[0].number, location_id: locationRes[0].id, location: args.location, }); From ced5cb3306bd0e242503a86f8c94911c2d57161f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Wed, 16 Aug 2023 15:09:53 -0600 Subject: [PATCH 32/56] feat: brc-20 balance at block (#186) --- src/api/routes/brc20.ts | 3 +++ src/pg/brc20/brc20-pg-store.ts | 5 +++++ tests/brc20.test.ts | 22 ++++++++++++++++++++++ 3 files changed, 30 insertions(+) diff --git a/src/api/routes/brc20.ts b/src/api/routes/brc20.ts index d2cfc5aa..ed6e3a7c 100644 --- a/src/api/routes/brc20.ts +++ b/src/api/routes/brc20.ts @@ -4,6 +4,7 @@ import { FastifyPluginCallback } from 'fastify'; import { Server } from 'http'; import { AddressParam, + BlockHeightParam, Brc20BalanceResponseSchema, Brc20HolderResponseSchema, Brc20TickerParam, @@ -165,6 +166,7 @@ export const Brc20Routes: FastifyPluginCallback< }), querystring: Type.Object({ ticker: Type.Optional(Brc20TickersParam), + block_height: Type.Optional(BlockHeightParam), // Pagination offset: Type.Optional(OffsetParam), limit: Type.Optional(LimitParam), @@ -182,6 +184,7 @@ export const Brc20Routes: FastifyPluginCallback< offset, address: request.params.address, ticker: request.query.ticker, + block_height: request.query.block_height ? parseInt(request.query.block_height) : undefined, }); await reply.send({ limit, diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index c050daf7..83bb8c8a 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -65,6 +65,7 @@ export class Brc20PgStore { args: { address: string; ticker?: string[]; + block_height?: number; } & DbInscriptionIndexPaging ): Promise> { const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; @@ -77,9 +78,13 @@ export class Brc20PgStore { COUNT(*) OVER() as total FROM brc20_balances AS b INNER JOIN brc20_deploys AS d ON d.id = b.brc20_deploy_id + ${ + args.block_height ? this.sql`INNER JOIN locations AS l ON l.id = b.location_id` : this.sql`` + } WHERE b.address = ${args.address} ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + ${args.block_height ? this.sql`AND l.block_height <= ${args.block_height}` : this.sql``} GROUP BY d.ticker LIMIT ${args.limit} OFFSET ${args.offset} diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index e116c2bc..3d74c363 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1218,6 +1218,14 @@ describe('BRC-20', () => { transferrable_balance: '2000', }, ]); + + // Balance at previous block + const response2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}?block_height=775618`, + }); + const json2 = response2.json(); + expect(json2.results[0].available_balance).toBe('10000'); }); test('cannot transfer more than available balance', async () => { @@ -1411,6 +1419,20 @@ describe('BRC-20', () => { transferrable_balance: '0', }, ]); + + // Balance at previous block + const prevBlock1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}?block_height=775618`, + }); + const prevBlockJson1 = prevBlock1.json(); + expect(prevBlockJson1.results[0].available_balance).toBe('10000'); + const prevBlock2 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address2}?block_height=775618`, + }); + const prevBlockJson2 = prevBlock2.json(); + expect(prevBlockJson2.results[0]).toBeUndefined(); }); test('cannot spend valid transfer twice', async () => { From 6da2ba9841a17a1066841bccc4f60da4cd4ee68e Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 17 Aug 2023 09:02:46 -0600 Subject: [PATCH 33/56] build: release for brc-20 branch --- .releaserc | 52 +++++++++++++++++++++++++++++++++++----------------- 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/.releaserc b/.releaserc index 7f30438e..55c945bd 100644 --- a/.releaserc +++ b/.releaserc @@ -1,19 +1,37 @@ { - "plugins": [ - [ - "@semantic-release/commit-analyzer", - { - "preset": "conventionalcommits" - } - ], - [ - "@semantic-release/release-notes-generator", - { - "preset": "conventionalcommits" - } - ], - "@semantic-release/github", - "@semantic-release/changelog", - "@semantic-release/git" - ] + "branches": [ + "+([0-9])?(.{+([0-9]),x}).x", + "master", + "next", + "next-major", + { + "name": "beta", + "prerelease": true + }, + { + "name": "alpha", + "prerelease": true + }, + { + "name": "brc-20", + "prerelease": true + } + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits" + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits" + } + ], + "@semantic-release/github", + "@semantic-release/changelog", + "@semantic-release/git" + ] } From 9047cd352b59803de7d6581c7ee03a1e1128eec2 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Thu, 17 Aug 2023 15:07:13 +0000 Subject: [PATCH 34/56] chore(release): 1.0.0-brc-20.1 [skip ci] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## [1.0.0-brc-20.1](https://github.com/hirosystems/ordinals-api/compare/v0.4.15...v1.0.0-brc-20.1) (2023-08-17) ### ⚠ BREAKING CHANGES * optimize transfer replay capability (#129) ### Features * add inscription number sort option ([#168](https://github.com/hirosystems/ordinals-api/issues/168)) ([9f4cdbc](https://github.com/hirosystems/ordinals-api/commit/9f4cdbc96f2efa4610e771df74b11951803cb8a6)) * add stats endpoint for inscription counts ([#70](https://github.com/hirosystems/ordinals-api/issues/70)) ([ac18e62](https://github.com/hirosystems/ordinals-api/commit/ac18e621ed7e8ea2fc5a5e536d59a152c3a1f345)) * brc-20 balance at block ([#186](https://github.com/hirosystems/ordinals-api/issues/186)) ([ced5cb3](https://github.com/hirosystems/ordinals-api/commit/ced5cb3306bd0e242503a86f8c94911c2d57161f)) * detect and tag recursive inscriptions ([#167](https://github.com/hirosystems/ordinals-api/issues/167)) ([fb36285](https://github.com/hirosystems/ordinals-api/commit/fb362857c2c3cf4c098f6604b49d77efa6f95d8b)) * first balance endpoint ([f9c6654](https://github.com/hirosystems/ordinals-api/commit/f9c66540b9d173d2981bc2af5ee13fd082dc5547)) * first balance transfers ([dd8ec07](https://github.com/hirosystems/ordinals-api/commit/dd8ec07d366e6bf15e74b528077c8fa1836958e9)) * holders endpoint ([a01f77e](https://github.com/hirosystems/ordinals-api/commit/a01f77ef6c9c03576a07a7cdc14d0279afc44cbb)) * mint within supply ([c8e5820](https://github.com/hirosystems/ordinals-api/commit/c8e582055956c9381d14d5ec1bae5a70c0a4d4a8)) * mints with balance changes ([32e90f7](https://github.com/hirosystems/ordinals-api/commit/32e90f73696aa403417869f0c71fa76da115048e)) * optimize transfer replay capability ([#129](https://github.com/hirosystems/ordinals-api/issues/129)) ([97874cc](https://github.com/hirosystems/ordinals-api/commit/97874cc1461d4e321d5143c70d68927ace62eec5)) * start storing token deploys ([bf4c7f6](https://github.com/hirosystems/ordinals-api/commit/bf4c7f6f27903f18d30ddb7fc2b1a779cc991114)) * token details ([5d35d5b](https://github.com/hirosystems/ordinals-api/commit/5d35d5b0eefb46eeac91ead52f4909279e39404d)) * token info endpoint ([8fad6b9](https://github.com/hirosystems/ordinals-api/commit/8fad6b96c0fffc302a3e61922677bdfb56b74b85)) * tokens endpoint as paginated index ([ae2049b](https://github.com/hirosystems/ordinals-api/commit/ae2049baf04950d810aa997bc0f31b585aaf3391)) ### Bug Fixes * add address column to genesis and current ([d71e1d4](https://github.com/hirosystems/ordinals-api/commit/d71e1d49dece39df1c19c0bb35a43129ef1a31e9)) * add indexes for fks ([354ddd0](https://github.com/hirosystems/ordinals-api/commit/354ddd0559a32a2aba1d407a2c7486348eb91d1c)) * add secondary sorting by inscription number ([#177](https://github.com/hirosystems/ordinals-api/issues/177)) ([99959df](https://github.com/hirosystems/ordinals-api/commit/99959dfe6ec3de9288ce47bd8ef4d72535c19468)) * allow gap fills for transfers ([026c275](https://github.com/hirosystems/ordinals-api/commit/026c2755483efbc8b54753a9a1bf315a6a833d88)) * allow multiple transfers of an inscription in one block ([#132](https://github.com/hirosystems/ordinals-api/issues/132)) ([bc545f0](https://github.com/hirosystems/ordinals-api/commit/bc545f0c1d06ea54ceb5d6ba30a9031d04c7e01e)) * auto predicate registration option ([e1ed7c7](https://github.com/hirosystems/ordinals-api/commit/e1ed7c773dfba99f0b098debb3d865da46d8d10e)) * balances and rollbacks ([61b4139](https://github.com/hirosystems/ordinals-api/commit/61b413955f6ce1428a6a3b1c6b023ae4464c111d)) * balances/:address ([687c2e4](https://github.com/hirosystems/ordinals-api/commit/687c2e43cc5782a2521c3442c0d7fcfe90943b67)) * build beta image ([13f2c13](https://github.com/hirosystems/ordinals-api/commit/13f2c13384a00f9bfd58b7ddd88a49e7abbbe588)) * build event server using chainhook client library ([#105](https://github.com/hirosystems/ordinals-api/issues/105)) ([ab4c795](https://github.com/hirosystems/ordinals-api/commit/ab4c795d1621078950e4defa3330ae597f46d6ac)) * chainhook client upgrades ([9a96492](https://github.com/hirosystems/ordinals-api/commit/9a9649251dd449d6784aa4f6cd448c6f1b6cb687)) * consider `tx_index` in transfers by block endpoint ([#178](https://github.com/hirosystems/ordinals-api/issues/178)) ([ed517d6](https://github.com/hirosystems/ordinals-api/commit/ed517d6eb01b2a780ef0fb89fc5a65582d5e575e)) * introduce materialized view to count address inscriptions ([#147](https://github.com/hirosystems/ordinals-api/issues/147)) ([09a95d5](https://github.com/hirosystems/ordinals-api/commit/09a95d55276be8b52ea19c90d0e7fa8bca73cfc7)) * invalid decimal count ([aa15b0e](https://github.com/hirosystems/ordinals-api/commit/aa15b0e4843435cacfa12856b881566ba0c2f3a3)) * make etag calculation sensitive to inscription location gap fills and upserts ([#156](https://github.com/hirosystems/ordinals-api/issues/156)) ([5648c9e](https://github.com/hirosystems/ordinals-api/commit/5648c9ea72ee09df4a224937a08f662e78d06edd)) * only consider blessed inscriptions ([2a4700c](https://github.com/hirosystems/ordinals-api/commit/2a4700c5ca851b799fba534ff8060004f7ca2f5d)) * optimize COUNT calculations via the use of count tables ([#175](https://github.com/hirosystems/ordinals-api/issues/175)) ([31498bd](https://github.com/hirosystems/ordinals-api/commit/31498bdb57203bd6c28eccac4446a9d169a3fe18)) * refresh views in parallel ([#154](https://github.com/hirosystems/ordinals-api/issues/154)) ([a7674a9](https://github.com/hirosystems/ordinals-api/commit/a7674a92efcb580b67c3510a2bf09ffb752e2ef0)) * remove old json content tables ([0732048](https://github.com/hirosystems/ordinals-api/commit/07320489889b85c881ab49a4ce10d0d21a750114)) * remove old json schemas ([8cc7f8a](https://github.com/hirosystems/ordinals-api/commit/8cc7f8adcb9d70cd511b09583dd45f9dc770cd92)) * remove unused json functions ([#165](https://github.com/hirosystems/ordinals-api/issues/165)) ([3eb0e24](https://github.com/hirosystems/ordinals-api/commit/3eb0e248a98913b8e4c56949e8ebd174a3f1faae)) * rename location pointer tables ([b84d27e](https://github.com/hirosystems/ordinals-api/commit/b84d27e3624737e59c949906cafa8d76a329c0a0)) * rollback location pointers ([#174](https://github.com/hirosystems/ordinals-api/issues/174)) ([3c9d7f0](https://github.com/hirosystems/ordinals-api/commit/3c9d7f07d14aed2bad1c07d69f7170d74a85d575)) * save tx_index on locations to support transfers on same block ([#145](https://github.com/hirosystems/ordinals-api/issues/145)) ([30a9635](https://github.com/hirosystems/ordinals-api/commit/30a96358c2b7c4c40f908e116478e3ddd83d8857)) * skip db migrations during readonly mode ([d5157f0](https://github.com/hirosystems/ordinals-api/commit/d5157f02646ceb6c58b73575d8ff4afc8833b97e)) * tick must be 4 bytes or less ([f6fd0a6](https://github.com/hirosystems/ordinals-api/commit/f6fd0a656d6520f90eda4d6610c04a077fa70354)) * transfers only usable once ([542ec34](https://github.com/hirosystems/ordinals-api/commit/542ec34292d7535d01f62832b270e11b80b59da4)) * upgrade chainhook client ([cbbb951](https://github.com/hirosystems/ordinals-api/commit/cbbb9512734f83c27da91b57fd5825b22c510c33)) * upgrade chainhook client to 1.3.3 ([ee66f93](https://github.com/hirosystems/ordinals-api/commit/ee66f93a1d06c786c2eb7ce415df28c7fa8d0032)) * warn correctly on missing prev locations ([879bf55](https://github.com/hirosystems/ordinals-api/commit/879bf55b0fc7efd830c5cc0e1d742818177e8344)) --- CHANGELOG.md | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index db5a8f47..9b83c929 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,61 @@ +## [1.0.0-brc-20.1](https://github.com/hirosystems/ordinals-api/compare/v0.4.15...v1.0.0-brc-20.1) (2023-08-17) + + +### ⚠ BREAKING CHANGES + +* optimize transfer replay capability (#129) + +### Features + +* add inscription number sort option ([#168](https://github.com/hirosystems/ordinals-api/issues/168)) ([9f4cdbc](https://github.com/hirosystems/ordinals-api/commit/9f4cdbc96f2efa4610e771df74b11951803cb8a6)) +* add stats endpoint for inscription counts ([#70](https://github.com/hirosystems/ordinals-api/issues/70)) ([ac18e62](https://github.com/hirosystems/ordinals-api/commit/ac18e621ed7e8ea2fc5a5e536d59a152c3a1f345)) +* brc-20 balance at block ([#186](https://github.com/hirosystems/ordinals-api/issues/186)) ([ced5cb3](https://github.com/hirosystems/ordinals-api/commit/ced5cb3306bd0e242503a86f8c94911c2d57161f)) +* detect and tag recursive inscriptions ([#167](https://github.com/hirosystems/ordinals-api/issues/167)) ([fb36285](https://github.com/hirosystems/ordinals-api/commit/fb362857c2c3cf4c098f6604b49d77efa6f95d8b)) +* first balance endpoint ([f9c6654](https://github.com/hirosystems/ordinals-api/commit/f9c66540b9d173d2981bc2af5ee13fd082dc5547)) +* first balance transfers ([dd8ec07](https://github.com/hirosystems/ordinals-api/commit/dd8ec07d366e6bf15e74b528077c8fa1836958e9)) +* holders endpoint ([a01f77e](https://github.com/hirosystems/ordinals-api/commit/a01f77ef6c9c03576a07a7cdc14d0279afc44cbb)) +* mint within supply ([c8e5820](https://github.com/hirosystems/ordinals-api/commit/c8e582055956c9381d14d5ec1bae5a70c0a4d4a8)) +* mints with balance changes ([32e90f7](https://github.com/hirosystems/ordinals-api/commit/32e90f73696aa403417869f0c71fa76da115048e)) +* optimize transfer replay capability ([#129](https://github.com/hirosystems/ordinals-api/issues/129)) ([97874cc](https://github.com/hirosystems/ordinals-api/commit/97874cc1461d4e321d5143c70d68927ace62eec5)) +* start storing token deploys ([bf4c7f6](https://github.com/hirosystems/ordinals-api/commit/bf4c7f6f27903f18d30ddb7fc2b1a779cc991114)) +* token details ([5d35d5b](https://github.com/hirosystems/ordinals-api/commit/5d35d5b0eefb46eeac91ead52f4909279e39404d)) +* token info endpoint ([8fad6b9](https://github.com/hirosystems/ordinals-api/commit/8fad6b96c0fffc302a3e61922677bdfb56b74b85)) +* tokens endpoint as paginated index ([ae2049b](https://github.com/hirosystems/ordinals-api/commit/ae2049baf04950d810aa997bc0f31b585aaf3391)) + + +### Bug Fixes + +* add address column to genesis and current ([d71e1d4](https://github.com/hirosystems/ordinals-api/commit/d71e1d49dece39df1c19c0bb35a43129ef1a31e9)) +* add indexes for fks ([354ddd0](https://github.com/hirosystems/ordinals-api/commit/354ddd0559a32a2aba1d407a2c7486348eb91d1c)) +* add secondary sorting by inscription number ([#177](https://github.com/hirosystems/ordinals-api/issues/177)) ([99959df](https://github.com/hirosystems/ordinals-api/commit/99959dfe6ec3de9288ce47bd8ef4d72535c19468)) +* allow gap fills for transfers ([026c275](https://github.com/hirosystems/ordinals-api/commit/026c2755483efbc8b54753a9a1bf315a6a833d88)) +* allow multiple transfers of an inscription in one block ([#132](https://github.com/hirosystems/ordinals-api/issues/132)) ([bc545f0](https://github.com/hirosystems/ordinals-api/commit/bc545f0c1d06ea54ceb5d6ba30a9031d04c7e01e)) +* auto predicate registration option ([e1ed7c7](https://github.com/hirosystems/ordinals-api/commit/e1ed7c773dfba99f0b098debb3d865da46d8d10e)) +* balances and rollbacks ([61b4139](https://github.com/hirosystems/ordinals-api/commit/61b413955f6ce1428a6a3b1c6b023ae4464c111d)) +* balances/:address ([687c2e4](https://github.com/hirosystems/ordinals-api/commit/687c2e43cc5782a2521c3442c0d7fcfe90943b67)) +* build beta image ([13f2c13](https://github.com/hirosystems/ordinals-api/commit/13f2c13384a00f9bfd58b7ddd88a49e7abbbe588)) +* build event server using chainhook client library ([#105](https://github.com/hirosystems/ordinals-api/issues/105)) ([ab4c795](https://github.com/hirosystems/ordinals-api/commit/ab4c795d1621078950e4defa3330ae597f46d6ac)) +* chainhook client upgrades ([9a96492](https://github.com/hirosystems/ordinals-api/commit/9a9649251dd449d6784aa4f6cd448c6f1b6cb687)) +* consider `tx_index` in transfers by block endpoint ([#178](https://github.com/hirosystems/ordinals-api/issues/178)) ([ed517d6](https://github.com/hirosystems/ordinals-api/commit/ed517d6eb01b2a780ef0fb89fc5a65582d5e575e)) +* introduce materialized view to count address inscriptions ([#147](https://github.com/hirosystems/ordinals-api/issues/147)) ([09a95d5](https://github.com/hirosystems/ordinals-api/commit/09a95d55276be8b52ea19c90d0e7fa8bca73cfc7)) +* invalid decimal count ([aa15b0e](https://github.com/hirosystems/ordinals-api/commit/aa15b0e4843435cacfa12856b881566ba0c2f3a3)) +* make etag calculation sensitive to inscription location gap fills and upserts ([#156](https://github.com/hirosystems/ordinals-api/issues/156)) ([5648c9e](https://github.com/hirosystems/ordinals-api/commit/5648c9ea72ee09df4a224937a08f662e78d06edd)) +* only consider blessed inscriptions ([2a4700c](https://github.com/hirosystems/ordinals-api/commit/2a4700c5ca851b799fba534ff8060004f7ca2f5d)) +* optimize COUNT calculations via the use of count tables ([#175](https://github.com/hirosystems/ordinals-api/issues/175)) ([31498bd](https://github.com/hirosystems/ordinals-api/commit/31498bdb57203bd6c28eccac4446a9d169a3fe18)) +* refresh views in parallel ([#154](https://github.com/hirosystems/ordinals-api/issues/154)) ([a7674a9](https://github.com/hirosystems/ordinals-api/commit/a7674a92efcb580b67c3510a2bf09ffb752e2ef0)) +* remove old json content tables ([0732048](https://github.com/hirosystems/ordinals-api/commit/07320489889b85c881ab49a4ce10d0d21a750114)) +* remove old json schemas ([8cc7f8a](https://github.com/hirosystems/ordinals-api/commit/8cc7f8adcb9d70cd511b09583dd45f9dc770cd92)) +* remove unused json functions ([#165](https://github.com/hirosystems/ordinals-api/issues/165)) ([3eb0e24](https://github.com/hirosystems/ordinals-api/commit/3eb0e248a98913b8e4c56949e8ebd174a3f1faae)) +* rename location pointer tables ([b84d27e](https://github.com/hirosystems/ordinals-api/commit/b84d27e3624737e59c949906cafa8d76a329c0a0)) +* rollback location pointers ([#174](https://github.com/hirosystems/ordinals-api/issues/174)) ([3c9d7f0](https://github.com/hirosystems/ordinals-api/commit/3c9d7f07d14aed2bad1c07d69f7170d74a85d575)) +* save tx_index on locations to support transfers on same block ([#145](https://github.com/hirosystems/ordinals-api/issues/145)) ([30a9635](https://github.com/hirosystems/ordinals-api/commit/30a96358c2b7c4c40f908e116478e3ddd83d8857)) +* skip db migrations during readonly mode ([d5157f0](https://github.com/hirosystems/ordinals-api/commit/d5157f02646ceb6c58b73575d8ff4afc8833b97e)) +* tick must be 4 bytes or less ([f6fd0a6](https://github.com/hirosystems/ordinals-api/commit/f6fd0a656d6520f90eda4d6610c04a077fa70354)) +* transfers only usable once ([542ec34](https://github.com/hirosystems/ordinals-api/commit/542ec34292d7535d01f62832b270e11b80b59da4)) +* upgrade chainhook client ([cbbb951](https://github.com/hirosystems/ordinals-api/commit/cbbb9512734f83c27da91b57fd5825b22c510c33)) +* upgrade chainhook client to 1.3.3 ([ee66f93](https://github.com/hirosystems/ordinals-api/commit/ee66f93a1d06c786c2eb7ce415df28c7fa8d0032)) +* warn correctly on missing prev locations ([879bf55](https://github.com/hirosystems/ordinals-api/commit/879bf55b0fc7efd830c5cc0e1d742818177e8344)) + ## [1.0.0-beta.15](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-beta.14...v1.0.0-beta.15) (2023-08-04) From c1939cee3a40df5f285d3bb749c8b29cea4bb271 Mon Sep 17 00:00:00 2001 From: janniks <6362150+janniks@users.noreply.github.com> Date: Wed, 23 Aug 2023 17:28:29 +0200 Subject: [PATCH 35/56] feat: add more brc20 features (#183) * refactor: update nullish access * feat: add deploy_timestamp to brc-20 token endpoints * feat: add minted_supply to brc-20 token endpoints * refactor: improve scan performance with generated lower ticker * feat: add prefix filtering to brc20 deploy tickers * test: add token endpoint test --------- Co-authored-by: janniks --- migrations/1692132685000_brc20-supply-view.ts | 17 ++ ...692188000000_brc20-deploys-ticker-index.ts | 15 ++ src/api/schemas.ts | 2 + src/api/util/helpers.ts | 2 + src/pg/brc20/brc20-pg-store.ts | 92 ++++++----- src/pg/brc20/types.ts | 2 + src/pg/helpers.ts | 22 +++ src/pg/pg-store.ts | 25 +-- tests/brc20.test.ts | 153 +++++++++++++++++- 9 files changed, 276 insertions(+), 54 deletions(-) create mode 100644 migrations/1692132685000_brc20-supply-view.ts create mode 100644 migrations/1692188000000_brc20-deploys-ticker-index.ts diff --git a/migrations/1692132685000_brc20-supply-view.ts b/migrations/1692132685000_brc20-supply-view.ts new file mode 100644 index 00000000..8ae5cb2f --- /dev/null +++ b/migrations/1692132685000_brc20-supply-view.ts @@ -0,0 +1,17 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createMaterializedView( + 'brc20_supplies', + { data: true }, + ` + SELECT brc20_deploy_id, SUM(amount) as minted_supply, MAX(block_height) as block_height + FROM brc20_mints + GROUP BY brc20_deploy_id + ` + ); + pgm.createIndex('brc20_supplies', ['brc20_deploy_id'], { unique: true }); +} diff --git a/migrations/1692188000000_brc20-deploys-ticker-index.ts b/migrations/1692188000000_brc20-deploys-ticker-index.ts new file mode 100644 index 00000000..4fd40203 --- /dev/null +++ b/migrations/1692188000000_brc20-deploys-ticker-index.ts @@ -0,0 +1,15 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.addColumns('brc20_deploys', { + ticker_lower: { + type: 'text', + notNull: true, + expressionGenerated: '(LOWER(ticker))', + }, + }); + pgm.createIndex('brc20_deploys', ['ticker_lower']); +} diff --git a/src/api/schemas.ts b/src/api/schemas.ts index ef83a9ea..99114e1b 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -388,6 +388,8 @@ export const Brc20TokenResponseSchema = Type.Object( max_supply: Type.String({ examples: ['21000000'] }), mint_limit: Nullable(Type.String({ examples: ['100000'] })), decimals: Type.Integer({ examples: [18] }), + deploy_timestamp: Type.Integer({ examples: [1677733170000] }), + minted_supply: Type.String({ examples: ['1000000'] }), }, { title: 'BRC-20 Token Response' } ); diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index c7ed0880..a27e7659 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -116,6 +116,8 @@ export function parseBrc20Tokens(items: DbBrc20Token[]): Brc20TokenResponse[] { max_supply: i.max, mint_limit: i.limit ?? null, decimals: i.decimals, + deploy_timestamp: i.deploy_timestamp.valueOf(), + minted_supply: i.minted_supply, })); } diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index 83bb8c8a..f88aa98b 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -1,28 +1,30 @@ import { PgSqlClient, logger } from '@hirosystems/api-toolkit'; +import BigNumber from 'bignumber.js'; +import * as postgres from 'postgres'; +import { throwOnFirstRejected } from '../helpers'; import { PgStore } from '../pg-store'; import { DbInscriptionIndexPaging, - DbPaginatedResult, DbInscriptionInsert, DbLocationInsert, + DbPaginatedResult, } from '../types'; -import BigNumber from 'bignumber.js'; +import { Brc20Deploy, Brc20Mint, Brc20Transfer, brc20FromInscription } from './helpers'; import { - DbBrc20Token, - DbBrc20Balance, + BRC20_DEPLOYS_COLUMNS, BRC20_EVENTS_COLUMNS, - DbBrc20Supply, - DbBrc20Holder, - DbBrc20Transfer, BRC20_TRANSFERS_COLUMNS, - DbBrc20DeployInsert, - DbBrc20EventInsert, - DbBrc20Deploy, - BRC20_DEPLOYS_COLUMNS, + DbBrc20Balance, DbBrc20BalanceInsert, DbBrc20BalanceTypeId, + DbBrc20Deploy, + DbBrc20DeployInsert, + DbBrc20EventInsert, + DbBrc20Holder, + DbBrc20Supply, + DbBrc20Token, + DbBrc20Transfer, } from './types'; -import { brc20FromInscription, Brc20Deploy, Brc20Mint, Brc20Transfer } from './helpers'; export class Brc20PgStore { // TODO: Move this to the api-toolkit so we can have pg submodules. @@ -35,17 +37,27 @@ export class Brc20PgStore { this.parent = db; } + sqlOr(partials: postgres.PendingQuery[] | undefined) { + return partials?.reduce((acc, curr) => this.sql`${acc} OR ${curr}`); + } + async getTokens( args: { ticker?: string[] } & DbInscriptionIndexPaging ): Promise> { - const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const tickerPrefixCondition = this.sqlOr( + args.ticker?.map(t => this.sql`d.ticker_lower LIKE LOWER(${t}) || '%'`) + ); + const results = await this.sql<(DbBrc20Token & { total: number })[]>` SELECT d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, - d.decimals, COUNT(*) OVER() as total + d.decimals, l.timestamp as deploy_timestamp, COALESCE(s.minted_supply, 0) as minted_supply, COUNT(*) OVER() as total FROM brc20_deploys AS d INNER JOIN inscriptions AS i ON i.id = d.inscription_id - ${lowerTickers ? this.sql`WHERE LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} + INNER JOIN genesis_locations AS g ON g.inscription_id = d.inscription_id + INNER JOIN locations AS l ON l.id = g.location_id + LEFT JOIN brc20_supplies AS s ON d.id = s.brc20_deploy_id + ${tickerPrefixCondition ? this.sql`WHERE ${tickerPrefixCondition}` : this.sql``} OFFSET ${args.offset} LIMIT ${args.limit} `; @@ -68,7 +80,10 @@ export class Brc20PgStore { block_height?: number; } & DbInscriptionIndexPaging ): Promise> { - const lowerTickers = args.ticker ? args.ticker.map(t => t.toLowerCase()) : undefined; + const tickerPrefixConditions = this.sqlOr( + args.ticker?.map(t => this.sql`d.ticker_lower LIKE LOWER(${t}) || '%'`) + ); + const results = await this.sql<(DbBrc20Balance & { total: number })[]>` SELECT d.ticker, @@ -83,8 +98,8 @@ export class Brc20PgStore { } WHERE b.address = ${args.address} - ${lowerTickers ? this.sql`AND LOWER(d.ticker) IN ${this.sql(lowerTickers)}` : this.sql``} ${args.block_height ? this.sql`AND l.block_height <= ${args.block_height}` : this.sql``} + ${tickerPrefixConditions ? this.sql`AND (${tickerPrefixConditions})` : this.sql``} GROUP BY d.ticker LIMIT ${args.limit} OFFSET ${args.offset} @@ -102,7 +117,7 @@ export class Brc20PgStore { FROM brc20_events AS e INNER JOIN brc20_deploys AS d ON d.id = e.brc20_deploy_id INNER JOIN inscriptions AS i ON i.id = e.inscription_id - WHERE LOWER(d.ticker) = LOWER(${args.ticker}) + WHERE d.ticker_lower = LOWER(${args.ticker}) ORDER BY i.number DESC LIMIT ${args.limit} OFFSET ${args.offset} @@ -111,38 +126,35 @@ export class Brc20PgStore { FROM events INNER JOIN `; + // todo: use event history } async getTokenSupply(args: { ticker: string }): Promise { return await this.parent.sqlTransaction(async sql => { const deploy = await this.getDeploy(args); - if (!deploy) { - return; - } - const minted = await sql<{ total: string }[]>` - SELECT SUM(avail_balance + trans_balance) AS total - FROM brc20_balances + if (!deploy) return; + + const supplyPromise = sql<{ max: string }[]>` + SELECT max FROM brc20_deploys WHERE id = ${deploy.id} + `; + const mintedPromise = sql<{ minted_supply: string }[]>` + SELECT minted_supply + FROM brc20_supplies WHERE brc20_deploy_id = ${deploy.id} - GROUP BY brc20_deploy_id `; - const holders = await sql<{ count: string }[]>` - WITH historical_holders AS ( - SELECT SUM(avail_balance + trans_balance) AS balance - FROM brc20_balances - WHERE brc20_deploy_id = ${deploy.id} - GROUP BY address - ) + const holdersPromise = sql<{ count: string }[]>` SELECT COUNT(*) AS count - FROM historical_holders - WHERE balance > 0 - `; - const supply = await sql<{ max: string }[]>` - SELECT max FROM brc20_deploys WHERE id = ${deploy.id} + FROM brc20_balances + WHERE brc20_deploy_id = ${deploy.id} + GROUP BY address + HAVING SUM(avail_balance + trans_balance) > 0 `; + const settles = await Promise.allSettled([supplyPromise, holdersPromise, mintedPromise]); + const [supply, holders, minted] = throwOnFirstRejected(settles); return { max_supply: supply[0].max, - minted_supply: minted[0].total, - holders: holders[0].count, + minted_supply: minted[0]?.minted_supply ?? '0', + holders: holders[0]?.count ?? '0', }; }); } @@ -234,7 +246,7 @@ export class Brc20PgStore { const brc20Transfer = await sql` SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} FROM locations AS l - INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id + INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id WHERE l.inscription_id = ${args.inscription_id} AND l.block_height <= ${args.location.block_height} LIMIT 3 diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index 7c304759..1d6497f5 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -45,6 +45,8 @@ export type DbBrc20Token = { max: string; limit?: string; decimals: number; + deploy_timestamp: number; + minted_supply: string; }; export type DbBrc20Supply = { diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index b0d967c2..2f812e9f 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -15,3 +15,25 @@ export function getInscriptionRecursion(content: PgBytea): string[] { } return result; } + +/** + * Returns the values from settled Promise results. + * Throws if any Promise is rejected. + * This can be used with Promise.allSettled to get the values from all promises, + * instead of Promise.all which will swallow following unhandled rejections. + * @param settles - Array of `Promise.allSettled()` results + * @returns Array of Promise result values + */ +export function throwOnFirstRejected(settles: { + [K in keyof T]: PromiseSettledResult; +}): T { + const values: T = [] as any; + for (const promise of settles) { + if (promise.status === 'rejected') throw promise.reason; + + // Note: Pushing to result `values` array is required for type inference + // Compared to e.g. `settles.map(s => s.value)` + values.push(promise.value); + } + return values; +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 25a8c61f..cbcfe480 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -1,8 +1,20 @@ +import { + BasePgStore, + PgSqlClient, + connectPostgres, + logger, + runMigrations, +} from '@hirosystems/api-toolkit'; import { BitcoinEvent, Payload } from '@hirosystems/chainhook-client'; +import * as path from 'path'; +import * as postgres from 'postgres'; import { Order, OrderBy } from '../api/schemas'; import { isProdEnv, isTestEnv, normalizedHexString, parseSatPoint } from '../api/util/helpers'; import { OrdinalSatoshi } from '../api/util/ordinal-satoshi'; import { ENV } from '../env'; +import { Brc20PgStore } from './brc20/brc20-pg-store'; +import { CountsPgStore } from './counts/counts-pg-store'; +import { getIndexResultCountType } from './counts/helpers'; import { getInscriptionRecursion } from './helpers'; import { DbFullyLocatedInscriptionResult, @@ -23,18 +35,6 @@ import { INSCRIPTIONS_COLUMNS, LOCATIONS_COLUMNS, } from './types'; -import { - BasePgStore, - PgSqlClient, - connectPostgres, - logger, - runMigrations, -} from '@hirosystems/api-toolkit'; -import * as path from 'path'; -import { Brc20PgStore } from './brc20/brc20-pg-store'; -import { CountsPgStore } from './counts/counts-pg-store'; -import { getIndexResultCountType } from './counts/helpers'; -import * as postgres from 'postgres'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); @@ -218,6 +218,7 @@ export class PgStore extends BasePgStore { // we can respond to the chainhook node with a `200` HTTP code as soon as possible. const viewRefresh = Promise.allSettled([ this.normalizeInscriptionCount({ min_block_height: updatedBlockHeightMin }), + this.refreshMaterializedView('brc20_supplies'), ]); // Only wait for these on tests. if (isTestEnv) await viewRefresh; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 3d74c363..6c86de80 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1,9 +1,9 @@ import { cycleMigrations } from '@hirosystems/api-toolkit'; import { buildApiServer } from '../src/api/init'; +import { brc20FromInscription } from '../src/pg/brc20/helpers'; import { MIGRATIONS_DIR, PgStore } from '../src/pg/pg-store'; import { DbInscriptionInsert } from '../src/pg/types'; -import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal } from './helpers'; -import { brc20FromInscription } from '../src/pg/brc20/helpers'; +import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal, randomHash } from './helpers'; describe('BRC-20', () => { let db: PgStore; @@ -403,6 +403,7 @@ describe('BRC-20', () => { .block({ height: 775617, hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d', + timestamp: 1677811111, }) .transaction({ hash: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', @@ -440,6 +441,8 @@ describe('BRC-20', () => { max_supply: '21000000', ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + deploy_timestamp: 1677811111000, + minted_supply: '0', }, ]); }); @@ -513,6 +516,8 @@ describe('BRC-20', () => { number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + deploy_timestamp: 1677803510000, + minted_supply: '0', }, ]); }); @@ -586,6 +591,8 @@ describe('BRC-20', () => { number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + deploy_timestamp: 1677803510000, + minted_supply: '0', }, ]); const response2 = await fastify.inject({ @@ -606,6 +613,8 @@ describe('BRC-20', () => { number: 5, ticker: 'PEPE', tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + deploy_timestamp: 1677803510000, + minted_supply: '0', }, ]); }); @@ -723,6 +732,19 @@ describe('BRC-20', () => { transferrable_balance: '0', }, ]); + + const response3 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PEPE`, + }); + expect(response3.statusCode).toBe(200); + const responseJson3 = response3.json(); + expect(responseJson3.total).toBe(1); + expect(responseJson3.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ ticker: 'PEPE', minted_supply: '350000' }), + ]) + ); }); test('rollback mints deduct balance correctly', async () => { @@ -1693,4 +1715,131 @@ describe('BRC-20', () => { ]); }); }); + + describe('routes', () => { + test('token endpoint', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: 775617 }) + .transaction({ hash: randomHash() }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens/PEPE`, + }); + expect(response.statusCode).toBe(200); + }); + + test('filter tickers by ticker prefix', async () => { + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: 775617 }) + .transaction({ hash: randomHash() }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEPE', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: 775618 }) + .transaction({ hash: randomHash() }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'PEER', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: 775619 }) + .transaction({ hash: randomHash() }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'ABCD', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ height: 775619 }) + .transaction({ hash: randomHash() }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'deploy', + tick: 'DCBA', + max: '21000000', + }, + number: 5, + tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + }) + ) + .build() + ); + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/tokens?ticker=PE&ticker=AB`, + }); + expect(response.statusCode).toBe(200); + const responseJson = response.json(); + expect(responseJson.total).toBe(3); + expect(responseJson.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ ticker: 'PEPE' }), + expect.objectContaining({ ticker: 'PEER' }), + expect.objectContaining({ ticker: 'ABCD' }), + ]) + ); + }); + }); }); From 38073341163bc1c5cea44b66ad3da505f2ce4273 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Wed, 23 Aug 2023 17:53:16 -0600 Subject: [PATCH 36/56] fix: optimize inscription and brc-20 inserts (#189) * chore: draft optims * chore: more optimizations * fix: reveals and transfers in same batch * fix: inscriptions tests pass * fix: optimize pointer logic * fix: cache updated_at writes * feat: scan block concept * fix: individual items, start transfers * fix: all tests pass * fix: env var brc20 processing * feat: add admin rpc server to control brc20 scans * style: comments --- src/admin-rpc/init.ts | 52 ++++ src/env.ts | 5 + src/index.ts | 11 + src/pg/brc20/brc20-pg-store.ts | 509 +++++++++++++------------------ src/pg/brc20/helpers.ts | 76 ++--- src/pg/brc20/types.ts | 48 ++- src/pg/counts/counts-pg-store.ts | 86 +++--- src/pg/pg-store.ts | 385 ++++++++++------------- src/pg/types.ts | 7 + tests/brc20.test.ts | 197 +++--------- 10 files changed, 612 insertions(+), 764 deletions(-) create mode 100644 src/admin-rpc/init.ts diff --git a/src/admin-rpc/init.ts b/src/admin-rpc/init.ts new file mode 100644 index 00000000..f7154991 --- /dev/null +++ b/src/admin-rpc/init.ts @@ -0,0 +1,52 @@ +import Fastify, { FastifyPluginCallback } from 'fastify'; +import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; +import { PgStore } from '../pg/pg-store'; +import { Server } from 'http'; +import { Type } from '@sinclair/typebox'; +import { PINO_LOGGER_CONFIG, logger } from '@hirosystems/api-toolkit'; + +export const AdminApi: FastifyPluginCallback, Server, TypeBoxTypeProvider> = ( + fastify, + options, + done +) => { + fastify.post( + '/brc-20/scan', + { + schema: { + description: 'Scan for BRC-20 operations within a block range', + querystring: Type.Object({ + start_block: Type.Integer(), + end_block: Type.Integer(), + }), + }, + }, + async (request, reply) => { + const startBlock = request.query.start_block; + const endBlock = request.query.end_block; + logger.info( + `AdminRPC scanning for BRC-20 operations from block ${startBlock} to block ${endBlock}` + ); + // TODO: Provide a way to stop this scan without restarting. + fastify.db.brc20 + .scanBlocks(startBlock, endBlock) + .then(() => logger.info(`AdminRPC finished scanning for BRC-20 operations`)) + .catch(error => logger.error(error, `AdminRPC failed to scan for BRC-20`)); + await reply.code(200).send(); + } + ); + + done(); +}; + +export async function buildAdminRpcServer(args: { db: PgStore }) { + const fastify = Fastify({ + trustProxy: true, + logger: PINO_LOGGER_CONFIG, + }).withTypeProvider(); + + fastify.decorate('db', args.db); + await fastify.register(AdminApi, { prefix: '/ordinals/admin' }); + + return fastify; +} diff --git a/src/env.ts b/src/env.ts index d1c4457d..7d54fdca 100644 --- a/src/env.ts +++ b/src/env.ts @@ -18,6 +18,8 @@ const schema = Type.Object({ API_HOST: Type.String({ default: '0.0.0.0' }), /** Port in which to serve the API */ API_PORT: Type.Number({ default: 3000, minimum: 0, maximum: 65535 }), + /** Port in which to serve the Admin RPC interface */ + ADMIN_RPC_PORT: Type.Number({ default: 3001, minimum: 0, maximum: 65535 }), /** Port in which to receive chainhook events */ EVENT_PORT: Type.Number({ default: 3099, minimum: 0, maximum: 65535 }), /** Event server body limit (bytes) */ @@ -49,6 +51,9 @@ const schema = Type.Object({ PG_CONNECTION_POOL_MAX: Type.Number({ default: 10 }), PG_IDLE_TIMEOUT: Type.Number({ default: 30 }), PG_MAX_LIFETIME: Type.Number({ default: 60 }), + + /** Enables BRC-20 processing in write mode APIs */ + BRC20_BLOCK_SCAN_ENABLED: Type.Boolean({ default: true }), }); type Env = Static; diff --git a/src/index.ts b/src/index.ts index 47bda7b3..7bf58ec3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,6 +5,7 @@ import { startChainhookServer } from './chainhook/server'; import { ENV } from './env'; import { ApiMetrics } from './metrics/metrics'; import { PgStore } from './pg/pg-store'; +import { buildAdminRpcServer } from './admin-rpc/init'; async function initBackgroundServices(db: PgStore) { logger.info('Initializing background services...'); @@ -16,6 +17,16 @@ async function initBackgroundServices(db: PgStore) { await server.close(); }, }); + + const adminRpcServer = await buildAdminRpcServer({ db }); + registerShutdownConfig({ + name: 'Admin RPC Server', + forceKillable: false, + handler: async () => { + await adminRpcServer.close(); + }, + }); + await adminRpcServer.listen({ host: ENV.API_HOST, port: ENV.ADMIN_RPC_PORT }); } async function initApiService(db: PgStore) { diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index f88aa98b..c1beef0c 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -5,26 +5,27 @@ import { throwOnFirstRejected } from '../helpers'; import { PgStore } from '../pg-store'; import { DbInscriptionIndexPaging, - DbInscriptionInsert, - DbLocationInsert, DbPaginatedResult, + LOCATIONS_COLUMNS, + DbLocation, } from '../types'; -import { Brc20Deploy, Brc20Mint, Brc20Transfer, brc20FromInscription } from './helpers'; import { - BRC20_DEPLOYS_COLUMNS, - BRC20_EVENTS_COLUMNS, - BRC20_TRANSFERS_COLUMNS, + DbBrc20Token, DbBrc20Balance, + DbBrc20Supply, + DbBrc20Holder, + DbBrc20Transfer, + BRC20_TRANSFERS_COLUMNS, + DbBrc20Deploy, + BRC20_DEPLOYS_COLUMNS, DbBrc20BalanceInsert, DbBrc20BalanceTypeId, - DbBrc20Deploy, + DbBrc20ScannedInscription, + DbBrc20MintInsert, DbBrc20DeployInsert, - DbBrc20EventInsert, - DbBrc20Holder, - DbBrc20Supply, - DbBrc20Token, - DbBrc20Transfer, } from './types'; +import { Brc20Deploy, Brc20Mint, Brc20Transfer, brc20FromInscriptionContent } from './helpers'; +import { hexToBuffer } from '../../api/util/helpers'; export class Brc20PgStore { // TODO: Move this to the api-toolkit so we can have pg submodules. @@ -41,6 +42,69 @@ export class Brc20PgStore { return partials?.reduce((acc, curr) => this.sql`${acc} OR ${curr}`); } + /** + * Perform a scan of all inscriptions stored in the DB divided by block in order to look for + * BRC-20 operations. + * @param startBlock - Start at block height + * @param endBlock - End at block height + */ + async scanBlocks(startBlock?: number, endBlock?: number): Promise { + const range = await this.parent.sql<{ min: number; max: number }[]>` + SELECT + ${startBlock ? this.parent.sql`${startBlock}` : this.parent.sql`MIN(block_height)`} AS min, + ${endBlock ? this.parent.sql`${endBlock}` : this.parent.sql`MAX(block_height)`} AS max + FROM locations + `; + for (let blockHeight = range[0].min; blockHeight <= range[0].max; blockHeight++) { + await this.parent.sqlWriteTransaction(async sql => { + const block = await sql` + SELECT + i.content, + ( + CASE EXISTS(SELECT location_id FROM genesis_locations WHERE location_id = l.id) + WHEN TRUE THEN TRUE + ELSE FALSE + END + ) AS genesis, + ${sql(LOCATIONS_COLUMNS.map(c => `l.${c}`))} + FROM locations AS l + INNER JOIN inscriptions AS i ON l.inscription_id = i.id + WHERE l.block_height = ${blockHeight} + AND i.number >= 0 + AND i.mime_type IN ('application/json', 'text/plain') + ORDER BY tx_index ASC + `; + await this.insertOperations(block); + }); + } + } + + async insertOperations(writes: DbBrc20ScannedInscription[]): Promise { + if (writes.length === 0) return; + for (const write of writes) { + if (write.genesis) { + if (write.address === null) continue; + const brc20 = brc20FromInscriptionContent(hexToBuffer(write.content)); + if (brc20) { + switch (brc20.op) { + case 'deploy': + await this.insertDeploy({ op: brc20, location: write }); + break; + case 'mint': + await this.insertMint({ op: brc20, location: write }); + break; + case 'transfer': + await this.insertTransfer({ op: brc20, location: write }); + break; + } + } + } + if (!write.genesis) { + await this.applyTransfer(write); + } + } + } + async getTokens( args: { ticker?: string[] } & DbInscriptionIndexPaging ): Promise> { @@ -67,12 +131,6 @@ export class Brc20PgStore { }; } - /** - * Returns an address balance for a BRC-20 token. - * @param address - Owner address - * @param ticker - BRC-20 tickers - * @returns `DbBrc20Balance` - */ async getBalances( args: { address: string; @@ -110,25 +168,6 @@ export class Brc20PgStore { }; } - async getHistory(args: { ticker: string } & DbInscriptionIndexPaging): Promise { - const results = await this.sql` - WITH events AS ( - SELECT ${this.sql(BRC20_EVENTS_COLUMNS)} - FROM brc20_events AS e - INNER JOIN brc20_deploys AS d ON d.id = e.brc20_deploy_id - INNER JOIN inscriptions AS i ON i.id = e.inscription_id - WHERE d.ticker_lower = LOWER(${args.ticker}) - ORDER BY i.number DESC - LIMIT ${args.limit} - OFFSET ${args.offset} - ) - SELECT * - FROM events - INNER JOIN - `; - // todo: use event history - } - async getTokenSupply(args: { ticker: string }): Promise { return await this.parent.sqlTransaction(async sql => { const deploy = await this.getDeploy(args); @@ -186,321 +225,195 @@ export class Brc20PgStore { }); } - async insertOperation(args: { - inscription_id: number; - location_id: number; - inscription: DbInscriptionInsert; - location: DbLocationInsert; - }): Promise { - if (args.inscription.number < 0) return; // No cursed inscriptions apply. - // Is this a BRC-20 operation? Is it being inscribed to a valid address? - const brc20 = brc20FromInscription(args.inscription); - if (brc20) { - if (args.location.address) { - switch (brc20.op) { - case 'deploy': - await this.insertDeploy({ - deploy: brc20, - inscription_id: args.inscription_id, - location: args.location, - }); - break; - case 'mint': - await this.insertMint({ - mint: brc20, - inscription_id: args.inscription_id, - location_id: args.location_id, - location: args.location, - }); - break; - case 'transfer': - await this.insertTransfer({ - transfer: brc20, - inscription_id: args.inscription_id, - location_id: args.location_id, - location: args.location, - }); - break; - } - } else { - logger.debug( - { block_height: args.location.block_height, tick: brc20.tick }, - `PgStore [BRC-20] ignoring operation spent as fee` - ); - } - } - } - - async insertOperationTransfer(args: { - inscription_id: number; - inscription_number: number; - location_id: number; - location: DbLocationInsert; - }): Promise { - if (args.inscription_number < 0) return; // No cursed inscriptions apply. - // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by - // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way - // of checking if we have just inserted the first transfer for this inscription (genesis + - // transfer). + async applyTransfer(args: DbBrc20ScannedInscription): Promise { await this.parent.sqlWriteTransaction(async sql => { + // Is this a BRC-20 balance transfer? Check if we have a valid transfer inscription emitted by + // this address that hasn't been sent to another address before. Use `LIMIT 3` as a quick way + // of checking if we have just inserted the first transfer for this inscription (genesis + + // transfer). const brc20Transfer = await sql` SELECT ${sql(BRC20_TRANSFERS_COLUMNS.map(c => `t.${c}`))} FROM locations AS l INNER JOIN brc20_transfers AS t ON t.inscription_id = l.inscription_id WHERE l.inscription_id = ${args.inscription_id} - AND l.block_height <= ${args.location.block_height} + AND (l.block_height < ${args.block_height} + OR (l.block_height = ${args.block_height} AND l.tx_index < ${args.tx_index})) LIMIT 3 `; - if (brc20Transfer.count === 2) { - const transfer = brc20Transfer[0]; - // This is the first time this BRC-20 transfer is being used. Apply the balance change. - const amount = new BigNumber(transfer.amount); - const changes: DbBrc20BalanceInsert[] = [ - { - inscription_id: transfer.inscription_id, - location_id: args.location_id, - brc20_deploy_id: transfer.brc20_deploy_id, - address: transfer.from_address, - avail_balance: '0', - trans_balance: amount.negated().toString(), - type: DbBrc20BalanceTypeId.transferFrom, - }, - { - inscription_id: transfer.inscription_id, - location_id: args.location_id, - brc20_deploy_id: transfer.brc20_deploy_id, - address: args.location.address, - avail_balance: amount.toString(), - trans_balance: '0', - type: DbBrc20BalanceTypeId.transferTo, - }, - ]; - await sql` - INSERT INTO brc20_balances ${sql(changes)} - ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO UPDATE SET - location_id = EXCLUDED.location_id, - brc20_deploy_id = EXCLUDED.brc20_deploy_id, - address = EXCLUDED.address, - avail_balance = EXCLUDED.avail_balance, - trans_balance = EXCLUDED.trans_balance - `; - // Keep the new valid owner of the transfer inscription - await sql` + if (brc20Transfer.count > 2) return; + const transfer = brc20Transfer[0]; + const amount = new BigNumber(transfer.amount); + const changes: DbBrc20BalanceInsert[] = [ + { + inscription_id: transfer.inscription_id, + location_id: args.id, + brc20_deploy_id: transfer.brc20_deploy_id, + address: transfer.from_address, + avail_balance: '0', + trans_balance: amount.negated().toString(), + type: DbBrc20BalanceTypeId.transferFrom, + }, + { + inscription_id: transfer.inscription_id, + location_id: args.id, + brc20_deploy_id: transfer.brc20_deploy_id, + address: args.address, + avail_balance: amount.toString(), + trans_balance: '0', + type: DbBrc20BalanceTypeId.transferTo, + }, + ]; + await sql` + WITH updated_transfer AS ( UPDATE brc20_transfers - SET to_address = ${args.location.address} + SET to_address = ${args.address} WHERE id = ${transfer.id} - `; - } else { - logger.debug( - { genesis_id: args.location.genesis_id, block_height: args.location.block_height }, - `PgStore [BRC-20] ignoring balance change for transfer that was already used` - ); - } + ) + INSERT INTO brc20_balances ${sql(changes)} + ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING + `; }); } - private async insertDeploy(args: { - deploy: Brc20Deploy; - inscription_id: number; - location: DbLocationInsert; - }): Promise { - await this.parent.sqlWriteTransaction(async sql => { - const address = args.location.address; - if (!address) { - logger.debug( - `PgStore [BRC-20] ignoring deploy with null address for ${args.deploy.tick} at block ${args.location.block_height}` - ); - return; - } - const deploy: DbBrc20DeployInsert = { - inscription_id: args.inscription_id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - address: address, - ticker: args.deploy.tick, - max: args.deploy.max, - limit: args.deploy.lim ?? null, - decimals: args.deploy.dec ?? '18', - }; - const insertion = await sql<{ id: string }[]>` - INSERT INTO brc20_deploys ${sql(deploy)} - ON CONFLICT (LOWER(ticker)) DO NOTHING - RETURNING id - `; - if (insertion.count > 0) { - // Add to history - const event: DbBrc20EventInsert = { - inscription_id: args.inscription_id, - brc20_deploy_id: insertion[0].id, - deploy_id: insertion[0].id, - mint_id: null, - transfer_id: null, - }; - await sql` - INSERT INTO brc20_events ${sql(event)} - `; - logger.info( - `PgStore [BRC-20] inserted deploy for ${args.deploy.tick} at block ${args.location.block_height}` - ); - } else { - logger.debug( - `PgStore [BRC-20] ignoring duplicate deploy for ${args.deploy.tick} at block ${args.location.block_height}` - ); - } - }); + private async insertDeploy(deploy: { op: Brc20Deploy; location: DbLocation }): Promise { + if (!deploy.location.inscription_id || !deploy.location.address) return; + const insert: DbBrc20DeployInsert = { + inscription_id: deploy.location.inscription_id, + block_height: deploy.location.block_height, + tx_id: deploy.location.tx_id, + address: deploy.location.address, + ticker: deploy.op.tick, + max: deploy.op.max, + limit: deploy.op.lim ?? null, + decimals: deploy.op.dec ?? '18', + }; + const tickers = await this.parent.sql<{ ticker: string; address: string }[]>` + INSERT INTO brc20_deploys ${this.parent.sql(insert)} + ON CONFLICT (LOWER(ticker)) DO NOTHING + `; + if (tickers.count) + logger.info( + `Brc20PgStore deploy ${deploy.op.tick} by ${deploy.location.address} at block ${deploy.location.block_height}` + ); } private async getDeploy(args: { ticker: string }): Promise { const deploy = await this.sql` SELECT ${this.sql(BRC20_DEPLOYS_COLUMNS)} FROM brc20_deploys - WHERE LOWER(ticker) = LOWER(${args.ticker}) + WHERE ticker_lower = LOWER(${args.ticker}) `; if (deploy.count) return deploy[0]; } - private async insertMint(args: { - mint: Brc20Mint; - inscription_id: number; - location_id: number; - location: DbLocationInsert; - }): Promise { + private async insertMint(mint: { op: Brc20Mint; location: DbLocation }): Promise { await this.parent.sqlWriteTransaction(async sql => { - // Is the token deployed? - const token = await this.getDeploy({ ticker: args.mint.tick }); - if (!token) { - logger.debug( - `PgStore [BRC-20] ignoring mint for non-deployed token ${args.mint.tick} at block ${args.location.block_height}` - ); - return; - } + if (!mint.location.inscription_id || !mint.location.address) return; + const tokenRes = await sql< + { id: string; decimals: string; limit: string; max: string; minted_supply: string }[] + >` + SELECT + d.id, d.decimals, d.limit, d.max, + COALESCE(SUM(amount), 0) AS minted_supply + FROM brc20_deploys AS d + LEFT JOIN brc20_mints AS m ON m.brc20_deploy_id = d.id + WHERE d.ticker_lower = LOWER(${mint.op.tick}) + GROUP BY d.id + `; + if (tokenRes.count === 0) return; + const token = tokenRes[0]; // Is the mint amount within the allowed token limits? - if (token.limit && BigNumber(args.mint.amt).isGreaterThan(token.limit)) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} that exceeds mint limit of ${token.limit} at block ${args.location.block_height}` - ); - return; - } + if (token.limit && BigNumber(mint.op.amt).isGreaterThan(token.limit)) return; // Is the number of decimals correct? - if ( - args.mint.amt.includes('.') && - args.mint.amt.split('.')[1].length > parseInt(token.decimals) - ) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because amount ${args.mint.amt} exceeds token decimals at block ${args.location.block_height}` - ); + if (mint.op.amt.includes('.') && mint.op.amt.split('.')[1].length > parseInt(token.decimals)) return; - } // Does the mint amount exceed remaining supply? - const mintedSupply = await sql<{ minted: string }[]>` - SELECT COALESCE(SUM(amount), 0) AS minted FROM brc20_mints WHERE brc20_deploy_id = ${token.id} - `; - const minted = new BigNumber(mintedSupply[0].minted); + const minted = new BigNumber(token.minted_supply); const availSupply = new BigNumber(token.max).minus(minted); - if (availSupply.isLessThanOrEqualTo(0)) { - logger.debug( - `PgStore [BRC-20] ignoring mint for ${args.mint.tick} because token has been completely minted at block ${args.location.block_height}` - ); - return; - } - const mintAmt = BigNumber.min(availSupply, args.mint.amt); + if (availSupply.isLessThanOrEqualTo(0)) return; + const mintAmt = BigNumber.min(availSupply, mint.op.amt); - const mint = { - inscription_id: args.inscription_id, + const mintInsert: DbBrc20MintInsert = { + inscription_id: mint.location.inscription_id, brc20_deploy_id: token.id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - address: args.location.address, - amount: args.mint.amt, // Original requested amount + block_height: mint.location.block_height, + tx_id: mint.location.tx_id, + address: mint.location.address, + amount: mint.op.amt, // Original requested amount }; - await sql`INSERT INTO brc20_mints ${sql(mint)}`; - logger.info( - `PgStore [BRC-20] inserted mint for ${args.mint.tick} (${args.mint.amt}) at block ${args.location.block_height}` - ); - - // Insert balance change for minting address - const balance: DbBrc20BalanceInsert = { - inscription_id: args.inscription_id, - location_id: args.location_id, - brc20_deploy_id: parseInt(token.id), - address: args.location.address, + const balanceInsert: DbBrc20BalanceInsert = { + inscription_id: mint.location.inscription_id, + location_id: mint.location.id, + brc20_deploy_id: token.id, + address: mint.location.address, avail_balance: mintAmt.toString(), trans_balance: '0', type: DbBrc20BalanceTypeId.mint, }; + await sql` - INSERT INTO brc20_balances ${sql(balance)} + WITH mint_insert AS ( + INSERT INTO brc20_mints ${sql(mintInsert)} + ) + INSERT INTO brc20_balances ${sql(balanceInsert)} + ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING `; + logger.info( + `Brc20PgStore mint ${mint.op.tick} (${mint.op.amt}) by ${mint.location.address} at block ${mint.location.block_height}` + ); }); } - private async insertTransfer(args: { - transfer: Brc20Transfer; - inscription_id: number; - location_id: number; - location: DbLocationInsert; + private async insertTransfer(transfer: { + op: Brc20Transfer; + location: DbLocation; }): Promise { await this.parent.sqlWriteTransaction(async sql => { - // Is the destination a valid address? - if (!args.location.address) { - logger.debug( - `PgStore [BRC-20] ignoring transfer spent as fee for ${args.transfer.tick} at block ${args.location.block_height}` - ); - return; - } - // Is the token deployed? - const token = await this.getDeploy({ ticker: args.transfer.tick }); - if (!token) { - logger.debug( - `PgStore [BRC-20] ignoring transfer for non-deployed token ${args.transfer.tick} at block ${args.location.block_height}` - ); - return; - } - // Get balance for this address and this token - const balanceResult = await this.getBalances({ - address: args.location.address, - ticker: [args.transfer.tick], - limit: 1, - offset: 0, - }); + if (!transfer.location.inscription_id || !transfer.location.address) return; + const balanceRes = await sql<{ brc20_deploy_id: string; avail_balance: string }[]>` + SELECT b.brc20_deploy_id, COALESCE(SUM(b.avail_balance), 0) AS avail_balance + FROM brc20_balances AS b + INNER JOIN brc20_deploys AS d ON b.brc20_deploy_id = d.id + WHERE d.ticker_lower = LOWER(${transfer.op.tick}) + AND b.address = ${transfer.location.address} + GROUP BY b.brc20_deploy_id + `; + if (balanceRes.count === 0) return; + // Do we have enough available balance to do this transfer? - const transAmt = new BigNumber(args.transfer.amt); - const available = new BigNumber(balanceResult.results[0]?.avail_balance ?? 0); - if (transAmt.gt(available)) { - logger.debug( - `PgStore [BRC-20] ignoring transfer for token ${args.transfer.tick} due to unavailable balance at block ${args.location.block_height}` - ); - return; - } + const transAmt = new BigNumber(transfer.op.amt); + const available = new BigNumber(balanceRes[0].avail_balance); + if (transAmt.gt(available)) return; - const transfer = { - inscription_id: args.inscription_id, - brc20_deploy_id: token.id, - block_height: args.location.block_height, - tx_id: args.location.tx_id, - from_address: args.location.address, + const transferInsert = { + inscription_id: transfer.location.inscription_id, + brc20_deploy_id: balanceRes[0].brc20_deploy_id, + block_height: transfer.location.block_height, + tx_id: transfer.location.tx_id, + from_address: transfer.location.address, to_address: null, // We don't know the receiver address yet - amount: args.transfer.amt, + amount: transfer.op.amt, }; - await sql`INSERT INTO brc20_transfers ${sql(transfer)}`; - logger.info( - `PgStore [BRC-20] inserted transfer for ${args.transfer.tick} (${args.transfer.amt}) at block ${args.location.block_height}` - ); - - // Insert balance change for minting address - const values: DbBrc20BalanceInsert = { - inscription_id: args.inscription_id, - location_id: args.location_id, - brc20_deploy_id: parseInt(token.id), - address: args.location.address, + const balanceInsert: DbBrc20BalanceInsert = { + inscription_id: transfer.location.inscription_id, + location_id: transfer.location.id, + brc20_deploy_id: balanceRes[0].brc20_deploy_id, + address: transfer.location.address, avail_balance: transAmt.negated().toString(), trans_balance: transAmt.toString(), type: DbBrc20BalanceTypeId.transferIntent, }; await sql` - INSERT INTO brc20_balances ${sql(values)} + WITH transfer_insert AS ( + INSERT INTO brc20_transfers ${sql(transferInsert)} + ) + INSERT INTO brc20_balances ${sql(balanceInsert)} + ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING `; + logger.info( + `Brc20PgStore transfer ${transfer.op.tick} (${transfer.op.amt}) by ${transfer.location.address} at block ${transfer.location.block_height}` + ); }); } } diff --git a/src/pg/brc20/helpers.ts b/src/pg/brc20/helpers.ts index ad7ea895..3c5329f5 100644 --- a/src/pg/brc20/helpers.ts +++ b/src/pg/brc20/helpers.ts @@ -47,51 +47,53 @@ const Brc20C = TypeCompiler.Compile(Brc20Schema); export type Brc20 = Static; export function brc20FromInscription(inscription: DbInscriptionInsert): Brc20 | undefined { - if ( - inscription.mime_type.startsWith('text/plain') || - inscription.mime_type.startsWith('application/json') - ) { - try { - const buf = - typeof inscription.content === 'string' - ? hexToBuffer(inscription.content) - : inscription.content; - const json = JSON.parse(buf.toString('utf-8')); - if (Brc20C.Check(json)) { - // Check ticker byte length - if (Buffer.from(json.tick).length > 4) { + if (inscription.number < 0) return; + if (inscription.mime_type !== 'text/plain' && inscription.mime_type !== 'application/json') + return; + const buf = + typeof inscription.content === 'string' + ? hexToBuffer(inscription.content) + : inscription.content; + return brc20FromInscriptionContent(buf); +} + +export function brc20FromInscriptionContent(content: Buffer): Brc20 | undefined { + try { + const json = JSON.parse(content.toString('utf-8')); + if (Brc20C.Check(json)) { + // Check ticker byte length + if (Buffer.from(json.tick).length > 4) { + return; + } + // Check numeric values. + const uint64_max = BigNumber('18446744073709551615'); + if (json.op === 'deploy') { + const max = BigNumber(json.max); + if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) { return; } - // Check numeric values. - const uint64_max = BigNumber('18446744073709551615'); - if (json.op === 'deploy') { - const max = BigNumber(json.max); - if (max.isNaN() || max.isZero() || max.isGreaterThan(uint64_max)) { + if (json.lim) { + const lim = BigNumber(json.lim); + if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) { return; } - if (json.lim) { - const lim = BigNumber(json.lim); - if (lim.isNaN() || lim.isZero() || lim.isGreaterThan(uint64_max)) { - return; - } - } - if (json.dec) { - // `dec` can have a value of 0 but must be no more than 18. - const dec = BigNumber(json.dec); - if (dec.isNaN() || dec.isGreaterThan(18)) { - return; - } - } - } else { - const amt = BigNumber(json.amt); - if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) { + } + if (json.dec) { + // `dec` can have a value of 0 but must be no more than 18. + const dec = BigNumber(json.dec); + if (dec.isNaN() || dec.isGreaterThan(18)) { return; } } - return json; + } else { + const amt = BigNumber(json.amt); + if (amt.isNaN() || amt.isZero() || amt.isGreaterThan(uint64_max)) { + return; + } } - } catch (error) { - // Not a BRC-20 inscription. + return json; } + } catch (error) { + // Not a BRC-20 inscription. } } diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index 1d6497f5..c7e86d74 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -1,8 +1,14 @@ import { PgNumeric } from '@hirosystems/api-toolkit'; +import { DbLocation } from '../types'; + +export type DbBrc20ScannedInscription = DbLocation & { + genesis: boolean; + content: string; +}; export type DbBrc20DeployInsert = { - inscription_id: number; - block_height: number; + inscription_id: string; + block_height: string; tx_id: string; address: string; ticker: string; @@ -11,6 +17,15 @@ export type DbBrc20DeployInsert = { limit: string | null; }; +export type DbBrc20MintInsert = { + inscription_id: string; + brc20_deploy_id: string; + block_height: string; + tx_id: string; + address: string; + amount: string; +}; + export type DbBrc20Deploy = { id: string; inscription_id: number; @@ -25,9 +40,9 @@ export type DbBrc20Deploy = { export type DbBrc20Transfer = { id: string; - inscription_id: number; - brc20_deploy_id: number; - block_height: number; + inscription_id: string; + brc20_deploy_id: string; + block_height: string; tx_id: string; from_address: string; to_address?: string; @@ -75,23 +90,15 @@ export enum DbBrc20BalanceTypeId { } export type DbBrc20BalanceInsert = { - inscription_id: number; - location_id: number; - brc20_deploy_id: number; + inscription_id: PgNumeric; + location_id: PgNumeric; + brc20_deploy_id: PgNumeric; address: string | null; avail_balance: PgNumeric; trans_balance: PgNumeric; type: DbBrc20BalanceTypeId; }; -export type DbBrc20EventInsert = { - inscription_id: number; - brc20_deploy_id: string; - deploy_id: string | null; - mint_id: string | null; - transfer_id: string | null; -}; - export const BRC20_DEPLOYS_COLUMNS = [ 'id', 'inscription_id', @@ -114,12 +121,3 @@ export const BRC20_TRANSFERS_COLUMNS = [ 'to_address', 'amount', ]; - -export const BRC20_EVENTS_COLUMNS = [ - 'id', - 'inscription_id', - 'brc20_deploy_id', - 'deploy_id', - 'mint_id', - 'transfer_id', -]; diff --git a/src/pg/counts/counts-pg-store.ts b/src/pg/counts/counts-pg-store.ts index 1d4397bc..14b388f6 100644 --- a/src/pg/counts/counts-pg-store.ts +++ b/src/pg/counts/counts-pg-store.ts @@ -63,21 +63,35 @@ export class CountsPgStore { } } - async applyInscription(args: { inscription: DbInscriptionInsert }): Promise { + async applyInscriptions(writes: DbInscriptionInsert[]): Promise { + if (writes.length === 0) return; await this.parent.sqlWriteTransaction(async sql => { + const mimeType = new Map(); + const rarity = new Map(); + const type = new Map(); + for (const i of writes) { + const t = i.number < 0 ? 'cursed' : 'blessed'; + mimeType.set(i.mime_type, { + mime_type: i.mime_type, + count: mimeType.get(i.mime_type)?.count ?? 0 + 1, + }); + rarity.set(i.sat_rarity, { + sat_rarity: i.sat_rarity, + count: rarity.get(i.sat_rarity)?.count ?? 0 + 1, + }); + type.set(t, { type: t, count: type.get(t)?.count ?? 0 + 1 }); + } await sql` - INSERT INTO counts_by_mime_type ${sql({ mime_type: args.inscription.mime_type })} - ON CONFLICT (mime_type) DO UPDATE SET count = counts_by_mime_type.count + 1 + INSERT INTO counts_by_mime_type ${sql([...mimeType.values()])} + ON CONFLICT (mime_type) DO UPDATE SET count = counts_by_mime_type.count + EXCLUDED.count `; await sql` - INSERT INTO counts_by_sat_rarity ${sql({ sat_rarity: args.inscription.sat_rarity })} - ON CONFLICT (sat_rarity) DO UPDATE SET count = counts_by_sat_rarity.count + 1 + INSERT INTO counts_by_sat_rarity ${sql([...rarity.values()])} + ON CONFLICT (sat_rarity) DO UPDATE SET count = counts_by_sat_rarity.count + EXCLUDED.count `; await sql` - INSERT INTO counts_by_type ${sql({ - type: args.inscription.number < 0 ? DbInscriptionType.cursed : DbInscriptionType.blessed, - })} - ON CONFLICT (type) DO UPDATE SET count = counts_by_type.count + 1 + INSERT INTO counts_by_type ${sql([...type.values()])} + ON CONFLICT (type) DO UPDATE SET count = counts_by_type.count + EXCLUDED.count `; }); } @@ -105,41 +119,37 @@ export class CountsPgStore { }); } - async applyGenesisLocation(args: { - old?: DbLocationPointer; - new: DbLocationPointer; - }): Promise { + async applyLocations( + writes: { old_address: string | null; new_address: string | null }[], + genesis: boolean = true + ): Promise { + if (writes.length === 0) return; await this.parent.sqlWriteTransaction(async sql => { - if (args.old && args.old.address) { - await sql` - UPDATE counts_by_genesis_address SET count = count - 1 WHERE address = ${args.old.address} - `; - } - if (args.new.address) { - await sql` - INSERT INTO counts_by_genesis_address ${sql({ address: args.new.address })} - ON CONFLICT (address) DO UPDATE SET count = counts_by_genesis_address.count + 1 - `; + const table = genesis ? sql`counts_by_genesis_address` : sql`counts_by_address`; + const oldAddr = new Map(); + const newAddr = new Map(); + for (const i of writes) { + if (i.old_address) + oldAddr.set(i.old_address, { + address: i.old_address, + count: oldAddr.get(i.old_address)?.count ?? 0 + 1, + }); + if (i.new_address) + newAddr.set(i.new_address, { + address: i.new_address, + count: newAddr.get(i.new_address)?.count ?? 0 + 1, + }); } - }); - } - - async applyCurrentLocation(args: { - old?: DbLocationPointer; - new: DbLocationPointer; - }): Promise { - await this.parent.sqlWriteTransaction(async sql => { - if (args.old && args.old.address) { + if (oldAddr.size) await sql` - UPDATE counts_by_address SET count = count - 1 WHERE address = ${args.old.address} + INSERT INTO ${table} ${sql([...oldAddr.values()])} + ON CONFLICT (address) DO UPDATE SET count = ${table}.count - EXCLUDED.count `; - } - if (args.new.address) { + if (newAddr.size) await sql` - INSERT INTO counts_by_address ${sql({ address: args.new.address })} - ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + 1 + INSERT INTO ${table} ${sql([...newAddr.values()])} + ON CONFLICT (address) DO UPDATE SET count = ${table}.count + EXCLUDED.count `; - } }); } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index cbcfe480..bce21d8f 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -28,10 +28,10 @@ import { DbInscriptionInsert, DbInscriptionLocationChange, DbLocation, - DbLocationInsert, DbLocationPointer, DbLocationPointerInsert, DbPaginatedResult, + DbRevealInsert, INSCRIPTIONS_COLUMNS, LOCATIONS_COLUMNS, } from './types'; @@ -82,6 +82,7 @@ export class PgStore extends BasePgStore { let updatedBlockHeightMin = Infinity; await this.sqlWriteTransaction(async sql => { for (const rollbackEvent of payload.rollback) { + // TODO: Optimize rollbacks just as we optimized applys. const event = rollbackEvent as BitcoinEvent; const block_height = event.block_identifier.index; for (const tx of event.transactions) { @@ -112,6 +113,7 @@ export class PgStore extends BasePgStore { const event = applyEvent as BitcoinEvent; const block_height = event.block_identifier.index; const block_hash = normalizedHexString(event.block_identifier.hash); + const writes: DbRevealInsert[] = []; for (const tx of event.transactions) { const tx_id = normalizedHexString(tx.transaction_identifier.hash); for (const operation of tx.metadata.ordinal_operations) { @@ -119,7 +121,8 @@ export class PgStore extends BasePgStore { const reveal = operation.inscription_revealed; const satoshi = new OrdinalSatoshi(reveal.ordinal_number); const satpoint = parseSatPoint(reveal.satpoint_post_inscription); - await this.insertInscription({ + const recursive_refs = getInscriptionRecursion(reveal.content_bytes); + writes.push({ inscription: { genesis_id: reveal.inscription_id, mime_type: reveal.content_type.split(';')[0], @@ -132,6 +135,7 @@ export class PgStore extends BasePgStore { sat_ordinal: reveal.ordinal_number.toString(), sat_rarity: satoshi.rarity, sat_coinbase_height: satoshi.blockHeight, + recursive: recursive_refs.length > 0, }, location: { block_hash, @@ -147,13 +151,15 @@ export class PgStore extends BasePgStore { value: reveal.inscription_output_value.toString(), timestamp: event.timestamp, }, + recursive_refs, }); } if (operation.cursed_inscription_revealed) { const reveal = operation.cursed_inscription_revealed; const satoshi = new OrdinalSatoshi(reveal.ordinal_number); const satpoint = parseSatPoint(reveal.satpoint_post_inscription); - await this.insertInscription({ + const recursive_refs = getInscriptionRecursion(reveal.content_bytes); + writes.push({ inscription: { genesis_id: reveal.inscription_id, mime_type: reveal.content_type.split(';')[0], @@ -166,6 +172,7 @@ export class PgStore extends BasePgStore { sat_ordinal: reveal.ordinal_number.toString(), sat_rarity: satoshi.rarity, sat_coinbase_height: satoshi.blockHeight, + recursive: recursive_refs.length > 0, }, location: { block_hash, @@ -181,13 +188,14 @@ export class PgStore extends BasePgStore { value: reveal.inscription_output_value.toString(), timestamp: event.timestamp, }, + recursive_refs, }); } if (operation.inscription_transferred) { const transfer = operation.inscription_transferred; const satpoint = parseSatPoint(transfer.satpoint_post_transfer); const prevSatpoint = parseSatPoint(transfer.satpoint_pre_transfer); - await this.insertLocation({ + writes.push({ location: { block_hash, block_height, @@ -208,7 +216,10 @@ export class PgStore extends BasePgStore { } } } + await this.insertInscriptions(writes); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); + if (ENV.BRC20_BLOCK_SCAN_ENABLED) + await this.brc20.scanBlocks(event.block_identifier.index, event.block_identifier.index); } }); await this.refreshMaterializedView('chain_tip'); @@ -588,125 +599,36 @@ export class PgStore extends BasePgStore { return query[0]; } - private async insertInscription(args: { - inscription: DbInscriptionInsert; - location: DbLocationInsert; - }): Promise { - let inscription_id: number | undefined; + private async insertInscriptions(writes: DbRevealInsert[]): Promise { + if (writes.length === 0) return; await this.sqlWriteTransaction(async sql => { - const upsert = await sql<{ id: number }[]>` - SELECT id FROM inscriptions WHERE number = ${args.inscription.number} - `; - const recursion = getInscriptionRecursion(args.inscription.content); - const data = { - ...args.inscription, - recursive: recursion.length > 0, - }; - const inscription = await sql<{ id: number }[]>` - INSERT INTO inscriptions ${sql(data)} - ON CONFLICT ON CONSTRAINT inscriptions_number_unique DO UPDATE SET - genesis_id = EXCLUDED.genesis_id, - mime_type = EXCLUDED.mime_type, - content_type = EXCLUDED.content_type, - content_length = EXCLUDED.content_length, - content = EXCLUDED.content, - fee = EXCLUDED.fee, - sat_ordinal = EXCLUDED.sat_ordinal, - sat_rarity = EXCLUDED.sat_rarity, - sat_coinbase_height = EXCLUDED.sat_coinbase_height, - updated_at = NOW() - RETURNING id - `; - inscription_id = inscription[0].id; - const location = { - ...args.location, - inscription_id, - timestamp: sql`to_timestamp(${args.location.timestamp})`, - }; - const locationRes = await sql<{ id: number }[]>` - INSERT INTO locations ${sql(location)} - ON CONFLICT ON CONSTRAINT locations_output_offset_unique DO UPDATE SET - inscription_id = EXCLUDED.inscription_id, - genesis_id = EXCLUDED.genesis_id, - block_height = EXCLUDED.block_height, - block_hash = EXCLUDED.block_hash, - tx_id = EXCLUDED.tx_id, - tx_index = EXCLUDED.tx_index, - address = EXCLUDED.address, - value = EXCLUDED.value, - timestamp = EXCLUDED.timestamp - RETURNING id - `; - await this.brc20.insertOperation({ - inscription_id, - location_id: locationRes[0].id, - inscription: args.inscription, - location: args.location, - }); - await this.updateInscriptionLocationPointers({ - inscription_id, - genesis_id: args.inscription.genesis_id, - location_id: locationRes[0].id, - block_height: args.location.block_height, - tx_index: args.location.tx_index, - address: args.location.address, - }); - await this.updateInscriptionRecursion({ inscription_id, ref_genesis_ids: recursion }); - await this.counts.applyInscription({ inscription: args.inscription }); - logger.info( - `PgStore${upsert.count > 0 ? ' upsert ' : ' '}reveal #${args.inscription.number} (${ - args.location.genesis_id - }) at block ${args.location.block_height}` - ); - }); - return inscription_id; - } - - private async insertLocation(args: { location: DbLocationInsert }): Promise { - await this.sqlWriteTransaction(async sql => { - // Does the inscription exist? Warn if it doesn't. - const genesis = await sql<{ id: number; number: number }[]>` - SELECT id, number FROM inscriptions WHERE genesis_id = ${args.location.genesis_id} - `; - if (genesis.count === 0) { - logger.warn( - `PgStore inserting transfer for missing inscription (${args.location.genesis_id}) at block ${args.location.block_height}` - ); - } - const inscription_id = genesis.count > 0 ? genesis[0].id : null; - // Do we have the location from `prev_output`? Warn if we don't. - if (args.location.prev_output) { - const prev = await sql` - SELECT id FROM locations - WHERE genesis_id = ${args.location.genesis_id} AND output = ${args.location.prev_output} + const inscriptions: DbInscriptionInsert[] = []; + const transferGenesisIds = new Set(); + for (const r of writes) + if (r.inscription) inscriptions.push(r.inscription); + else transferGenesisIds.add(r.location.genesis_id); + if (inscriptions.length) + await sql` + INSERT INTO inscriptions ${sql(inscriptions)} + ON CONFLICT ON CONSTRAINT inscriptions_number_unique DO UPDATE SET + genesis_id = EXCLUDED.genesis_id, + mime_type = EXCLUDED.mime_type, + content_type = EXCLUDED.content_type, + content_length = EXCLUDED.content_length, + content = EXCLUDED.content, + fee = EXCLUDED.fee, + sat_ordinal = EXCLUDED.sat_ordinal, + sat_rarity = EXCLUDED.sat_rarity, + sat_coinbase_height = EXCLUDED.sat_coinbase_height, + updated_at = NOW() `; - if (prev.count === 0) { - logger.warn( - `PgStore inserting transfer (${args.location.genesis_id}) superceding a missing prev_output ${args.location.prev_output} at block ${args.location.block_height}` - ); - } - } - const upsert = await sql` - SELECT id FROM locations - WHERE output = ${args.location.output} AND "offset" = ${args.location.offset} - `; - const location = { - inscription_id, - genesis_id: args.location.genesis_id, - block_height: args.location.block_height, - block_hash: args.location.block_hash, - tx_id: args.location.tx_id, - tx_index: args.location.tx_index, - address: args.location.address, - output: args.location.output, - offset: args.location.offset, - prev_output: args.location.prev_output, - prev_offset: args.location.prev_offset, - value: args.location.value, - timestamp: this.sql`to_timestamp(${args.location.timestamp})`, - }; - const locationRes = await sql<{ id: number }[]>` - INSERT INTO locations ${sql(location)} + const locationData = writes.map(i => ({ + ...i.location, + inscription_id: sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.location.genesis_id})`, + timestamp: sql`TO_TIMESTAMP(${i.location.timestamp})`, + })); + const locations = await sql` + INSERT INTO locations ${sql(locationData)} ON CONFLICT ON CONSTRAINT locations_output_offset_unique DO UPDATE SET inscription_id = EXCLUDED.inscription_id, genesis_id = EXCLUDED.genesis_id, @@ -717,29 +639,24 @@ export class PgStore extends BasePgStore { address = EXCLUDED.address, value = EXCLUDED.value, timestamp = EXCLUDED.timestamp - RETURNING id + RETURNING inscription_id, id AS location_id, block_height, tx_index, address `; - if (inscription_id) { - await this.updateInscriptionLocationPointers({ - inscription_id, - genesis_id: args.location.genesis_id, - location_id: locationRes[0].id, - block_height: args.location.block_height, - tx_index: args.location.tx_index, - address: args.location.address, - }); - await this.brc20.insertOperationTransfer({ - inscription_id, - inscription_number: genesis[0].number, - location_id: locationRes[0].id, - location: args.location, - }); + if (transferGenesisIds.size) + await sql` + UPDATE inscriptions + SET updated_at = NOW() + WHERE genesis_id IN ${sql([...transferGenesisIds])} + `; + await this.updateInscriptionLocationPointers(locations); + await this.updateInscriptionRecursions(writes); + await this.backfillOrphanLocations(); + await this.counts.applyInscriptions(inscriptions); + for (const reveal of writes) { + const action = reveal.inscription ? `reveal #${reveal.inscription.number}` : `transfer`; + logger.info( + `PgStore ${action} (${reveal.location.genesis_id}) at block ${reveal.location.block_height}` + ); } - logger.info( - `PgStore${upsert.count > 0 ? ' upsert ' : ' '}transfer (${ - args.location.genesis_id - }) at block ${args.location.block_height}` - ); }); } @@ -816,71 +733,101 @@ export class PgStore extends BasePgStore { } private async updateInscriptionLocationPointers( - args: DbLocationPointerInsert & { genesis_id: string } + pointers: DbLocationPointerInsert[] ): Promise { - await this.sqlWriteTransaction(async sql => { - // Update genesis and current location pointers for this inscription. - const pointer: DbLocationPointerInsert = { - inscription_id: args.inscription_id, - location_id: args.location_id, - block_height: args.block_height, - tx_index: args.tx_index, - address: args.address, - }; - - const genesis = await sql` - SELECT * FROM genesis_locations WHERE inscription_id = ${args.inscription_id} - `; - const genesisRes = await sql` - INSERT INTO genesis_locations ${sql(pointer)} - ON CONFLICT (inscription_id) DO UPDATE SET - location_id = EXCLUDED.location_id, - block_height = EXCLUDED.block_height, - tx_index = EXCLUDED.tx_index, - address = EXCLUDED.address - WHERE - EXCLUDED.block_height < genesis_locations.block_height OR - (EXCLUDED.block_height = genesis_locations.block_height AND - EXCLUDED.tx_index < genesis_locations.tx_index) - `; - // Affect genesis counts only if we have an update. - if (genesisRes.count > 0) { - await this.counts.applyGenesisLocation({ old: genesis[0], new: pointer }); + if (pointers.length === 0) return; + + // Filters pointer args so we enter only one new pointer per inscription. + const distinctPointers = ( + cond: (a: DbLocationPointerInsert, b: DbLocationPointerInsert) => boolean + ): DbLocationPointerInsert[] => { + const out = new Map(); + for (const ptr of pointers) { + if (ptr.inscription_id === null) continue; + const current = out.get(ptr.inscription_id); + out.set(ptr.inscription_id, current ? (cond(current, ptr) ? current : ptr) : ptr); } + return [...out.values()]; + }; - const current = await sql` - SELECT * FROM current_locations WHERE inscription_id = ${args.inscription_id} - `; - const currentRes = await sql` - INSERT INTO current_locations ${sql(pointer)} - ON CONFLICT (inscription_id) DO UPDATE SET - location_id = EXCLUDED.location_id, - block_height = EXCLUDED.block_height, - tx_index = EXCLUDED.tx_index, - address = EXCLUDED.address - WHERE - EXCLUDED.block_height > current_locations.block_height OR - (EXCLUDED.block_height = current_locations.block_height AND - EXCLUDED.tx_index > current_locations.tx_index) - `; - // Affect current location counts only if we have an update. - if (currentRes.count > 0) { - await this.counts.applyCurrentLocation({ old: current[0], new: pointer }); + await this.sqlWriteTransaction(async sql => { + const distinctIds = [ + ...new Set(pointers.map(i => i.inscription_id).filter(v => v !== null)), + ]; + const genesisPtrs = distinctPointers( + (a, b) => + a.block_height < b.block_height || + (a.block_height === b.block_height && a.tx_index < b.tx_index) + ); + if (genesisPtrs.length) { + const genesis = await sql<{ old_address: string | null; new_address: string | null }[]>` + WITH old_pointers AS ( + SELECT inscription_id, address + FROM genesis_locations + WHERE inscription_id IN ${sql(distinctIds)} + ), + new_pointers AS ( + INSERT INTO genesis_locations ${sql(genesisPtrs)} + ON CONFLICT (inscription_id) DO UPDATE SET + location_id = EXCLUDED.location_id, + block_height = EXCLUDED.block_height, + tx_index = EXCLUDED.tx_index, + address = EXCLUDED.address + WHERE + EXCLUDED.block_height < genesis_locations.block_height OR + (EXCLUDED.block_height = genesis_locations.block_height AND + EXCLUDED.tx_index < genesis_locations.tx_index) + RETURNING inscription_id, address + ) + SELECT n.address AS new_address, o.address AS old_address + FROM new_pointers AS n + LEFT JOIN old_pointers AS o USING (inscription_id) + `; + await this.counts.applyLocations(genesis, true); } - // Backfill orphan locations for this inscription, if any. - await sql` - UPDATE locations - SET inscription_id = ${args.inscription_id} - WHERE genesis_id = ${args.genesis_id} AND inscription_id IS NULL - `; - // Update the inscription's `updated_at` timestamp for caching purposes. - await sql` - UPDATE inscriptions SET updated_at = NOW() WHERE genesis_id = ${args.genesis_id} - `; + const currentPtrs = distinctPointers( + (a, b) => + a.block_height > b.block_height || + (a.block_height === b.block_height && a.tx_index > b.tx_index) + ); + if (currentPtrs.length) { + const current = await sql<{ old_address: string | null; new_address: string | null }[]>` + WITH old_pointers AS ( + SELECT inscription_id, address + FROM current_locations + WHERE inscription_id IN ${sql(distinctIds)} + ), + new_pointers AS ( + INSERT INTO current_locations ${sql(currentPtrs)} + ON CONFLICT (inscription_id) DO UPDATE SET + location_id = EXCLUDED.location_id, + block_height = EXCLUDED.block_height, + tx_index = EXCLUDED.tx_index, + address = EXCLUDED.address + WHERE + EXCLUDED.block_height > current_locations.block_height OR + (EXCLUDED.block_height = current_locations.block_height AND + EXCLUDED.tx_index > current_locations.tx_index) + RETURNING inscription_id, address + ) + SELECT n.address AS new_address, o.address AS old_address + FROM new_pointers AS n + LEFT JOIN old_pointers AS o USING (inscription_id) + `; + await this.counts.applyLocations(current, false); + } }); } + private async backfillOrphanLocations(): Promise { + await this.sql` + UPDATE locations AS l + SET inscription_id = (SELECT id FROM inscriptions WHERE genesis_id = l.genesis_id) + WHERE l.inscription_id IS NULL + `; + } + private async recalculateCurrentLocationPointerFromLocationRollBack(args: { location: DbLocation; }): Promise { @@ -912,24 +859,26 @@ export class PgStore extends BasePgStore { }); } - private async updateInscriptionRecursion(args: { - inscription_id: number; - ref_genesis_ids: string[]; - }): Promise { - await this.sqlWriteTransaction(async sql => { - const validated = await sql<{ id: string }[]>` - SELECT id FROM inscriptions WHERE genesis_id IN ${this.sql(args.ref_genesis_ids)} - `; - if (validated.count > 0) { - const values = validated.map(i => ({ - inscription_id: args.inscription_id, - ref_inscription_id: i.id, - })); - await this.sql` - INSERT INTO inscription_recursions ${sql(values)} - ON CONFLICT ON CONSTRAINT inscriptions_inscription_id_ref_inscription_id_unique DO NOTHING - `; - } - }); + private async updateInscriptionRecursions(reveals: DbRevealInsert[]): Promise { + if (reveals.length === 0) return; + // TODO: Gap fills may make us miss some recursion refs because they will not appear in this + // query. + const inserts: { + inscription_id: postgres.PendingQuery; + ref_inscription_id: postgres.PendingQuery; + }[] = []; + for (const i of reveals) + if (i.inscription && i.recursive_refs?.length) + for (const r of i.recursive_refs) + inserts.push({ + inscription_id: this + .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription?.genesis_id})`, + ref_inscription_id: this.sql`(SELECT id FROM inscriptions WHERE genesis_id = ${r})`, + }); + if (inserts.length === 0) return; + await this.sql` + INSERT INTO inscription_recursions ${this.sql(inserts)} + ON CONFLICT ON CONSTRAINT inscriptions_inscription_id_ref_inscription_id_unique DO NOTHING + `; } } diff --git a/src/pg/types.ts b/src/pg/types.ts index c35c9cae..13aa26f8 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -138,6 +138,13 @@ export type DbInscriptionInsert = { sat_ordinal: PgNumeric; sat_rarity: string; sat_coinbase_height: number; + recursive: boolean; +}; + +export type DbRevealInsert = { + inscription?: DbInscriptionInsert; + recursive_refs?: string[]; + location: DbLocationInsert; }; export type DbInscription = { diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 6c86de80..506d0b65 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -35,6 +35,7 @@ describe('BRC-20', () => { sat_ordinal: '2000000', sat_rarity: 'common', sat_coinbase_height: 110, + recursive: false, }; return insert; }; @@ -61,6 +62,7 @@ describe('BRC-20', () => { sat_ordinal: '2000000', sat_rarity: 'common', sat_coinbase_height: 110, + recursive: false, }; expect(brc20FromInscription(insert)).toBeUndefined(); insert.content_type = 'application/json'; @@ -88,6 +90,7 @@ describe('BRC-20', () => { sat_ordinal: '2000000', sat_rarity: 'common', sat_coinbase_height: 110, + recursive: false, }; expect(brc20FromInscription(insert)).toBeUndefined(); }); @@ -1250,6 +1253,52 @@ describe('BRC-20', () => { expect(json2.results[0].available_balance).toBe('10000'); }); + test('transfer ignored if token not found', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(address); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'TEST', // Not found + amt: '2000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + + const response = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}`, + }); + expect(response.statusCode).toBe(200); + const json = response.json(); + expect(json.total).toBe(1); + expect(json.results).toStrictEqual([ + { + available_balance: '10000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + }); + test('cannot transfer more than available balance', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; await deployAndMintPEPE(address); @@ -1566,154 +1615,6 @@ describe('BRC-20', () => { }, ]); }); - - test('balance transfer gap fill applied correctly', async () => { - const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; - const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; - await deployAndMintPEPE(address); - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775640, - hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', - }) - .transaction({ - hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - }) - .inscriptionRevealed( - brc20Reveal({ - json: { - p: 'brc-20', - op: 'transfer', - tick: 'PEPE', - amt: '9000', - }, - number: 7, - tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', - address: address, - }) - ) - .build() - ); - - // Make the first seen transfer - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775651, - hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', - }) - .transaction({ - hash: 'ce32d47452a4dfae6510fd283e1cec587c5cac217dec09ac4b01541adc86cd34', - }) - .inscriptionTransferred({ - inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', - updated_address: address2, - satpoint_pre_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - satpoint_post_transfer: - 'ce32d47452a4dfae6510fd283e1cec587c5cac217dec09ac4b01541adc86cd34:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) - .build() - ); - const response1 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1.statusCode).toBe(200); - const json1 = response1.json(); - expect(json1.total).toBe(1); - expect(json1.results).toStrictEqual([ - { - available_balance: '1000', - overall_balance: '1000', - ticker: 'PEPE', - transferrable_balance: '0', - }, - ]); - const response2 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address2}`, - }); - expect(response2.statusCode).toBe(200); - const json2 = response2.json(); - expect(json2.total).toBe(1); - expect(json2.results).toStrictEqual([ - { - available_balance: '9000', - overall_balance: '9000', - ticker: 'PEPE', - transferrable_balance: '0', - }, - ]); - - // Oops, turns out there was a gap fill with another transfer first - await db.updateInscriptions( - new TestChainhookPayloadBuilder() - .apply() - .block({ - height: 775645, - hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', - }) - .transaction({ - hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', - }) - .inscriptionTransferred({ - inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', - updated_address: 'bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz', - satpoint_pre_transfer: - 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', - satpoint_post_transfer: - '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', - post_transfer_output_value: null, - tx_index: 0, - }) - .build() - ); - const response1b = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address}`, - }); - expect(response1b.statusCode).toBe(200); - const json1b = response1b.json(); - expect(json1b.total).toBe(1); - expect(json1b.results).toStrictEqual([ - { - available_balance: '1000', - overall_balance: '1000', - ticker: 'PEPE', - transferrable_balance: '0', - }, - ]); - // No movements at all for this address. - const response2b = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/${address2}`, - }); - expect(response2b.statusCode).toBe(200); - const json2b = response2b.json(); - expect(json2b.total).toBe(0); - // This address is the one that should have the balance. - const response3 = await fastify.inject({ - method: 'GET', - url: `/ordinals/brc-20/balances/bc1q6uwuet65rm6xvlz7ztw2gvdmmay5uaycu03mqz`, - }); - expect(response3.statusCode).toBe(200); - const json3 = response3.json(); - expect(json3.total).toBe(1); - expect(json3.results).toStrictEqual([ - { - available_balance: '9000', - overall_balance: '9000', - ticker: 'PEPE', - transferrable_balance: '0', - }, - ]); - }); }); describe('routes', () => { From 66aa72104c1834ca9f60a30ce45485f95c1f97cc Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Thu, 24 Aug 2023 00:07:52 +0000 Subject: [PATCH 37/56] chore(release): 1.0.0-brc-20.2 [skip ci] ## [1.0.0-brc-20.2](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.1...v1.0.0-brc-20.2) (2023-08-24) ### Features * add more brc20 features ([#183](https://github.com/hirosystems/ordinals-api/issues/183)) ([c1939ce](https://github.com/hirosystems/ordinals-api/commit/c1939cee3a40df5f285d3bb749c8b29cea4bb271)) ### Bug Fixes * optimize inscription and brc-20 inserts ([#189](https://github.com/hirosystems/ordinals-api/issues/189)) ([3807334](https://github.com/hirosystems/ordinals-api/commit/38073341163bc1c5cea44b66ad3da505f2ce4273)) --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9b83c929..fed9470f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +## [1.0.0-brc-20.2](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.1...v1.0.0-brc-20.2) (2023-08-24) + + +### Features + +* add more brc20 features ([#183](https://github.com/hirosystems/ordinals-api/issues/183)) ([c1939ce](https://github.com/hirosystems/ordinals-api/commit/c1939cee3a40df5f285d3bb749c8b29cea4bb271)) + + +### Bug Fixes + +* optimize inscription and brc-20 inserts ([#189](https://github.com/hirosystems/ordinals-api/issues/189)) ([3807334](https://github.com/hirosystems/ordinals-api/commit/38073341163bc1c5cea44b66ad3da505f2ce4273)) + ## [1.0.0-brc-20.1](https://github.com/hirosystems/ordinals-api/compare/v0.4.15...v1.0.0-brc-20.1) (2023-08-17) From e23012a926eef80e1d467ef28f618fae989426fd Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 23 Aug 2023 23:04:19 -0600 Subject: [PATCH 38/56] fix: sending transfer as fee returns amt to sender --- src/admin-rpc/init.ts | 2 + src/pg/brc20/brc20-pg-store.ts | 16 +++----- src/pg/brc20/types.ts | 12 +++++- tests/brc20.test.ts | 68 ++++++++++++++++++++++++++++++++++ 4 files changed, 87 insertions(+), 11 deletions(-) diff --git a/src/admin-rpc/init.ts b/src/admin-rpc/init.ts index f7154991..2fbc0c4e 100644 --- a/src/admin-rpc/init.ts +++ b/src/admin-rpc/init.ts @@ -16,6 +16,8 @@ export const AdminApi: FastifyPluginCallback, Server, TypeB schema: { description: 'Scan for BRC-20 operations within a block range', querystring: Type.Object({ + // TIP: The first BRC-20 token was deployed at height `779832`. This should be a good + // place to start. start_block: Type.Integer(), end_block: Type.Integer(), }), diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index c1beef0c..bde3bcac 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -23,6 +23,7 @@ import { DbBrc20ScannedInscription, DbBrc20MintInsert, DbBrc20DeployInsert, + DbBrc20TransferInsert, } from './types'; import { Brc20Deploy, Brc20Mint, Brc20Transfer, brc20FromInscriptionContent } from './helpers'; import { hexToBuffer } from '../../api/util/helpers'; @@ -60,12 +61,7 @@ export class Brc20PgStore { const block = await sql` SELECT i.content, - ( - CASE EXISTS(SELECT location_id FROM genesis_locations WHERE location_id = l.id) - WHEN TRUE THEN TRUE - ELSE FALSE - END - ) AS genesis, + EXISTS(SELECT location_id FROM genesis_locations WHERE location_id = l.id) AS genesis, ${sql(LOCATIONS_COLUMNS.map(c => `l.${c}`))} FROM locations AS l INNER JOIN inscriptions AS i ON l.inscription_id = i.id @@ -98,8 +94,7 @@ export class Brc20PgStore { break; } } - } - if (!write.genesis) { + } else { await this.applyTransfer(write); } } @@ -257,7 +252,8 @@ export class Brc20PgStore { inscription_id: transfer.inscription_id, location_id: args.id, brc20_deploy_id: transfer.brc20_deploy_id, - address: args.address, + // If a transfer is sent as fee, its amount must be returned to sender. + address: args.address ?? transfer.from_address, avail_balance: amount.toString(), trans_balance: '0', type: DbBrc20BalanceTypeId.transferTo, @@ -386,7 +382,7 @@ export class Brc20PgStore { const available = new BigNumber(balanceRes[0].avail_balance); if (transAmt.gt(available)) return; - const transferInsert = { + const transferInsert: DbBrc20TransferInsert = { inscription_id: transfer.location.inscription_id, brc20_deploy_id: balanceRes[0].brc20_deploy_id, block_height: transfer.location.block_height, diff --git a/src/pg/brc20/types.ts b/src/pg/brc20/types.ts index c7e86d74..9ff859a3 100644 --- a/src/pg/brc20/types.ts +++ b/src/pg/brc20/types.ts @@ -38,6 +38,16 @@ export type DbBrc20Deploy = { limit?: string; }; +export type DbBrc20TransferInsert = { + inscription_id: string; + brc20_deploy_id: string; + block_height: string; + tx_id: string; + from_address: string; + to_address: string | null; + amount: string; +}; + export type DbBrc20Transfer = { id: string; inscription_id: string; @@ -93,7 +103,7 @@ export type DbBrc20BalanceInsert = { inscription_id: PgNumeric; location_id: PgNumeric; brc20_deploy_id: PgNumeric; - address: string | null; + address: string; avail_balance: PgNumeric; trans_balance: PgNumeric; type: DbBrc20BalanceTypeId; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 506d0b65..93dea504 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1506,6 +1506,74 @@ describe('BRC-20', () => { expect(prevBlockJson2.results[0]).toBeUndefined(); }); + test('sending transfer as fee returns amount to sender', async () => { + const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; + await deployAndMintPEPE(address); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775619, + hash: '00000000000000000002b14f0c5dde0b2fc74d022e860696bd64f1f652756674', + }) + .transaction({ + hash: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + }) + .inscriptionRevealed( + brc20Reveal({ + json: { + p: 'brc-20', + op: 'transfer', + tick: 'PEPE', + amt: '9000', + }, + number: 7, + tx_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a', + address: address, + }) + ) + .build() + ); + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 775620, + hash: '00000000000000000003feae13d107f0f2c4fb4dd08fb2a8b1ab553512e77f03', + }) + .transaction({ + hash: '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac', + }) + .inscriptionTransferred({ + inscription_id: 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47ai0', + updated_address: null, // Sent as fee + satpoint_pre_transfer: + 'eee52b22397ea4a4aefe6a39931315e93a157091f5a994216c0aa9c8c6fef47a:0:0', + satpoint_post_transfer: + '7edaa48337a94da327b6262830505f116775a32db5ad4ad46e87ecea33f21bac:0:0', + post_transfer_output_value: null, + tx_index: 0, + }) + .build() + ); + + const response1 = await fastify.inject({ + method: 'GET', + url: `/ordinals/brc-20/balances/${address}`, + }); + expect(response1.statusCode).toBe(200); + const json1 = response1.json(); + expect(json1.total).toBe(1); + expect(json1.results).toStrictEqual([ + { + available_balance: '10000', + overall_balance: '10000', + ticker: 'PEPE', + transferrable_balance: '0', + }, + ]); + }); + test('cannot spend valid transfer twice', async () => { const address = 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td'; const address2 = '3QNjwPDRafjBm9XxJpshgk3ksMJh3TFxTU'; From b428bb48ef2b2a3eda6f3366fbd7fd488d090d4d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 23 Aug 2023 23:32:33 -0600 Subject: [PATCH 39/56] fix: add unique indexes for mints and transfers --- ...1692853050488_brc20-mint-transfer-unique.ts | 18 ++++++++++++++++++ src/pg/brc20/brc20-pg-store.ts | 2 ++ 2 files changed, 20 insertions(+) create mode 100644 migrations/1692853050488_brc20-mint-transfer-unique.ts diff --git a/migrations/1692853050488_brc20-mint-transfer-unique.ts b/migrations/1692853050488_brc20-mint-transfer-unique.ts new file mode 100644 index 00000000..2ad987e1 --- /dev/null +++ b/migrations/1692853050488_brc20-mint-transfer-unique.ts @@ -0,0 +1,18 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.dropIndex('brc20_transfers', ['inscription_id']); + pgm.createIndex('brc20_transfers', ['inscription_id'], { unique: true }); + pgm.dropIndex('brc20_mints', ['inscription_id']); + pgm.createIndex('brc20_mints', ['inscription_id'], { unique: true }); +} + +export function down(pgm: MigrationBuilder): void { + pgm.dropIndex('brc20_transfers', ['inscription_id'], { unique: true }); + pgm.createIndex('brc20_transfers', ['inscription_id']); + pgm.dropIndex('brc20_mints', ['inscription_id'], { unique: true }); + pgm.createIndex('brc20_mints', ['inscription_id']); +} diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index bde3bcac..a2423497 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -351,6 +351,7 @@ export class Brc20PgStore { await sql` WITH mint_insert AS ( INSERT INTO brc20_mints ${sql(mintInsert)} + ON CONFLICT (inscription_id) DO NOTHING ) INSERT INTO brc20_balances ${sql(balanceInsert)} ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING @@ -403,6 +404,7 @@ export class Brc20PgStore { await sql` WITH transfer_insert AS ( INSERT INTO brc20_transfers ${sql(transferInsert)} + ON CONFLICT (inscription_id) DO NOTHING ) INSERT INTO brc20_balances ${sql(balanceInsert)} ON CONFLICT ON CONSTRAINT brc20_balances_inscription_id_type_unique DO NOTHING From eb6e7fd465d69bb65aef9467df994895068b113a Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Thu, 24 Aug 2023 05:38:28 +0000 Subject: [PATCH 40/56] chore(release): 1.0.0-brc-20.3 [skip ci] ## [1.0.0-brc-20.3](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.2...v1.0.0-brc-20.3) (2023-08-24) ### Bug Fixes * add unique indexes for mints and transfers ([b428bb4](https://github.com/hirosystems/ordinals-api/commit/b428bb48ef2b2a3eda6f3366fbd7fd488d090d4d)) * sending transfer as fee returns amt to sender ([e23012a](https://github.com/hirosystems/ordinals-api/commit/e23012a926eef80e1d467ef28f618fae989426fd)) --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed9470f..f080c8c1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +## [1.0.0-brc-20.3](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.2...v1.0.0-brc-20.3) (2023-08-24) + + +### Bug Fixes + +* add unique indexes for mints and transfers ([b428bb4](https://github.com/hirosystems/ordinals-api/commit/b428bb48ef2b2a3eda6f3366fbd7fd488d090d4d)) +* sending transfer as fee returns amt to sender ([e23012a](https://github.com/hirosystems/ordinals-api/commit/e23012a926eef80e1d467ef28f618fae989426fd)) + ## [1.0.0-brc-20.2](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.1...v1.0.0-brc-20.2) (2023-08-24) From 0e673a7ab327e98b85b3f6289fc7addadee6b1d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Thu, 24 Aug 2023 14:18:41 -0600 Subject: [PATCH 41/56] fix: upgrade api-toolkit (#190) * fix: upgrade api-toolkit * fix: isProdEnv helper --- package-lock.json | 48 +++++++++++++++++++++++++++----- package.json | 2 +- src/api/init.ts | 3 +- src/api/util/helpers.ts | 8 ------ src/env.ts | 1 + src/index.ts | 3 +- src/pg/counts/counts-pg-store.ts | 23 ++++----------- src/pg/pg-store.ts | 13 ++++----- 8 files changed, 56 insertions(+), 45 deletions(-) diff --git a/package-lock.json b/package-lock.json index d34bbea7..c8e9d38f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,7 @@ "@fastify/multipart": "^7.1.0", "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", - "@hirosystems/api-toolkit": "^1.0.0", + "@hirosystems/api-toolkit": "^1.1.0", "@hirosystems/chainhook-client": "^1.3.1", "@types/node": "^18.13.0", "bignumber.js": "^9.1.1", @@ -49,6 +49,40 @@ "typescript": "^4.7.4" } }, + "../api-toolkit": { + "name": "@hirosystems/api-toolkit", + "version": "1.1.0", + "extraneous": true, + "license": "Apache 2.0", + "dependencies": { + "node-pg-migrate": "^6.2.2", + "pino": "^8.11.0", + "postgres": "^3.3.4" + }, + "bin": { + "api-toolkit-git-info": "bin/api-toolkit-git-info.js" + }, + "devDependencies": { + "@commitlint/cli": "^17.5.0", + "@commitlint/config-conventional": "^17.4.4", + "@stacks/eslint-config": "^1.2.0", + "@types/jest": "^29.5.0", + "@typescript-eslint/eslint-plugin": "^5.56.0", + "@typescript-eslint/parser": "^5.56.0", + "babel-jest": "^29.5.0", + "copyfiles": "^2.4.1", + "eslint": "^8.36.0", + "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-tsdoc": "^0.2.17", + "husky": "^8.0.3", + "jest": "^29.5.0", + "prettier": "^2.8.6", + "rimraf": "^4.4.1", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "typescript": "^5.0.2" + } + }, "../chainhook/components/client/typescript": { "name": "@hirosystems/chainhook-client", "version": "1.3.0", @@ -1153,9 +1187,9 @@ } }, "node_modules/@hirosystems/api-toolkit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@hirosystems/api-toolkit/-/api-toolkit-1.0.0.tgz", - "integrity": "sha512-/e/oI3COpx92dK+U7TQVFxaTzoWbx5LhA6IWhqh5Imx2BHTQeP0PrOQy6lEfK/FcrkTTrfbPrNYKrJ34otwyDA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@hirosystems/api-toolkit/-/api-toolkit-1.1.0.tgz", + "integrity": "sha512-yjlNNRJ4LP+oGrK1ECmZDLutfOwLPu58wbF4eet1Dr9kr36vUGuDEuT6VZKKet7zj7RsUdWW8/SRfUDp+0bppQ==", "dependencies": { "node-pg-migrate": "^6.2.2", "pino": "^8.11.0", @@ -13571,9 +13605,9 @@ "requires": {} }, "@hirosystems/api-toolkit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@hirosystems/api-toolkit/-/api-toolkit-1.0.0.tgz", - "integrity": "sha512-/e/oI3COpx92dK+U7TQVFxaTzoWbx5LhA6IWhqh5Imx2BHTQeP0PrOQy6lEfK/FcrkTTrfbPrNYKrJ34otwyDA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@hirosystems/api-toolkit/-/api-toolkit-1.1.0.tgz", + "integrity": "sha512-yjlNNRJ4LP+oGrK1ECmZDLutfOwLPu58wbF4eet1Dr9kr36vUGuDEuT6VZKKet7zj7RsUdWW8/SRfUDp+0bppQ==", "requires": { "node-pg-migrate": "^6.2.2", "pino": "^8.11.0", diff --git a/package.json b/package.json index 2f331681..27b0daa9 100644 --- a/package.json +++ b/package.json @@ -50,7 +50,7 @@ "@fastify/multipart": "^7.1.0", "@fastify/swagger": "^8.3.1", "@fastify/type-provider-typebox": "^3.2.0", - "@hirosystems/api-toolkit": "^1.0.0", + "@hirosystems/api-toolkit": "^1.1.0", "@hirosystems/chainhook-client": "^1.3.1", "@types/node": "^18.13.0", "bignumber.js": "^9.1.1", diff --git a/src/api/init.ts b/src/api/init.ts index a1609d30..8846cb55 100644 --- a/src/api/init.ts +++ b/src/api/init.ts @@ -8,8 +8,7 @@ import { InscriptionsRoutes } from './routes/inscriptions'; import { SatRoutes } from './routes/sats'; import { StatsRoutes } from './routes/stats'; import { StatusRoutes } from './routes/status'; -import { isProdEnv } from './util/helpers'; -import { PINO_LOGGER_CONFIG } from '@hirosystems/api-toolkit'; +import { PINO_LOGGER_CONFIG, isProdEnv } from '@hirosystems/api-toolkit'; import { Brc20Routes } from './routes/brc20'; export const Api: FastifyPluginAsync< diff --git a/src/api/util/helpers.ts b/src/api/util/helpers.ts index a27e7659..fe391967 100644 --- a/src/api/util/helpers.ts +++ b/src/api/util/helpers.ts @@ -16,14 +16,6 @@ import { InscriptionResponseType, } from '../schemas'; -export const isDevEnv = process.env.NODE_ENV === 'development'; -export const isTestEnv = process.env.NODE_ENV === 'test'; -export const isProdEnv = - process.env.NODE_ENV === 'production' || - process.env.NODE_ENV === 'prod' || - !process.env.NODE_ENV || - (!isTestEnv && !isDevEnv); - export const DEFAULT_API_LIMIT = 20; export function parseDbInscriptions( diff --git a/src/env.ts b/src/env.ts index 7d54fdca..9f83eed3 100644 --- a/src/env.ts +++ b/src/env.ts @@ -51,6 +51,7 @@ const schema = Type.Object({ PG_CONNECTION_POOL_MAX: Type.Number({ default: 10 }), PG_IDLE_TIMEOUT: Type.Number({ default: 30 }), PG_MAX_LIFETIME: Type.Number({ default: 60 }), + PG_STATEMENT_TIMEOUT: Type.Number({ default: 60_000 }), /** Enables BRC-20 processing in write mode APIs */ BRC20_BLOCK_SCAN_ENABLED: Type.Boolean({ default: true }), diff --git a/src/index.ts b/src/index.ts index 7bf58ec3..3add9360 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,5 @@ -import { logger, registerShutdownConfig } from '@hirosystems/api-toolkit'; +import { isProdEnv, logger, registerShutdownConfig } from '@hirosystems/api-toolkit'; import { buildApiServer, buildPromServer } from './api/init'; -import { isProdEnv } from './api/util/helpers'; import { startChainhookServer } from './chainhook/server'; import { ENV } from './env'; import { ApiMetrics } from './metrics/metrics'; diff --git a/src/pg/counts/counts-pg-store.ts b/src/pg/counts/counts-pg-store.ts index 14b388f6..74d13583 100644 --- a/src/pg/counts/counts-pg-store.ts +++ b/src/pg/counts/counts-pg-store.ts @@ -1,5 +1,4 @@ -import { PgSqlClient } from '@hirosystems/api-toolkit'; -import { PgStore } from '../pg-store'; +import { BasePgStoreModule } from '@hirosystems/api-toolkit'; import { SatoshiRarity } from '../../api/util/ordinal-satoshi'; import { DbInscription, @@ -14,17 +13,7 @@ import { DbInscriptionIndexResultCountType } from './types'; * This class affects all the different tables that track inscription counts according to different * parameters (sat rarity, mime type, cursed, blessed, current owner, etc.) */ -export class CountsPgStore { - // TODO: Move this to the api-toolkit so we can have pg submodules. - private readonly parent: PgStore; - private get sql(): PgSqlClient { - return this.parent.sql; - } - - constructor(db: PgStore) { - this.parent = db; - } - +export class CountsPgStore extends BasePgStoreModule { async fromResults( countType: DbInscriptionIndexResultCountType, filters?: DbInscriptionIndexFilters @@ -65,7 +54,7 @@ export class CountsPgStore { async applyInscriptions(writes: DbInscriptionInsert[]): Promise { if (writes.length === 0) return; - await this.parent.sqlWriteTransaction(async sql => { + await this.sqlWriteTransaction(async sql => { const mimeType = new Map(); const rarity = new Map(); const type = new Map(); @@ -97,7 +86,7 @@ export class CountsPgStore { } async rollBackInscription(args: { inscription: DbInscription }): Promise { - await this.parent.sqlWriteTransaction(async sql => { + await this.sqlWriteTransaction(async sql => { await sql` UPDATE counts_by_mime_type SET count = count - 1 WHERE mime_type = ${args.inscription.mime_type} `; @@ -124,7 +113,7 @@ export class CountsPgStore { genesis: boolean = true ): Promise { if (writes.length === 0) return; - await this.parent.sqlWriteTransaction(async sql => { + await this.sqlWriteTransaction(async sql => { const table = genesis ? sql`counts_by_genesis_address` : sql`counts_by_address`; const oldAddr = new Map(); const newAddr = new Map(); @@ -157,7 +146,7 @@ export class CountsPgStore { curr: DbLocationPointer; prev: DbLocationPointer; }): Promise { - await this.parent.sqlWriteTransaction(async sql => { + await this.sqlWriteTransaction(async sql => { if (args.curr.address) { await sql` UPDATE counts_by_address SET count = count - 1 WHERE address = ${args.curr.address} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index bce21d8f..71eba98f 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -1,7 +1,9 @@ import { BasePgStore, + PgConnectionVars, PgSqlClient, connectPostgres, + isTestEnv, logger, runMigrations, } from '@hirosystems/api-toolkit'; @@ -9,7 +11,7 @@ import { BitcoinEvent, Payload } from '@hirosystems/chainhook-client'; import * as path from 'path'; import * as postgres from 'postgres'; import { Order, OrderBy } from '../api/schemas'; -import { isProdEnv, isTestEnv, normalizedHexString, parseSatPoint } from '../api/util/helpers'; +import { normalizedHexString, parseSatPoint } from '../api/util/helpers'; import { OrdinalSatoshi } from '../api/util/ordinal-satoshi'; import { ENV } from '../env'; import { Brc20PgStore } from './brc20/brc20-pg-store'; @@ -45,7 +47,7 @@ export class PgStore extends BasePgStore { readonly counts: CountsPgStore; static async connect(opts?: { skipMigrations: boolean }): Promise { - const pgConfig = { + const pgConfig: PgConnectionVars = { host: ENV.PGHOST, port: ENV.PGPORT, user: ENV.PGUSER, @@ -59,6 +61,7 @@ export class PgStore extends BasePgStore { poolMax: ENV.PG_CONNECTION_POOL_MAX, idleTimeout: ENV.PG_IDLE_TIMEOUT, maxLifetime: ENV.PG_MAX_LIFETIME, + statementTimeout: ENV.PG_STATEMENT_TIMEOUT, }, }); if (opts?.skipMigrations !== true) { @@ -570,12 +573,6 @@ export class PgStore extends BasePgStore { `; // roughly 35 days of blocks, assuming 10 minute block times on a full database } - async refreshMaterializedView(viewName: string) { - await this.sql`REFRESH MATERIALIZED VIEW ${ - isProdEnv ? this.sql`CONCURRENTLY` : this.sql`` - } ${this.sql(viewName)}`; - } - private async getInscription(args: { genesis_id: string }): Promise { const query = await this.sql` SELECT ${this.sql(INSCRIPTIONS_COLUMNS)} From 7d6705a82f2acec5531b53d2d352f7fa04bf1c51 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 25 Aug 2023 08:39:48 -0600 Subject: [PATCH 42/56] fix: refresh supplies view only if BRC-20 is enabled --- src/pg/pg-store.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 71eba98f..a00b2cbc 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -230,10 +230,9 @@ export class PgStore extends BasePgStore { if (payload.chainhook.is_streaming_blocks) { // We'll issue materialized view refreshes in parallel. We will not wait for them to finish so // we can respond to the chainhook node with a `200` HTTP code as soon as possible. - const viewRefresh = Promise.allSettled([ - this.normalizeInscriptionCount({ min_block_height: updatedBlockHeightMin }), - this.refreshMaterializedView('brc20_supplies'), - ]); + const views = [this.normalizeInscriptionCount({ min_block_height: updatedBlockHeightMin })]; + if (ENV.BRC20_BLOCK_SCAN_ENABLED) views.push(this.refreshMaterializedView('brc20_supplies')); + const viewRefresh = Promise.allSettled(views); // Only wait for these on tests. if (isTestEnv) await viewRefresh; } From 33d70a4abb68c609702c01853583deee9ebe5b3c Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Fri, 25 Aug 2023 14:45:16 +0000 Subject: [PATCH 43/56] chore(release): 1.0.0-brc-20.4 [skip ci] ## [1.0.0-brc-20.4](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.3...v1.0.0-brc-20.4) (2023-08-25) ### Bug Fixes * refresh supplies view only if BRC-20 is enabled ([7d6705a](https://github.com/hirosystems/ordinals-api/commit/7d6705a82f2acec5531b53d2d352f7fa04bf1c51)) * upgrade api-toolkit ([#190](https://github.com/hirosystems/ordinals-api/issues/190)) ([0e673a7](https://github.com/hirosystems/ordinals-api/commit/0e673a7ab327e98b85b3f6289fc7addadee6b1d2)) --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f080c8c1..5a92287f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +## [1.0.0-brc-20.4](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.3...v1.0.0-brc-20.4) (2023-08-25) + + +### Bug Fixes + +* refresh supplies view only if BRC-20 is enabled ([7d6705a](https://github.com/hirosystems/ordinals-api/commit/7d6705a82f2acec5531b53d2d352f7fa04bf1c51)) +* upgrade api-toolkit ([#190](https://github.com/hirosystems/ordinals-api/issues/190)) ([0e673a7](https://github.com/hirosystems/ordinals-api/commit/0e673a7ab327e98b85b3f6289fc7addadee6b1d2)) + ## [1.0.0-brc-20.3](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.2...v1.0.0-brc-20.3) (2023-08-24) From 9a9c5de7ff76557e2728b726363d68833d05a689 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 25 Aug 2023 10:35:13 -0600 Subject: [PATCH 44/56] fix: change uniqueness constraint in locations table --- migrations/1692980393413_locations-unique.ts | 20 ++++++++++++++++++++ src/pg/pg-store.ts | 7 +++---- tests/brc20.test.ts | 14 +++++++------- 3 files changed, 30 insertions(+), 11 deletions(-) create mode 100644 migrations/1692980393413_locations-unique.ts diff --git a/migrations/1692980393413_locations-unique.ts b/migrations/1692980393413_locations-unique.ts new file mode 100644 index 00000000..704d87b5 --- /dev/null +++ b/migrations/1692980393413_locations-unique.ts @@ -0,0 +1,20 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.dropConstraint('locations', 'locations_output_offset_unique'); + pgm.createIndex('locations', ['output', 'offset']); + pgm.createConstraint( + 'locations', + 'locations_inscription_id_block_height_tx_index_unique', + 'UNIQUE(inscription_id, block_height, tx_index)' + ); +} + +export function down(pgm: MigrationBuilder): void { + pgm.dropConstraint('locations', 'locations_inscription_id_block_height_tx_index_unique'); + pgm.dropIndex('locations', ['output', 'offset']); + pgm.createConstraint('locations', 'locations_output_offset_unique', 'UNIQUE(output, "offset")'); +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index a00b2cbc..8c6beb25 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -625,15 +625,14 @@ export class PgStore extends BasePgStore { })); const locations = await sql` INSERT INTO locations ${sql(locationData)} - ON CONFLICT ON CONSTRAINT locations_output_offset_unique DO UPDATE SET - inscription_id = EXCLUDED.inscription_id, + ON CONFLICT ON CONSTRAINT locations_inscription_id_block_height_tx_index_unique DO UPDATE SET genesis_id = EXCLUDED.genesis_id, - block_height = EXCLUDED.block_height, block_hash = EXCLUDED.block_hash, tx_id = EXCLUDED.tx_id, - tx_index = EXCLUDED.tx_index, address = EXCLUDED.address, value = EXCLUDED.value, + output = EXCLUDED.output, + "offset" = EXCLUDED.offset, timestamp = EXCLUDED.timestamp RETURNING inscription_id, id AS location_id, block_height, tx_index, address `; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 93dea504..86dbda5d 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -1748,8 +1748,8 @@ describe('BRC-20', () => { tick: 'PEER', max: '21000000', }, - number: 5, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + number: 6, + tx_id: 'ff2aaeff6889211369305e6367180fc58a12c57fc784b68fa6a6741943db3863', address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', }) ) @@ -1768,8 +1768,8 @@ describe('BRC-20', () => { tick: 'ABCD', max: '21000000', }, - number: 5, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + number: 7, + tx_id: '7ab47806eb20100417a0450244a034bd49062e1809713a0b1d31a61b07b9e434', address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', }) ) @@ -1778,7 +1778,7 @@ describe('BRC-20', () => { await db.updateInscriptions( new TestChainhookPayloadBuilder() .apply() - .block({ height: 775619 }) + .block({ height: 775620 }) .transaction({ hash: randomHash() }) .inscriptionRevealed( brc20Reveal({ @@ -1788,8 +1788,8 @@ describe('BRC-20', () => { tick: 'DCBA', max: '21000000', }, - number: 5, - tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', + number: 8, + tx_id: '0b3b160765698aa809bcb948d1fd2d939dc372340439e4bf840d42a07bf41975', address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', }) ) From 78c38005b59662a28f517edb712fc2762e385271 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Fri, 25 Aug 2023 16:38:24 +0000 Subject: [PATCH 45/56] chore(release): 1.0.0-brc-20.5 [skip ci] ## [1.0.0-brc-20.5](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.4...v1.0.0-brc-20.5) (2023-08-25) ### Bug Fixes * change uniqueness constraint in locations table ([9a9c5de](https://github.com/hirosystems/ordinals-api/commit/9a9c5de7ff76557e2728b726363d68833d05a689)) --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a92287f..06dc1823 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +## [1.0.0-brc-20.5](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.4...v1.0.0-brc-20.5) (2023-08-25) + + +### Bug Fixes + +* change uniqueness constraint in locations table ([9a9c5de](https://github.com/hirosystems/ordinals-api/commit/9a9c5de7ff76557e2728b726363d68833d05a689)) + ## [1.0.0-brc-20.4](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.3...v1.0.0-brc-20.4) (2023-08-25) From bef5f23891b0a041bc27f54e507fead928306c95 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Sat, 26 Aug 2023 17:04:59 -0600 Subject: [PATCH 46/56] fix: place a cap on max insertion size --- src/pg/helpers.ts | 12 ++++++++++++ src/pg/pg-store.ts | 5 +++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/pg/helpers.ts b/src/pg/helpers.ts index 2f812e9f..86cc1988 100644 --- a/src/pg/helpers.ts +++ b/src/pg/helpers.ts @@ -37,3 +37,15 @@ export function throwOnFirstRejected(settles: { } return values; } + +/** + * Divides array into equal chunks + * @param arr - Array + * @param chunkSize - Chunk size + * @returns Array of arrays + */ +export function chunkArray(arr: T[], chunkSize: number): T[][] { + const result: T[][] = []; + for (let i = 0; i < arr.length; i += chunkSize) result.push(arr.slice(i, i + chunkSize)); + return result; +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 8c6beb25..bc1680c7 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -17,7 +17,7 @@ import { ENV } from '../env'; import { Brc20PgStore } from './brc20/brc20-pg-store'; import { CountsPgStore } from './counts/counts-pg-store'; import { getIndexResultCountType } from './counts/helpers'; -import { getInscriptionRecursion } from './helpers'; +import { chunkArray, getInscriptionRecursion } from './helpers'; import { DbFullyLocatedInscriptionResult, DbInscription, @@ -219,7 +219,8 @@ export class PgStore extends BasePgStore { } } } - await this.insertInscriptions(writes); + for (const writeChunk of chunkArray(writes, 5000)) + await this.insertInscriptions(writeChunk); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); if (ENV.BRC20_BLOCK_SCAN_ENABLED) await this.brc20.scanBlocks(event.block_identifier.index, event.block_identifier.index); From 04f266604f3438ce3fffd3a76df266d010f4d5d7 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Sat, 26 Aug 2023 23:08:14 +0000 Subject: [PATCH 47/56] chore(release): 1.0.0-brc-20.6 [skip ci] ## [1.0.0-brc-20.6](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.5...v1.0.0-brc-20.6) (2023-08-26) ### Bug Fixes * place a cap on max insertion size ([bef5f23](https://github.com/hirosystems/ordinals-api/commit/bef5f23891b0a041bc27f54e507fead928306c95)) --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06dc1823..22799543 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +## [1.0.0-brc-20.6](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.5...v1.0.0-brc-20.6) (2023-08-26) + + +### Bug Fixes + +* place a cap on max insertion size ([bef5f23](https://github.com/hirosystems/ordinals-api/commit/bef5f23891b0a041bc27f54e507fead928306c95)) + ## [1.0.0-brc-20.5](https://github.com/hirosystems/ordinals-api/compare/v1.0.0-brc-20.4...v1.0.0-brc-20.5) (2023-08-25) From 71ce1a52760c2e101c255e7d691653c180cc73c8 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Sun, 27 Aug 2023 19:50:06 -0600 Subject: [PATCH 48/56] fix: guard against empty recursion refs --- src/pg/pg-store.ts | 38 +++++++++++++++++++------------------- tests/inscriptions.test.ts | 32 ++++++++++++++++++++++++++++++-- 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index bc1680c7..6759972c 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -219,6 +219,8 @@ export class PgStore extends BasePgStore { } } } + // Divide insertion array into chunks of 5000 in order to avoid the postgres limit of 65534 + // query params. for (const writeChunk of chunkArray(writes, 5000)) await this.insertInscriptions(writeChunk); updatedBlockHeightMin = Math.min(updatedBlockHeightMin, event.block_identifier.index); @@ -857,24 +859,22 @@ export class PgStore extends BasePgStore { private async updateInscriptionRecursions(reveals: DbRevealInsert[]): Promise { if (reveals.length === 0) return; - // TODO: Gap fills may make us miss some recursion refs because they will not appear in this - // query. - const inserts: { - inscription_id: postgres.PendingQuery; - ref_inscription_id: postgres.PendingQuery; - }[] = []; - for (const i of reveals) - if (i.inscription && i.recursive_refs?.length) - for (const r of i.recursive_refs) - inserts.push({ - inscription_id: this - .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription?.genesis_id})`, - ref_inscription_id: this.sql`(SELECT id FROM inscriptions WHERE genesis_id = ${r})`, - }); - if (inserts.length === 0) return; - await this.sql` - INSERT INTO inscription_recursions ${this.sql(inserts)} - ON CONFLICT ON CONSTRAINT inscriptions_inscription_id_ref_inscription_id_unique DO NOTHING - `; + await this.sqlWriteTransaction(async sql => { + // TODO: Gap fills may make us miss some recursion refs because they will not appear in this + // query. + for (const i of reveals) + if (i.inscription && i.recursive_refs?.length) + await sql` + WITH from_i AS ( + SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription.genesis_id} + ), + to_i AS ( + SELECT id FROM inscriptions WHERE genesis_id IN ${sql(i.recursive_refs)} + ) + INSERT INTO inscription_recursions (inscription_id, ref_inscription_id) + (SELECT from_i.id, to_i.id FROM from_i, to_i WHERE to_i IS NOT NULL) + ON CONFLICT ON CONSTRAINT inscriptions_inscription_id_ref_inscription_id_unique DO NOTHING + `; + }); } } diff --git a/tests/inscriptions.test.ts b/tests/inscriptions.test.ts index 93e767bb..919a3928 100644 --- a/tests/inscriptions.test.ts +++ b/tests/inscriptions.test.ts @@ -160,6 +160,27 @@ describe('/inscriptions', () => { inscription_input_index: 0, transfers_pre_inscription: 0, }) + .transaction({ + hash: '0xf351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421', + }) + .inscriptionRevealed({ + content_bytes: '0x48656C6C6F', + content_type: 'image/png', + content_length: 5, + inscription_number: 189, + inscription_fee: 2805, + inscription_id: 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + inscription_output_value: 10000, + inscriber_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td', + ordinal_number: 257418248345364, + ordinal_block_height: 51483, + ordinal_offset: 0, + satpoint_post_inscription: + 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421:0:0', + inscription_input_index: 0, + transfers_pre_inscription: 0, + tx_index: 0, + }) .build() ); await db.updateInscriptions( @@ -174,8 +195,12 @@ describe('/inscriptions', () => { hash: '0x38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', }) .inscriptionRevealed({ + // Include inscription not in DB content_bytes: `0x${Buffer.from( - 'Hello /content/9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0' + `Hello + /content/9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0 + /content/f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0 + /content/b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5ai0` ).toString('hex')}`, content_type: 'image/png', content_length: 5, @@ -219,7 +244,10 @@ describe('/inscriptions', () => { genesis_tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc', curse_type: null, recursive: true, - recursion_refs: ['9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0'], + recursion_refs: [ + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + ], }; // By inscription id From ab2f7bfc475f2399f72647aeac23b4b9a62d0905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Mon, 28 Aug 2023 09:17:01 -0600 Subject: [PATCH 49/56] fix: optimize inscription backfill indexes (#197) --- ...1693234572099_locations-remove-duplicate-index.ts | 12 ++++++++++++ ...3234845450_locations-null-inscription-id-index.ts | 8 ++++++++ 2 files changed, 20 insertions(+) create mode 100644 migrations/1693234572099_locations-remove-duplicate-index.ts create mode 100644 migrations/1693234845450_locations-null-inscription-id-index.ts diff --git a/migrations/1693234572099_locations-remove-duplicate-index.ts b/migrations/1693234572099_locations-remove-duplicate-index.ts new file mode 100644 index 00000000..e8103544 --- /dev/null +++ b/migrations/1693234572099_locations-remove-duplicate-index.ts @@ -0,0 +1,12 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.dropIndex('locations', ['inscription_id'], { ifExists: true }); +} + +export function down(pgm: MigrationBuilder): void { + pgm.createIndex('locations', ['inscription_id'], { ifNotExists: true }); +} diff --git a/migrations/1693234845450_locations-null-inscription-id-index.ts b/migrations/1693234845450_locations-null-inscription-id-index.ts new file mode 100644 index 00000000..c522d1c3 --- /dev/null +++ b/migrations/1693234845450_locations-null-inscription-id-index.ts @@ -0,0 +1,8 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createIndex('locations', ['inscription_id'], { where: 'inscription_id IS NULL' }); +} From 63571eeb5459165a90cc6b45d35f5f50adb7df79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Mon, 28 Aug 2023 12:31:17 -0600 Subject: [PATCH 50/56] fix: add recursion backfill and temporary skip (#198) --- .../1693235147508_recursion-backfills.ts | 51 ++++++++++++++++ src/pg/pg-store.ts | 59 +++++++++++-------- tests/inscriptions.test.ts | 43 ++++++++++++++ 3 files changed, 128 insertions(+), 25 deletions(-) create mode 100644 migrations/1693235147508_recursion-backfills.ts diff --git a/migrations/1693235147508_recursion-backfills.ts b/migrations/1693235147508_recursion-backfills.ts new file mode 100644 index 00000000..d4fea843 --- /dev/null +++ b/migrations/1693235147508_recursion-backfills.ts @@ -0,0 +1,51 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.addColumn('inscription_recursions', { + ref_inscription_genesis_id: { + type: 'text', + }, + }); + pgm.sql(` + UPDATE inscription_recursions AS ir + SET ref_inscription_genesis_id = ( + SELECT genesis_id FROM inscriptions WHERE id = ir.ref_inscription_id + ) + `); + pgm.alterColumn('inscription_recursions', 'ref_inscription_genesis_id', { notNull: true }); + pgm.alterColumn('inscription_recursions', 'ref_inscription_id', { allowNull: true }); + + pgm.createIndex('inscription_recursions', ['ref_inscription_genesis_id']); + pgm.createIndex('inscription_recursions', ['ref_inscription_id'], { + where: 'ref_inscription_id IS NULL', + name: 'inscription_recursions_ref_inscription_id_null_index', + }); + pgm.dropConstraint( + 'inscription_recursions', + 'inscriptions_inscription_id_ref_inscription_id_unique' + ); + pgm.createConstraint( + 'inscription_recursions', + 'inscription_recursions_unique', + 'UNIQUE(inscription_id, ref_inscription_genesis_id)' + ); +} + +export function down(pgm: MigrationBuilder): void { + pgm.dropConstraint('inscription_recursions', 'inscription_recursions_unique'); + pgm.dropIndex('inscription_recursions', ['ref_inscription_genesis_id']); + pgm.dropColumn('inscription_recursions', 'ref_inscription_genesis_id'); + pgm.dropIndex('inscription_recursions', ['ref_inscription_id'], { + name: 'inscription_recursions_ref_inscription_id_null_index', + }); + pgm.sql(`DELETE FROM inscription_recursions WHERE ref_inscription_id IS NULL`); + pgm.alterColumn('inscription_recursions', 'ref_inscription_id', { notNull: true }); + pgm.createConstraint( + 'inscription_recursions', + 'inscriptions_inscription_id_ref_inscription_id_unique', + 'UNIQUE(inscription_id, ref_inscription_id)' + ); +} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 6759972c..6b1c44b7 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -645,10 +645,13 @@ export class PgStore extends BasePgStore { SET updated_at = NOW() WHERE genesis_id IN ${sql([...transferGenesisIds])} `; - await this.updateInscriptionLocationPointers(locations); await this.updateInscriptionRecursions(writes); - await this.backfillOrphanLocations(); - await this.counts.applyInscriptions(inscriptions); + if (ENV.BRC20_BLOCK_SCAN_ENABLED) { + // TODO: Temporary + await this.backfillOrphanLocations(); + await this.updateInscriptionLocationPointers(locations); + await this.counts.applyInscriptions(inscriptions); + } for (const reveal of writes) { const action = reveal.inscription ? `reveal #${reveal.inscription.number}` : `transfer`; logger.info( @@ -819,11 +822,18 @@ export class PgStore extends BasePgStore { } private async backfillOrphanLocations(): Promise { - await this.sql` - UPDATE locations AS l - SET inscription_id = (SELECT id FROM inscriptions WHERE genesis_id = l.genesis_id) - WHERE l.inscription_id IS NULL - `; + await this.sqlWriteTransaction(async sql => { + await sql` + UPDATE locations AS l + SET inscription_id = (SELECT id FROM inscriptions WHERE genesis_id = l.genesis_id) + WHERE l.inscription_id IS NULL + `; + await sql` + UPDATE inscription_recursions AS l + SET ref_inscription_id = (SELECT id FROM inscriptions WHERE genesis_id = l.ref_inscription_genesis_id) + WHERE l.ref_inscription_id IS NULL + `; + }); } private async recalculateCurrentLocationPointerFromLocationRollBack(args: { @@ -859,22 +869,21 @@ export class PgStore extends BasePgStore { private async updateInscriptionRecursions(reveals: DbRevealInsert[]): Promise { if (reveals.length === 0) return; - await this.sqlWriteTransaction(async sql => { - // TODO: Gap fills may make us miss some recursion refs because they will not appear in this - // query. - for (const i of reveals) - if (i.inscription && i.recursive_refs?.length) - await sql` - WITH from_i AS ( - SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription.genesis_id} - ), - to_i AS ( - SELECT id FROM inscriptions WHERE genesis_id IN ${sql(i.recursive_refs)} - ) - INSERT INTO inscription_recursions (inscription_id, ref_inscription_id) - (SELECT from_i.id, to_i.id FROM from_i, to_i WHERE to_i IS NOT NULL) - ON CONFLICT ON CONSTRAINT inscriptions_inscription_id_ref_inscription_id_unique DO NOTHING - `; - }); + const inserts = []; + for (const i of reveals) + if (i.inscription && i.recursive_refs?.length) + for (const ref of i.recursive_refs) + inserts.push({ + inscription_id: this + .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription.genesis_id})`, + ref_inscription_id: this.sql`(SELECT id FROM inscriptions WHERE genesis_id = ${ref})`, + ref_inscription_genesis_id: ref, + }); + if (inserts.length === 0) return; + await this.sql` + INSERT INTO inscription_recursions ${this.sql(inserts)} + ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO UPDATE SET + ref_inscription_id = EXCLUDED.ref_inscription_id + `; } } diff --git a/tests/inscriptions.test.ts b/tests/inscriptions.test.ts index 919a3928..61624951 100644 --- a/tests/inscriptions.test.ts +++ b/tests/inscriptions.test.ts @@ -265,6 +265,49 @@ describe('/inscriptions', () => { }); expect(response2.statusCode).toBe(200); expect(response2.json()).toStrictEqual(expected); + + // Backfill new inscription + await db.updateInscriptions( + new TestChainhookPayloadBuilder() + .apply() + .block({ + height: 778600, + hash: '000000000000000000043b10697970720b44c79f6ca2dd604cc83cc015e0c459', + timestamp: 1676913207, + }) + .transaction({ + hash: 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5a', + }) + .inscriptionRevealed({ + content_bytes: `0x${Buffer.from('World').toString('hex')}`, + content_type: 'text/plain;charset=utf-8', + content_length: 5, + inscription_number: 200, + inscription_fee: 705, + inscription_id: 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5ai0', + inscription_output_value: 10000, + inscriber_address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj', + ordinal_number: 257418248345364, + ordinal_block_height: 650000, + ordinal_offset: 0, + satpoint_post_inscription: + 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5a:0:0', + tx_index: 0, + inscription_input_index: 0, + transfers_pre_inscription: 0, + }) + .build() + ); + const response3 = await fastify.inject({ + method: 'GET', + url: '/ordinals/v1/inscriptions/38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0', + }); + expect(response3.statusCode).toBe(200); + expect(response3.json().recursion_refs).toStrictEqual([ + '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0', + 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0', + 'b4b27b9a15f928b95a8ce4b418946553b7b313a345254cd9b23d79489175fa5ai0', + ]); }); test('shows inscription with null genesis address', async () => { From 8f973a30862f5aa869709dcaeec434cbe43903c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Mon, 28 Aug 2023 15:12:23 -0600 Subject: [PATCH 51/56] fix: dont update cache timestamp during ingestion (#200) --- src/pg/pg-store.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 6b1c44b7..cb5d9ae1 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -639,15 +639,15 @@ export class PgStore extends BasePgStore { timestamp = EXCLUDED.timestamp RETURNING inscription_id, id AS location_id, block_height, tx_index, address `; - if (transferGenesisIds.size) - await sql` - UPDATE inscriptions - SET updated_at = NOW() - WHERE genesis_id IN ${sql([...transferGenesisIds])} - `; await this.updateInscriptionRecursions(writes); if (ENV.BRC20_BLOCK_SCAN_ENABLED) { // TODO: Temporary + if (transferGenesisIds.size) + await sql` + UPDATE inscriptions + SET updated_at = NOW() + WHERE genesis_id IN ${sql([...transferGenesisIds])} + `; await this.backfillOrphanLocations(); await this.updateInscriptionLocationPointers(locations); await this.counts.applyInscriptions(inscriptions); From 9c8508b1e5ef7b21e99c4174b72c4b98b7990d23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Mon, 28 Aug 2023 15:20:11 -0600 Subject: [PATCH 52/56] fix: do not insert repeated recursions (#199) * fix: do not insert repeated recursions * chore: remove custom launch --- src/pg/pg-store.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index cb5d9ae1..32ff5f89 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -871,19 +871,20 @@ export class PgStore extends BasePgStore { if (reveals.length === 0) return; const inserts = []; for (const i of reveals) - if (i.inscription && i.recursive_refs?.length) - for (const ref of i.recursive_refs) + if (i.inscription && i.recursive_refs?.length) { + const refSet = new Set(i.recursive_refs); + for (const ref of refSet) inserts.push({ inscription_id: this .sql`(SELECT id FROM inscriptions WHERE genesis_id = ${i.inscription.genesis_id})`, ref_inscription_id: this.sql`(SELECT id FROM inscriptions WHERE genesis_id = ${ref})`, ref_inscription_genesis_id: ref, }); + } if (inserts.length === 0) return; await this.sql` INSERT INTO inscription_recursions ${this.sql(inserts)} - ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO UPDATE SET - ref_inscription_id = EXCLUDED.ref_inscription_id + ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO NOTHING `; } } From 98d69f92a3c272abb07bbefe619b02d44f127dee Mon Sep 17 00:00:00 2001 From: Dean Chi <21262275+deantchi@users.noreply.github.com> Date: Mon, 28 Aug 2023 14:45:25 -0700 Subject: [PATCH 53/56] chore: lock commit-analyzer to 9.0.2; rm dup plugins in releaserc file --- .github/workflows/ci.yml | 1 + .releaserc | 16 ++-------------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6925ca45..7181f2ff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -131,6 +131,7 @@ jobs: @semantic-release/changelog @semantic-release/git conventional-changelog-conventionalcommits + @semantic-release/commit-analyzer@v9.0.2 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 diff --git a/.releaserc b/.releaserc index 55c945bd..c316270d 100644 --- a/.releaserc +++ b/.releaserc @@ -19,19 +19,7 @@ ], "plugins": [ [ - "@semantic-release/commit-analyzer", - { - "preset": "conventionalcommits" - } - ], - [ - "@semantic-release/release-notes-generator", - { - "preset": "conventionalcommits" - } - ], - "@semantic-release/github", - "@semantic-release/changelog", - "@semantic-release/git" + "@semantic-release/release-notes-generator" + ] ] } From a88086463143bb84f00732417d2d2d268bdd5c09 Mon Sep 17 00:00:00 2001 From: Dean Chi <21262275+deantchi@users.noreply.github.com> Date: Mon, 28 Aug 2023 14:54:55 -0700 Subject: [PATCH 54/56] chore: revert releaserc file --- .releaserc | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/.releaserc b/.releaserc index c316270d..55c945bd 100644 --- a/.releaserc +++ b/.releaserc @@ -19,7 +19,19 @@ ], "plugins": [ [ - "@semantic-release/release-notes-generator" - ] + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits" + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits" + } + ], + "@semantic-release/github", + "@semantic-release/changelog", + "@semantic-release/git" ] } From 4ebc10652a345353ec58afc8a09893564e28d94e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20C=C3=A1rdenas?= Date: Tue, 29 Aug 2023 09:20:26 -0600 Subject: [PATCH 55/56] fix: split recursion insertion into chunks (#201) --- src/pg/pg-store.ts | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 32ff5f89..5b0cd487 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -41,6 +41,7 @@ import { export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); type InscriptionIdentifier = { genesis_id: string } | { number: number }; +type PgQueryFragment = postgres.PendingQuery; // TODO: Move to api-toolkit export class PgStore extends BasePgStore { readonly brc20: Brc20PgStore; @@ -869,7 +870,11 @@ export class PgStore extends BasePgStore { private async updateInscriptionRecursions(reveals: DbRevealInsert[]): Promise { if (reveals.length === 0) return; - const inserts = []; + const inserts: { + inscription_id: PgQueryFragment; + ref_inscription_id: PgQueryFragment; + ref_inscription_genesis_id: string; + }[] = []; for (const i of reveals) if (i.inscription && i.recursive_refs?.length) { const refSet = new Set(i.recursive_refs); @@ -882,9 +887,12 @@ export class PgStore extends BasePgStore { }); } if (inserts.length === 0) return; - await this.sql` - INSERT INTO inscription_recursions ${this.sql(inserts)} - ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO NOTHING - `; + await this.sqlWriteTransaction(async sql => { + for (const chunk of chunkArray(inserts, 500)) + await sql` + INSERT INTO inscription_recursions ${sql(chunk)} + ON CONFLICT ON CONSTRAINT inscription_recursions_unique DO NOTHING + `; + }); } } From 2ade47f19ec1845cd217ea2e759fab3bde2b6145 Mon Sep 17 00:00:00 2001 From: janniks Date: Wed, 30 Aug 2023 03:01:19 +0200 Subject: [PATCH 56/56] fix: add brc20 stats view --- migrations/1693353005000_brc20-stats-view.ts | 34 ++++++++++++++++++++ src/pg/brc20/brc20-pg-store.ts | 7 ++-- src/pg/pg-store.ts | 5 ++- tests/brc20.test.ts | 9 ++++++ tests/stats.test.ts | 2 -- 5 files changed, 52 insertions(+), 5 deletions(-) create mode 100644 migrations/1693353005000_brc20-stats-view.ts diff --git a/migrations/1693353005000_brc20-stats-view.ts b/migrations/1693353005000_brc20-stats-view.ts new file mode 100644 index 00000000..f8ed14a2 --- /dev/null +++ b/migrations/1693353005000_brc20-stats-view.ts @@ -0,0 +1,34 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createMaterializedView( + 'brc20_stats', + { data: true }, + ` + WITH balances AS ( + SELECT brc20_deploy_id, address, SUM(avail_balance + trans_balance) AS balance + FROM brc20_balances + GROUP BY brc20_deploy_id, address + ), holders AS ( + SELECT brc20_deploy_id, COUNT(*) AS count + FROM balances + WHERE balance > 0 + GROUP BY brc20_deploy_id + ), transactions AS ( + SELECT brc20_deploy_id, COUNT(*) AS count + FROM brc20_events + GROUP BY brc20_deploy_id + ) + SELECT + brc20_deploy_id, + t.count AS tx_count, + h.count AS holder_count + FROM transactions AS t + LEFT JOIN holders AS h USING (brc20_deploy_id) + ` + ); + pgm.createIndex('brc20_stats', ['brc20_deploy_id'], { unique: true }); +} diff --git a/src/pg/brc20/brc20-pg-store.ts b/src/pg/brc20/brc20-pg-store.ts index a2423497..2ff34810 100644 --- a/src/pg/brc20/brc20-pg-store.ts +++ b/src/pg/brc20/brc20-pg-store.ts @@ -110,12 +110,15 @@ export class Brc20PgStore { const results = await this.sql<(DbBrc20Token & { total: number })[]>` SELECT d.id, i.genesis_id, i.number, d.block_height, d.tx_id, d.address, d.ticker, d.max, d.limit, - d.decimals, l.timestamp as deploy_timestamp, COALESCE(s.minted_supply, 0) as minted_supply, COUNT(*) OVER() as total + d.decimals, l.timestamp as deploy_timestamp, COALESCE(st.tx_count, 0) AS tx_count, + COALESCE(st.holder_count, 0) AS holder_count, COALESCE(s.minted_supply, 0) AS minted_supply, + COUNT(*) OVER() as total FROM brc20_deploys AS d INNER JOIN inscriptions AS i ON i.id = d.inscription_id INNER JOIN genesis_locations AS g ON g.inscription_id = d.inscription_id INNER JOIN locations AS l ON l.id = g.location_id - LEFT JOIN brc20_supplies AS s ON d.id = s.brc20_deploy_id + LEFT JOIN brc20_stats AS st ON st.brc20_deploy_id = d.id + LEFT JOIN brc20_supplies AS s ON s.brc20_deploy_id = d.id ${tickerPrefixCondition ? this.sql`WHERE ${tickerPrefixCondition}` : this.sql``} OFFSET ${args.offset} LIMIT ${args.limit} diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 5b0cd487..99b7234d 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -235,7 +235,10 @@ export class PgStore extends BasePgStore { // We'll issue materialized view refreshes in parallel. We will not wait for them to finish so // we can respond to the chainhook node with a `200` HTTP code as soon as possible. const views = [this.normalizeInscriptionCount({ min_block_height: updatedBlockHeightMin })]; - if (ENV.BRC20_BLOCK_SCAN_ENABLED) views.push(this.refreshMaterializedView('brc20_supplies')); + if (ENV.BRC20_BLOCK_SCAN_ENABLED) { + views.push(this.refreshMaterializedView('brc20_supplies')); + views.push(this.refreshMaterializedView('brc20_stats')); + } const viewRefresh = Promise.allSettled(views); // Only wait for these on tests. if (isTestEnv) await viewRefresh; diff --git a/tests/brc20.test.ts b/tests/brc20.test.ts index 86dbda5d..151a2e49 100644 --- a/tests/brc20.test.ts +++ b/tests/brc20.test.ts @@ -5,6 +5,8 @@ import { MIGRATIONS_DIR, PgStore } from '../src/pg/pg-store'; import { DbInscriptionInsert } from '../src/pg/types'; import { TestChainhookPayloadBuilder, TestFastifyServer, brc20Reveal, randomHash } from './helpers'; +jest.setTimeout(100_000); + describe('BRC-20', () => { let db: PgStore; let fastify: TestFastifyServer; @@ -1712,6 +1714,13 @@ describe('BRC-20', () => { url: `/ordinals/brc-20/tokens/PEPE`, }); expect(response.statusCode).toBe(200); + const responseJson = response.json(); + expect(responseJson.total).toBe(1); + expect(responseJson.results).toEqual( + expect.arrayContaining([ + expect.objectContaining({ ticker: 'PEPE', holder_count: 0, tx_count: 0 }), + ]) + ); }); test('filter tickers by ticker prefix', async () => { diff --git a/tests/stats.test.ts b/tests/stats.test.ts index 1d4bc4cf..90097703 100644 --- a/tests/stats.test.ts +++ b/tests/stats.test.ts @@ -3,8 +3,6 @@ import { buildApiServer } from '../src/api/init'; import { MIGRATIONS_DIR, PgStore } from '../src/pg/pg-store'; import { TestChainhookPayloadBuilder, TestFastifyServer, randomHash } from './helpers'; -jest.setTimeout(100_000_000); - describe('/stats', () => { let db: PgStore; let fastify: TestFastifyServer;