diff --git a/README.md b/README.md index c9852d14..b19c3261 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ npm run docker: - postgres - graphql-engine - crawler -- phragmen +- phragmen (temporarily disabled) ## Updating containers @@ -60,3 +60,134 @@ The crawler is able to detect and fill the gaps in postgres database by harvesti ## Phragmen This container includes an offline-phragmen binary. It is a forked modification of [Kianenigma](https://github.com/kianenigma/offline-phragmen) repository. + +## Hasura demo + +The crawler needs to wait for your substrate-node container to get synced before starting to collect data. You can use an already synced external RPC for instant testing by changing the environment variable WS_PROVIDER_URL in `docker-compose.yml` file: + +```yaml +crawler: + image: polkastats-backend:latest + build: + context: ../../ + dockerfile: ./docker/polkastats-backend/backend/Dockerfile + depends_on: + - "postgres" + - "substrate-node" + restart: on-failure + environment: + - NODE_ENV=production + - WS_PROVIDER_URL=wss://kusama-rpc.polkadot.io # Change this line +``` + +Just uncomment out the first one and comment the second and rebuild the dockers. + +``` +npm run docker:clean +npm run docker +``` + +Then browse to http://localhost:8082 + +Click on "Data" at the top menu + +![](images/hasura-data.png) + +Then add all tables to the tracking process + +![](images/hasura-track.png) + +From now on, hasura will be collecting and tracking all the changes in the data base. + +In order to check it and see its power you could start a new subscription or just perform an example query such us this one: + +### Query example. Static + +- Block query example: +``` +query { + block { + block_hash + block_author + block_number + block_author_name + current_era + current_index + new_accounts + session_length + session_per_era + session_progress + } +} +``` + +- Rewards query example: +``` +query { + rewards { + era_index + era_rewards + stash_id + timestamp + } +} +``` + +- Validator by number of nominators example: +``` +query { + validator_num_nominators { + block_number + nominators + timestamp + } +} +``` + +- Account query example: +``` +query { + account { + account_id + balances + identity + } +} +``` + +### Subscription example. Dynamic + +- Block subscription example: +``` +subscription { + block { + block_number + block_hash + current_era + current_index + } +} +``` + +- Validator active subscription example: +``` +subscription MySubscription { + validator_active { + account_id + active + block_number + session_index + timestamp + } +} +``` + +- Account subscription example: +``` +subscription MySubscription { + account { + account_id + balances + } +} +``` diff --git a/backend.config.js b/backend.config.js index be452c07..54cf9b68 100644 --- a/backend.config.js +++ b/backend.config.js @@ -1,4 +1,3 @@ -// Also wss://kusama-rpc.polkadot.io const DEFAULT_WS_PROVIDER_URL = 'ws://substrate-node:9944'; module.exports = { @@ -35,7 +34,7 @@ module.exports = { enabled: true, module: require('./lib/crawlers/activeAccounts.js'), config: { - pollingTime: 1 * 60 * 1000, + pollingTime: 10 * 60 * 1000, }, }, @@ -45,7 +44,7 @@ module.exports = { }, { - enabled: true, + enabled: false, module: require('./lib/crawlers/phragmen.js'), config: { wsProviderUrl: process.env.WS_PROVIDER_URL || DEFAULT_WS_PROVIDER_URL, diff --git a/docker/polkastats-backend/backend/Dockerfile b/docker/polkastats-backend/backend/Dockerfile index 3131283b..5a58d12c 100644 --- a/docker/polkastats-backend/backend/Dockerfile +++ b/docker/polkastats-backend/backend/Dockerfile @@ -1,21 +1,24 @@ -FROM rust AS builder - -RUN mkdir -p /app; \ - wget https://github.com/bigomby/offline-phragmen/archive/master.zip; \ - unzip master.zip -d /app/ - -WORKDIR /app/offline-phragmen-master - -RUN cargo build --release +#FROM rust AS builder +# +#RUN mkdir -p /app; \ +# wget https://github.com/bigomby/offline-phragmen/archive/master.zip; \ +# unzip master.zip -d /app/ +# +#WORKDIR /app/offline-phragmen-master +# +#RUN cargo build --release FROM node WORKDIR /usr/app/polkastats-backend-v3 -COPY --from=builder /app/offline-phragmen-master/target/release/offline-phragmen /usr/app/polkastats-backend-v3 +#COPY --from=builder /app/offline-phragmen-master/target/release/offline-phragmen /usr/app/polkastats-backend-v3 + +RUN wget https://github.com/Bigomby/offline-phragmen/releases/download/0.1.0/offline-phragmen; \ + chmod +x offline-phragmen COPY . /usr/app/polkastats-backend-v3 RUN npm install -CMD ["npm", "start"] \ No newline at end of file +CMD ["npm", "start"] diff --git a/docker/polkastats-backend/docker-compose.yml b/docker/polkastats-backend/docker-compose.yml index 432bb304..2f8faf2e 100644 --- a/docker/polkastats-backend/docker-compose.yml +++ b/docker/polkastats-backend/docker-compose.yml @@ -63,6 +63,7 @@ services: restart: on-failure environment: - NODE_ENV=production + - WS_PROVIDER_URL=ws://substrate-node:9944 # # Persisten volumes # diff --git a/docker/polkastats-backend/sql/polkastats.sql b/docker/polkastats-backend/sql/polkastats.sql index 8ffd62f9..f695bcb4 100644 --- a/docker/polkastats-backend/sql/polkastats.sql +++ b/docker/polkastats-backend/sql/polkastats.sql @@ -119,8 +119,6 @@ CREATE TABLE IF NOT EXISTS validator_active ( CREATE TABLE IF NOT EXISTS account ( account_id VARCHAR(100) NOT NULL, - account_index VARCHAR(100) NOT NULL, - nickname VARCHAR(100) NOT NULL, identity TEXT NOT NULL, balances TEXT NOT NULL, timestamp BIGINT NOT NULL, diff --git a/docker/polkastats-backend/substrate-client/Dockerfile b/docker/polkastats-backend/substrate-client/Dockerfile index eb658066..73992e93 100644 --- a/docker/polkastats-backend/substrate-client/Dockerfile +++ b/docker/polkastats-backend/substrate-client/Dockerfile @@ -2,7 +2,7 @@ FROM phusion/baseimage:0.11 LABEL maintainer "@ColmenaLabs_svq" LABEL description="Small image with the Substrate binary." -ARG VERSION=v0.7.22 +ARG VERSION=v0.7.27 RUN apt-get update && apt-get install wget curl jq -y diff --git a/images/hasura-data.png b/images/hasura-data.png new file mode 100644 index 00000000..88ae16d4 Binary files /dev/null and b/images/hasura-data.png differ diff --git a/images/hasura-track.png b/images/hasura-track.png new file mode 100644 index 00000000..9cecf2ea Binary files /dev/null and b/images/hasura-track.png differ diff --git a/lib/crawlers/activeAccounts.js b/lib/crawlers/activeAccounts.js index fb5e0621..2f34204f 100644 --- a/lib/crawlers/activeAccounts.js +++ b/lib/crawlers/activeAccounts.js @@ -4,56 +4,45 @@ module.exports = { console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); // Fetch active accounts - const accounts = await api.derive.accounts.indexes(); + const accountKeys = await api.query.system.account.keys() + const accounts = accountKeys.map(key => key.args[0].toHuman()); - let accountsInfo = []; + console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing ${accounts.length} active accounts\x1b[0m`); - for (var key in accounts ) { - let accountId = key; - let accountIndex = accounts[key] - let accountInfo = await api.derive.accounts.info(accountId); - let identity = accountInfo.identity.display ? JSON.stringify(accountInfo.identity) : ''; - let nickname = accountInfo.nickname ? accountInfo.nickname : ''; - let balances = await api.derive.balances.all(accountId); - accountsInfo[accountId] = { - accountId, - accountIndex, - identity, - nickname, - balances - } - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing account ${accountId}\x1b[0m`); - } + await accounts.forEach(async accountId => { + + // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing account ${accountId}\x1b[0m`); + const accountInfo = await api.derive.accounts.info(accountId); + const identity = accountInfo.identity.display ? JSON.stringify(accountInfo.identity) : ``; + const balances = await api.derive.balances.all(accountId); + const block = await api.rpc.chain.getBlock(); + const blockNumber = block.block.header.number.toNumber(); + + let sql = `SELECT account_id FROM account WHERE account_id = '${accountId}'`; + let res = await pool.query(sql); - // Main loop - for (var key in accountsInfo ) { - if (accountsInfo.hasOwnProperty(key)) { - // console.log(key + " -> " + accounts[key]); - let sql = `SELECT account_id FROM account WHERE account_id = '${key}'`; - let res = await pool.query(sql); - const sqlBlockHeight = `SELECT block_number FROM block ORDER BY timestamp desc LIMIT 1`; - const resBlockHeight = await pool.query(sqlBlockHeight); - if (res.rows.length > 0) { - const timestamp = new Date().getTime(); - sql = `UPDATE account SET account_index = '${accountsInfo[key].accountIndex}', nickname = '${accountsInfo[key].nickname}', identity = '${accountsInfo[key].identity}', balances = '${JSON.stringify(accountsInfo[key].balances)}', timestamp = '${timestamp}', block_height = '${resBlockHeight.rows[0].block_number}' WHERE account_id = '${key}'`; - try { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mUpdating account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`); - await pool.query(sql); - } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError updating account ${key}\x1b[0m`); - } - } else { - const timestamp = new Date().getTime(); - sql = `INSERT INTO account (account_id, account_index, nickname, identity, balances, timestamp, block_height) VALUES ('${key}', '${accountsInfo[key].accountIndex}', '${accountsInfo[key].nickname}', '${accountsInfo[key].idenity}', '${JSON.stringify(accountsInfo[key].balances)}', '${timestamp}', '${resBlockHeight.rows[0].block_number}');`; - try { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mAdding account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`); - await pool.query(sql); - } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError adding new account ${key}\x1b[0m`); - } - } + if (res.rows.length > 0) { + const timestamp = new Date().getTime(); + sql = `UPDATE account SET identity = '${identity}', balances = '${JSON.stringify(balances)}', timestamp = '${timestamp}', block_height = '${blockNumber}' WHERE account_id = '${accountId}'`; + try { + // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mUpdating account ${accountId}\x1b[0m`); + await pool.query(sql); + } catch (error) { + console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError updating account ${accountId}\x1b[0m`); + console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError: ${error}\x1b[0m`); + } + } else { + const timestamp = new Date().getTime(); + sql = `INSERT INTO account (account_id, identity, balances, timestamp, block_height) VALUES ('${accountId}', '${identity}', '${JSON.stringify(balances)}', '${timestamp}', '${blockNumber}');`; + try { + // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mAdding account ${accountId}\x1b[0m`); + await pool.query(sql); + } catch (error) { + console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError adding new account ${accountId}\x1b[0m`); + console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError: ${error}\x1b[0m`); + } } - } + }); setTimeout( () => module.exports.start(api, pool, config), diff --git a/lib/crawlers/blockHarvester.js b/lib/crawlers/blockHarvester.js index 206b2b63..0cf1c6f3 100644 --- a/lib/crawlers/blockHarvester.js +++ b/lib/crawlers/blockHarvester.js @@ -1,4 +1,5 @@ // @ts-check +const {BigNumber} = require('bignumber.js'); const { shortHash } = require('../utils.js'); module.exports = { @@ -119,113 +120,129 @@ module.exports = { } }); // Get session info for the block - const currentIndex = await api.query.session.currentIndex.at(blockHash); - const currentSlot = await api.query.babe.currentSlot.at(blockHash); - const epochIndex = await api.query.babe.epochIndex.at(blockHash); - const genesisSlot = await api.query.babe.genesisSlot.at(blockHash); - const currentEraStartSessionIndex = await api.query.staking.currentEraStartSessionIndex.at(blockHash); - const currentEra = await api.query.staking.currentEra.at(blockHash); - const validatorCount = await api.query.staking.validatorCount.at(blockHash); - const epochDuration = api.consts.babe.epochDuration; - const sessionsPerEra = api.consts.staking.sessionsPerEra; - const eraLength = epochDuration.mul(sessionsPerEra); - const epochStartSlot = epochIndex.mul(epochDuration).add(genesisSlot); - const sessionProgress = currentSlot.sub(epochStartSlot); - const eraProgress = currentIndex.sub(currentEraStartSessionIndex).mul(epochDuration).add(sessionProgress); - - // Get block author - const blockAuthor = extendedHeader.author; - - // Get block author identity display name - const blockAuthorIdentity = await api.derive.accounts.info(blockAuthor); - const blockAuthorName = blockAuthorIdentity.identity.display || ``; - - // Get runtime spec name and version - const runtimeVersion = await api.rpc.state.getRuntimeVersion(blockHash); + const currentIndex = new BigNumber(await api.query.session.currentIndex.at(blockHash)); + const currentSlot = new BigNumber(await api.query.babe.currentSlot.at(blockHash)); + const epochIndex = new BigNumber(await api.query.babe.epochIndex.at(blockHash)); + const genesisSlot = new BigNumber(await api.query.babe.genesisSlot.at(blockHash)); + const currentEra = new BigNumber(await api.query.staking.currentEra.at(blockHash)); + + // This only works for the last HISTORY_DEPTH eras (api.query.staking.historyDepth) + const erasStartSessionIndex = await api.query.staking.erasStartSessionIndex(currentEra.toString()); + const currentEraStartSessionIndex = new BigNumber(erasStartSessionIndex); + + if (currentEraStartSessionIndex) { - // We can't get the timestamp for old blocks so we put the harvest timestap - const timestamp = new Date().getTime(); + const validatorCount = await api.query.staking.validatorCount.at(blockHash); - // Total events - const totalEvents = blockEvents.length || 0; + const epochDuration = new BigNumber(api.consts.babe.epochDuration); + const sessionsPerEra = new BigNumber(api.consts.staking.sessionsPerEra); + const eraLength = epochDuration.multipliedBy(sessionsPerEra); + const epochStartSlot = epochIndex.multipliedBy(epochDuration).plus(genesisSlot); + const sessionProgress = currentSlot.minus(epochStartSlot); - // Find number of balance transfers in this block - const numTransfers = - blockEvents - .filter( record => (record.event.section === `balances` && record.event.method === `Transfer`)) - .length || 0; + // console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mEra progress calculation, currentIndex is ${currentIndex.toString()}}, currentEraStartSessionIndex is ${currentEraStartSessionIndex.toString()}}, epochDuration is ${epochDuration.toString()}}, sessionProgress is ${sessionProgress.toString()}\x1b[0m`); - // Find number of new accounts in this block - const newAccounts = - blockEvents - .filter( record => (record.event.section === `balances` && record.event.method === `Endowed`)) - .length || 0; + const eraProgress = new BigNumber(currentIndex) + .minus(currentEraStartSessionIndex) + .multipliedBy(epochDuration) + .plus(sessionProgress) + + // Get block author + const blockAuthor = extendedHeader.author; + + // Get block author identity display name + const blockAuthorIdentity = await api.derive.accounts.info(blockAuthor); + const blockAuthorName = blockAuthorIdentity.identity.display || ``; + + // Get runtime spec name and version + const runtimeVersion = await api.rpc.state.getRuntimeVersion(blockHash); + + // We can't get the timestamp for old blocks so we put the harvest timestap + const timestamp = new Date().getTime(); + + // Total events + const totalEvents = blockEvents.length || 0; - // Delete before insert to avoid duplicate key errors (issue #48) - sqlDelete = `DELETE FROM block WHERE block_number = '${endBlock}';`; - try { - await pool.query(sqlDelete); - } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`); - } + // Find number of balance transfers in this block + const numTransfers = + blockEvents + .filter( record => (record.event.section === `balances` && record.event.method === `Transfer`)) + .length || 0; - const sqlInsert = - `INSERT INTO block ( - block_number, - block_author, - block_author_name, - block_hash, - parent_hash, - extrinsics_root, - state_root, - current_era, - current_index, - era_length, - era_progress, - is_epoch, - session_length, - session_per_era, - session_progress, - validator_count, - spec_name, - spec_version, - total_events, - num_transfers, - new_accounts, - timestamp - ) VALUES ( - '${endBlock}', - '${blockAuthor}', - '${blockAuthorName}', - '${blockHash}', - '${parentHash}', - '${extrinsicsRoot}', - '${stateRoot}', - '${currentEra}', - '${currentIndex}', - '${eraLength}', - '${eraProgress}', - 'true', - '${epochDuration}', - '${sessionsPerEra}', - '${sessionProgress}', - '${validatorCount}', - '${runtimeVersion.specName}', - '${runtimeVersion.specVersion}', - '${totalEvents}', - '${numTransfers}', - '${newAccounts}', - '${timestamp}' - )`; - try { - await pool.query(sqlInsert); - const endTime = new Date().getTime(); - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mAdded block #${endBlock} (${shortHash(blockHash.toString())}) in ${((endTime - startTime) / 1000).toFixed(3)}s\x1b[0m`); - } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError adding block #${endBlock}: ${error.error}\x1b[0m`); + // Find number of new accounts in this block + const newAccounts = + blockEvents + .filter( record => (record.event.section === `balances` && record.event.method === `Endowed`)) + .length || 0; + + // Delete before insert to avoid duplicate key errors (issue #48) + sqlDelete = `DELETE FROM block WHERE block_number = '${endBlock}';`; + try { + await pool.query(sqlDelete); + } catch (error) { + console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`); + } + + const sqlInsert = + `INSERT INTO block ( + block_number, + block_author, + block_author_name, + block_hash, + parent_hash, + extrinsics_root, + state_root, + current_era, + current_index, + era_length, + era_progress, + is_epoch, + session_length, + session_per_era, + session_progress, + validator_count, + spec_name, + spec_version, + total_events, + num_transfers, + new_accounts, + timestamp + ) VALUES ( + '${endBlock}', + '${blockAuthor}', + '${blockAuthorName}', + '${blockHash}', + '${parentHash}', + '${extrinsicsRoot}', + '${stateRoot}', + '${currentEra}', + '${currentIndex}', + '${eraLength}', + '${eraProgress}', + 'true', + '${epochDuration}', + '${sessionsPerEra}', + '${sessionProgress}', + '${validatorCount}', + '${runtimeVersion.specName}', + '${runtimeVersion.specVersion}', + '${totalEvents}', + '${numTransfers}', + '${newAccounts}', + '${timestamp}' + )`; + try { + await pool.query(sqlInsert); + const endTime = new Date().getTime(); + console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mAdded block #${endBlock} (${shortHash(blockHash.toString())}) in ${((endTime - startTime) / 1000).toFixed(3)}s\x1b[0m`); + } catch (error) { + console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError adding block #${endBlock}: ${error.error}\x1b[0m`); + } + endBlock--; + addedBlocks++; + } else { + console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mEnd of history depth reached, stopping!\x1b[0m`); } - endBlock--; - addedBlocks++; } } } \ No newline at end of file diff --git a/lib/crawlers/blockListener.js b/lib/crawlers/blockListener.js index aeddb4f9..4c217045 100644 --- a/lib/crawlers/blockListener.js +++ b/lib/crawlers/blockListener.js @@ -132,36 +132,50 @@ module.exports = { try { await pool.query(sqlInsert); } catch (error) { - console.log(`[PolkaStats backend v3] - Block listener - \x1b[31mError: ${error}\x1b[0m`); + if (`${error}`.indexOf(`duplicate key value violates unique constraint`) !== -1) { + console.log(`[PolkaStats backend v3] - Block listener - \x1b[33mBlock #${blockNumber} already added!\x1b[0m`); + } else { + console.log(`[PolkaStats backend v3] - Block listener - \x1b[31mError adding block #${blockNumber}: ${error}, sql: ${sqlInsert}\x1b[0m`); + } } // Loop through the Vec - blockEvents.forEach( async (record, index) => { + await blockEvents.forEach( async (record, index) => { // Extract the phase and event const { event, phase } = record; + + const sqlSelect = `SELECT FROM event WHERE block_number = '${blockNumber}' AND event_index = '${index}';`; + const res = await pool.query(sqlSelect); + + if (res.rows.length === 0) { - const sqlInsert = - `INSERT INTO event ( - block_number, - event_index, - section, - method, - phase, - data - ) VALUES ( - '${blockNumber}', - '${index}', - '${event.section}', - '${event.method}', - '${phase.toString()}', - '${JSON.stringify(event.data)}' - );`; - try { - await pool.query(sqlInsert); - console.log(`[PolkaStats backend v3] - Block listener - \x1b[32m=> Adding event #${blockNumber}-${index} ${event.section} => ${event.method}\x1b[0m`); - - } catch (error) { - console.log(`[PolkaStats backend v3] - Block listener - \x1b[31mError adding event #${blockNumber}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`); + const sqlInsert = + `INSERT INTO event ( + block_number, + event_index, + section, + method, + phase, + data + ) VALUES ( + '${blockNumber}', + '${index}', + '${event.section}', + '${event.method}', + '${phase.toString()}', + '${JSON.stringify(event.data)}' + );`; + try { + await pool.query(sqlInsert); + console.log(`[PolkaStats backend v3] - Block listener - \x1b[32m=> Adding event #${blockNumber}-${index} ${event.section} => ${event.method}\x1b[0m`); + + } catch (error) { + if (`${error}`.indexOf(`duplicate key value violates unique constraint`) !== -1) { + console.log(`[PolkaStats backend v3] - Block listener - \x1b[33m=> Event #${blockNumber}-${index} already added!\x1b[0m`); + } else { + console.log(`[PolkaStats backend v3] - Block listener - \x1b[31m=> Error adding event #${blockNumber}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`); + } + } } }); } diff --git a/lib/crawlers/staking.js b/lib/crawlers/staking.js index 8621fba9..1ca4987b 100644 --- a/lib/crawlers/staking.js +++ b/lib/crawlers/staking.js @@ -1,88 +1,72 @@ const {BigNumber} = require('bignumber.js'); // @ts-check -let crawlerIsRunning = false; module.exports = { start: async function (api, pool, _config) { console.log(`[PolkaStats backend v3] - \x1b[32mStarting staking crawler...\x1b[0m`); + + let currentDBSessionIndex; + + // Get last era index stored in DB + const sqlSelect = `SELECT session_index FROM validator_staking ORDER BY session_index DESC LIMIT 1`; + const res = await pool.query(sqlSelect); + if (res.rows.length > 0) { + currentDBSessionIndex = parseInt(res.rows[0]["session_index"]); + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mLast session index stored in DB is #${currentDBSessionIndex}\x1b[0m`); + } else { + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mFirst execution, no session index found in DB!\x1b[0m`); + + const sessionInfo = await api.derive.session.info(); + const currentEraIndex = sessionInfo.activeEra.toNumber(); + const currentSessionIndex = sessionInfo.currentIndex.toNumber(); + currentDBSessionIndex = currentSessionIndex; + + const block = await api.rpc.chain.getBlock(); + const blockNumber = block.block.header.number.toNumber(); + await module.exports.storeStakingInfo(api, pool, blockNumber, sessionInfo, currentEraIndex); + } // Subscribe to new blocks await api.rpc.chain.subscribeNewHeads(async (header) => { - let currentDBIndex; - - // Get block number const blockNumber = header.number.toNumber(); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mNew block #${blockNumber}\x1b[0m`); - - // Get last index stored in DB - const sqlSelect = `SELECT session_index FROM validator_staking ORDER BY session_index DESC LIMIT 1`; - const res = await pool.query(sqlSelect); - if (res.rows.length > 0) { - currentDBIndex = parseInt(res.rows[0]["session_index"]); - // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mLast session index stored in DB is #${currentDBIndex}\x1b[0m`); - } else { - currentDBIndex = 0; - if (!crawlerIsRunning) { - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mFirst execution, no session index found in DB!\x1b[0m`); - } - } - - // Get current session info + const sessionInfo = await api.derive.session.info(); - - // Retrieve the active era - let activeEra = await api.query.staking.activeEra(); - activeEra = JSON.parse(JSON.stringify(activeEra)); - const currentEraIndex = activeEra.index; - - if (sessionInfo.currentIndex > currentDBIndex) { - if (!crawlerIsRunning) { - await module.exports.storeStakingInfo(api, pool, blockNumber, sessionInfo, currentEraIndex); - } + const currentEraIndex = sessionInfo.activeEra.toNumber(); + const currentSessionIndex = sessionInfo.currentIndex.toNumber(); + + if (currentSessionIndex > currentDBSessionIndex) { + currentDBSessionIndex = currentSessionIndex; + await module.exports.storeStakingInfo(api, pool, blockNumber, sessionInfo, currentEraIndex); } }); }, storeStakingInfo: async function (api, pool, blockNumber, sessionInfo, currentEraIndex) { - crawlerIsRunning = true; const currentIndex = sessionInfo.currentIndex.toNumber(); - // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mCurrent session index is #${currentIndex}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring validators staking info for at block #${blockNumber} (session #${currentIndex})\x1b[0m`); - + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mStoring validators staking info for session #${currentIndex} (block #${blockNumber})\x1b[0m`); + // - // Get active validators, imOnline data, current elected and current era points earned + // Get all stash addresses, active validators, imOnline data, current elected and current era points earned // - const [validators, imOnline, exposures, erasRewardPoints] = await Promise.all([ + const [allStashAddresses, validatorAddresses, imOnline, erasRewardPoints] = await Promise.all([ + api.derive.staking.stashes(), api.query.session.validators(), api.derive.imOnline.receivedHeartbeats(), - api.query.staking.erasStakers.entries(currentEraIndex), api.query.staking.erasRewardPoints(currentEraIndex) ]); - // - // Get all validator addresses in the last era - // - const eraExposures = exposures.map(([key, exposure]) => { - return { - accountId: key.args[1].toHuman(), - exposure: JSON.parse(JSON.stringify(exposure)) - } - }); - // - // Get all validator addresses in the last era - // - const eraValidatorList = eraExposures.map(exposure => { - return exposure.accountId; - }); + // Fetch intention validator addresses for current session. + const intentionAddresses = allStashAddresses.filter(address => !validatorAddresses.includes(address)); // // Map validator authorityId to staking info object // const validatorStaking = await Promise.all( - validators.map(authorityId => api.derive.staking.account(authorityId)) + validatorAddresses.map(authorityId => api.derive.staking.account(authorityId)) ); // @@ -103,21 +87,17 @@ module.exports = { }, imOnline); // - // Add current elected and earned era points to validator object + // Add earned era points to validator object // for(let i = 0; i < validatorStaking.length; i++) { let validator = validatorStaking[i]; - if (Number.isInteger(eraValidatorList.indexOf(validator.accountId))) { // TODO: refactor duplicity with lines 171... - validator.currentElected = true; - } else { - validator.currentElected = false; - } - if (erasRewardPoints.individual[eraValidatorList.indexOf(validator.accountId)]) { // TODO: refactor - validator.erasRewardPoints = erasRewardPoints.individual[eraValidatorList.indexOf(validator.accountId)]; // TODO: refactor + if (Object.keys(erasRewardPoints.individual).includes(erasRewardPoints)) { + validator.erasRewardPoints = erasRewardPoints.individual[validator.accountId]; } } if (validatorStaking) { + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mInserting staking data in DB\x1b[0m`); let sqlInsert = `INSERT INTO validator_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify(validatorStaking)}', extract(epoch from now()));`; try { await pool.query(sqlInsert); @@ -130,73 +110,37 @@ module.exports = { // // Populate graph data tables // - + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mPopulating validator_bonded, validator_selfbonded, validator_num_nominators and validator_active tables\x1b[0m`); validatorStaking.forEach(async validator => { // populate validator_bonded table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_bonded table\x1b[0m`); let sql = `INSERT INTO validator_bonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber(validator.exposure.total).toString(10)}', extract(epoch from now()));`; await pool.query(sql); // populate validator_selfbonded table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_selfbonded table\x1b[0m`); sql = `INSERT INTO validator_selfbonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber(validator.exposure.own).toString(10)}', extract(epoch from now()));`; await pool.query(sql); // populate validator_num_nominators table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_num_nominators table\x1b[0m`); sql = `INSERT INTO validator_num_nominators (block_number, session_index, account_id, nominators, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${validator.exposure.others.length}', extract(epoch from now()));`; await pool.query(sql); // populate validator_active table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_active table\x1b[0m`); sql = `INSERT INTO validator_active (block_number, session_index, account_id, active, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', 'true', extract(epoch from now()));`; await pool.query(sql); }) - - // - // Populate validator_era_points table - // - // We need to get earned era points at the last block of the previous session - // - - // TODO: Replace queries, check https://github.com/Colm3na/polkastats-backend-v3/pull/63#issuecomment-598613801 - - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_era_points table\x1b[0m`); - - const lastSessionBlockNumber = blockNumber - sessionInfo.sessionProgress - 1; - // const lastSessionEraPoints = await api.query.staking.currentEraPointsEarned.at(lastSessionBlockHash); // I guess this is the same than eraRewardsPoints? - // const lastSessionElected = await api.query.staking.currentElected.at(lastSessionBlockHash); // And this like the elected validators from exposures - const eraEraPoints = await api.query.staking.erasRewardPoints(currentEraIndex); // TODO: avoid duplicates - const eraEraPointsList = eraEraPoints.individual.toHuman(); - - validatorStaking.forEach(async validator => { - const validatorAccountId = validator.accountId.toHuman(); - if (eraEraPointsList[validatorAccountId]) { - const validatorEraPoints = parseInt(eraEraPointsList[validatorAccountId]) - let sqlInsert = `INSERT INTO validator_era_points (block_number, session_index, account_id, era_points, timestamp) VALUES ('${lastSessionBlockNumber}', '${currentIndex}', '${validatorAccountId}', '${validatorEraPoints}', extract(epoch from now()));`; - try { - const res = await pool.query(sqlInsert); - // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mResponse from Database is ${JSON.stringify(res)}]`) - } catch (error) { - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); - } - } - }) // // Fetch intention validators // - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring intentions staking info at block #${blockNumber} (session #${currentIndex})\x1b[0m`); - const intentionValidators = await api.query.staking.validators(); - const intentions = intentionValidators[0]; + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mStoring intentions staking info for session #${currentIndex} (block #${blockNumber}\x1b[0m`); // // Map validator authorityId to staking info object // const intentionStaking = await Promise.all( - intentions.map(authorityId => api.derive.staking.account(authorityId)) + intentionAddresses.map(authorityId => api.derive.staking.account(authorityId)) ); // @@ -212,24 +156,21 @@ module.exports = { } } - // // Populate intention_staking table // - - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating intention_staking table\x1b[0m`); + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mPopulating intention_staking table\x1b[0m`); if (intentionStaking) { + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mInserting staking data in DB\x1b[0m`); let sqlInsert = `INSERT INTO intention_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify(intentionStaking)}', extract(epoch from now()));`; try { - const res = await pool.query(sqlInsert); + await pool.query(sqlInsert); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mResponse from Database is ${JSON.stringify(res)}]`) } catch (error) { console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); } } - - crawlerIsRunning = false; } } \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 22f25bc4..9aaca1a7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,17 +13,17 @@ } }, "@polkadot/api": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/api/-/api-1.6.2.tgz", - "integrity": "sha512-ZTOOBzfyt1ZWmsvNhMXjcu+4ASSbd3obgma88VkOF7JcF77DoLdFfESqzOoj3xViOOH+yqqZ1bMNoDr1QmZfYg==", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/api/-/api-1.8.0-beta.2.tgz", + "integrity": "sha512-iPf9fmTPcVYAVVhjLCULh8/YIkxyq5nGQ5zJOGDAs+Q7ax0FLyPbNof3TXSROxXTXJDsABkFRljuqGQIEtMxZA==", "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/api-derive": "1.6.2", + "@babel/runtime": "^7.8.7", + "@polkadot/api-derive": "1.8.0-beta.2", "@polkadot/keyring": "^2.6.2", - "@polkadot/metadata": "1.6.2", - "@polkadot/rpc-core": "1.6.2", - "@polkadot/rpc-provider": "1.6.2", - "@polkadot/types": "1.6.2", + "@polkadot/metadata": "1.8.0-beta.2", + "@polkadot/rpc-core": "1.8.0-beta.2", + "@polkadot/rpc-provider": "1.8.0-beta.2", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "@polkadot/util-crypto": "^2.6.2", "bn.js": "^5.1.1", @@ -32,15 +32,15 @@ } }, "@polkadot/api-derive": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/api-derive/-/api-derive-1.6.2.tgz", - "integrity": "sha512-x0LA+Jh1GV9yjMYflE0G7PN87Bglm6fu3KepJdxg4FLja4ruYygvOKlprlEynOqW7bEEKba01cB2t7B3ksPzFw==", - "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/api": "1.6.2", - "@polkadot/rpc-core": "1.6.2", - "@polkadot/rpc-provider": "1.6.2", - "@polkadot/types": "1.6.2", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/api-derive/-/api-derive-1.8.0-beta.2.tgz", + "integrity": "sha512-ldba01uBSdtFK13mWfRsw3kbu24KOrLvABAxbsl4dy93ZJKuozfGzomo1GJpeZ6m3QoetAgulHdiHzE/3Ct5Gg==", + "requires": { + "@babel/runtime": "^7.8.7", + "@polkadot/api": "1.8.0-beta.2", + "@polkadot/rpc-core": "1.8.0-beta.2", + "@polkadot/rpc-provider": "1.8.0-beta.2", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "@polkadot/util-crypto": "^2.6.2", "bn.js": "^5.1.1", @@ -49,12 +49,12 @@ } }, "@polkadot/jsonrpc": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/jsonrpc/-/jsonrpc-1.6.2.tgz", - "integrity": "sha512-hYzyT0QsUzLBb0JUtzAfp0XPKuSJnd/RnpOpVf5yZ9lk1Z+sqpe9z1LsutAPWZtUhFNtUhUfxDOSrGWu28XKhg==", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/jsonrpc/-/jsonrpc-1.8.0-beta.2.tgz", + "integrity": "sha512-le+vLFyM7Ajd21rSM9Y/9MH3ZmKdHI7uXK0fWYz1ibxRXKY0HtQzY3V6twbq3Y6eOV8vlAmNv2XimsIJKdyvQw==", "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/types": "1.6.2", + "@babel/runtime": "^7.8.7", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2" } }, @@ -69,41 +69,41 @@ } }, "@polkadot/metadata": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/metadata/-/metadata-1.6.2.tgz", - "integrity": "sha512-iFolXhI83E8s1Ac2RWV0ws2NL41phZSL2roscSFWsi4wUITg8h71o7W6Gp5olWeXvENraCga2Uei4+1AvvFsQg==", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/metadata/-/metadata-1.8.0-beta.2.tgz", + "integrity": "sha512-fIvoAqiVGyh5VzUXSLYyfItM3gNkkSLYH1O6YGZ+lsUJsDGHd+1krgtXw02a74KLxUin+nGRYCiZtTyIYHFsTg==", "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/types": "1.6.2", + "@babel/runtime": "^7.8.7", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "@polkadot/util-crypto": "^2.6.2", "bn.js": "^5.1.1" } }, "@polkadot/rpc-core": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/rpc-core/-/rpc-core-1.6.2.tgz", - "integrity": "sha512-GZeXwx3wzpS4TPEE9SjqaawmMGmB8lrvOysS1F8trRBQ278H6xt/oGS9PiATZ0Tkv4pdGPMZvStQFYaIK31Rcg==", - "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/jsonrpc": "1.6.2", - "@polkadot/metadata": "1.6.2", - "@polkadot/rpc-provider": "1.6.2", - "@polkadot/types": "1.6.2", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/rpc-core/-/rpc-core-1.8.0-beta.2.tgz", + "integrity": "sha512-Pa/+utd+Bwwsh2fx5HWNKdD7eGL3jZkmhr2HQ9y2UPW4MUcoNVhvSgaXEBVE7wbu3IgjJo0gwP6bIFnALOwmSA==", + "requires": { + "@babel/runtime": "^7.8.7", + "@polkadot/jsonrpc": "1.8.0-beta.2", + "@polkadot/metadata": "1.8.0-beta.2", + "@polkadot/rpc-provider": "1.8.0-beta.2", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "memoizee": "^0.4.14", "rxjs": "^6.5.4" } }, "@polkadot/rpc-provider": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/rpc-provider/-/rpc-provider-1.6.2.tgz", - "integrity": "sha512-PJF7124SAOiTLI+ySFkX6BxgPzU8cWrlitvSKHW/ClQHvmF0A8qR2qILiPKwY9rMYbgHbLzJqOyt3JrVPuCVxA==", - "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/jsonrpc": "1.6.2", - "@polkadot/metadata": "1.6.2", - "@polkadot/types": "1.6.2", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/rpc-provider/-/rpc-provider-1.8.0-beta.2.tgz", + "integrity": "sha512-n896AhmIO2mPW2NulYEjkTNw7G49igGGxrRVjdb+P0f+PTtxeRpeiKd9WmqzlCXS5mcijidKMr9I/Ou6vSg32A==", + "requires": { + "@babel/runtime": "^7.8.7", + "@polkadot/jsonrpc": "1.8.0-beta.2", + "@polkadot/metadata": "1.8.0-beta.2", + "@polkadot/types": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "@polkadot/util-crypto": "^2.6.2", "bn.js": "^5.1.1", @@ -113,12 +113,12 @@ } }, "@polkadot/types": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/types/-/types-1.6.2.tgz", - "integrity": "sha512-mo1eKJPcllsaOe+h7kIvjklJXH8c0AqLM9bDGhJtM3O3+aAM99xDItHp02uaoEywCUbc9XqI4+/mH/Lu0fTT3g==", + "version": "1.8.0-beta.2", + "resolved": "https://registry.npmjs.org/@polkadot/types/-/types-1.8.0-beta.2.tgz", + "integrity": "sha512-ALtCUTOFWrWT8FwqWAvqMXCiJjTeUtzproqsvq4ST0PHsX3qsgS+l7P4u27X0xb739EnE7CJg9nZjJitGtOoPQ==", "requires": { - "@babel/runtime": "^7.8.4", - "@polkadot/metadata": "1.6.2", + "@babel/runtime": "^7.8.7", + "@polkadot/metadata": "1.8.0-beta.2", "@polkadot/util": "^2.6.2", "@polkadot/util-crypto": "^2.6.2", "@types/bn.js": "^4.11.6", @@ -179,9 +179,9 @@ "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==" }, "@types/node": { - "version": "13.9.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.9.1.tgz", - "integrity": "sha512-E6M6N0blf/jiZx8Q3nb0vNaswQeEyn0XlupO+xN6DtJ6r6IT4nXrTry7zhIfYvFCl3/8Cu6WIysmUBKiqV0bqQ==" + "version": "13.9.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.9.2.tgz", + "integrity": "sha512-bnoqK579sAYrQbp73wwglccjJ4sfRdKU7WNEZ5FW4K2U6Kc0/eZ5kvXG0JKsEKFB50zrFmfFt52/cvBbZa7eXg==" }, "ansi-styles": { "version": "4.2.1", diff --git a/package.json b/package.json index db5a88ea..0a28f6f4 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ }, "homepage": "https://github.com/Colm3na/polkastats-backend-v3#readme", "dependencies": { - "@polkadot/api": "^1.6.2", + "@polkadot/api": "^v1.8.0-beta.2", "axios": "^0.19.2", "bignumber.js": "^9.0.0", "pg": "^7.18.1"