diff --git a/.github/workflows/on-tag.yml b/.github/workflows/on-tag.yml index a1093a261..32d4b085b 100644 --- a/.github/workflows/on-tag.yml +++ b/.github/workflows/on-tag.yml @@ -68,24 +68,24 @@ jobs: run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin - name: Checkout project - uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2 + uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0 - name: Init repo for Dockerization run: docker/init.sh "$TAG" - name: Set up QEMU - uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1 + uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2.1.0 id: qemu - name: Setup Docker buildx action - uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # v1 + uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2.2.1 id: buildx - name: Available platforms run: echo ${{ steps.buildx.outputs.platforms }} - name: Cache Docker layers - uses: actions/cache@661fd3eb7f2f20d8c7c84bc2b0509efd7a826628 # v2 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # v3.0.11 id: cache with: path: /tmp/.buildx-cache diff --git a/.gitignore b/.gitignore index 687e9e8cb..b41b0db08 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ data docker-compose.yml backend/mempool-config.json *.swp +frontend/src/resources/config.template.js +frontend/src/resources/config.js diff --git a/backend/mempool-config.sample.json b/backend/mempool-config.sample.json index 86d226154..fe5f2e213 100644 --- a/backend/mempool-config.sample.json +++ b/backend/mempool-config.sample.json @@ -2,6 +2,7 @@ "MEMPOOL": { "NETWORK": "mainnet", "BACKEND": "electrum", + "ENABLED": true, "HTTP_PORT": 8999, "SPAWN_CLUSTER_PROCS": 0, "API_URL_PREFIX": "/api/v1/", @@ -23,7 +24,8 @@ "STDOUT_LOG_MIN_PRIORITY": "debug", "AUTOMATIC_BLOCK_REINDEXING": false, "POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json", - "POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master" + "POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master", + "ADVANCED_TRANSACTION_SELECTION": false }, "CORE_RPC": { "HOST": "127.0.0.1", diff --git a/backend/src/__fixtures__/mempool-config.template.json b/backend/src/__fixtures__/mempool-config.template.json index a42426249..d54365cda 100644 --- a/backend/src/__fixtures__/mempool-config.template.json +++ b/backend/src/__fixtures__/mempool-config.template.json @@ -1,7 +1,9 @@ { "MEMPOOL": { + "ENABLED": true, "NETWORK": "__MEMPOOL_NETWORK__", "BACKEND": "__MEMPOOL_BACKEND__", + "ENABLED": true, "BLOCKS_SUMMARIES_INDEXING": true, "HTTP_PORT": 1, "SPAWN_CLUSTER_PROCS": 2, @@ -23,7 +25,8 @@ "STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__", "INDEXING_BLOCKS_AMOUNT": 14, "POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__", - "POOLS_JSON_URL": "__POOLS_JSON_URL__" + "POOLS_JSON_URL": "__POOLS_JSON_URL__", + "ADVANCED_TRANSACTION_SELECTION": "__ADVANCED_TRANSACTION_SELECTION__" }, "CORE_RPC": { "HOST": "__CORE_RPC_HOST__", diff --git a/backend/src/__tests__/config.test.ts b/backend/src/__tests__/config.test.ts index 7314fde6f..9bb06c58a 100644 --- a/backend/src/__tests__/config.test.ts +++ b/backend/src/__tests__/config.test.ts @@ -13,6 +13,7 @@ describe('Mempool Backend Config', () => { const config = jest.requireActual('../config').default; expect(config.MEMPOOL).toStrictEqual({ + ENABLED: true, NETWORK: 'mainnet', BACKEND: 'none', BLOCKS_SUMMARIES_INDEXING: false, @@ -36,7 +37,8 @@ describe('Mempool Backend Config', () => { USER_AGENT: 'mempool', STDOUT_LOG_MIN_PRIORITY: 'debug', POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master', - POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json' + POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json', + ADVANCED_TRANSACTION_SELECTION: false, }); expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true }); diff --git a/backend/src/api/audit.ts b/backend/src/api/audit.ts index 77a6e7459..1cbfe7a84 100644 --- a/backend/src/api/audit.ts +++ b/backend/src/api/audit.ts @@ -1,5 +1,10 @@ -import logger from '../logger'; -import { BlockExtended, TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces'; +import config from '../config'; +import bitcoinApi from './bitcoin/bitcoin-api-factory'; +import { Common } from './common'; +import { TransactionExtended, MempoolBlockWithTransactions, AuditScore } from '../mempool.interfaces'; +import blocksRepository from '../repositories/BlocksRepository'; +import blocksAuditsRepository from '../repositories/BlocksAuditsRepository'; +import blocks from '../api/blocks'; const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners @@ -44,8 +49,6 @@ class Audit { displacedWeight += (4000 - transactions[0].weight); - logger.warn(`${fresh.length} fresh, ${Object.keys(isCensored).length} possibly censored, ${displacedWeight} displaced weight`); - // we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs // these displaced transactions should occupy the first N weight units of the next projected block let displacedWeightRemaining = displacedWeight; @@ -73,6 +76,7 @@ class Audit { // mark unexpected transactions in the mined block as 'added' let overflowWeight = 0; + let totalWeight = 0; for (const tx of transactions) { if (inTemplate[tx.txid]) { matches.push(tx.txid); @@ -82,11 +86,13 @@ class Audit { } overflowWeight += tx.weight; } + totalWeight += tx.weight; } // transactions missing from near the end of our template are probably not being censored - let overflowWeightRemaining = overflowWeight; - let lastOverflowRate = 1.00; + let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight); + let maxOverflowRate = 0; + let rateThreshold = 0; index = projectedBlocks[0].transactionIds.length - 1; while (index >= 0) { const txid = projectedBlocks[0].transactionIds[index]; @@ -94,8 +100,11 @@ class Audit { if (isCensored[txid]) { delete isCensored[txid]; } - lastOverflowRate = mempool[txid].effectiveFeePerVsize; - } else if (Math.floor(mempool[txid].effectiveFeePerVsize * 100) <= Math.ceil(lastOverflowRate * 100)) { // tolerance of 0.01 sat/vb + if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) { + maxOverflowRate = mempool[txid].effectiveFeePerVsize; + rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005; + } + } else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding if (isCensored[txid]) { delete isCensored[txid]; } @@ -113,6 +122,45 @@ class Audit { score }; } + + public async $getBlockAuditScores(fromHeight?: number, limit: number = 15): Promise { + let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight(); + const returnScores: AuditScore[] = []; + + if (currentHeight < 0) { + return returnScores; + } + + for (let i = 0; i < limit && currentHeight >= 0; i++) { + const block = blocks.getBlocks().find((b) => b.height === currentHeight); + if (block?.extras?.matchRate != null) { + returnScores.push({ + hash: block.id, + matchRate: block.extras.matchRate + }); + } else { + let currentHash; + if (!currentHash && Common.indexingEnabled()) { + const dbBlock = await blocksRepository.$getBlockByHeight(currentHeight); + if (dbBlock && dbBlock['id']) { + currentHash = dbBlock['id']; + } + } + if (!currentHash) { + currentHash = await bitcoinApi.$getBlockHash(currentHeight); + } + if (currentHash) { + const auditScore = await blocksAuditsRepository.$getBlockAuditScore(currentHash); + returnScores.push({ + hash: currentHash, + matchRate: auditScore?.matchRate + }); + } + } + currentHeight--; + } + return returnScores; + } } export default new Audit(); \ No newline at end of file diff --git a/backend/src/api/blocks.ts b/backend/src/api/blocks.ts index f536ce3d5..562f49de1 100644 --- a/backend/src/api/blocks.ts +++ b/backend/src/api/blocks.ts @@ -34,6 +34,7 @@ class Blocks { private lastDifficultyAdjustmentTime = 0; private previousDifficultyRetarget = 0; private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = []; + private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise)[] = []; constructor() { } @@ -57,6 +58,10 @@ class Blocks { this.newBlockCallbacks.push(fn); } + public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise) { + this.newAsyncBlockCallbacks.push(fn); + } + /** * Return the list of transaction for a block * @param blockHash @@ -130,7 +135,7 @@ class Blocks { const stripped = block.tx.map((tx) => { return { txid: tx.txid, - vsize: tx.vsize, + vsize: tx.weight / 4, fee: tx.fee ? Math.round(tx.fee * 100000000) : 0, value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000) }; @@ -195,9 +200,9 @@ class Blocks { }; } - const auditSummary = await BlocksAuditsRepository.$getShortBlockAudit(block.id); - if (auditSummary) { - blockExtended.extras.matchRate = auditSummary.matchRate; + const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id); + if (auditScore != null) { + blockExtended.extras.matchRate = auditScore.matchRate; } } @@ -444,6 +449,9 @@ class Blocks { const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions); const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock); + // start async callbacks + const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions)); + if (Common.indexingEnabled()) { if (!fastForwarded) { const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1); @@ -514,6 +522,9 @@ class Blocks { if (!memPool.hasPriority()) { diskCache.$saveCacheToDisk(); } + + // wait for pending async callbacks to finish + await Promise.all(callbackPromises); } } diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index ec201de0c..6dbfab723 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -4,8 +4,8 @@ import logger from '../logger'; import { Common } from './common'; class DatabaseMigration { - private static currentVersion = 41; - private queryTimeout = 120000; + private static currentVersion = 44; + private queryTimeout = 900_000; private statisticsAddedIndexed = false; private uniqueLogs: string[] = []; @@ -352,6 +352,19 @@ class DatabaseMigration { if (databaseSchemaVersion < 41 && isBitcoin === true) { await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1'); } + + if (databaseSchemaVersion < 42 && isBitcoin === true) { + await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0'); + } + + if (databaseSchemaVersion < 43 && isBitcoin === true) { + await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records')); + } + + if (databaseSchemaVersion < 44 && isBitcoin === true) { + await this.$executeQuery('TRUNCATE TABLE `blocks_audits`'); + await this.$executeQuery('UPDATE blocks_summaries SET template = NULL'); + } } /** @@ -787,6 +800,19 @@ class DatabaseMigration { ) ENGINE=InnoDB DEFAULT CHARSET=utf8;`; } + private getCreateLNNodeRecordsTableQuery(): string { + return `CREATE TABLE IF NOT EXISTS nodes_records ( + public_key varchar(66) NOT NULL, + type int(10) unsigned NOT NULL, + payload blob NOT NULL, + UNIQUE KEY public_key_type (public_key, type), + INDEX (public_key), + FOREIGN KEY (public_key) + REFERENCES nodes (public_key) + ON DELETE CASCADE + ) ENGINE=InnoDB DEFAULT CHARSET=utf8;`; + } + public async $truncateIndexedData(tables: string[]) { const allowedTables = ['blocks', 'hashrates', 'prices']; diff --git a/backend/src/api/explorer/channels.api.ts b/backend/src/api/explorer/channels.api.ts index a52b0f28f..787bbe521 100644 --- a/backend/src/api/explorer/channels.api.ts +++ b/backend/src/api/explorer/channels.api.ts @@ -117,6 +117,17 @@ class ChannelsApi { } } + public async $getUnresolvedClosedChannels(): Promise { + try { + const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`; + const [rows]: any = await DB.query(query); + return rows; + } catch (e) { + logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e)); + throw e; + } + } + public async $getChannelsWithoutCreatedDate(): Promise { try { const query = `SELECT * FROM channels WHERE created IS NULL`; diff --git a/backend/src/api/explorer/nodes.api.ts b/backend/src/api/explorer/nodes.api.ts index d8dceab19..f9677c193 100644 --- a/backend/src/api/explorer/nodes.api.ts +++ b/backend/src/api/explorer/nodes.api.ts @@ -105,6 +105,18 @@ class NodesApi { node.closed_channel_count = rows[0].closed_channel_count; } + // Custom records + query = ` + SELECT type, payload + FROM nodes_records + WHERE public_key = ? + `; + [rows] = await DB.query(query, [public_key]); + node.custom_records = {}; + for (const record of rows) { + node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex'); + } + return node; } catch (e) { logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`); @@ -512,7 +524,37 @@ class NodesApi { public async $getNodesPerISP(ISPId: string) { try { - const query = ` + let query = ` + SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID, + channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID + FROM channels + JOIN nodes node1 ON node1.public_key = channels.node1_public_key + JOIN nodes node2 ON node2.public_key = channels.node2_public_key + JOIN geo_names isp1 ON isp1.id = node1.as_number + JOIN geo_names isp2 ON isp2.id = node2.as_number + WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?)) + ORDER BY short_id DESC + `; + + const IPSIds = ISPId.split(','); + const [rows]: any = await DB.query(query, [IPSIds, IPSIds]); + const nodes = {}; + + const intISPIds: number[] = []; + for (const ispId of IPSIds) { + intISPIds.push(parseInt(ispId, 10)); + } + + for (const channel of rows) { + if (intISPIds.includes(channel.isp1ID)) { + nodes[channel.node1PublicKey] = true; + } + if (intISPIds.includes(channel.isp2ID)) { + nodes[channel.node2PublicKey] = true; + } + } + + query = ` SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels, nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, geo_names_city.names as city, geo_names_country.names as country, @@ -523,17 +565,18 @@ class NodesApi { LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division' - WHERE nodes.as_number IN (?) + WHERE nodes.public_key IN (?) ORDER BY capacity DESC `; - const [rows]: any = await DB.query(query, [ISPId.split(',')]); - for (let i = 0; i < rows.length; ++i) { - rows[i].country = JSON.parse(rows[i].country); - rows[i].city = JSON.parse(rows[i].city); - rows[i].subdivision = JSON.parse(rows[i].subdivision); + const [rows2]: any = await DB.query(query, [Object.keys(nodes)]); + for (let i = 0; i < rows2.length; ++i) { + rows2[i].country = JSON.parse(rows2[i].country); + rows2[i].city = JSON.parse(rows2[i].city); + rows2[i].subdivision = JSON.parse(rows2[i].subdivision); } - return rows; + return rows2; + } catch (e) { logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`); throw e; diff --git a/backend/src/api/lightning/clightning/clightning-convert.ts b/backend/src/api/lightning/clightning/clightning-convert.ts index 9b3c62f04..92ae1f0a7 100644 --- a/backend/src/api/lightning/clightning/clightning-convert.ts +++ b/backend/src/api/lightning/clightning/clightning-convert.ts @@ -7,6 +7,15 @@ import { Common } from '../../common'; * Convert a clightning "listnode" entry to a lnd node entry */ export function convertNode(clNode: any): ILightningApi.Node { + let custom_records: { [type: number]: string } | undefined = undefined; + if (clNode.option_will_fund) { + try { + custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') }; + } catch (e) { + logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e)); + custom_records = undefined; + } + } return { alias: clNode.alias ?? '', color: `#${clNode.color ?? ''}`, @@ -23,6 +32,7 @@ export function convertNode(clNode: any): ILightningApi.Node { }; }) ?? [], last_update: clNode?.last_timestamp ?? 0, + custom_records }; } diff --git a/backend/src/api/lightning/lightning-api.interface.ts b/backend/src/api/lightning/lightning-api.interface.ts index 1a5e2793f..6e3ea0de3 100644 --- a/backend/src/api/lightning/lightning-api.interface.ts +++ b/backend/src/api/lightning/lightning-api.interface.ts @@ -49,6 +49,7 @@ export namespace ILightningApi { }[]; color: string; features: { [key: number]: Feature }; + custom_records?: { [type: number]: string }; } export interface Info { diff --git a/backend/src/api/mempool-blocks.ts b/backend/src/api/mempool-blocks.ts index d0c2a4f63..334362458 100644 --- a/backend/src/api/mempool-blocks.ts +++ b/backend/src/api/mempool-blocks.ts @@ -1,12 +1,17 @@ import logger from '../logger'; -import { MempoolBlock, TransactionExtended, AuditTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces'; +import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces'; import { Common } from './common'; import config from '../config'; -import { PairingHeap } from '../utils/pairing-heap'; +import { StaticPool } from 'node-worker-threads-pool'; +import path from 'path'; class MempoolBlocks { private mempoolBlocks: MempoolBlockWithTransactions[] = []; private mempoolBlockDeltas: MempoolBlockDelta[] = []; + private makeTemplatesPool = new StaticPool({ + size: 1, + task: path.resolve(__dirname, './tx-selection-worker.js'), + }); constructor() {} @@ -72,16 +77,15 @@ class MempoolBlocks { const time = end - start; logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds'); - const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks); + const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks); + const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks); this.mempoolBlocks = blocks; this.mempoolBlockDeltas = deltas; } - private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): - { blocks: MempoolBlockWithTransactions[], deltas: MempoolBlockDelta[] } { + private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] { const mempoolBlocks: MempoolBlockWithTransactions[] = []; - const mempoolBlockDeltas: MempoolBlockDelta[] = []; let blockWeight = 0; let blockSize = 0; let transactions: TransactionExtended[] = []; @@ -102,7 +106,11 @@ class MempoolBlocks { mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length)); } - // Calculate change from previous block states + return mempoolBlocks; + } + + private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] { + const mempoolBlockDeltas: MempoolBlockDelta[] = []; for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) { let added: TransactionStripped[] = []; let removed: string[] = []; @@ -135,284 +143,25 @@ class MempoolBlocks { removed }); } - - return { - blocks: mempoolBlocks, - deltas: mempoolBlockDeltas - }; + return mempoolBlockDeltas; } - /* - * Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core - * (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp) - * - * blockLimit: number of blocks to build in total. - * weightLimit: maximum weight of transactions to consider using the selection algorithm. - * if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate - * condenseRest: whether to ignore excess transactions or append them to the final block. - */ - public makeBlockTemplates(mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): MempoolBlockWithTransactions[] { - const start = Date.now(); - const auditPool: { [txid: string]: AuditTransaction } = {}; - const mempoolArray: AuditTransaction[] = []; - const restOfArray: TransactionExtended[] = []; - - let weight = 0; - const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity; - // grab the top feerate txs up to maxWeight - Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => { - weight += tx.weight; - if (weight >= maxWeight) { - restOfArray.push(tx); - return; - } - // initializing everything up front helps V8 optimize property access later - auditPool[tx.txid] = { - txid: tx.txid, - fee: tx.fee, - size: tx.size, - weight: tx.weight, - feePerVsize: tx.feePerVsize, - vin: tx.vin, - relativesSet: false, - ancestorMap: new Map(), - children: new Set(), - ancestorFee: 0, - ancestorWeight: 0, - score: 0, - used: false, - modified: false, - modifiedNode: null, - } - mempoolArray.push(auditPool[tx.txid]); - }) + public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise { + const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest }); + const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks); - // Build relatives graph & calculate ancestor scores - for (const tx of mempoolArray) { - if (!tx.relativesSet) { - this.setRelatives(tx, auditPool); - } - } - - // Sort by descending ancestor score - mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0)); - - // Build blocks by greedily choosing the highest feerate package - // (i.e. the package rooted in the transaction with the best ancestor score) - const blocks: MempoolBlockWithTransactions[] = []; - let blockWeight = 4000; - let blockSize = 0; - let transactions: AuditTransaction[] = []; - const modified: PairingHeap = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0)); - let overflow: AuditTransaction[] = []; - let failures = 0; - let top = 0; - while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) { - // skip invalid transactions - while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) { - top++; - } - - // Select best next package - let nextTx; - const nextPoolTx = mempoolArray[top]; - const nextModifiedTx = modified.peek(); - if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) { - nextTx = nextPoolTx; - top++; - } else { - modified.pop(); - if (nextModifiedTx) { - nextTx = nextModifiedTx; - nextTx.modifiedNode = undefined; - } - } - - if (nextTx && !nextTx?.used) { - // Check if the package fits into this block - if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) { - blockWeight += nextTx.ancestorWeight; - const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values()); - // sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count) - const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx]; - const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4); - sortedTxSet.forEach((ancestor, i, arr) => { - const mempoolTx = mempool[ancestor.txid]; - if (ancestor && !ancestor?.used) { - ancestor.used = true; - // update original copy of this tx with effective fee rate & relatives data - mempoolTx.effectiveFeePerVsize = effectiveFeeRate; - mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => { - return { - txid: a.txid, - fee: a.fee, - weight: a.weight, - } - }) - if (i < arr.length - 1) { - mempoolTx.bestDescendant = { - txid: arr[arr.length - 1].txid, - fee: arr[arr.length - 1].fee, - weight: arr[arr.length - 1].weight, - }; - } - transactions.push(ancestor); - blockSize += ancestor.size; - } - }); - - // remove these as valid package ancestors for any descendants remaining in the mempool - if (sortedTxSet.length) { - sortedTxSet.forEach(tx => { - this.updateDescendants(tx, auditPool, modified); - }); - } - - failures = 0; - } else { - // hold this package in an overflow list while we check for smaller options - overflow.push(nextTx); - failures++; - } - } - - // this block is full - const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000); - if (exceededPackageTries && (!condenseRest || blocks.length < blockLimit - 1)) { - // construct this block - if (transactions.length) { - blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); - } - // reset for the next block - transactions = []; - blockSize = 0; - blockWeight = 4000; - - // 'overflow' packages didn't fit in this block, but are valid candidates for the next - for (const overflowTx of overflow.reverse()) { - if (overflowTx.modified) { - overflowTx.modifiedNode = modified.add(overflowTx); - } else { - top--; - mempoolArray[top] = overflowTx; - } - } - overflow = []; - } - } - if (condenseRest) { - // pack any leftover transactions into the last block - for (const tx of overflow) { - if (!tx || tx?.used) { - continue; - } - blockWeight += tx.weight; - blockSize += tx.size; - transactions.push(tx); - tx.used = true; - } - const blockTransactions = transactions.map(t => mempool[t.txid]) - restOfArray.forEach(tx => { - blockWeight += tx.weight; - blockSize += tx.size; - blockTransactions.push(tx); - }); - if (blockTransactions.length) { - blocks.push(this.dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length)); - } - transactions = []; - } else if (transactions.length) { - blocks.push(this.dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); - } - - const end = Date.now(); - const time = end - start; - logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds'); - - return blocks; - } - - // traverse in-mempool ancestors - // recursion unavoidable, but should be limited to depth < 25 by mempool policy - public setRelatives( - tx: AuditTransaction, - mempool: { [txid: string]: AuditTransaction }, - ): void { - for (const parent of tx.vin) { - const parentTx = mempool[parent.txid]; - if (parentTx && !tx.ancestorMap!.has(parent.txid)) { - tx.ancestorMap.set(parent.txid, parentTx); - parentTx.children.add(tx); - // visit each node only once - if (!parentTx.relativesSet) { - this.setRelatives(parentTx, mempool); - } - parentTx.ancestorMap.forEach((ancestor) => { - tx.ancestorMap.set(ancestor.txid, ancestor); - }); - } - }; - tx.ancestorFee = tx.fee || 0; - tx.ancestorWeight = tx.weight || 0; - tx.ancestorMap.forEach((ancestor) => { - tx.ancestorFee += ancestor.fee; - tx.ancestorWeight += ancestor.weight; - }); - tx.score = tx.ancestorFee / (tx.ancestorWeight || 1); - tx.relativesSet = true; - } - - // iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score - // avoids recursion to limit call stack depth - private updateDescendants( - rootTx: AuditTransaction, - mempool: { [txid: string]: AuditTransaction }, - modified: PairingHeap, - ): void { - const descendantSet: Set = new Set(); - // stack of nodes left to visit - const descendants: AuditTransaction[] = []; - let descendantTx; - let ancestorIndex; - let tmpScore; - rootTx.children.forEach(childTx => { - if (!descendantSet.has(childTx)) { - descendants.push(childTx); - descendantSet.add(childTx); + // copy CPFP info across to main thread's mempool + Object.keys(newMempool).forEach((txid) => { + if (newMempool[txid] && mempool[txid]) { + newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize; + newMempool[txid].ancestors = mempool[txid].ancestors; + newMempool[txid].bestDescendant = mempool[txid].bestDescendant; + newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked; } }); - while (descendants.length) { - descendantTx = descendants.pop(); - if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) { - // remove tx as ancestor - descendantTx.ancestorMap.delete(rootTx.txid); - descendantTx.ancestorFee -= rootTx.fee; - descendantTx.ancestorWeight -= rootTx.weight; - tmpScore = descendantTx.score; - descendantTx.score = descendantTx.ancestorFee / descendantTx.ancestorWeight; - if (!descendantTx.modifiedNode) { - descendantTx.modified = true; - descendantTx.modifiedNode = modified.add(descendantTx); - } else { - // rebalance modified heap if score has changed - if (descendantTx.score < tmpScore) { - modified.decreasePriority(descendantTx.modifiedNode); - } else if (descendantTx.score > tmpScore) { - modified.increasePriority(descendantTx.modifiedNode); - } - } - - // add this node's children to the stack - descendantTx.children.forEach(childTx => { - // visit each node only once - if (!descendantSet.has(childTx)) { - descendants.push(childTx); - descendantSet.add(childTx); - } - }); - } - } + this.mempoolBlocks = blocks; + this.mempoolBlockDeltas = deltas; } private dataToMempoolBlocks(transactions: TransactionExtended[], diff --git a/backend/src/api/mempool.ts b/backend/src/api/mempool.ts index 43aea6059..584ddf816 100644 --- a/backend/src/api/mempool.ts +++ b/backend/src/api/mempool.ts @@ -20,6 +20,8 @@ class Mempool { maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 }; private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => void) | undefined; + private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[], + deletedTransactions: TransactionExtended[]) => void) | undefined; private txPerSecondArray: number[] = []; private txPerSecond: number = 0; @@ -63,6 +65,11 @@ class Mempool { this.mempoolChangedCallback = fn; } + public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; }, + newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise) { + this.asyncMempoolChangedCallback = fn; + } + public getMempool(): { [txid: string]: TransactionExtended } { return this.mempoolCache; } @@ -72,6 +79,9 @@ class Mempool { if (this.mempoolChangedCallback) { this.mempoolChangedCallback(this.mempoolCache, [], []); } + if (this.asyncMempoolChangedCallback) { + this.asyncMempoolChangedCallback(this.mempoolCache, [], []); + } } public async $updateMemPoolInfo() { @@ -103,12 +113,11 @@ class Mempool { return txTimes; } - public async $updateMempool() { - logger.debug('Updating mempool'); + public async $updateMempool(): Promise { + logger.debug(`Updating mempool...`); const start = new Date().getTime(); let hasChange: boolean = false; const currentMempoolSize = Object.keys(this.mempoolCache).length; - let txCount = 0; const transactions = await bitcoinApi.$getRawMempool(); const diff = transactions.length - currentMempoolSize; const newTransactions: TransactionExtended[] = []; @@ -124,7 +133,6 @@ class Mempool { try { const transaction = await transactionUtils.$getTransactionExtended(txid); this.mempoolCache[txid] = transaction; - txCount++; if (this.inSync) { this.txPerSecondArray.push(new Date().getTime()); this.vBytesPerSecondArray.push({ @@ -133,14 +141,9 @@ class Mempool { }); } hasChange = true; - if (diff > 0) { - logger.debug('Fetched transaction ' + txCount + ' / ' + diff); - } else { - logger.debug('Fetched transaction ' + txCount); - } newTransactions.push(transaction); } catch (e) { - logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e)); + logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e)); } } @@ -194,11 +197,13 @@ class Mempool { if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) { this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions); } + if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) { + await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions); + } const end = new Date().getTime(); const time = end - start; - logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`); - logger.debug('Mempool updated in ' + time / 1000 + ' seconds'); + logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`); } public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) { diff --git a/backend/src/api/mining/mining-routes.ts b/backend/src/api/mining/mining-routes.ts index 47704f993..73d38d841 100644 --- a/backend/src/api/mining/mining-routes.ts +++ b/backend/src/api/mining/mining-routes.ts @@ -1,6 +1,7 @@ import { Application, Request, Response } from 'express'; import config from "../../config"; import logger from '../../logger'; +import audits from '../audit'; import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository'; import BlocksRepository from '../../repositories/BlocksRepository'; import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository'; @@ -26,7 +27,11 @@ class MiningRoutes { .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight) .get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments) .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction) + .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores) + .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores) + .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore) .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit) + .get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp) ; } @@ -252,6 +257,52 @@ class MiningRoutes { res.status(500).send(e instanceof Error ? e.message : e); } } + + private async $getHeightFromTimestamp(req: Request, res: Response) { + try { + const timestamp = parseInt(req.params.timestamp, 10); + // This will prevent people from entering milliseconds etc. + // Block timestamps are allowed to be up to 2 hours off, so 24 hours + // will never put the maximum value before the most recent block + const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24; + // Prevent non-integers that are not seconds + if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) { + throw new Error(`Invalid timestamp, value must be Unix seconds`); + } + const result = await BlocksRepository.$getBlockHeightFromTimestamp( + timestamp, + ); + res.header('Pragma', 'public'); + res.header('Cache-control', 'public'); + res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString()); + res.json(result); + } catch (e) { + res.status(500).send(e instanceof Error ? e.message : e); + } + } + + private async $getBlockAuditScores(req: Request, res: Response) { + try { + const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10); + res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString()); + res.json(await audits.$getBlockAuditScores(height, 15)); + } catch (e) { + res.status(500).send(e instanceof Error ? e.message : e); + } + } + + public async $getBlockAuditScore(req: Request, res: Response) { + try { + const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash); + + res.header('Pragma', 'public'); + res.header('Cache-control', 'public'); + res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString()); + res.json(audit || 'null'); + } catch (e) { + res.status(500).send(e instanceof Error ? e.message : e); + } + } } export default new MiningRoutes(); diff --git a/backend/src/api/pools-parser.ts b/backend/src/api/pools-parser.ts index 731653a83..ae6bd52ce 100644 --- a/backend/src/api/pools-parser.ts +++ b/backend/src/api/pools-parser.ts @@ -14,10 +14,10 @@ interface Pool { class PoolsParser { miningPools: any[] = []; unknownPool: any = { - 'name': "Unknown", - 'link': "https://learnmeabitcoin.com/technical/coinbase-transaction", - 'regexes': "[]", - 'addresses': "[]", + 'name': 'Unknown', + 'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction', + 'regexes': '[]', + 'addresses': '[]', 'slug': 'unknown' }; slugWarnFlag = false; @@ -25,7 +25,7 @@ class PoolsParser { /** * Parse the pools.json file, consolidate the data and dump it into the database */ - public async migratePoolsJson(poolsJson: object) { + public async migratePoolsJson(poolsJson: object): Promise { if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { return; } @@ -81,6 +81,7 @@ class PoolsParser { // Finally, we generate the final consolidated pools data const finalPoolDataAdd: Pool[] = []; const finalPoolDataUpdate: Pool[] = []; + const finalPoolDataRename: Pool[] = []; for (let i = 0; i < poolNames.length; ++i) { let allAddresses: string[] = []; let allRegexes: string[] = []; @@ -127,8 +128,26 @@ class PoolsParser { finalPoolDataUpdate.push(poolObj); } } else { - logger.debug(`Add '${finalPoolName}' mining pool`); - finalPoolDataAdd.push(poolObj); + // Double check that if we're not just renaming a pool (same address same regex) + const [poolToRename]: any[] = await DB.query(` + SELECT * FROM pools + WHERE addresses = ? OR regexes = ?`, + [JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)] + ); + if (poolToRename && poolToRename.length > 0) { + // We're actually renaming an existing pool + finalPoolDataRename.push({ + 'name': poolObj.name, + 'link': poolObj.link, + 'regexes': allRegexes, + 'addresses': allAddresses, + 'slug': slug + }); + logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`); + } else { + logger.debug(`Add '${finalPoolName}' mining pool`); + finalPoolDataAdd.push(poolObj); + } } this.miningPools.push({ @@ -145,7 +164,9 @@ class PoolsParser { return; } - if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) { + if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 || + finalPoolDataRename.length > 0 + ) { logger.debug(`Update pools table now`); // Add new mining pools into the database @@ -169,8 +190,22 @@ class PoolsParser { ;`); } + // Rename mining pools + const renameQueries: string[] = []; + for (let i = 0; i < finalPoolDataRename.length; ++i) { + renameQueries.push(` + UPDATE pools + SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}', + slug='${finalPoolDataRename[i].slug}' + WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}' + AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}' + ;`); + } + try { - await this.$deleteBlocskToReindex(finalPoolDataUpdate); + if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) { + await this.$deleteBlocskToReindex(finalPoolDataUpdate); + } if (finalPoolDataAdd.length > 0) { await DB.query({ sql: queryAdd, timeout: 120000 }); @@ -178,6 +213,9 @@ class PoolsParser { for (const query of updateQueries) { await DB.query({ sql: query, timeout: 120000 }); } + for (const query of renameQueries) { + await DB.query({ sql: query, timeout: 120000 }); + } await this.insertUnknownPool(); logger.info('Mining pools.json import completed'); } catch (e) { diff --git a/backend/src/api/tx-selection-worker.ts b/backend/src/api/tx-selection-worker.ts new file mode 100644 index 000000000..10f65000b --- /dev/null +++ b/backend/src/api/tx-selection-worker.ts @@ -0,0 +1,336 @@ +import config from '../config'; +import logger from '../logger'; +import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces'; +import { PairingHeap } from '../utils/pairing-heap'; +import { Common } from './common'; +import { parentPort } from 'worker_threads'; + +if (parentPort) { + parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => { + const { mempool, blocks } = makeBlockTemplates(params); + + // return the result to main thread. + if (parentPort) { + parentPort.postMessage({ mempool, blocks }); + } + }); +} + +/* +* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core +* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp) +* +* blockLimit: number of blocks to build in total. +* weightLimit: maximum weight of transactions to consider using the selection algorithm. +* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate +* condenseRest: whether to ignore excess transactions or append them to the final block. +*/ +function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null }) + : { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } { + const start = Date.now(); + const auditPool: { [txid: string]: AuditTransaction } = {}; + const mempoolArray: AuditTransaction[] = []; + const restOfArray: TransactionExtended[] = []; + + let weight = 0; + const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity; + // grab the top feerate txs up to maxWeight + Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => { + weight += tx.weight; + if (weight >= maxWeight) { + restOfArray.push(tx); + return; + } + // initializing everything up front helps V8 optimize property access later + auditPool[tx.txid] = { + txid: tx.txid, + fee: tx.fee, + size: tx.size, + weight: tx.weight, + feePerVsize: tx.feePerVsize, + vin: tx.vin, + relativesSet: false, + ancestorMap: new Map(), + children: new Set(), + ancestorFee: 0, + ancestorWeight: 0, + score: 0, + used: false, + modified: false, + modifiedNode: null, + }; + mempoolArray.push(auditPool[tx.txid]); + }); + + // Build relatives graph & calculate ancestor scores + for (const tx of mempoolArray) { + if (!tx.relativesSet) { + setRelatives(tx, auditPool); + } + } + + // Sort by descending ancestor score + mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0)); + + // Build blocks by greedily choosing the highest feerate package + // (i.e. the package rooted in the transaction with the best ancestor score) + const blocks: MempoolBlockWithTransactions[] = []; + let blockWeight = 4000; + let blockSize = 0; + let transactions: AuditTransaction[] = []; + const modified: PairingHeap = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0)); + let overflow: AuditTransaction[] = []; + let failures = 0; + let top = 0; + while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) { + // skip invalid transactions + while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) { + top++; + } + + // Select best next package + let nextTx; + const nextPoolTx = mempoolArray[top]; + const nextModifiedTx = modified.peek(); + if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) { + nextTx = nextPoolTx; + top++; + } else { + modified.pop(); + if (nextModifiedTx) { + nextTx = nextModifiedTx; + nextTx.modifiedNode = undefined; + } + } + + if (nextTx && !nextTx?.used) { + // Check if the package fits into this block + if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) { + blockWeight += nextTx.ancestorWeight; + const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values()); + // sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count) + const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx]; + const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4); + sortedTxSet.forEach((ancestor, i, arr) => { + const mempoolTx = mempool[ancestor.txid]; + if (ancestor && !ancestor?.used) { + ancestor.used = true; + // update original copy of this tx with effective fee rate & relatives data + mempoolTx.effectiveFeePerVsize = effectiveFeeRate; + mempoolTx.ancestors = (Array.from(ancestor.ancestorMap?.values()) as AuditTransaction[]).map((a) => { + return { + txid: a.txid, + fee: a.fee, + weight: a.weight, + }; + }); + mempoolTx.cpfpChecked = true; + if (i < arr.length - 1) { + mempoolTx.bestDescendant = { + txid: arr[arr.length - 1].txid, + fee: arr[arr.length - 1].fee, + weight: arr[arr.length - 1].weight, + }; + } else { + mempoolTx.bestDescendant = null; + } + transactions.push(ancestor); + blockSize += ancestor.size; + } + }); + + // remove these as valid package ancestors for any descendants remaining in the mempool + if (sortedTxSet.length) { + sortedTxSet.forEach(tx => { + updateDescendants(tx, auditPool, modified); + }); + } + + failures = 0; + } else { + // hold this package in an overflow list while we check for smaller options + overflow.push(nextTx); + failures++; + } + } + + // this block is full + const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000); + const queueEmpty = top >= mempoolArray.length && modified.isEmpty(); + if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) { + // construct this block + if (transactions.length) { + blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); + } + // reset for the next block + transactions = []; + blockSize = 0; + blockWeight = 4000; + + // 'overflow' packages didn't fit in this block, but are valid candidates for the next + for (const overflowTx of overflow.reverse()) { + if (overflowTx.modified) { + overflowTx.modifiedNode = modified.add(overflowTx); + } else { + top--; + mempoolArray[top] = overflowTx; + } + } + overflow = []; + } + } + if (condenseRest) { + // pack any leftover transactions into the last block + for (const tx of overflow) { + if (!tx || tx?.used) { + continue; + } + blockWeight += tx.weight; + blockSize += tx.size; + const mempoolTx = mempool[tx.txid]; + // update original copy of this tx with effective fee rate & relatives data + mempoolTx.effectiveFeePerVsize = tx.score; + mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => { + return { + txid: a.txid, + fee: a.fee, + weight: a.weight, + }; + }); + mempoolTx.bestDescendant = null; + mempoolTx.cpfpChecked = true; + transactions.push(tx); + tx.used = true; + } + const blockTransactions = transactions.map(t => mempool[t.txid]); + restOfArray.forEach(tx => { + blockWeight += tx.weight; + blockSize += tx.size; + tx.effectiveFeePerVsize = tx.feePerVsize; + tx.cpfpChecked = false; + tx.ancestors = []; + tx.bestDescendant = null; + blockTransactions.push(tx); + }); + if (blockTransactions.length) { + blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length)); + } + transactions = []; + } else if (transactions.length) { + blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length)); + } + + const end = Date.now(); + const time = end - start; + logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds'); + + return { + mempool, + blocks + }; +} + +// traverse in-mempool ancestors +// recursion unavoidable, but should be limited to depth < 25 by mempool policy +function setRelatives( + tx: AuditTransaction, + mempool: { [txid: string]: AuditTransaction }, +): void { + for (const parent of tx.vin) { + const parentTx = mempool[parent.txid]; + if (parentTx && !tx.ancestorMap?.has(parent.txid)) { + tx.ancestorMap.set(parent.txid, parentTx); + parentTx.children.add(tx); + // visit each node only once + if (!parentTx.relativesSet) { + setRelatives(parentTx, mempool); + } + parentTx.ancestorMap.forEach((ancestor) => { + tx.ancestorMap.set(ancestor.txid, ancestor); + }); + } + }; + tx.ancestorFee = tx.fee || 0; + tx.ancestorWeight = tx.weight || 0; + tx.ancestorMap.forEach((ancestor) => { + tx.ancestorFee += ancestor.fee; + tx.ancestorWeight += ancestor.weight; + }); + tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1); + tx.relativesSet = true; +} + +// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score +// avoids recursion to limit call stack depth +function updateDescendants( + rootTx: AuditTransaction, + mempool: { [txid: string]: AuditTransaction }, + modified: PairingHeap, +): void { + const descendantSet: Set = new Set(); + // stack of nodes left to visit + const descendants: AuditTransaction[] = []; + let descendantTx; + let tmpScore; + rootTx.children.forEach(childTx => { + if (!descendantSet.has(childTx)) { + descendants.push(childTx); + descendantSet.add(childTx); + } + }); + while (descendants.length) { + descendantTx = descendants.pop(); + if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) { + // remove tx as ancestor + descendantTx.ancestorMap.delete(rootTx.txid); + descendantTx.ancestorFee -= rootTx.fee; + descendantTx.ancestorWeight -= rootTx.weight; + tmpScore = descendantTx.score; + descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4); + + if (!descendantTx.modifiedNode) { + descendantTx.modified = true; + descendantTx.modifiedNode = modified.add(descendantTx); + } else { + // rebalance modified heap if score has changed + if (descendantTx.score < tmpScore) { + modified.decreasePriority(descendantTx.modifiedNode); + } else if (descendantTx.score > tmpScore) { + modified.increasePriority(descendantTx.modifiedNode); + } + } + + // add this node's children to the stack + descendantTx.children.forEach(childTx => { + // visit each node only once + if (!descendantSet.has(childTx)) { + descendants.push(childTx); + descendantSet.add(childTx); + } + }); + } + } +} + +function dataToMempoolBlocks(transactions: TransactionExtended[], + blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions { + let rangeLength = 4; + if (blocksIndex === 0) { + rangeLength = 8; + } + if (transactions.length > 4000) { + rangeLength = 6; + } else if (transactions.length > 10000) { + rangeLength = 8; + } + return { + blockSize: blockSize, + blockVSize: blockWeight / 4, + nTx: transactions.length, + totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0), + medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE), + feeRange: Common.getFeesInRange(transactions, rangeLength), + transactionIds: transactions.map((tx) => tx.txid), + transactions: transactions.map((tx) => Common.stripTransaction(tx)), + }; +} \ No newline at end of file diff --git a/backend/src/api/websocket-handler.ts b/backend/src/api/websocket-handler.ts index 4bd7cfc8d..375869902 100644 --- a/backend/src/api/websocket-handler.ts +++ b/backend/src/api/websocket-handler.ts @@ -244,13 +244,18 @@ class WebsocketHandler { }); } - handleMempoolChange(newMempool: { [txid: string]: TransactionExtended }, - newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) { + async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended }, + newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise { if (!this.wss) { throw new Error('WebSocket.Server is not set'); } - mempoolBlocks.updateMempoolBlocks(newMempool); + if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) { + await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true); + } + else { + mempoolBlocks.updateMempoolBlocks(newMempool); + } const mBlocks = mempoolBlocks.getMempoolBlocks(); const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas(); const mempoolInfo = memPool.getMempoolInfo(); @@ -405,22 +410,25 @@ class WebsocketHandler { } }); } - - handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): void { + + async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise { if (!this.wss) { throw new Error('WebSocket.Server is not set'); } - let mBlocks: undefined | MempoolBlock[]; - let mBlockDeltas: undefined | MempoolBlockDelta[]; - let matchRate; const _memPool = memPool.getMempool(); + let matchRate; - if (Common.indexingEnabled()) { - const mempoolCopy = cloneMempool(_memPool); - const projectedBlocks = mempoolBlocks.makeBlockTemplates(mempoolCopy, 2); + if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) { + await mempoolBlocks.makeBlockTemplates(_memPool, 2); + } else { + mempoolBlocks.updateMempoolBlocks(_memPool); + } - const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, mempoolCopy); + if (Common.indexingEnabled() && memPool.isInSync()) { + const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions(); + + const { censored, added, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool); matchRate = Math.round(score * 100 * 100) / 100; const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => { @@ -459,9 +467,13 @@ class WebsocketHandler { delete _memPool[txId]; } - mempoolBlocks.updateMempoolBlocks(_memPool); - mBlocks = mempoolBlocks.getMempoolBlocks(); - mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas(); + if (config.MEMPOOL.ADVANCED_TRANSACTION_SELECTION) { + await mempoolBlocks.makeBlockTemplates(_memPool, 2); + } else { + mempoolBlocks.updateMempoolBlocks(_memPool); + } + const mBlocks = mempoolBlocks.getMempoolBlocks(); + const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas(); const da = difficultyAdjustment.getDifficultyAdjustment(); const fees = feeApi.getRecommendedFee(); @@ -569,14 +581,4 @@ class WebsocketHandler { } } -function cloneMempool(mempool: { [txid: string]: TransactionExtended }): { [txid: string]: TransactionExtended } { - const cloned = {}; - Object.keys(mempool).forEach(id => { - cloned[id] = { - ...mempool[id] - }; - }); - return cloned; -} - export default new WebsocketHandler(); diff --git a/backend/src/config.ts b/backend/src/config.ts index 43ba16cb3..4aab7a306 100644 --- a/backend/src/config.ts +++ b/backend/src/config.ts @@ -4,6 +4,7 @@ const configFromFile = require( interface IConfig { MEMPOOL: { + ENABLED: boolean; NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet'; BACKEND: 'esplora' | 'electrum' | 'none'; HTTP_PORT: number; @@ -28,6 +29,7 @@ interface IConfig { AUTOMATIC_BLOCK_REINDEXING: boolean; POOLS_JSON_URL: string, POOLS_JSON_TREE_URL: string, + ADVANCED_TRANSACTION_SELECTION: boolean; }; ESPLORA: { REST_API_URL: string; @@ -119,6 +121,7 @@ interface IConfig { const defaults: IConfig = { 'MEMPOOL': { + 'ENABLED': true, 'NETWORK': 'mainnet', 'BACKEND': 'none', 'HTTP_PORT': 8999, @@ -143,6 +146,7 @@ const defaults: IConfig = { 'AUTOMATIC_BLOCK_REINDEXING': false, 'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json', 'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master', + 'ADVANCED_TRANSACTION_SELECTION': false, }, 'ESPLORA': { 'REST_API_URL': 'http://127.0.0.1:3000', @@ -224,11 +228,11 @@ const defaults: IConfig = { 'BISQ_URL': 'https://bisq.markets/api', 'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api' }, - "MAXMIND": { + 'MAXMIND': { 'ENABLED': false, - "GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb", - "GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb", - "GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb" + 'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb', + 'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb', + 'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb' }, }; diff --git a/backend/src/index.ts b/backend/src/index.ts index d1e3cee8d..09a12e200 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -1,4 +1,4 @@ -import express from "express"; +import express from 'express'; import { Application, Request, Response, NextFunction } from 'express'; import * as http from 'http'; import * as WebSocket from 'ws'; @@ -34,7 +34,7 @@ import miningRoutes from './api/mining/mining-routes'; import bisqRoutes from './api/bisq/bisq.routes'; import liquidRoutes from './api/liquid/liquid.routes'; import bitcoinRoutes from './api/bitcoin/bitcoin.routes'; -import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher"; +import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher'; class Server { private wss: WebSocket.Server | undefined; @@ -74,7 +74,7 @@ class Server { } } - async startServer(worker = false) { + async startServer(worker = false): Promise { logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`); this.app @@ -92,7 +92,9 @@ class Server { this.setUpWebsocketHandling(); await syncAssets.syncAssets$(); - diskCache.loadMempoolCache(); + if (config.MEMPOOL.ENABLED) { + diskCache.loadMempoolCache(); + } if (config.DATABASE.ENABLED) { await DB.checkDbConnection(); @@ -127,7 +129,10 @@ class Server { fiatConversion.startService(); this.setUpHttpApiRoutes(); - this.runMainUpdateLoop(); + + if (config.MEMPOOL.ENABLED) { + this.runMainUpdateLoop(); + } if (config.BISQ.ENABLED) { bisq.startBisqService(); @@ -149,7 +154,7 @@ class Server { }); } - async runMainUpdateLoop() { + async runMainUpdateLoop(): Promise { try { try { await memPool.$updateMemPoolInfo(); @@ -183,7 +188,7 @@ class Server { } } - async $runLightningBackend() { + async $runLightningBackend(): Promise { try { await fundingTxFetcher.$init(); await networkSyncService.$startService(); @@ -195,7 +200,7 @@ class Server { }; } - setUpWebsocketHandling() { + setUpWebsocketHandling(): void { if (this.wss) { websocketHandler.setWebsocketServer(this.wss); } @@ -209,19 +214,21 @@ class Server { }); } websocketHandler.setupConnectionHandling(); - statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler)); - blocks.setNewBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler)); - memPool.setMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler)); + if (config.MEMPOOL.ENABLED) { + statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler)); + memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler)); + blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler)); + } fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler)); loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler)); } - - setUpHttpApiRoutes() { + + setUpHttpApiRoutes(): void { bitcoinRoutes.initRoutes(this.app); - if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) { + if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) { statisticsRoutes.initRoutes(this.app); } - if (Common.indexingEnabled()) { + if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) { miningRoutes.initRoutes(this.app); } if (config.BISQ.ENABLED) { @@ -238,4 +245,4 @@ class Server { } } -const server = new Server(); +((): Server => new Server())(); diff --git a/backend/src/mempool.interfaces.ts b/backend/src/mempool.interfaces.ts index 32d87f3dc..24bfa1565 100644 --- a/backend/src/mempool.interfaces.ts +++ b/backend/src/mempool.interfaces.ts @@ -32,6 +32,11 @@ export interface BlockAudit { matchRate: number, } +export interface AuditScore { + hash: string, + matchRate?: number, +} + export interface MempoolBlock { blockSize: number; blockVSize: number; diff --git a/backend/src/repositories/BlocksAuditsRepository.ts b/backend/src/repositories/BlocksAuditsRepository.ts index 188cf4c38..2aa1fb260 100644 --- a/backend/src/repositories/BlocksAuditsRepository.ts +++ b/backend/src/repositories/BlocksAuditsRepository.ts @@ -1,6 +1,6 @@ import DB from '../database'; import logger from '../logger'; -import { BlockAudit } from '../mempool.interfaces'; +import { BlockAudit, AuditScore } from '../mempool.interfaces'; class BlocksAuditRepositories { public async $saveAudit(audit: BlockAudit): Promise { @@ -72,10 +72,10 @@ class BlocksAuditRepositories { } } - public async $getShortBlockAudit(hash: string): Promise { + public async $getBlockAuditScore(hash: string): Promise { try { const [rows]: any[] = await DB.query( - `SELECT hash as id, match_rate as matchRate + `SELECT hash, match_rate as matchRate FROM blocks_audits WHERE blocks_audits.hash = "${hash}" `); diff --git a/backend/src/repositories/BlocksRepository.ts b/backend/src/repositories/BlocksRepository.ts index 40f670833..590e9de37 100644 --- a/backend/src/repositories/BlocksRepository.ts +++ b/backend/src/repositories/BlocksRepository.ts @@ -392,6 +392,36 @@ class BlocksRepository { } } + /** + * Get the first block at or directly after a given timestamp + * @param timestamp number unix time in seconds + * @returns The height and timestamp of a block (timestamp might vary from given timestamp) + */ + public async $getBlockHeightFromTimestamp( + timestamp: number, + ): Promise<{ height: number; hash: string; timestamp: number }> { + try { + // Get first block at or after the given timestamp + const query = `SELECT height, hash, blockTimestamp as timestamp FROM blocks + WHERE blockTimestamp <= FROM_UNIXTIME(?) + ORDER BY blockTimestamp DESC + LIMIT 1`; + const params = [timestamp]; + const [rows]: any[][] = await DB.query(query, params); + if (rows.length === 0) { + throw new Error(`No block was found before timestamp ${timestamp}`); + } + + return rows[0]; + } catch (e) { + logger.err( + 'Cannot get block height from timestamp from the db. Reason: ' + + (e instanceof Error ? e.message : e), + ); + throw e; + } + } + /** * Return blocks height */ diff --git a/backend/src/repositories/NodeRecordsRepository.ts b/backend/src/repositories/NodeRecordsRepository.ts new file mode 100644 index 000000000..cf676e35e --- /dev/null +++ b/backend/src/repositories/NodeRecordsRepository.ts @@ -0,0 +1,67 @@ +import { ResultSetHeader, RowDataPacket } from 'mysql2'; +import DB from '../database'; +import logger from '../logger'; + +export interface NodeRecord { + publicKey: string; // node public key + type: number; // TLV extension record type + payload: string; // base64 record payload +} + +class NodesRecordsRepository { + public async $saveRecord(record: NodeRecord): Promise { + try { + const payloadBytes = Buffer.from(record.payload, 'base64'); + await DB.query(` + INSERT INTO nodes_records(public_key, type, payload) + VALUE (?, ?, ?) + ON DUPLICATE KEY UPDATE + payload = ? + `, [record.publicKey, record.type, payloadBytes, payloadBytes]); + } catch (e: any) { + if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this + logger.err(`Cannot save node record (${[record.publicKey, record.type, record.payload]}) into db. Reason: ` + (e instanceof Error ? e.message : e)); + // We don't throw, not a critical issue if we miss some nodes records + } + } + } + + public async $getRecordTypes(publicKey: string): Promise { + try { + const query = ` + SELECT type FROM nodes_records + WHERE public_key = ? + `; + const [rows] = await DB.query(query, [publicKey]); + return rows.map(row => row['type']); + } catch (e) { + logger.err(`Cannot retrieve custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e)); + return []; + } + } + + public async $deleteUnusedRecords(publicKey: string, recordTypes: number[]): Promise { + try { + let query; + if (recordTypes.length) { + query = ` + DELETE FROM nodes_records + WHERE public_key = ? + AND type NOT IN (${recordTypes.map(type => `${type}`).join(',')}) + `; + } else { + query = ` + DELETE FROM nodes_records + WHERE public_key = ? + `; + } + const [result] = await DB.query(query, [publicKey]); + return result.affectedRows; + } catch (e) { + logger.err(`Cannot delete unused custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e)); + return 0; + } + } +} + +export default new NodesRecordsRepository(); diff --git a/backend/src/tasks/lightning/network-sync.service.ts b/backend/src/tasks/lightning/network-sync.service.ts index 838170a3e..2910f0f9c 100644 --- a/backend/src/tasks/lightning/network-sync.service.ts +++ b/backend/src/tasks/lightning/network-sync.service.ts @@ -13,6 +13,7 @@ import fundingTxFetcher from './sync-tasks/funding-tx-fetcher'; import NodesSocketsRepository from '../../repositories/NodesSocketsRepository'; import { Common } from '../../api/common'; import blocks from '../../api/blocks'; +import NodeRecordsRepository from '../../repositories/NodeRecordsRepository'; class NetworkSyncService { loggerTimer = 0; @@ -63,6 +64,7 @@ class NetworkSyncService { let progress = 0; let deletedSockets = 0; + let deletedRecords = 0; const graphNodesPubkeys: string[] = []; for (const node of nodes) { const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key); @@ -84,8 +86,23 @@ class NetworkSyncService { addresses.push(socket.addr); } deletedSockets += await NodesSocketsRepository.$deleteUnusedSockets(node.pub_key, addresses); + + const oldRecordTypes = await NodeRecordsRepository.$getRecordTypes(node.pub_key); + const customRecordTypes: number[] = []; + for (const [type, payload] of Object.entries(node.custom_records || {})) { + const numericalType = parseInt(type); + await NodeRecordsRepository.$saveRecord({ + publicKey: node.pub_key, + type: numericalType, + payload, + }); + customRecordTypes.push(numericalType); + } + if (oldRecordTypes.reduce((changed, type) => changed || customRecordTypes.indexOf(type) === -1, false)) { + deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes); + } } - logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted`); + logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`); // If a channel if not present in the graph, mark it as inactive await nodesApi.$setNodesInactive(graphNodesPubkeys); @@ -309,7 +326,7 @@ class NetworkSyncService { └──────────────────┘ */ - private async $runClosedChannelsForensics(): Promise { + private async $runClosedChannelsForensics(skipUnresolved: boolean = false): Promise { if (!config.ESPLORA.REST_API_URL) { return; } @@ -318,9 +335,18 @@ class NetworkSyncService { try { logger.info(`Started running closed channel forensics...`); - const channels = await channelsApi.$getClosedChannelsWithoutReason(); + let channels; + const closedChannels = await channelsApi.$getClosedChannelsWithoutReason(); + if (skipUnresolved) { + channels = closedChannels; + } else { + const unresolvedChannels = await channelsApi.$getUnresolvedClosedChannels(); + channels = [...closedChannels, ...unresolvedChannels]; + } + for (const channel of channels) { let reason = 0; + let resolvedForceClose = false; // Only Esplora backend can retrieve spent transaction outputs try { let outspends: IEsploraApi.Outspend[] | undefined; @@ -350,6 +376,7 @@ class NetworkSyncService { reason = 3; } else { reason = 2; + resolvedForceClose = true; } } else { /* @@ -374,6 +401,9 @@ class NetworkSyncService { if (reason) { logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.'); await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]); + if (reason === 2 && resolvedForceClose) { + await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]); + } } } catch (e) { logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`); diff --git a/backend/src/tasks/lightning/sync-tasks/node-locations.ts b/backend/src/tasks/lightning/sync-tasks/node-locations.ts index ba59e9e48..afd280ec7 100644 --- a/backend/src/tasks/lightning/sync-tasks/node-locations.ts +++ b/backend/src/tasks/lightning/sync-tasks/node-locations.ts @@ -6,6 +6,7 @@ import DB from '../../../database'; import logger from '../../../logger'; import { ResultSetHeader } from 'mysql2'; import * as IPCheck from '../../../utils/ipcheck.js'; +import { Reader } from 'mmdb-lib'; export async function $lookupNodeLocation(): Promise { let loggerTimer = new Date().getTime() / 1000; @@ -18,7 +19,10 @@ export async function $lookupNodeLocation(): Promise { const nodes = await nodesApi.$getAllNodes(); const lookupCity = await maxmind.open(config.MAXMIND.GEOLITE2_CITY); const lookupAsn = await maxmind.open(config.MAXMIND.GEOLITE2_ASN); - const lookupIsp = await maxmind.open(config.MAXMIND.GEOIP2_ISP); + let lookupIsp: Reader | null = null; + try { + lookupIsp = await maxmind.open(config.MAXMIND.GEOIP2_ISP); + } catch (e) { } for (const node of nodes) { const sockets: string[] = node.sockets.split(','); @@ -29,7 +33,10 @@ export async function $lookupNodeLocation(): Promise { if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') { const city = lookupCity.get(ip); const asn = lookupAsn.get(ip); - const isp = lookupIsp.get(ip); + let isp: IspResponse | null = null; + if (lookupIsp) { + isp = lookupIsp.get(ip); + } let asOverwrite: any | undefined; if (asn && (IPCheck.match(ip, '170.75.160.0/20') || IPCheck.match(ip, '172.81.176.0/21'))) { diff --git a/docker/README.md b/docker/README.md index 92cc1df6d..4061f420c 100644 --- a/docker/README.md +++ b/docker/README.md @@ -89,6 +89,7 @@ Below we list all settings from `mempool-config.json` and the corresponding over "MEMPOOL": { "NETWORK": "mainnet", "BACKEND": "electrum", + "ENABLED": true, "HTTP_PORT": 8999, "SPAWN_CLUSTER_PROCS": 0, "API_URL_PREFIX": "/api/v1/", diff --git a/docker/backend/mempool-config.json b/docker/backend/mempool-config.json index 930430127..378bba8db 100644 --- a/docker/backend/mempool-config.json +++ b/docker/backend/mempool-config.json @@ -2,6 +2,7 @@ "MEMPOOL": { "NETWORK": "__MEMPOOL_NETWORK__", "BACKEND": "__MEMPOOL_BACKEND__", + "ENABLED": __MEMPOOL_ENABLED__, "HTTP_PORT": __MEMPOOL_HTTP_PORT__, "SPAWN_CLUSTER_PROCS": __MEMPOOL_SPAWN_CLUSTER_PROCS__, "API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__", diff --git a/docker/backend/start.sh b/docker/backend/start.sh index 4933dc2ee..c8e2a1502 100755 --- a/docker/backend/start.sh +++ b/docker/backend/start.sh @@ -3,6 +3,7 @@ # MEMPOOL __MEMPOOL_NETWORK__=${MEMPOOL_NETWORK:=mainnet} __MEMPOOL_BACKEND__=${MEMPOOL_BACKEND:=electrum} +__MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=true} __MEMPOOL_HTTP_PORT__=${BACKEND_HTTP_PORT:=8999} __MEMPOOL_SPAWN_CLUSTER_PROCS__=${MEMPOOL_SPAWN_CLUSTER_PROCS:=0} __MEMPOOL_API_URL_PREFIX__=${MEMPOOL_API_URL_PREFIX:=/api/v1/} @@ -111,6 +112,7 @@ mkdir -p "${__MEMPOOL_CACHE_DIR__}" sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json +sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json diff --git a/docker/frontend/Dockerfile b/docker/frontend/Dockerfile index d144d5882..b54612e3d 100644 --- a/docker/frontend/Dockerfile +++ b/docker/frontend/Dockerfile @@ -8,7 +8,9 @@ WORKDIR /build COPY . . RUN apt-get update RUN apt-get install -y build-essential rsync +RUN cp mempool-frontend-config.sample.json mempool-frontend-config.json RUN npm install --omit=dev --omit=optional + RUN npm run build FROM nginx:1.17.8-alpine @@ -28,7 +30,9 @@ RUN chown -R 1000:1000 /patch && chmod -R 755 /patch && \ chown -R 1000:1000 /var/cache/nginx && \ chown -R 1000:1000 /var/log/nginx && \ chown -R 1000:1000 /etc/nginx/nginx.conf && \ - chown -R 1000:1000 /etc/nginx/conf.d + chown -R 1000:1000 /etc/nginx/conf.d && \ + chown -R 1000:1000 /var/www/mempool + RUN touch /var/run/nginx.pid && \ chown -R 1000:1000 /var/run/nginx.pid diff --git a/docker/frontend/entrypoint.sh b/docker/frontend/entrypoint.sh index 7ebe5632c..6a263de99 100644 --- a/docker/frontend/entrypoint.sh +++ b/docker/frontend/entrypoint.sh @@ -10,4 +10,51 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf cat /patch/nginx.conf > /etc/nginx/nginx.conf +# Runtime overrides - read env vars defined in docker compose + +__TESTNET_ENABLED__=${TESTNET_ENABLED:=false} +__SIGNET_ENABLED__=${SIGNET_ENABLED:=false} +__LIQUID_ENABLED__=${LIQUID_EANBLED:=false} +__LIQUID_TESTNET_ENABLED__=${LIQUID_TESTNET_ENABLED:=false} +__BISQ_ENABLED__=${BISQ_ENABLED:=false} +__BISQ_SEPARATE_BACKEND__=${BISQ_SEPARATE_BACKEND:=false} +__ITEMS_PER_PAGE__=${ITEMS_PER_PAGE:=10} +__KEEP_BLOCKS_AMOUNT__=${KEEP_BLOCKS_AMOUNT:=8} +__NGINX_PROTOCOL__=${NGINX_PROTOCOL:=http} +__NGINX_HOSTNAME__=${NGINX_HOSTNAME:=localhost} +__NGINX_PORT__=${NGINX_PORT:=8999} +__BLOCK_WEIGHT_UNITS__=${BLOCK_WEIGHT_UNITS:=4000000} +__MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_BLOCKS_AMOUNT:=8} +__BASE_MODULE__=${BASE_MODULE:=mempool} +__MEMPOOL_WEBSITE_URL__=${MEMPOOL_WEBSITE_URL:=https://mempool.space} +__LIQUID_WEBSITE_URL__=${LIQUID_WEBSITE_URL:=https://liquid.network} +__BISQ_WEBSITE_URL__=${BISQ_WEBSITE_URL:=https://bisq.markets} +__MINING_DASHBOARD__=${MINING_DASHBOARD:=true} +__LIGHTNING__=${LIGHTNING:=false} + +# Export as environment variables to be used by envsubst +export __TESTNET_ENABLED__ +export __SIGNET_ENABLED__ +export __LIQUID_ENABLED__ +export __LIQUID_TESTNET_ENABLED__ +export __BISQ_ENABLED__ +export __BISQ_SEPARATE_BACKEND__ +export __ITEMS_PER_PAGE__ +export __KEEP_BLOCKS_AMOUNT__ +export __NGINX_PROTOCOL__ +export __NGINX_HOSTNAME__ +export __NGINX_PORT__ +export __BLOCK_WEIGHT_UNITS__ +export __MEMPOOL_BLOCKS_AMOUNT__ +export __BASE_MODULE__ +export __MEMPOOL_WEBSITE_URL__ +export __LIQUID_WEBSITE_URL__ +export __BISQ_WEBSITE_URL__ +export __MINING_DASHBOARD__ +export __LIGHTNING__ + +folder=$(find /var/www/mempool -name "config.js" | xargs dirname) +echo ${folder} +envsubst < ${folder}/config.template.js > ${folder}/config.js + exec "$@" diff --git a/frontend/angular.json b/frontend/angular.json index 1ed29cad9..dedf52d81 100644 --- a/frontend/angular.json +++ b/frontend/angular.json @@ -152,15 +152,14 @@ "assets": [ "src/favicon.ico", "src/resources", - "src/robots.txt" + "src/robots.txt", + "src/config.js", + "src/config.template.js" ], "styles": [ "src/styles.scss", "node_modules/@fortawesome/fontawesome-svg-core/styles.css" ], - "scripts": [ - "generated-config.js" - ], "vendorChunk": true, "extractLicenses": false, "buildOptimizer": false, @@ -222,6 +221,10 @@ "proxyConfig": "proxy.conf.local.js", "verbose": true }, + "local-esplora": { + "proxyConfig": "proxy.conf.local-esplora.js", + "verbose": true + }, "mixed": { "proxyConfig": "proxy.conf.mixed.js", "verbose": true diff --git a/frontend/generate-config.js b/frontend/generate-config.js index 1f37953b7..3cc173e00 100644 --- a/frontend/generate-config.js +++ b/frontend/generate-config.js @@ -2,7 +2,8 @@ var fs = require('fs'); const { spawnSync } = require('child_process'); const CONFIG_FILE_NAME = 'mempool-frontend-config.json'; -const GENERATED_CONFIG_FILE_NAME = 'generated-config.js'; +const GENERATED_CONFIG_FILE_NAME = 'src/resources/config.js'; +const GENERATED_TEMPLATE_CONFIG_FILE_NAME = 'src/resources/config.template.js'; let settings = []; let configContent = {}; @@ -67,10 +68,17 @@ if (process.env.DOCKER_COMMIT_HASH) { const newConfig = `(function (window) { window.__env = window.__env || {};${settings.reduce((str, obj) => `${str} - window.__env.${obj.key} = ${ typeof obj.value === 'string' ? `'${obj.value}'` : obj.value };`, '')} + window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'${obj.value}'` : obj.value};`, '')} window.__env.GIT_COMMIT_HASH = '${gitCommitHash}'; window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}'; - }(global || this));`; + }(this));`; + +const newConfigTemplate = `(function (window) { + window.__env = window.__env || {};${settings.reduce((str, obj) => `${str} + window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'\${__${obj.key}__}'` : `\${__${obj.key}__}`};`, '')} + window.__env.GIT_COMMIT_HASH = '${gitCommitHash}'; + window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}'; + }(this));`; function readConfig(path) { try { @@ -89,6 +97,16 @@ function writeConfig(path, config) { } } +function writeConfigTemplate(path, config) { + try { + fs.writeFileSync(path, config, 'utf8'); + } catch (e) { + throw new Error(e); + } +} + +writeConfigTemplate(GENERATED_TEMPLATE_CONFIG_FILE_NAME, newConfigTemplate); + const currentConfig = readConfig(GENERATED_CONFIG_FILE_NAME); if (currentConfig && currentConfig === newConfig) { @@ -106,4 +124,4 @@ if (currentConfig && currentConfig === newConfig) { console.log('NEW CONFIG: ', newConfig); writeConfig(GENERATED_CONFIG_FILE_NAME, newConfig); console.log(`${GENERATED_CONFIG_FILE_NAME} file updated`); -}; +} diff --git a/frontend/package.json b/frontend/package.json index c5a062e01..9423eb901 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,6 +29,7 @@ "serve:local-prod": "npm run generate-config && npm run ng -- serve -c local-prod", "serve:local-staging": "npm run generate-config && npm run ng -- serve -c local-staging", "start": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local", + "start:local-esplora": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-esplora", "start:stg": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c staging", "start:local-prod": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-prod", "start:local-staging": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-staging", diff --git a/frontend/proxy.conf.local-esplora.js b/frontend/proxy.conf.local-esplora.js new file mode 100644 index 000000000..8bb57e623 --- /dev/null +++ b/frontend/proxy.conf.local-esplora.js @@ -0,0 +1,137 @@ +const fs = require('fs'); + +const FRONTEND_CONFIG_FILE_NAME = 'mempool-frontend-config.json'; + +let configContent; + +// Read frontend config +try { + const rawConfig = fs.readFileSync(FRONTEND_CONFIG_FILE_NAME); + configContent = JSON.parse(rawConfig); + console.log(`${FRONTEND_CONFIG_FILE_NAME} file found, using provided config`); +} catch (e) { + console.log(e); + if (e.code !== 'ENOENT') { + throw new Error(e); + } else { + console.log(`${FRONTEND_CONFIG_FILE_NAME} file not found, using default config`); + } +} + +let PROXY_CONFIG = []; + +if (configContent && configContent.BASE_MODULE === 'liquid') { + PROXY_CONFIG.push(...[ + { + context: ['/liquid/api/v1/**'], + target: `http://127.0.0.1:8999`, + secure: false, + ws: true, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/liquid": "" + }, + }, + { + context: ['/liquid/api/**'], + target: `http://127.0.0.1:3000`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/liquid/api/": "" + }, + }, + { + context: ['/liquidtestnet/api/v1/**'], + target: `http://127.0.0.1:8999`, + secure: false, + ws: true, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/liquidtestnet": "" + }, + }, + { + context: ['/liquidtestnet/api/**'], + target: `http://127.0.0.1:3000`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/liquidtestnet/api/": "/" + }, + }, + ]); +} + + +if (configContent && configContent.BASE_MODULE === 'bisq') { + PROXY_CONFIG.push(...[ + { + context: ['/bisq/api/v1/ws'], + target: `http://127.0.0.1:8999`, + secure: false, + ws: true, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/bisq": "" + }, + }, + { + context: ['/bisq/api/v1/**'], + target: `http://127.0.0.1:8999`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + }, + { + context: ['/bisq/api/**'], + target: `http://127.0.0.1:8999`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/bisq/api/": "/api/v1/bisq/" + }, + } + ]); +} + +PROXY_CONFIG.push(...[ + { + context: ['/testnet/api/v1/lightning/**'], + target: `http://127.0.0.1:8999`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/testnet": "" + }, + }, + { + context: ['/api/v1/**'], + target: `http://127.0.0.1:8999`, + secure: false, + ws: true, + changeOrigin: true, + proxyTimeout: 30000, + }, + { + context: ['/api/**'], + target: `http://127.0.0.1:3000`, + secure: false, + changeOrigin: true, + proxyTimeout: 30000, + pathRewrite: { + "^/api": "" + }, + } +]); + +console.log(PROXY_CONFIG); + +module.exports = PROXY_CONFIG; \ No newline at end of file diff --git a/frontend/src/app/app.constants.ts b/frontend/src/app/app.constants.ts index 232578e6b..9cd374cd0 100644 --- a/frontend/src/app/app.constants.ts +++ b/frontend/src/app/app.constants.ts @@ -79,7 +79,7 @@ export const poolsColor = { 'binancepool': '#1E88E5', 'viabtc': '#039BE5', 'btccom': '#00897B', - 'slushpool': '#00ACC1', + 'braiinspool': '#00ACC1', 'sbicrypto': '#43A047', 'marapool': '#7CB342', 'luxor': '#C0CA33', diff --git a/frontend/src/app/components/about/about.component.html b/frontend/src/app/components/about/about.component.html index 5cfb2c905..cc24e7893 100644 --- a/frontend/src/app/components/about/about.component.html +++ b/frontend/src/app/components/about/about.component.html @@ -129,7 +129,7 @@ Gemini - + @@ -274,6 +274,10 @@ Schildbach + + + Nunchuk + diff --git a/frontend/src/app/components/about/about.component.scss b/frontend/src/app/components/about/about.component.scss index 2cc5d5102..1d1d93214 100644 --- a/frontend/src/app/components/about/about.component.scss +++ b/frontend/src/app/components/about/about.component.scss @@ -3,8 +3,8 @@ text-align: center; .image { - width: 80px; - height: 80px; + width: 81px; + height: 81px; background-size: 100%, 100%; border-radius: 50%; margin: 25px; diff --git a/frontend/src/app/components/block-audit/block-audit.component.html b/frontend/src/app/components/block-audit/block-audit.component.html index 543dbb705..a3f2e2ada 100644 --- a/frontend/src/app/components/block-audit/block-audit.component.html +++ b/frontend/src/app/components/block-audit/block-audit.component.html @@ -41,10 +41,6 @@ - - Transactions - {{ blockAudit.tx_count }} - Size @@ -61,6 +57,10 @@
+ + + + @@ -69,18 +69,10 @@ - - - - - - - -
Transactions{{ blockAudit.tx_count }}
Block health {{ blockAudit.matchRate }}%Removed txs {{ blockAudit.missingTxs.length }}
Omitted txs{{ numMissing }}
Added txs {{ blockAudit.addedTxs.length }}
Included txs{{ numUnexpected }}
@@ -97,21 +89,6 @@ -
-

- - Block Audit -   - {{ blockAudit.height }} -   - -

- -
- - -
-
@@ -123,7 +100,6 @@ -
@@ -136,7 +112,6 @@ -
@@ -180,16 +155,16 @@

Projected Block

+ [blockLimit]="stateService.blockVSize" [orientation]="'top'" [flip]="false" [mirrorTxid]="hoverTx" + (txClickEvent)="onTxClick($event)" (txHoverEvent)="onTxHover($event)">

Actual Block

+ [blockLimit]="stateService.blockVSize" [orientation]="'top'" [flip]="false" [mirrorTxid]="hoverTx" + (txClickEvent)="onTxClick($event)" (txHoverEvent)="onTxHover($event)">
diff --git a/frontend/src/app/components/block-audit/block-audit.component.ts b/frontend/src/app/components/block-audit/block-audit.component.ts index f8ce8d9bb..3787796fd 100644 --- a/frontend/src/app/components/block-audit/block-audit.component.ts +++ b/frontend/src/app/components/block-audit/block-audit.component.ts @@ -1,9 +1,10 @@ import { Component, OnDestroy, OnInit, AfterViewInit, ViewChildren, QueryList } from '@angular/core'; import { ActivatedRoute, ParamMap, Router } from '@angular/router'; -import { Subscription, combineLatest } from 'rxjs'; -import { map, switchMap, startWith, catchError } from 'rxjs/operators'; +import { Subscription, combineLatest, of } from 'rxjs'; +import { map, switchMap, startWith, catchError, filter } from 'rxjs/operators'; import { BlockAudit, TransactionStripped } from '../../interfaces/node-api.interface'; import { ApiService } from '../../services/api.service'; +import { ElectrsApiService } from '../../services/electrs-api.service'; import { StateService } from '../../services/state.service'; import { detectWebGL } from '../../shared/graphs.utils'; import { RelativeUrlPipe } from '../../shared/pipes/relative-url/relative-url.pipe'; @@ -37,6 +38,7 @@ export class BlockAuditComponent implements OnInit, AfterViewInit, OnDestroy { isLoading = true; webGlEnabled = true; isMobile = window.innerWidth <= 767.98; + hoverTx: string; childChangeSubscription: Subscription; @@ -51,7 +53,8 @@ export class BlockAuditComponent implements OnInit, AfterViewInit, OnDestroy { private route: ActivatedRoute, public stateService: StateService, private router: Router, - private apiService: ApiService + private apiService: ApiService, + private electrsApiService: ElectrsApiService, ) { this.webGlEnabled = detectWebGL(); } @@ -76,69 +79,95 @@ export class BlockAuditComponent implements OnInit, AfterViewInit, OnDestroy { this.auditSubscription = this.route.paramMap.pipe( switchMap((params: ParamMap) => { - this.blockHash = params.get('id') || null; - if (!this.blockHash) { + const blockHash = params.get('id') || null; + if (!blockHash) { return null; } + + let isBlockHeight = false; + if (/^[0-9]+$/.test(blockHash)) { + isBlockHeight = true; + } else { + this.blockHash = blockHash; + } + + if (isBlockHeight) { + return this.electrsApiService.getBlockHashFromHeight$(parseInt(blockHash, 10)) + .pipe( + switchMap((hash: string) => { + if (hash) { + this.blockHash = hash; + return this.apiService.getBlockAudit$(this.blockHash) + } else { + return null; + } + }), + catchError((err) => { + this.error = err; + return of(null); + }), + ); + } return this.apiService.getBlockAudit$(this.blockHash) - .pipe( - map((response) => { - const blockAudit = response.body; - const inTemplate = {}; - const inBlock = {}; - const isAdded = {}; - const isCensored = {}; - const isMissing = {}; - const isSelected = {}; - this.numMissing = 0; - this.numUnexpected = 0; - for (const tx of blockAudit.template) { - inTemplate[tx.txid] = true; - } - for (const tx of blockAudit.transactions) { - inBlock[tx.txid] = true; - } - for (const txid of blockAudit.addedTxs) { - isAdded[txid] = true; - } - for (const txid of blockAudit.missingTxs) { - isCensored[txid] = true; - } - // set transaction statuses - for (const tx of blockAudit.template) { - if (isCensored[tx.txid]) { - tx.status = 'censored'; - } else if (inBlock[tx.txid]) { - tx.status = 'found'; - } else { - tx.status = 'missing'; - isMissing[tx.txid] = true; - this.numMissing++; - } - } - for (const [index, tx] of blockAudit.transactions.entries()) { - if (isAdded[tx.txid]) { - tx.status = 'added'; - } else if (index === 0 || inTemplate[tx.txid]) { - tx.status = 'found'; - } else { - tx.status = 'selected'; - isSelected[tx.txid] = true; - this.numUnexpected++; - } - } - for (const tx of blockAudit.transactions) { - inBlock[tx.txid] = true; - } - return blockAudit; - }) - ); + }), + filter((response) => response != null), + map((response) => { + const blockAudit = response.body; + const inTemplate = {}; + const inBlock = {}; + const isAdded = {}; + const isCensored = {}; + const isMissing = {}; + const isSelected = {}; + this.numMissing = 0; + this.numUnexpected = 0; + for (const tx of blockAudit.template) { + inTemplate[tx.txid] = true; + } + for (const tx of blockAudit.transactions) { + inBlock[tx.txid] = true; + } + for (const txid of blockAudit.addedTxs) { + isAdded[txid] = true; + } + for (const txid of blockAudit.missingTxs) { + isCensored[txid] = true; + } + // set transaction statuses + for (const tx of blockAudit.template) { + if (isCensored[tx.txid]) { + tx.status = 'censored'; + } else if (inBlock[tx.txid]) { + tx.status = 'found'; + } else { + tx.status = 'missing'; + isMissing[tx.txid] = true; + this.numMissing++; + } + } + for (const [index, tx] of blockAudit.transactions.entries()) { + if (index === 0) { + tx.status = null; + } else if (isAdded[tx.txid]) { + tx.status = 'added'; + } else if (inTemplate[tx.txid]) { + tx.status = 'found'; + } else { + tx.status = 'selected'; + isSelected[tx.txid] = true; + this.numUnexpected++; + } + } + for (const tx of blockAudit.transactions) { + inBlock[tx.txid] = true; + } + return blockAudit; }), catchError((err) => { console.log(err); this.error = err; this.isLoading = false; - return null; + return of(null); }), ).subscribe((blockAudit) => { this.blockAudit = blockAudit; @@ -189,4 +218,12 @@ export class BlockAuditComponent implements OnInit, AfterViewInit, OnDestroy { const url = new RelativeUrlPipe(this.stateService).transform(`/tx/${event.txid}`); this.router.navigate([url]); } + + onTxHover(txid: string): void { + if (txid && txid.length) { + this.hoverTx = txid; + } else { + this.hoverTx = null; + } + } } diff --git a/frontend/src/app/components/block-overview-graph/block-overview-graph.component.ts b/frontend/src/app/components/block-overview-graph/block-overview-graph.component.ts index 14607f398..7bb4378a6 100644 --- a/frontend/src/app/components/block-overview-graph/block-overview-graph.component.ts +++ b/frontend/src/app/components/block-overview-graph/block-overview-graph.component.ts @@ -18,7 +18,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On @Input() orientation = 'left'; @Input() flip = true; @Input() disableSpinner = false; + @Input() mirrorTxid: string | void; @Output() txClickEvent = new EventEmitter(); + @Output() txHoverEvent = new EventEmitter(); @Output() readyEvent = new EventEmitter(); @ViewChild('blockCanvas') @@ -37,6 +39,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On scene: BlockScene; hoverTx: TxView | void; selectedTx: TxView | void; + mirrorTx: TxView | void; tooltipPosition: Position; readyNextFrame = false; @@ -63,6 +66,9 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On this.scene.setOrientation(this.orientation, this.flip); } } + if (changes.mirrorTxid) { + this.setMirror(this.mirrorTxid); + } } ngOnDestroy(): void { @@ -76,6 +82,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On this.exit(direction); this.hoverTx = null; this.selectedTx = null; + this.onTxHover(null); this.start(); } @@ -181,7 +188,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On this.gl.viewport(0, 0, this.displayWidth, this.displayHeight); } if (this.scene) { - this.scene.resize({ width: this.displayWidth, height: this.displayHeight }); + this.scene.resize({ width: this.displayWidth, height: this.displayHeight, animate: false }); this.start(); } else { this.scene = new BlockScene({ width: this.displayWidth, height: this.displayHeight, resolution: this.resolution, @@ -301,6 +308,7 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On } this.hoverTx = null; this.selectedTx = null; + this.onTxHover(null); } } @@ -352,17 +360,20 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On this.selectedTx = selected; } else { this.hoverTx = selected; + this.onTxHover(this.hoverTx ? this.hoverTx.txid : null); } } else { if (clicked) { this.selectedTx = null; } this.hoverTx = null; + this.onTxHover(null); } } else if (clicked) { if (selected === this.selectedTx) { this.hoverTx = this.selectedTx; this.selectedTx = null; + this.onTxHover(this.hoverTx ? this.hoverTx.txid : null); } else { this.selectedTx = selected; } @@ -370,6 +381,18 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On } } + setMirror(txid: string | void) { + if (this.mirrorTx) { + this.scene.setHover(this.mirrorTx, false); + this.start(); + } + if (txid && this.scene.txs[txid]) { + this.mirrorTx = this.scene.txs[txid]; + this.scene.setHover(this.mirrorTx, true); + this.start(); + } + } + onTxClick(cssX: number, cssY: number) { const x = cssX * window.devicePixelRatio; const y = cssY * window.devicePixelRatio; @@ -378,6 +401,10 @@ export class BlockOverviewGraphComponent implements AfterViewInit, OnDestroy, On this.txClickEvent.emit(selected); } } + + onTxHover(hoverId: string) { + this.txHoverEvent.emit(hoverId); + } } // WebGL shader attributes diff --git a/frontend/src/app/components/block-overview-graph/block-scene.ts b/frontend/src/app/components/block-overview-graph/block-scene.ts index 39ac44e7a..8d3c46af4 100644 --- a/frontend/src/app/components/block-overview-graph/block-scene.ts +++ b/frontend/src/app/components/block-overview-graph/block-scene.ts @@ -29,7 +29,7 @@ export default class BlockScene { this.init({ width, height, resolution, blockLimit, orientation, flip, vertexArray }); } - resize({ width = this.width, height = this.height }: { width?: number, height?: number}): void { + resize({ width = this.width, height = this.height, animate = true }: { width?: number, height?: number, animate: boolean }): void { this.width = width; this.height = height; this.gridSize = this.width / this.gridWidth; @@ -38,7 +38,7 @@ export default class BlockScene { this.dirty = true; if (this.initialised && this.scene) { - this.updateAll(performance.now(), 50); + this.updateAll(performance.now(), 50, 'left', animate); } } @@ -212,7 +212,7 @@ export default class BlockScene { this.vbytesPerUnit = blockLimit / Math.pow(resolution / 1.02, 2); this.gridWidth = resolution; this.gridHeight = resolution; - this.resize({ width, height }); + this.resize({ width, height, animate: true }); this.layout = new BlockLayout({ width: this.gridWidth, height: this.gridHeight }); this.txs = {}; @@ -225,14 +225,14 @@ export default class BlockScene { this.animateUntil = Math.max(this.animateUntil, tx.update(update)); } - private updateTx(tx: TxView, startTime: number, delay: number, direction: string = 'left'): void { + private updateTx(tx: TxView, startTime: number, delay: number, direction: string = 'left', animate: boolean = true): void { if (tx.dirty || this.dirty) { this.saveGridToScreenPosition(tx); - this.setTxOnScreen(tx, startTime, delay, direction); + this.setTxOnScreen(tx, startTime, delay, direction, animate); } } - private setTxOnScreen(tx: TxView, startTime: number, delay: number = 50, direction: string = 'left'): void { + private setTxOnScreen(tx: TxView, startTime: number, delay: number = 50, direction: string = 'left', animate: boolean = true): void { if (!tx.initialised) { const txColor = tx.getColor(); this.applyTxUpdate(tx, { @@ -252,30 +252,42 @@ export default class BlockScene { position: tx.screenPosition, color: txColor }, - duration: 1000, + duration: animate ? 1000 : 1, start: startTime, - delay, + delay: animate ? delay : 0, }); } else { this.applyTxUpdate(tx, { display: { position: tx.screenPosition }, - duration: 1000, - minDuration: 500, + duration: animate ? 1000 : 0, + minDuration: animate ? 500 : 0, start: startTime, - delay, - adjust: true + delay: animate ? delay : 0, + adjust: animate }); + if (!animate) { + this.applyTxUpdate(tx, { + display: { + position: tx.screenPosition + }, + duration: 0, + minDuration: 0, + start: startTime, + delay: 0, + adjust: false + }); + } } } - private updateAll(startTime: number, delay: number = 50, direction: string = 'left'): void { + private updateAll(startTime: number, delay: number = 50, direction: string = 'left', animate: boolean = true): void { this.scene.count = 0; const ids = this.getTxList(); startTime = startTime || performance.now(); for (const id of ids) { - this.updateTx(this.txs[id], startTime, delay, direction); + this.updateTx(this.txs[id], startTime, delay, direction, animate); } this.dirty = false; } diff --git a/frontend/src/app/components/block-overview-graph/tx-view.ts b/frontend/src/app/components/block-overview-graph/tx-view.ts index ac2a4655a..f07d96eb0 100644 --- a/frontend/src/app/components/block-overview-graph/tx-view.ts +++ b/frontend/src/app/components/block-overview-graph/tx-view.ts @@ -12,8 +12,8 @@ const auditFeeColors = feeColors.map((color) => desaturate(color, 0.3)); const auditColors = { censored: hexToColor('f344df'), missing: darken(desaturate(hexToColor('f344df'), 0.3), 0.7), - added: hexToColor('03E1E5'), - selected: darken(desaturate(hexToColor('039BE5'), 0.3), 0.7), + added: hexToColor('0099ff'), + selected: darken(desaturate(hexToColor('0099ff'), 0.3), 0.7), } // convert from this class's update format to TxSprite's update format diff --git a/frontend/src/app/components/block-overview-tooltip/block-overview-tooltip.component.html b/frontend/src/app/components/block-overview-tooltip/block-overview-tooltip.component.html index b19b67b06..8c1002025 100644 --- a/frontend/src/app/components/block-overview-tooltip/block-overview-tooltip.component.html +++ b/frontend/src/app/components/block-overview-tooltip/block-overview-tooltip.component.html @@ -37,9 +37,9 @@ match removed - missing + omitted added - included + extra diff --git a/frontend/src/app/components/block/block.component.html b/frontend/src/app/components/block/block.component.html index 819b05c81..ba8f3aef3 100644 --- a/frontend/src/app/components/block/block.component.html +++ b/frontend/src/app/components/block/block.component.html @@ -114,7 +114,7 @@ Block health {{ block.extras.matchRate }}% - Unknown + Unknown
diff --git a/frontend/src/app/components/block/block.component.ts b/frontend/src/app/components/block/block.component.ts index 8f977b81d..aff07a95e 100644 --- a/frontend/src/app/components/block/block.component.ts +++ b/frontend/src/app/components/block/block.component.ts @@ -4,7 +4,7 @@ import { ActivatedRoute, ParamMap, Router } from '@angular/router'; import { ElectrsApiService } from '../../services/electrs-api.service'; import { switchMap, tap, throttleTime, catchError, map, shareReplay, startWith, pairwise } from 'rxjs/operators'; import { Transaction, Vout } from '../../interfaces/electrs.interface'; -import { Observable, of, Subscription, asyncScheduler, EMPTY } from 'rxjs'; +import { Observable, of, Subscription, asyncScheduler, EMPTY, Subject } from 'rxjs'; import { StateService } from '../../services/state.service'; import { SeoService } from '../../services/seo.service'; import { WebsocketService } from '../../services/websocket.service'; @@ -60,6 +60,8 @@ export class BlockComponent implements OnInit, OnDestroy { nextBlockTxListSubscription: Subscription = undefined; timeLtrSubscription: Subscription; timeLtr: boolean; + fetchAuditScore$ = new Subject(); + fetchAuditScoreSubscription: Subscription; @ViewChild('blockGraph') blockGraph: BlockOverviewGraphComponent; @@ -105,12 +107,30 @@ export class BlockComponent implements OnInit, OnDestroy { if (block.id === this.blockHash) { this.block = block; + if (this.block.id && this.block?.extras?.matchRate == null) { + this.fetchAuditScore$.next(this.block.id); + } if (block?.extras?.reward != undefined) { this.fees = block.extras.reward / 100000000 - this.blockSubsidy; } } }); + if (this.indexingAvailable) { + this.fetchAuditScoreSubscription = this.fetchAuditScore$ + .pipe( + switchMap((hash) => this.apiService.getBlockAuditScore$(hash)), + catchError(() => EMPTY), + ) + .subscribe((score) => { + if (score && score.hash === this.block.id) { + this.block.extras.matchRate = score.matchRate || null; + } else { + this.block.extras.matchRate = null; + } + }); + } + const block$ = this.route.paramMap.pipe( switchMap((params: ParamMap) => { const blockHash: string = params.get('id') || ''; @@ -209,6 +229,9 @@ export class BlockComponent implements OnInit, OnDestroy { this.fees = block.extras.reward / 100000000 - this.blockSubsidy; } this.stateService.markBlock$.next({ blockHeight: this.blockHeight }); + if (this.block.id && this.block?.extras?.matchRate == null) { + this.fetchAuditScore$.next(this.block.id); + } this.isLoadingTransactions = true; this.transactions = null; this.transactionsError = null; @@ -311,6 +334,7 @@ export class BlockComponent implements OnInit, OnDestroy { this.networkChangedSubscription.unsubscribe(); this.queryParamsSubscription.unsubscribe(); this.timeLtrSubscription.unsubscribe(); + this.fetchAuditScoreSubscription?.unsubscribe(); this.unsubscribeNextBlockSubscriptions(); } diff --git a/frontend/src/app/components/blocks-list/blocks-list.component.html b/frontend/src/app/components/blocks-list/blocks-list.component.html index 68acf71ea..69bcf3141 100644 --- a/frontend/src/app/components/blocks-list/blocks-list.component.html +++ b/frontend/src/app/components/blocks-list/blocks-list.component.html @@ -46,22 +46,17 @@ - +
+ [ngStyle]="{'width': (100 - (auditScores[block.id] || 0)) + '%' }">
- {{ block.extras.matchRate }}% + {{ auditScores[block.id] }}% + + ~
-
-
-
- ~ -
-
diff --git a/frontend/src/app/components/blocks-list/blocks-list.component.scss b/frontend/src/app/components/blocks-list/blocks-list.component.scss index 6617cec58..713e59640 100644 --- a/frontend/src/app/components/blocks-list/blocks-list.component.scss +++ b/frontend/src/app/components/blocks-list/blocks-list.component.scss @@ -196,6 +196,10 @@ tr, td, th { @media (max-width: 950px) { display: none; } + + .progress-text .skeleton-loader { + top: -8.5px; + } } .health.widget { width: 25%; diff --git a/frontend/src/app/components/blocks-list/blocks-list.component.ts b/frontend/src/app/components/blocks-list/blocks-list.component.ts index 7e4c34eb4..700032225 100644 --- a/frontend/src/app/components/blocks-list/blocks-list.component.ts +++ b/frontend/src/app/components/blocks-list/blocks-list.component.ts @@ -1,6 +1,6 @@ -import { Component, OnInit, ChangeDetectionStrategy, Input } from '@angular/core'; -import { BehaviorSubject, combineLatest, concat, Observable, timer } from 'rxjs'; -import { delayWhen, map, retryWhen, scan, skip, switchMap, tap } from 'rxjs/operators'; +import { Component, OnInit, OnDestroy, ChangeDetectionStrategy, Input } from '@angular/core'; +import { BehaviorSubject, combineLatest, concat, Observable, timer, EMPTY, Subscription, of } from 'rxjs'; +import { catchError, delayWhen, map, retryWhen, scan, skip, switchMap, tap } from 'rxjs/operators'; import { BlockExtended } from '../../interfaces/node-api.interface'; import { ApiService } from '../../services/api.service'; import { StateService } from '../../services/state.service'; @@ -12,10 +12,14 @@ import { WebsocketService } from '../../services/websocket.service'; styleUrls: ['./blocks-list.component.scss'], changeDetection: ChangeDetectionStrategy.OnPush, }) -export class BlocksList implements OnInit { +export class BlocksList implements OnInit, OnDestroy { @Input() widget: boolean = false; blocks$: Observable = undefined; + auditScores: { [hash: string]: number | void } = {}; + + auditScoreSubscription: Subscription; + latestScoreSubscription: Subscription; indexingAvailable = false; isLoading = true; @@ -105,6 +109,53 @@ export class BlocksList implements OnInit { return acc; }, []) ); + + if (this.indexingAvailable) { + this.auditScoreSubscription = this.fromHeightSubject.pipe( + switchMap((fromBlockHeight) => { + return this.apiService.getBlockAuditScores$(this.page === 1 ? undefined : fromBlockHeight) + .pipe( + catchError(() => { + return EMPTY; + }) + ); + }) + ).subscribe((scores) => { + Object.values(scores).forEach(score => { + this.auditScores[score.hash] = score?.matchRate != null ? score.matchRate : null; + }); + }); + + this.latestScoreSubscription = this.stateService.blocks$.pipe( + switchMap((block) => { + if (block[0]?.extras?.matchRate != null) { + return of({ + hash: block[0].id, + matchRate: block[0]?.extras?.matchRate, + }); + } + else if (block[0]?.id && this.auditScores[block[0].id] === undefined) { + return this.apiService.getBlockAuditScore$(block[0].id) + .pipe( + catchError(() => { + return EMPTY; + }) + ); + } else { + return EMPTY; + } + }), + ).subscribe((score) => { + if (score && score.hash) { + this.auditScores[score.hash] = score?.matchRate != null ? score.matchRate : null; + } + }); + } + } + + ngOnDestroy(): void { + this.auditScoreSubscription?.unsubscribe(); + this.latestScoreSubscription?.unsubscribe(); } pageChange(page: number) { diff --git a/frontend/src/app/components/search-form/search-form.component.html b/frontend/src/app/components/search-form/search-form.component.html index 1303f4a62..4e38ea6e0 100644 --- a/frontend/src/app/components/search-form/search-form.component.html +++ b/frontend/src/app/components/search-form/search-form.component.html @@ -2,9 +2,7 @@
- - - +
diff --git a/frontend/src/app/components/transaction/transaction-preview.component.scss b/frontend/src/app/components/transaction/transaction-preview.component.scss index 65c0ca75e..4fa8b661a 100644 --- a/frontend/src/app/components/transaction/transaction-preview.component.scss +++ b/frontend/src/app/components/transaction/transaction-preview.component.scss @@ -29,6 +29,8 @@ .features { font-size: 24px; margin-left: 1em; + margin-top: 0.5em; + margin-right: -4px; } .top-data { @@ -60,6 +62,15 @@ } } +.top-data .field { + &:first-child { + padding-left: 0; + } + &:last-child { + padding-right: 0; + } +} + .tx-link { display: inline; font-size: 28px; @@ -69,7 +80,7 @@ .graph-wrapper { position: relative; background: #181b2d; - padding: 10px; + padding: 10px 0; padding-bottom: 0; .above-bow { @@ -92,26 +103,37 @@ max-width: 90%; margin: auto; overflow: hidden; + display: flex; + flex-direction: row; + justify-content: center; .opreturns { + display: inline-block; width: auto; + max-width: 100%; margin: auto; table-layout: auto; background: #2d3348af; border-top-left-radius: 5px; border-top-right-radius: 5px; - td { - padding: 10px 10px; + .opreturn-row { + width: 100%; + display: flex; + flex-direction: row; + justify-content: flex-start; + padding: 0 10px; + } - &.message { - overflow: hidden; - display: inline-block; - vertical-align: bottom; - text-overflow: ellipsis; - white-space: nowrap; - text-align: left; - } + .label { + margin-right: 1em; + } + + .message { + flex-shrink: 1; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; } } } diff --git a/frontend/src/app/components/transaction/transaction-preview.component.ts b/frontend/src/app/components/transaction/transaction-preview.component.ts index cc9dfac56..6c04af0ab 100644 --- a/frontend/src/app/components/transaction/transaction-preview.component.ts +++ b/frontend/src/app/components/transaction/transaction-preview.component.ts @@ -117,8 +117,9 @@ export class TransactionPreviewComponent implements OnInit, OnDestroy { }), switchMap(() => { let transactionObservable$: Observable; - if (history.state.data && history.state.data.fee !== -1) { - transactionObservable$ = of(history.state.data); + const cached = this.stateService.getTxFromCache(this.txId); + if (cached && cached.fee !== -1) { + transactionObservable$ = of(cached); } else { transactionObservable$ = this.electrsApiService .getTransaction$(this.txId) diff --git a/frontend/src/app/components/transaction/transaction.component.html b/frontend/src/app/components/transaction/transaction.component.html index ec0e824c8..cd0cd716d 100644 --- a/frontend/src/app/components/transaction/transaction.component.html +++ b/frontend/src/app/components/transaction/transaction.component.html @@ -3,7 +3,7 @@