Store block first seen time in block audits

This commit is contained in:
natsoni 2024-10-04 21:54:13 +09:00
parent 67295c1b9b
commit 1a75e3e317
No known key found for this signature in database
GPG Key ID: C65917583181743B
8 changed files with 65 additions and 12 deletions

View File

@ -46,7 +46,8 @@
"PASSWORD": "__CORE_RPC_PASSWORD__",
"TIMEOUT": 1000,
"COOKIE": false,
"COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__"
"COOKIE_PATH": "__CORE_RPC_COOKIE_PATH__",
"DEBUG_LOG_PATH": "__CORE_RPC_DEBUG_LOG_PATH__"
},
"ELECTRUM": {
"HOST": "__ELECTRUM_HOST__",

View File

@ -73,7 +73,8 @@ describe('Mempool Backend Config', () => {
PASSWORD: 'mempool',
TIMEOUT: 60000,
COOKIE: false,
COOKIE_PATH: '/bitcoin/.cookie'
COOKIE_PATH: '/bitcoin/.cookie',
DEBUG_LOG_PATH: '',
});
expect(config.SECOND_CORE_RPC).toStrictEqual({

View File

@ -1,3 +1,4 @@
import * as fs from 'fs';
import config from '../config';
import logger from '../logger';
import { MempoolTransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
@ -7,10 +8,10 @@ import transactionUtils from './transaction-utils';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended })
: { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number } {
auditBlock(height: number, transactions: MempoolTransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: MempoolTransactionExtended }, hash: string)
: { unseen: string[], censored: string[], added: string[], prioritized: string[], fresh: string[], sigop: string[], fullrbf: string[], accelerated: string[], score: number, similarity: number, firstSeen: string | undefined } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1 };
return { unseen: [], censored: [], added: [], prioritized: [], fresh: [], sigop: [], fullrbf: [], accelerated: [], score: 1, similarity: 1, firstSeen: undefined };
}
const matches: string[] = []; // present in both mined block and template
@ -176,6 +177,8 @@ class Audit {
}
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
const firstSeen = this.getFirstSeenFromLogs(hash);
return {
unseen,
censored: Object.keys(isCensored),
@ -187,8 +190,39 @@ class Audit {
accelerated,
score,
similarity,
firstSeen
};
}
getFirstSeenFromLogs(hash: string): string | undefined {
const debugLogPath = config.CORE_RPC.DEBUG_LOG_PATH;
if (debugLogPath) {
try {
const fileDescriptor = fs.openSync(debugLogPath, 'r');
const bufferSize = 2048; // Read the last few lines of the file
const buffer = Buffer.alloc(bufferSize);
const fileSize = fs.statSync(debugLogPath).size;
const chunkSize = Math.min(bufferSize, fileSize);
fs.readSync(fileDescriptor, buffer, 0, chunkSize, fileSize - chunkSize);
const lines = buffer.toString('utf8', 0, chunkSize).split('\n');
fs.closeSync(fileDescriptor);
for (let i = lines.length - 1; i >= 0; i--) {
const line = lines[i];
if (line && line.includes(`Saw new header hash=${hash}`)) {
// Extract time from log: "2021-08-31T12:34:56Z" or "2021-08-31T12:34:56.123456Z" if logtimemicros=1
const dateMatch = line.match(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.?\d{6})?Z/);
if (dateMatch) {
return dateMatch[0].replace("T", " ").replace("Z", "");
}
}
}
} catch (e) {
logger.err(`Cannot parse block first seen time from Core logs. Reason: ` + (e instanceof Error ? e.message : e));
}
}
return undefined;
}
}
export default new Audit();

View File

@ -7,7 +7,7 @@ import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 82;
private static currentVersion = 83;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@ -705,6 +705,11 @@ class DatabaseMigration {
await this.$fixBadV1AuditBlocks();
await this.updateToSchemaVersion(82);
}
if (databaseSchemaVersion < 83 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD first_seen timestamp(6) DEFAULT NULL');
await this.updateToSchemaVersion(83);
}
}
/**

View File

@ -975,7 +975,7 @@ class WebsocketHandler {
}
if (Common.indexingEnabled()) {
const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool);
const { unseen, censored, added, prioritized, fresh, sigop, fullrbf, accelerated, score, similarity, firstSeen } = Audit.auditBlock(block.height, blockTransactions, projectedBlocks, auditMempool, block.id);
const matchRate = Math.round(score * 100 * 100) / 100;
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions : [];
@ -1012,6 +1012,7 @@ class WebsocketHandler {
matchRate: matchRate,
expectedFees: totalFees,
expectedWeight: totalWeight,
firstSeen: firstSeen,
});
if (block.extras) {

View File

@ -45,6 +45,7 @@ export interface BlockAudit {
expectedFees?: number,
expectedWeight?: number,
template?: any[];
firstSeen?: string;
}
export interface TransactionAudit {
@ -57,6 +58,7 @@ export interface TransactionAudit {
conflict?: boolean;
coinbase?: boolean;
firstSeen?: number;
blockFirstSeen?: string;
}
export interface AuditScore {

View File

@ -124,7 +124,8 @@ class AuditReplication {
matchRate: auditSummary.matchRate,
expectedFees: auditSummary.expectedFees,
expectedWeight: auditSummary.expectedWeight,
});
firstSeen: auditSummary.firstSeen,
}, true);
// add missing data to cached blocks
const cachedBlock = blocks.getBlocks().find(block => block.id === blockHash);
if (cachedBlock) {

View File

@ -15,11 +15,11 @@ interface MigrationAudit {
}
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
public async $saveAudit(audit: BlockAudit, replication = false): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight)
VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight]);
await DB.query(`INSERT INTO blocks_audits(version, time, height, hash, unseen_txs, missing_txs, added_txs, prioritized_txs, fresh_txs, sigop_txs, fullrbf_txs, accelerated_txs, match_rate, expected_fees, expected_weight, first_seen)
VALUE (?, FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ${replication ? 'FROM_UNIXTIME(?)' : '?'})`, [audit.version, audit.time, audit.height, audit.hash, JSON.stringify(audit.unseenTxs), JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.prioritizedTxs), JSON.stringify(audit.freshTxs), JSON.stringify(audit.sigopTxs), JSON.stringify(audit.fullrbfTxs), JSON.stringify(audit.acceleratedTxs), audit.matchRate, audit.expectedFees, audit.expectedWeight, audit.firstSeen]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
@ -78,6 +78,7 @@ class BlocksAuditRepositories {
blocks_audits.height,
blocks_audits.hash as id,
UNIX_TIMESTAMP(blocks_audits.time) as timestamp,
UNIX_TIMESTAMP(blocks_audits.first_seen) as firstSeen,
template,
unseen_txs as unseenTxs,
missing_txs as missingTxs,
@ -96,6 +97,7 @@ class BlocksAuditRepositories {
`, [hash]);
if (rows.length) {
console.log(rows[0].firstSeen);
rows[0].unseenTxs = JSON.parse(rows[0].unseenTxs);
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
@ -106,6 +108,10 @@ class BlocksAuditRepositories {
rows[0].acceleratedTxs = JSON.parse(rows[0].acceleratedTxs);
rows[0].template = JSON.parse(rows[0].template);
if (!rows[0].firstSeen) {
delete rows[0].firstSeen;
}
return rows[0];
}
return null;
@ -124,6 +130,7 @@ class BlocksAuditRepositories {
const isPrioritized = blockAudit.prioritizedTxs.includes(txid);
const isAccelerated = blockAudit.acceleratedTxs.includes(txid);
const isConflict = blockAudit.fullrbfTxs.includes(txid);
const blockFirstSeen = blockAudit.firstSeen;
let isExpected = false;
let firstSeen = undefined;
blockAudit.template?.forEach(tx => {
@ -142,6 +149,7 @@ class BlocksAuditRepositories {
conflict: isConflict,
accelerated: isAccelerated,
firstSeen,
...(blockFirstSeen) && { blockFirstSeen },
};
}
return null;