Enable W191 and W291 flake8 checks.

Remove trailing whitespace from Python files.
Convert tabs to spaces.
This commit is contained in:
John Bampton
2018-05-11 01:28:27 +10:00
parent 1c58250350
commit 0d31ef4762
13 changed files with 367 additions and 365 deletions

View File

@ -61,6 +61,8 @@
# F823 local variable name … referenced before assignment # F823 local variable name … referenced before assignment
# F831 duplicate argument name in function definition # F831 duplicate argument name in function definition
# F841 local variable 'foo' is assigned to but never used # F841 local variable 'foo' is assigned to but never used
# W191 indentation contains tabs
# W291 trailing whitespace
# W292 no newline at end of file # W292 no newline at end of file
# W293 blank line contains whitespace # W293 blank line contains whitespace
# W504 line break after binary operator # W504 line break after binary operator
@ -71,4 +73,4 @@
# W605 invalid escape sequence "x" # W605 invalid escape sequence "x"
# W606 'async' and 'await' are reserved keywords starting with Python 3.7 # W606 'async' and 'await' are reserved keywords starting with Python 3.7
flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W504,W601,W602,W603,W604,W605,W606 . flake8 --ignore=B,C,E,F,I,N,W --select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,F401,E901,E902,F402,F404,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W191,W291,W292,W293,W504,W601,W602,W603,W604,W605,W606 .

View File

@ -22,300 +22,300 @@ from binascii import hexlify, unhexlify
settings = {} settings = {}
def hex_switchEndian(s): def hex_switchEndian(s):
""" Switches the endianness of a hex string (in pairs of hex chars) """ """ Switches the endianness of a hex string (in pairs of hex chars) """
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)] pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
return b''.join(pairList[::-1]).decode() return b''.join(pairList[::-1]).decode()
def uint32(x): def uint32(x):
return x & 0xffffffff return x & 0xffffffff
def bytereverse(x): def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) | return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) )) (((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf): def bufreverse(in_buf):
out_words = [] out_words = []
for i in range(0, len(in_buf), 4): for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0] word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word))) out_words.append(struct.pack('@I', bytereverse(word)))
return b''.join(out_words) return b''.join(out_words)
def wordreverse(in_buf): def wordreverse(in_buf):
out_words = [] out_words = []
for i in range(0, len(in_buf), 4): for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4]) out_words.append(in_buf[i:i+4])
out_words.reverse() out_words.reverse()
return b''.join(out_words) return b''.join(out_words)
def calc_hdr_hash(blk_hdr): def calc_hdr_hash(blk_hdr):
hash1 = hashlib.sha256() hash1 = hashlib.sha256()
hash1.update(blk_hdr) hash1.update(blk_hdr)
hash1_o = hash1.digest() hash1_o = hash1.digest()
hash2 = hashlib.sha256() hash2 = hashlib.sha256()
hash2.update(hash1_o) hash2.update(hash1_o)
hash2_o = hash2.digest() hash2_o = hash2.digest()
return hash2_o return hash2_o
def calc_hash_str(blk_hdr): def calc_hash_str(blk_hdr):
hash = calc_hdr_hash(blk_hdr) hash = calc_hdr_hash(blk_hdr)
hash = bufreverse(hash) hash = bufreverse(hash)
hash = wordreverse(hash) hash = wordreverse(hash)
hash_str = hexlify(hash).decode('utf-8') hash_str = hexlify(hash).decode('utf-8')
return hash_str return hash_str
def get_blk_dt(blk_hdr): def get_blk_dt(blk_hdr):
members = struct.unpack("<I", blk_hdr[68:68+4]) members = struct.unpack("<I", blk_hdr[68:68+4])
nTime = members[0] nTime = members[0]
dt = datetime.datetime.fromtimestamp(nTime) dt = datetime.datetime.fromtimestamp(nTime)
dt_ym = datetime.datetime(dt.year, dt.month, 1) dt_ym = datetime.datetime(dt.year, dt.month, 1)
return (dt_ym, nTime) return (dt_ym, nTime)
# When getting the list of block hashes, undo any byte reversals. # When getting the list of block hashes, undo any byte reversals.
def get_block_hashes(settings): def get_block_hashes(settings):
blkindex = [] blkindex = []
f = open(settings['hashlist'], "r") f = open(settings['hashlist'], "r")
for line in f: for line in f:
line = line.rstrip() line = line.rstrip()
if settings['rev_hash_bytes'] == 'true': if settings['rev_hash_bytes'] == 'true':
line = hex_switchEndian(line) line = hex_switchEndian(line)
blkindex.append(line) blkindex.append(line)
print("Read " + str(len(blkindex)) + " hashes") print("Read " + str(len(blkindex)) + " hashes")
return blkindex return blkindex
# The block map shouldn't give or receive byte-reversed hashes. # The block map shouldn't give or receive byte-reversed hashes.
def mkblockmap(blkindex): def mkblockmap(blkindex):
blkmap = {} blkmap = {}
for height,hash in enumerate(blkindex): for height,hash in enumerate(blkindex):
blkmap[hash] = height blkmap[hash] = height
return blkmap return blkmap
# Block header and extent on disk # Block header and extent on disk
BlockExtent = namedtuple('BlockExtent', ['fn', 'offset', 'inhdr', 'blkhdr', 'size']) BlockExtent = namedtuple('BlockExtent', ['fn', 'offset', 'inhdr', 'blkhdr', 'size'])
class BlockDataCopier: class BlockDataCopier:
def __init__(self, settings, blkindex, blkmap): def __init__(self, settings, blkindex, blkmap):
self.settings = settings self.settings = settings
self.blkindex = blkindex self.blkindex = blkindex
self.blkmap = blkmap self.blkmap = blkmap
self.inFn = 0 self.inFn = 0
self.inF = None self.inF = None
self.outFn = 0 self.outFn = 0
self.outsz = 0 self.outsz = 0
self.outF = None self.outF = None
self.outFname = None self.outFname = None
self.blkCountIn = 0 self.blkCountIn = 0
self.blkCountOut = 0 self.blkCountOut = 0
self.lastDate = datetime.datetime(2000, 1, 1) self.lastDate = datetime.datetime(2000, 1, 1)
self.highTS = 1408893517 - 315360000 self.highTS = 1408893517 - 315360000
self.timestampSplit = False self.timestampSplit = False
self.fileOutput = True self.fileOutput = True
self.setFileTime = False self.setFileTime = False
self.maxOutSz = settings['max_out_sz'] self.maxOutSz = settings['max_out_sz']
if 'output' in settings: if 'output' in settings:
self.fileOutput = False self.fileOutput = False
if settings['file_timestamp'] != 0: if settings['file_timestamp'] != 0:
self.setFileTime = True self.setFileTime = True
if settings['split_timestamp'] != 0: if settings['split_timestamp'] != 0:
self.timestampSplit = True self.timestampSplit = True
# Extents and cache for out-of-order blocks # Extents and cache for out-of-order blocks
self.blockExtents = {} self.blockExtents = {}
self.outOfOrderData = {} self.outOfOrderData = {}
self.outOfOrderSize = 0 # running total size for items in outOfOrderData self.outOfOrderSize = 0 # running total size for items in outOfOrderData
def writeBlock(self, inhdr, blk_hdr, rawblock): def writeBlock(self, inhdr, blk_hdr, rawblock):
blockSizeOnDisk = len(inhdr) + len(blk_hdr) + len(rawblock) blockSizeOnDisk = len(inhdr) + len(blk_hdr) + len(rawblock)
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz): if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
self.outF.close() self.outF.close()
if self.setFileTime: if self.setFileTime:
os.utime(self.outFname, (int(time.time()), self.highTS)) os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None self.outF = None
self.outFname = None self.outFname = None
self.outFn = self.outFn + 1 self.outFn = self.outFn + 1
self.outsz = 0 self.outsz = 0
(blkDate, blkTS) = get_blk_dt(blk_hdr) (blkDate, blkTS) = get_blk_dt(blk_hdr)
if self.timestampSplit and (blkDate > self.lastDate): if self.timestampSplit and (blkDate > self.lastDate):
print("New month " + blkDate.strftime("%Y-%m") + " @ " + self.hash_str) print("New month " + blkDate.strftime("%Y-%m") + " @ " + self.hash_str)
self.lastDate = blkDate self.lastDate = blkDate
if self.outF: if self.outF:
self.outF.close() self.outF.close()
if self.setFileTime: if self.setFileTime:
os.utime(self.outFname, (int(time.time()), self.highTS)) os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None self.outF = None
self.outFname = None self.outFname = None
self.outFn = self.outFn + 1 self.outFn = self.outFn + 1
self.outsz = 0 self.outsz = 0
if not self.outF: if not self.outF:
if self.fileOutput: if self.fileOutput:
self.outFname = self.settings['output_file'] self.outFname = self.settings['output_file']
else: else:
self.outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn) self.outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + self.outFname) print("Output file " + self.outFname)
self.outF = open(self.outFname, "wb") self.outF = open(self.outFname, "wb")
self.outF.write(inhdr) self.outF.write(inhdr)
self.outF.write(blk_hdr) self.outF.write(blk_hdr)
self.outF.write(rawblock) self.outF.write(rawblock)
self.outsz = self.outsz + len(inhdr) + len(blk_hdr) + len(rawblock) self.outsz = self.outsz + len(inhdr) + len(blk_hdr) + len(rawblock)
self.blkCountOut = self.blkCountOut + 1 self.blkCountOut = self.blkCountOut + 1
if blkTS > self.highTS: if blkTS > self.highTS:
self.highTS = blkTS self.highTS = blkTS
if (self.blkCountOut % 1000) == 0: if (self.blkCountOut % 1000) == 0:
print('%i blocks scanned, %i blocks written (of %i, %.1f%% complete)' % print('%i blocks scanned, %i blocks written (of %i, %.1f%% complete)' %
(self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex))) (self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex)))
def inFileName(self, fn): def inFileName(self, fn):
return os.path.join(self.settings['input'], "blk%05d.dat" % fn) return os.path.join(self.settings['input'], "blk%05d.dat" % fn)
def fetchBlock(self, extent): def fetchBlock(self, extent):
'''Fetch block contents from disk given extents''' '''Fetch block contents from disk given extents'''
with open(self.inFileName(extent.fn), "rb") as f: with open(self.inFileName(extent.fn), "rb") as f:
f.seek(extent.offset) f.seek(extent.offset)
return f.read(extent.size) return f.read(extent.size)
def copyOneBlock(self): def copyOneBlock(self):
'''Find the next block to be written in the input, and copy it to the output.''' '''Find the next block to be written in the input, and copy it to the output.'''
extent = self.blockExtents.pop(self.blkCountOut) extent = self.blockExtents.pop(self.blkCountOut)
if self.blkCountOut in self.outOfOrderData: if self.blkCountOut in self.outOfOrderData:
# If the data is cached, use it from memory and remove from the cache # If the data is cached, use it from memory and remove from the cache
rawblock = self.outOfOrderData.pop(self.blkCountOut) rawblock = self.outOfOrderData.pop(self.blkCountOut)
self.outOfOrderSize -= len(rawblock) self.outOfOrderSize -= len(rawblock)
else: # Otherwise look up data on disk else: # Otherwise look up data on disk
rawblock = self.fetchBlock(extent) rawblock = self.fetchBlock(extent)
self.writeBlock(extent.inhdr, extent.blkhdr, rawblock) self.writeBlock(extent.inhdr, extent.blkhdr, rawblock)
def run(self): def run(self):
while self.blkCountOut < len(self.blkindex): while self.blkCountOut < len(self.blkindex):
if not self.inF: if not self.inF:
fname = self.inFileName(self.inFn) fname = self.inFileName(self.inFn)
print("Input file " + fname) print("Input file " + fname)
try: try:
self.inF = open(fname, "rb") self.inF = open(fname, "rb")
except IOError: except IOError:
print("Premature end of block data") print("Premature end of block data")
return return
inhdr = self.inF.read(8) inhdr = self.inF.read(8)
if (not inhdr or (inhdr[0] == "\0")): if (not inhdr or (inhdr[0] == "\0")):
self.inF.close() self.inF.close()
self.inF = None self.inF = None
self.inFn = self.inFn + 1 self.inFn = self.inFn + 1
continue continue
inMagic = inhdr[:4] inMagic = inhdr[:4]
if (inMagic != self.settings['netmagic']): if (inMagic != self.settings['netmagic']):
print("Invalid magic: " + hexlify(inMagic).decode('utf-8')) print("Invalid magic: " + hexlify(inMagic).decode('utf-8'))
return return
inLenLE = inhdr[4:] inLenLE = inhdr[4:]
su = struct.unpack("<I", inLenLE) su = struct.unpack("<I", inLenLE)
inLen = su[0] - 80 # length without header inLen = su[0] - 80 # length without header
blk_hdr = self.inF.read(80) blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen) inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
self.hash_str = calc_hash_str(blk_hdr) self.hash_str = calc_hash_str(blk_hdr)
if not self.hash_str in blkmap: if not self.hash_str in blkmap:
# Because blocks can be written to files out-of-order as of 0.10, the script # Because blocks can be written to files out-of-order as of 0.10, the script
# may encounter blocks it doesn't know about. Treat as debug output. # may encounter blocks it doesn't know about. Treat as debug output.
if settings['debug_output'] == 'true': if settings['debug_output'] == 'true':
print("Skipping unknown block " + self.hash_str) print("Skipping unknown block " + self.hash_str)
self.inF.seek(inLen, os.SEEK_CUR) self.inF.seek(inLen, os.SEEK_CUR)
continue continue
blkHeight = self.blkmap[self.hash_str] blkHeight = self.blkmap[self.hash_str]
self.blkCountIn += 1 self.blkCountIn += 1
if self.blkCountOut == blkHeight: if self.blkCountOut == blkHeight:
# If in-order block, just copy # If in-order block, just copy
rawblock = self.inF.read(inLen) rawblock = self.inF.read(inLen)
self.writeBlock(inhdr, blk_hdr, rawblock) self.writeBlock(inhdr, blk_hdr, rawblock)
# See if we can catch up to prior out-of-order blocks # See if we can catch up to prior out-of-order blocks
while self.blkCountOut in self.blockExtents: while self.blkCountOut in self.blockExtents:
self.copyOneBlock() self.copyOneBlock()
else: # If out-of-order, skip over block data for now else: # If out-of-order, skip over block data for now
self.blockExtents[blkHeight] = inExtent self.blockExtents[blkHeight] = inExtent
if self.outOfOrderSize < self.settings['out_of_order_cache_sz']: if self.outOfOrderSize < self.settings['out_of_order_cache_sz']:
# If there is space in the cache, read the data # If there is space in the cache, read the data
# Reading the data in file sequence instead of seeking and fetching it later is preferred, # Reading the data in file sequence instead of seeking and fetching it later is preferred,
# but we don't want to fill up memory # but we don't want to fill up memory
self.outOfOrderData[blkHeight] = self.inF.read(inLen) self.outOfOrderData[blkHeight] = self.inF.read(inLen)
self.outOfOrderSize += inLen self.outOfOrderSize += inLen
else: # If no space in cache, seek forward else: # If no space in cache, seek forward
self.inF.seek(inLen, os.SEEK_CUR) self.inF.seek(inLen, os.SEEK_CUR)
print("Done (%i blocks written)" % (self.blkCountOut)) print("Done (%i blocks written)" % (self.blkCountOut))
if __name__ == '__main__': if __name__ == '__main__':
if len(sys.argv) != 2: if len(sys.argv) != 2:
print("Usage: linearize-data.py CONFIG-FILE") print("Usage: linearize-data.py CONFIG-FILE")
sys.exit(1) sys.exit(1)
f = open(sys.argv[1]) f = open(sys.argv[1])
for line in f: for line in f:
# skip comment lines # skip comment lines
m = re.search('^\s*#', line) m = re.search('^\s*#', line)
if m: if m:
continue continue
# parse key=value lines # parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line) m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None: if m is None:
continue continue
settings[m.group(1)] = m.group(2) settings[m.group(1)] = m.group(2)
f.close() f.close()
# Force hash byte format setting to be lowercase to make comparisons easier. # Force hash byte format setting to be lowercase to make comparisons easier.
# Also place upfront in case any settings need to know about it. # Also place upfront in case any settings need to know about it.
if 'rev_hash_bytes' not in settings: if 'rev_hash_bytes' not in settings:
settings['rev_hash_bytes'] = 'false' settings['rev_hash_bytes'] = 'false'
settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower() settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower()
if 'netmagic' not in settings: if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9' settings['netmagic'] = 'f9beb4d9'
if 'genesis' not in settings: if 'genesis' not in settings:
settings['genesis'] = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f' settings['genesis'] = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
if 'input' not in settings: if 'input' not in settings:
settings['input'] = 'input' settings['input'] = 'input'
if 'hashlist' not in settings: if 'hashlist' not in settings:
settings['hashlist'] = 'hashlist.txt' settings['hashlist'] = 'hashlist.txt'
if 'file_timestamp' not in settings: if 'file_timestamp' not in settings:
settings['file_timestamp'] = 0 settings['file_timestamp'] = 0
if 'split_timestamp' not in settings: if 'split_timestamp' not in settings:
settings['split_timestamp'] = 0 settings['split_timestamp'] = 0
if 'max_out_sz' not in settings: if 'max_out_sz' not in settings:
settings['max_out_sz'] = 1000 * 1000 * 1000 settings['max_out_sz'] = 1000 * 1000 * 1000
if 'out_of_order_cache_sz' not in settings: if 'out_of_order_cache_sz' not in settings:
settings['out_of_order_cache_sz'] = 100 * 1000 * 1000 settings['out_of_order_cache_sz'] = 100 * 1000 * 1000
if 'debug_output' not in settings: if 'debug_output' not in settings:
settings['debug_output'] = 'false' settings['debug_output'] = 'false'
settings['max_out_sz'] = int(settings['max_out_sz']) settings['max_out_sz'] = int(settings['max_out_sz'])
settings['split_timestamp'] = int(settings['split_timestamp']) settings['split_timestamp'] = int(settings['split_timestamp'])
settings['file_timestamp'] = int(settings['file_timestamp']) settings['file_timestamp'] = int(settings['file_timestamp'])
settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8')) settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8'))
settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz']) settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz'])
settings['debug_output'] = settings['debug_output'].lower() settings['debug_output'] = settings['debug_output'].lower()
if 'output_file' not in settings and 'output' not in settings: if 'output_file' not in settings and 'output' not in settings:
print("Missing output file / directory") print("Missing output file / directory")
sys.exit(1) sys.exit(1)
blkindex = get_block_hashes(settings) blkindex = get_block_hashes(settings)
blkmap = mkblockmap(blkindex) blkmap = mkblockmap(blkindex)
# Block hash map won't be byte-reversed. Neither should the genesis hash. # Block hash map won't be byte-reversed. Neither should the genesis hash.
if not settings['genesis'] in blkmap: if not settings['genesis'] in blkmap:
print("Genesis block not found in hashlist") print("Genesis block not found in hashlist")
else: else:
BlockDataCopier(settings, blkindex, blkmap).run() BlockDataCopier(settings, blkindex, blkmap).run()

View File

@ -22,135 +22,135 @@ import os.path
settings = {} settings = {}
def hex_switchEndian(s): def hex_switchEndian(s):
""" Switches the endianness of a hex string (in pairs of hex chars) """ """ Switches the endianness of a hex string (in pairs of hex chars) """
pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)] pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)]
return b''.join(pairList[::-1]).decode() return b''.join(pairList[::-1]).decode()
class BitcoinRPC: class BitcoinRPC:
def __init__(self, host, port, username, password): def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password) authpair = "%s:%s" % (username, password)
authpair = authpair.encode('utf-8') authpair = authpair.encode('utf-8')
self.authhdr = b"Basic " + base64.b64encode(authpair) self.authhdr = b"Basic " + base64.b64encode(authpair)
self.conn = httplib.HTTPConnection(host, port=port, timeout=30) self.conn = httplib.HTTPConnection(host, port=port, timeout=30)
def execute(self, obj): def execute(self, obj):
try: try:
self.conn.request('POST', '/', json.dumps(obj), self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr, { 'Authorization' : self.authhdr,
'Content-type' : 'application/json' }) 'Content-type' : 'application/json' })
except ConnectionRefusedError: except ConnectionRefusedError:
print('RPC connection refused. Check RPC settings and the server status.', print('RPC connection refused. Check RPC settings and the server status.',
file=sys.stderr) file=sys.stderr)
return None return None
resp = self.conn.getresponse() resp = self.conn.getresponse()
if resp is None: if resp is None:
print("JSON-RPC: no response", file=sys.stderr) print("JSON-RPC: no response", file=sys.stderr)
return None return None
body = resp.read().decode('utf-8') body = resp.read().decode('utf-8')
resp_obj = json.loads(body) resp_obj = json.loads(body)
return resp_obj return resp_obj
@staticmethod @staticmethod
def build_request(idx, method, params): def build_request(idx, method, params):
obj = { 'version' : '1.1', obj = { 'version' : '1.1',
'method' : method, 'method' : method,
'id' : idx } 'id' : idx }
if params is None: if params is None:
obj['params'] = [] obj['params'] = []
else: else:
obj['params'] = params obj['params'] = params
return obj return obj
@staticmethod @staticmethod
def response_is_error(resp_obj): def response_is_error(resp_obj):
return 'error' in resp_obj and resp_obj['error'] is not None return 'error' in resp_obj and resp_obj['error'] is not None
def get_block_hashes(settings, max_blocks_per_call=10000): def get_block_hashes(settings, max_blocks_per_call=10000):
rpc = BitcoinRPC(settings['host'], settings['port'], rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword']) settings['rpcuser'], settings['rpcpassword'])
height = settings['min_height'] height = settings['min_height']
while height < settings['max_height']+1: while height < settings['max_height']+1:
num_blocks = min(settings['max_height']+1-height, max_blocks_per_call) num_blocks = min(settings['max_height']+1-height, max_blocks_per_call)
batch = [] batch = []
for x in range(num_blocks): for x in range(num_blocks):
batch.append(rpc.build_request(x, 'getblockhash', [height + x])) batch.append(rpc.build_request(x, 'getblockhash', [height + x]))
reply = rpc.execute(batch) reply = rpc.execute(batch)
if reply is None: if reply is None:
print('Cannot continue. Program will halt.') print('Cannot continue. Program will halt.')
return None return None
for x,resp_obj in enumerate(reply): for x,resp_obj in enumerate(reply):
if rpc.response_is_error(resp_obj): if rpc.response_is_error(resp_obj):
print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr) print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr)
sys.exit(1) sys.exit(1)
assert(resp_obj['id'] == x) # assume replies are in-sequence assert(resp_obj['id'] == x) # assume replies are in-sequence
if settings['rev_hash_bytes'] == 'true': if settings['rev_hash_bytes'] == 'true':
resp_obj['result'] = hex_switchEndian(resp_obj['result']) resp_obj['result'] = hex_switchEndian(resp_obj['result'])
print(resp_obj['result']) print(resp_obj['result'])
height += num_blocks height += num_blocks
def get_rpc_cookie(): def get_rpc_cookie():
# Open the cookie file # Open the cookie file
with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r') as f: with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r') as f:
combined = f.readline() combined = f.readline()
combined_split = combined.split(":") combined_split = combined.split(":")
settings['rpcuser'] = combined_split[0] settings['rpcuser'] = combined_split[0]
settings['rpcpassword'] = combined_split[1] settings['rpcpassword'] = combined_split[1]
if __name__ == '__main__': if __name__ == '__main__':
if len(sys.argv) != 2: if len(sys.argv) != 2:
print("Usage: linearize-hashes.py CONFIG-FILE") print("Usage: linearize-hashes.py CONFIG-FILE")
sys.exit(1) sys.exit(1)
f = open(sys.argv[1]) f = open(sys.argv[1])
for line in f: for line in f:
# skip comment lines # skip comment lines
m = re.search('^\s*#', line) m = re.search('^\s*#', line)
if m: if m:
continue continue
# parse key=value lines # parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line) m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None: if m is None:
continue continue
settings[m.group(1)] = m.group(2) settings[m.group(1)] = m.group(2)
f.close() f.close()
if 'host' not in settings: if 'host' not in settings:
settings['host'] = '127.0.0.1' settings['host'] = '127.0.0.1'
if 'port' not in settings: if 'port' not in settings:
settings['port'] = 8332 settings['port'] = 8332
if 'min_height' not in settings: if 'min_height' not in settings:
settings['min_height'] = 0 settings['min_height'] = 0
if 'max_height' not in settings: if 'max_height' not in settings:
settings['max_height'] = 313000 settings['max_height'] = 313000
if 'rev_hash_bytes' not in settings: if 'rev_hash_bytes' not in settings:
settings['rev_hash_bytes'] = 'false' settings['rev_hash_bytes'] = 'false'
use_userpass = True use_userpass = True
use_datadir = False use_datadir = False
if 'rpcuser' not in settings or 'rpcpassword' not in settings: if 'rpcuser' not in settings or 'rpcpassword' not in settings:
use_userpass = False use_userpass = False
if 'datadir' in settings and not use_userpass: if 'datadir' in settings and not use_userpass:
use_datadir = True use_datadir = True
if not use_userpass and not use_datadir: if not use_userpass and not use_datadir:
print("Missing datadir or username and/or password in cfg file", file=sys.stderr) print("Missing datadir or username and/or password in cfg file", file=sys.stderr)
sys.exit(1) sys.exit(1)
settings['port'] = int(settings['port']) settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height']) settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height']) settings['max_height'] = int(settings['max_height'])
# Force hash byte format setting to be lowercase to make comparisons easier. # Force hash byte format setting to be lowercase to make comparisons easier.
settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower() settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower()
# Get the rpc user and pass from the cookie if the datadir is set # Get the rpc user and pass from the cookie if the datadir is set
if use_datadir: if use_datadir:
get_rpc_cookie() get_rpc_cookie()
get_block_hashes(settings) get_block_hashes(settings)