Update plutus.py

This commit is contained in:
Isaacdelly
2019-02-03 11:36:44 -08:00
committed by GitHub
parent 3d9598184d
commit caea5a3e8e

View File

@@ -7,10 +7,10 @@ import pickle
import hashlib import hashlib
import binascii import binascii
import multiprocessing import multiprocessing
from ellipticcurve.privateKey import PrivateKey import fastecdsa
from ellipticcurve.publicKey import PublicKey import bloom_filter
DATABASE = r'database/JAN_09_2019/' DATABASE = r'database/FEB_03_2019/'
def generate_private_key(): def generate_private_key():
"""Generate a random 32-byte hex integer which serves as a randomly generated Bitcoin private key. """Generate a random 32-byte hex integer which serves as a randomly generated Bitcoin private key.
@@ -19,12 +19,11 @@ def generate_private_key():
return binascii.hexlify(os.urandom(32)).decode('utf-8').upper() return binascii.hexlify(os.urandom(32)).decode('utf-8').upper()
def private_key_to_public_key(private_key): def private_key_to_public_key(private_key):
"""Accept a hex private key and convert it to its respective public key. Because converting a private key to """Accept a hex private key and convert it to its respective public key using SECP256k1 ECDSA signing.
a public key requires SECP256k1 ECDSA signing, this function is the most time consuming and is a bottleneck Average Time: seconds
in the overall speed of the program.
Average Time: 0.0031567731 seconds
""" """
return '04' + PrivateKey().fromString(bytes.fromhex(private_key)).publicKey().toString().hex().upper() public_key = fastecdsa.keys.get_public_key(private_key, fastecdsa.curve.secp256k1)
return '04' + public_key.x.to_bytes(32, byteorder='big').hex().upper() + public_key.y.to_bytes(32, byteorder='big').hex().upper()
def public_key_to_address(public_key): def public_key_to_address(public_key):
"""Accept a public key and convert it to its resepective P2PKH wallet address. """Accept a public key and convert it to its resepective P2PKH wallet address.
@@ -48,10 +47,7 @@ def process(private_key, public_key, address, database):
assumed to be empty and printed to the user. This is a fast and efficient query. assumed to be empty and printed to the user. This is a fast and efficient query.
Average Time: 0.0000026941 seconds Average Time: 0.0000026941 seconds
""" """
if address in database[0] or \ if address in database:
address in database[1] or \
address in database[2] or \
address in database[3]:
with open('plutus.txt', 'a') as file: with open('plutus.txt', 'a') as file:
file.write('hex private key: ' + str(private_key) + '\n' + file.write('hex private key: ' + str(private_key) + '\n' +
'WIF private key: ' + str(private_key_to_WIF(private_key)) + '\n' + 'WIF private key: ' + str(private_key_to_WIF(private_key)) + '\n' +
@@ -86,30 +82,19 @@ def main(database):
""" """
while True: while True:
private_key = generate_private_key() # 0.0000061659 seconds private_key = generate_private_key() # 0.0000061659 seconds
public_key = private_key_to_public_key(private_key) # 0.0031567731 seconds public_key = private_key_to_public_key(private_key) # seconds
address = public_key_to_address(public_key) # 0.0000801390 seconds address = public_key_to_address(public_key) # 0.0000801390 seconds
process(private_key, public_key, address, database) # 0.0000026941 seconds process(private_key, public_key, address, database) # 0.0000026941 seconds
# -------------------- # --------------------
# 0.0032457721 seconds # seconds
# brute forces per second = 0.0032457721 ÷ cpu_count() # brute forces per second = ÷ cpu_count()
if __name__ == '__main__': if __name__ == '__main__':
"""Deserialize the database and read into a list of sets for easier selection and O(1) complexity. Initialize """Deserialize the database and load into a bloom filter. Initialize the multiprocessing pool to target the
the multiprocessing pool to target the main function with cpu_count() concurrent processes. main function with cpu_count() * 2 concurrent processes.
""" """
database = [set() for _ in range(4)] with open(DATABASE, 'rb') as file:
count = len(os.listdir(DATABASE)) database = pickle.load(file)
half = count // 2; quarter = half // 2
for c, p in enumerate(os.listdir(DATABASE)):
with open(DATABASE + p, 'rb') as file:
print('\rreading database: ' + str(c+1) + '/' + str(count), end='')
if c < half:
if c < quarter: database[0] = database[0] | pickle.load(file)
else: database[1] = database[1] | pickle.load(file)
else:
if c < half + quarter: database[2] = database[2] | pickle.load(file)
else: database[3] = database[3] | pickle.load(file)
print(' DONE')
with multiprocessing.Pool(multiprocessing.cpu_count()) as pool: with multiprocessing.Pool(multiprocessing.cpu_count()) as pool:
pool.map(main(database), range(multiprocessing.cpu_count() * 2)) pool.map(main(database), range(multiprocessing.cpu_count() * 2))