mirror of
https://github.com/bitcoin/bitcoin.git
synced 2025-08-04 07:13:00 +02:00
Merge #11817: [tests] Change feature_csv_activation.py to use BitcoinTestFramework
12982682a6
[tests] Change feature_csv_activation.py to use BitcoinTestFramework (John Newbery)db7ffb9d1b
[tests] Move utility functions in feature_csv_activation.py out of class. (John Newbery)0842edf9ee
[tests] Remove nested loops from feature_csv_activation.py (John Newbery)2e511d5424
[tests] improve logging in feature_csv_activation.py (John Newbery)6f7f5bc002
[tests] fix flake8 nits in feature_csv_activation.py (John Newbery) Pull request description: Next step in #10603. - first four commits tidy up bip68-112-113-p2p.py - fifth commit removes usage of ComparisonTestFramework Tree-SHA512: 34466be12280096ad92ac842f9c594ae40c19be9b4edc73c1e37964d03d55f4e75b80cea50c9940404096effc23705671503a883a7b7773b5866a29f653ba710
This commit is contained in:
@@ -42,98 +42,131 @@ bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {re
|
|||||||
bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
|
bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
|
||||||
bip112tx_special - test negative argument to OP_CSV
|
bip112tx_special - test negative argument to OP_CSV
|
||||||
"""
|
"""
|
||||||
|
from decimal import Decimal
|
||||||
from test_framework.test_framework import ComparisonTestFramework
|
from itertools import product
|
||||||
from test_framework.util import *
|
|
||||||
from test_framework.mininode import ToHex, CTransaction, network_thread_start
|
|
||||||
from test_framework.blocktools import create_coinbase, create_block
|
|
||||||
from test_framework.comptool import TestInstance, TestManager
|
|
||||||
from test_framework.script import *
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import time
|
import time
|
||||||
|
|
||||||
base_relative_locktime = 10
|
from test_framework.blocktools import create_coinbase, create_block
|
||||||
seq_disable_flag = 1<<31
|
from test_framework.messages import ToHex, CTransaction
|
||||||
seq_random_high_bit = 1<<25
|
from test_framework.mininode import network_thread_start, P2PDataStore
|
||||||
seq_type_flag = 1<<22
|
from test_framework.script import (
|
||||||
seq_random_low_bit = 1<<18
|
CScript,
|
||||||
|
OP_CHECKSEQUENCEVERIFY,
|
||||||
|
OP_DROP,
|
||||||
|
)
|
||||||
|
from test_framework.test_framework import BitcoinTestFramework
|
||||||
|
from test_framework.util import (
|
||||||
|
assert_equal,
|
||||||
|
get_bip9_status,
|
||||||
|
hex_str_to_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
|
BASE_RELATIVE_LOCKTIME = 10
|
||||||
# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
|
SEQ_DISABLE_FLAG = 1 << 31
|
||||||
relative_locktimes = []
|
SEQ_RANDOM_HIGH_BIT = 1 << 25
|
||||||
for b31 in range(2):
|
SEQ_TYPE_FLAG = 1 << 22
|
||||||
b25times = []
|
SEQ_RANDOM_LOW_BIT = 1 << 18
|
||||||
for b25 in range(2):
|
|
||||||
b22times = []
|
|
||||||
for b22 in range(2):
|
|
||||||
b18times = []
|
|
||||||
for b18 in range(2):
|
|
||||||
rlt = base_relative_locktime
|
|
||||||
if (b31):
|
|
||||||
rlt = rlt | seq_disable_flag
|
|
||||||
if (b25):
|
|
||||||
rlt = rlt | seq_random_high_bit
|
|
||||||
if (b22):
|
|
||||||
rlt = rlt | seq_type_flag
|
|
||||||
if (b18):
|
|
||||||
rlt = rlt | seq_random_low_bit
|
|
||||||
b18times.append(rlt)
|
|
||||||
b22times.append(b18times)
|
|
||||||
b25times.append(b22times)
|
|
||||||
relative_locktimes.append(b25times)
|
|
||||||
|
|
||||||
def all_rlt_txs(txarray):
|
def relative_locktime(sdf, srhb, stf, srlb):
|
||||||
|
"""Returns a locktime with certain bits set."""
|
||||||
|
|
||||||
|
locktime = BASE_RELATIVE_LOCKTIME
|
||||||
|
if sdf:
|
||||||
|
locktime |= SEQ_DISABLE_FLAG
|
||||||
|
if srhb:
|
||||||
|
locktime |= SEQ_RANDOM_HIGH_BIT
|
||||||
|
if stf:
|
||||||
|
locktime |= SEQ_TYPE_FLAG
|
||||||
|
if srlb:
|
||||||
|
locktime |= SEQ_RANDOM_LOW_BIT
|
||||||
|
return locktime
|
||||||
|
|
||||||
|
def all_rlt_txs(txs):
|
||||||
|
return [tx['tx'] for tx in txs]
|
||||||
|
|
||||||
|
def create_transaction(node, txid, to_address, amount):
|
||||||
|
inputs = [{"txid": txid, "vout": 0}]
|
||||||
|
outputs = {to_address: amount}
|
||||||
|
rawtx = node.createrawtransaction(inputs, outputs)
|
||||||
|
tx = CTransaction()
|
||||||
|
f = BytesIO(hex_str_to_bytes(rawtx))
|
||||||
|
tx.deserialize(f)
|
||||||
|
return tx
|
||||||
|
|
||||||
|
def sign_transaction(node, unsignedtx):
|
||||||
|
rawtx = ToHex(unsignedtx)
|
||||||
|
signresult = node.signrawtransactionwithwallet(rawtx)
|
||||||
|
tx = CTransaction()
|
||||||
|
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
||||||
|
tx.deserialize(f)
|
||||||
|
return tx
|
||||||
|
|
||||||
|
def create_bip112special(node, input, txversion, address):
|
||||||
|
tx = create_transaction(node, input, address, Decimal("49.98"))
|
||||||
|
tx.nVersion = txversion
|
||||||
|
signtx = sign_transaction(node, tx)
|
||||||
|
signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||||
|
return signtx
|
||||||
|
|
||||||
|
def send_generic_input_tx(node, coinbases, address):
|
||||||
|
amount = Decimal("49.99")
|
||||||
|
return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount))))
|
||||||
|
|
||||||
|
def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0):
|
||||||
|
"""Returns a list of bip68 transactions with different bits set."""
|
||||||
txs = []
|
txs = []
|
||||||
for b31 in range(2):
|
assert(len(bip68inputs) >= 16)
|
||||||
for b25 in range(2):
|
for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
|
||||||
for b22 in range(2):
|
locktime = relative_locktime(sdf, srhb, stf, srlb)
|
||||||
for b18 in range(2):
|
tx = create_transaction(node, bip68inputs[i], address, Decimal("49.98"))
|
||||||
txs.append(txarray[b31][b25][b22][b18])
|
tx.nVersion = txversion
|
||||||
|
tx.vin[0].nSequence = locktime + locktime_delta
|
||||||
|
tx = sign_transaction(node, tx)
|
||||||
|
tx.rehash()
|
||||||
|
txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})
|
||||||
|
|
||||||
return txs
|
return txs
|
||||||
|
|
||||||
class BIP68_112_113Test(ComparisonTestFramework):
|
def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0):
|
||||||
|
"""Returns a list of bip68 transactions with different bits set."""
|
||||||
|
txs = []
|
||||||
|
assert(len(bip112inputs) >= 16)
|
||||||
|
for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
|
||||||
|
locktime = relative_locktime(sdf, srhb, stf, srlb)
|
||||||
|
tx = create_transaction(node, bip112inputs[i], address, Decimal("49.98"))
|
||||||
|
if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
|
||||||
|
tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta
|
||||||
|
else: # vary nSequence instead, OP_CSV is fixed
|
||||||
|
tx.vin[0].nSequence = locktime + locktime_delta
|
||||||
|
tx.nVersion = txversion
|
||||||
|
signtx = sign_transaction(node, tx)
|
||||||
|
if (varyOP_CSV):
|
||||||
|
signtx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||||
|
else:
|
||||||
|
signtx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
||||||
|
tx.rehash()
|
||||||
|
txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf})
|
||||||
|
return txs
|
||||||
|
|
||||||
|
class BIP68_112_113Test(BitcoinTestFramework):
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
self.setup_clean_chain = True
|
self.setup_clean_chain = True
|
||||||
self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4', '-addresstype=legacy']]
|
self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4', '-addresstype=legacy']]
|
||||||
|
|
||||||
def run_test(self):
|
def generate_blocks(self, number, version, test_blocks=None):
|
||||||
test = TestManager(self, self.options.tmpdir)
|
if test_blocks is None:
|
||||||
test.add_all_connections(self.nodes)
|
test_blocks = []
|
||||||
network_thread_start()
|
|
||||||
test.run()
|
|
||||||
|
|
||||||
def send_generic_input_tx(self, node, coinbases):
|
|
||||||
amount = Decimal("49.99")
|
|
||||||
return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
|
|
||||||
|
|
||||||
def create_transaction(self, node, txid, to_address, amount):
|
|
||||||
inputs = [{ "txid" : txid, "vout" : 0}]
|
|
||||||
outputs = { to_address : amount }
|
|
||||||
rawtx = node.createrawtransaction(inputs, outputs)
|
|
||||||
tx = CTransaction()
|
|
||||||
f = BytesIO(hex_str_to_bytes(rawtx))
|
|
||||||
tx.deserialize(f)
|
|
||||||
return tx
|
|
||||||
|
|
||||||
def sign_transaction(self, node, unsignedtx):
|
|
||||||
rawtx = ToHex(unsignedtx)
|
|
||||||
signresult = node.signrawtransactionwithwallet(rawtx)
|
|
||||||
tx = CTransaction()
|
|
||||||
f = BytesIO(hex_str_to_bytes(signresult['hex']))
|
|
||||||
tx.deserialize(f)
|
|
||||||
return tx
|
|
||||||
|
|
||||||
def generate_blocks(self, number, version, test_blocks = []):
|
|
||||||
for i in range(number):
|
for i in range(number):
|
||||||
block = self.create_test_block([], version)
|
block = self.create_test_block([], version)
|
||||||
test_blocks.append([block, True])
|
test_blocks.append(block)
|
||||||
self.last_block_time += 600
|
self.last_block_time += 600
|
||||||
self.tip = block.sha256
|
self.tip = block.sha256
|
||||||
self.tipheight += 1
|
self.tipheight += 1
|
||||||
return test_blocks
|
return test_blocks
|
||||||
|
|
||||||
def create_test_block(self, txs, version = 536870912):
|
def create_test_block(self, txs, version=536870912):
|
||||||
block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
|
block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
|
||||||
block.nVersion = version
|
block.nVersion = version
|
||||||
block.vtx.extend(txs)
|
block.vtx.extend(txs)
|
||||||
@@ -142,184 +175,148 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
|||||||
block.solve()
|
block.solve()
|
||||||
return block
|
return block
|
||||||
|
|
||||||
def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
|
def sync_blocks(self, blocks, success=True, reject_code=None, reject_reason=None, request_block=True):
|
||||||
txs = []
|
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
|
||||||
assert(len(bip68inputs) >= 16)
|
|
||||||
i = 0
|
|
||||||
for b31 in range(2):
|
|
||||||
b25txs = []
|
|
||||||
for b25 in range(2):
|
|
||||||
b22txs = []
|
|
||||||
for b22 in range(2):
|
|
||||||
b18txs = []
|
|
||||||
for b18 in range(2):
|
|
||||||
tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98"))
|
|
||||||
i += 1
|
|
||||||
tx.nVersion = txversion
|
|
||||||
tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
|
|
||||||
b18txs.append(self.sign_transaction(self.nodes[0], tx))
|
|
||||||
b22txs.append(b18txs)
|
|
||||||
b25txs.append(b22txs)
|
|
||||||
txs.append(b25txs)
|
|
||||||
return txs
|
|
||||||
|
|
||||||
def create_bip112special(self, input, txversion):
|
Call with success = False if the tip shouldn't advance to the most recent block."""
|
||||||
tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98"))
|
self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_code=reject_code, reject_reason=reject_reason, request_block=request_block)
|
||||||
tx.nVersion = txversion
|
|
||||||
signtx = self.sign_transaction(self.nodes[0], tx)
|
|
||||||
signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
|
||||||
return signtx
|
|
||||||
|
|
||||||
def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
|
def run_test(self):
|
||||||
txs = []
|
self.nodes[0].add_p2p_connection(P2PDataStore())
|
||||||
assert(len(bip112inputs) >= 16)
|
network_thread_start()
|
||||||
i = 0
|
self.nodes[0].p2p.wait_for_verack()
|
||||||
for b31 in range(2):
|
|
||||||
b25txs = []
|
|
||||||
for b25 in range(2):
|
|
||||||
b22txs = []
|
|
||||||
for b22 in range(2):
|
|
||||||
b18txs = []
|
|
||||||
for b18 in range(2):
|
|
||||||
tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98"))
|
|
||||||
i += 1
|
|
||||||
if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
|
|
||||||
tx.vin[0].nSequence = base_relative_locktime + locktime_delta
|
|
||||||
else: # vary nSequence instead, OP_CSV is fixed
|
|
||||||
tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
|
|
||||||
tx.nVersion = txversion
|
|
||||||
signtx = self.sign_transaction(self.nodes[0], tx)
|
|
||||||
if (varyOP_CSV):
|
|
||||||
signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
|
||||||
else:
|
|
||||||
signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
|
|
||||||
b18txs.append(signtx)
|
|
||||||
b22txs.append(b18txs)
|
|
||||||
b25txs.append(b22txs)
|
|
||||||
txs.append(b25txs)
|
|
||||||
return txs
|
|
||||||
|
|
||||||
def get_tests(self):
|
self.log.info("Generate blocks in the past for coinbase outputs.")
|
||||||
long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
|
long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
|
||||||
self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
|
self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
|
||||||
self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
|
self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1) # 82 blocks generated for inputs
|
||||||
self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
|
self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
|
||||||
self.tipheight = 82 # height of the next block to build
|
self.tipheight = 82 # height of the next block to build
|
||||||
self.last_block_time = long_past_time
|
self.last_block_time = long_past_time
|
||||||
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
|
self.tip = int(self.nodes[0].getbestblockhash(), 16)
|
||||||
self.nodeaddress = self.nodes[0].getnewaddress()
|
self.nodeaddress = self.nodes[0].getnewaddress()
|
||||||
|
|
||||||
|
self.log.info("Test that the csv softfork is DEFINED")
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
|
||||||
test_blocks = self.generate_blocks(61, 4)
|
test_blocks = self.generate_blocks(61, 4)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 1
|
self.sync_blocks(test_blocks)
|
||||||
# Advanced from DEFINED to STARTED, height = 143
|
|
||||||
|
self.log.info("Advance from DEFINED to STARTED, height = 143")
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
|
||||||
|
|
||||||
# Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
|
self.log.info("Fail to achieve LOCKED_IN")
|
||||||
# using a variety of bits to simulate multiple parallel softforks
|
# 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks
|
||||||
test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
|
|
||||||
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
|
test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
|
||||||
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
|
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
|
||||||
test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
|
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 2
|
test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
|
||||||
# Failed to advance past STARTED, height = 287
|
self.sync_blocks(test_blocks)
|
||||||
|
|
||||||
|
self.log.info("Failed to advance past STARTED, height = 287")
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
|
||||||
|
|
||||||
|
self.log.info("Generate blocks to achieve LOCK-IN")
|
||||||
# 108 out of 144 signal bit 0 to achieve lock-in
|
# 108 out of 144 signal bit 0 to achieve lock-in
|
||||||
# using a variety of bits to simulate multiple parallel softforks
|
# using a variety of bits to simulate multiple parallel softforks
|
||||||
test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
|
test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
|
||||||
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
|
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
|
||||||
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
|
test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
|
||||||
test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
|
test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 3
|
self.sync_blocks(test_blocks)
|
||||||
# Advanced from STARTED to LOCKED_IN, height = 431
|
|
||||||
|
self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
|
||||||
|
|
||||||
# 140 more version 4 blocks
|
# Generate 140 more version 4 blocks
|
||||||
test_blocks = self.generate_blocks(140, 4)
|
test_blocks = self.generate_blocks(140, 4)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 4
|
self.sync_blocks(test_blocks)
|
||||||
|
|
||||||
### Inputs at height = 572
|
# Inputs at height = 572
|
||||||
|
#
|
||||||
# Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
|
# Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
|
||||||
# Note we reuse inputs for v1 and v2 txs so must test these separately
|
# Note we reuse inputs for v1 and v2 txs so must test these separately
|
||||||
# 16 normal inputs
|
# 16 normal inputs
|
||||||
bip68inputs = []
|
bip68inputs = []
|
||||||
for i in range(16):
|
for i in range(16):
|
||||||
bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
|
bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
|
||||||
|
|
||||||
# 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
# 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
||||||
bip112basicinputs = []
|
bip112basicinputs = []
|
||||||
for j in range(2):
|
for j in range(2):
|
||||||
inputs = []
|
inputs = []
|
||||||
for i in range(16):
|
for i in range(16):
|
||||||
inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
|
inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
|
||||||
bip112basicinputs.append(inputs)
|
bip112basicinputs.append(inputs)
|
||||||
|
|
||||||
# 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
# 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
||||||
bip112diverseinputs = []
|
bip112diverseinputs = []
|
||||||
for j in range(2):
|
for j in range(2):
|
||||||
inputs = []
|
inputs = []
|
||||||
for i in range(16):
|
for i in range(16):
|
||||||
inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
|
inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
|
||||||
bip112diverseinputs.append(inputs)
|
bip112diverseinputs.append(inputs)
|
||||||
|
|
||||||
# 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
# 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
|
||||||
bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
|
bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
|
||||||
|
|
||||||
# 1 normal input
|
# 1 normal input
|
||||||
bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
|
bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
|
||||||
|
|
||||||
self.nodes[0].setmocktime(self.last_block_time + 600)
|
self.nodes[0].setmocktime(self.last_block_time + 600)
|
||||||
inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
|
inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
|
||||||
self.nodes[0].setmocktime(0)
|
self.nodes[0].setmocktime(0)
|
||||||
self.tip = int("0x" + inputblockhash, 0)
|
self.tip = int(inputblockhash, 16)
|
||||||
self.tipheight += 1
|
self.tipheight += 1
|
||||||
self.last_block_time += 600
|
self.last_block_time += 600
|
||||||
assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
|
assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)
|
||||||
|
|
||||||
# 2 more version 4 blocks
|
# 2 more version 4 blocks
|
||||||
test_blocks = self.generate_blocks(2, 4)
|
test_blocks = self.generate_blocks(2, 4)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 5
|
self.sync_blocks(test_blocks)
|
||||||
# Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
|
|
||||||
|
self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
|
||||||
|
|
||||||
# Test both version 1 and version 2 transactions for all tests
|
# Test both version 1 and version 2 transactions for all tests
|
||||||
# BIP113 test transaction will be modified before each use to put in appropriate block time
|
# BIP113 test transaction will be modified before each use to put in appropriate block time
|
||||||
bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
|
bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
|
||||||
bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
|
bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
|
||||||
bip113tx_v1.nVersion = 1
|
bip113tx_v1.nVersion = 1
|
||||||
bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
|
bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
|
||||||
bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
|
bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
|
||||||
bip113tx_v2.nVersion = 2
|
bip113tx_v2.nVersion = 2
|
||||||
|
|
||||||
# For BIP68 test all 16 relative sequence locktimes
|
# For BIP68 test all 16 relative sequence locktimes
|
||||||
bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
|
bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
|
||||||
bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
|
bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)
|
||||||
|
|
||||||
# For BIP112 test:
|
# For BIP112 test:
|
||||||
# 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
|
# 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
|
||||||
bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
|
bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
|
||||||
bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
|
bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
|
||||||
# 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
|
# 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
|
||||||
bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
|
bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
|
||||||
bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
|
bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
|
||||||
# sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
|
# sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
|
||||||
bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
|
bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
|
||||||
bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
|
bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
|
||||||
# sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
|
# sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
|
||||||
bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
|
bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
|
||||||
bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
|
bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
|
||||||
# -1 OP_CSV OP_DROP input
|
# -1 OP_CSV OP_DROP input
|
||||||
bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
|
bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
|
||||||
bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
|
bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)
|
||||||
|
|
||||||
|
self.log.info("TESTING")
|
||||||
|
|
||||||
|
self.log.info("Pre-Soft Fork Tests. All txs should pass.")
|
||||||
|
self.log.info("Test version 1 txs")
|
||||||
|
|
||||||
### TESTING ###
|
|
||||||
##################################
|
|
||||||
### Before Soft Forks Activate ###
|
|
||||||
##################################
|
|
||||||
# All txs should pass
|
|
||||||
### Version 1 txs ###
|
|
||||||
success_txs = []
|
success_txs = []
|
||||||
# add BIP113 tx and -1 CSV tx
|
# add BIP113 tx and -1 CSV tx
|
||||||
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
||||||
bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
|
bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
|
||||||
success_txs.append(bip113signed1)
|
success_txs.append(bip113signed1)
|
||||||
success_txs.append(bip112tx_special_v1)
|
success_txs.append(bip112tx_special_v1)
|
||||||
# add BIP 68 txs
|
# add BIP 68 txs
|
||||||
@@ -330,14 +327,15 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
|||||||
# try BIP 112 with seq=9 txs
|
# try BIP 112 with seq=9 txs
|
||||||
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
|
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
|
||||||
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
|
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
### Version 2 txs ###
|
self.log.info("Test version 2 txs")
|
||||||
|
|
||||||
success_txs = []
|
success_txs = []
|
||||||
# add BIP113 tx and -1 CSV tx
|
# add BIP113 tx and -1 CSV tx
|
||||||
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
||||||
bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
|
bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
|
||||||
success_txs.append(bip113signed2)
|
success_txs.append(bip113signed2)
|
||||||
success_txs.append(bip112tx_special_v2)
|
success_txs.append(bip112tx_special_v2)
|
||||||
# add BIP 68 txs
|
# add BIP 68 txs
|
||||||
@@ -348,187 +346,149 @@ class BIP68_112_113Test(ComparisonTestFramework):
|
|||||||
# try BIP 112 with seq=9 txs
|
# try BIP 112 with seq=9 txs
|
||||||
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
|
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
|
||||||
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
|
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
|
|
||||||
# 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
|
# 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
|
||||||
test_blocks = self.generate_blocks(1, 4)
|
test_blocks = self.generate_blocks(1, 4)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 8
|
self.sync_blocks(test_blocks)
|
||||||
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
|
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
|
||||||
|
|
||||||
|
self.log.info("Post-Soft Fork Tests.")
|
||||||
|
|
||||||
#################################
|
self.log.info("BIP 113 tests")
|
||||||
### After Soft Forks Activate ###
|
|
||||||
#################################
|
|
||||||
### BIP 113 ###
|
|
||||||
# BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
|
# BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
|
||||||
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
||||||
bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
|
bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
|
||||||
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
|
||||||
bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
|
bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
|
||||||
for bip113tx in [bip113signed1, bip113signed2]:
|
for bip113tx in [bip113signed1, bip113signed2]:
|
||||||
yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
|
self.sync_blocks([self.create_test_block([bip113tx])], success=False)
|
||||||
# BIP 113 tests should now pass if the locktime is < MTP
|
# BIP 113 tests should now pass if the locktime is < MTP
|
||||||
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
|
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
|
||||||
bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
|
bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
|
||||||
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
|
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
|
||||||
bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
|
bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
|
||||||
for bip113tx in [bip113signed1, bip113signed2]:
|
for bip113tx in [bip113signed1, bip113signed2]:
|
||||||
yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
|
self.sync_blocks([self.create_test_block([bip113tx])])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
# Next block height = 580 after 4 blocks of random version
|
# Next block height = 580 after 4 blocks of random version
|
||||||
test_blocks = self.generate_blocks(4, 1234)
|
test_blocks = self.generate_blocks(4, 1234)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 13
|
self.sync_blocks(test_blocks)
|
||||||
|
|
||||||
|
self.log.info("BIP 68 tests")
|
||||||
|
self.log.info("Test version 1 txs - all should still pass")
|
||||||
|
|
||||||
### BIP 68 ###
|
|
||||||
### Version 1 txs ###
|
|
||||||
# All still pass
|
|
||||||
success_txs = []
|
success_txs = []
|
||||||
success_txs.extend(all_rlt_txs(bip68txs_v1))
|
success_txs.extend(all_rlt_txs(bip68txs_v1))
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
### Version 2 txs ###
|
self.log.info("Test version 2 txs")
|
||||||
bip68success_txs = []
|
|
||||||
# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
|
# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
|
||||||
for b25 in range(2):
|
bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
|
||||||
for b22 in range(2):
|
self.sync_blocks([self.create_test_block(bip68success_txs)])
|
||||||
for b18 in range(2):
|
|
||||||
bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
|
|
||||||
yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
|
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
|
# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
|
||||||
bip68timetxs = []
|
bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
|
||||||
for b25 in range(2):
|
|
||||||
for b18 in range(2):
|
|
||||||
bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
|
|
||||||
for tx in bip68timetxs:
|
for tx in bip68timetxs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
bip68heighttxs = []
|
|
||||||
for b25 in range(2):
|
bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
|
||||||
for b18 in range(2):
|
|
||||||
bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
|
|
||||||
for tx in bip68heighttxs:
|
for tx in bip68heighttxs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
# Advance one block to 581
|
# Advance one block to 581
|
||||||
test_blocks = self.generate_blocks(1, 1234)
|
test_blocks = self.generate_blocks(1, 1234)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 24
|
self.sync_blocks(test_blocks)
|
||||||
|
|
||||||
# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
|
# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
|
||||||
bip68success_txs.extend(bip68timetxs)
|
bip68success_txs.extend(bip68timetxs)
|
||||||
yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
|
self.sync_blocks([self.create_test_block(bip68success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
for tx in bip68heighttxs:
|
for tx in bip68heighttxs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
# Advance one block to 582
|
# Advance one block to 582
|
||||||
test_blocks = self.generate_blocks(1, 1234)
|
test_blocks = self.generate_blocks(1, 1234)
|
||||||
yield TestInstance(test_blocks, sync_every_block=False) # 30
|
self.sync_blocks(test_blocks)
|
||||||
|
|
||||||
# All BIP 68 txs should pass
|
# All BIP 68 txs should pass
|
||||||
bip68success_txs.extend(bip68heighttxs)
|
bip68success_txs.extend(bip68heighttxs)
|
||||||
yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
|
self.sync_blocks([self.create_test_block(bip68success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
|
self.log.info("BIP 112 tests")
|
||||||
|
self.log.info("Test version 1 txs")
|
||||||
|
|
||||||
### BIP 112 ###
|
|
||||||
### Version 1 txs ###
|
|
||||||
# -1 OP_CSV tx should fail
|
# -1 OP_CSV tx should fail
|
||||||
yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
|
self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
|
||||||
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
|
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
|
||||||
success_txs = []
|
|
||||||
for b25 in range(2):
|
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
|
||||||
for b22 in range(2):
|
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
|
||||||
for b18 in range(2):
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
|
|
||||||
success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
|
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
|
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
|
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
|
||||||
fail_txs = []
|
fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
|
||||||
fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
|
fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
|
||||||
fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
|
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
|
||||||
for b25 in range(2):
|
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
|
||||||
for b22 in range(2):
|
|
||||||
for b18 in range(2):
|
|
||||||
fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
|
|
||||||
fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
|
|
||||||
|
|
||||||
for tx in fail_txs:
|
for tx in fail_txs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
|
self.log.info("Test version 2 txs")
|
||||||
|
|
||||||
### Version 2 txs ###
|
|
||||||
# -1 OP_CSV tx should fail
|
# -1 OP_CSV tx should fail
|
||||||
yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
|
self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)
|
||||||
|
|
||||||
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
|
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
|
||||||
success_txs = []
|
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
|
||||||
for b25 in range(2):
|
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]
|
||||||
for b22 in range(2):
|
|
||||||
for b18 in range(2):
|
|
||||||
success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
|
|
||||||
success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
|
|
||||||
|
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
|
# SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
|
||||||
# All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
|
|
||||||
fail_txs = []
|
|
||||||
fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
|
|
||||||
for b25 in range(2):
|
|
||||||
for b22 in range(2):
|
|
||||||
for b18 in range(2):
|
|
||||||
fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
|
|
||||||
|
|
||||||
|
# All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
|
||||||
|
fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
|
||||||
|
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
|
||||||
for tx in fail_txs:
|
for tx in fail_txs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
|
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
|
||||||
fail_txs = []
|
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
|
||||||
for b25 in range(2):
|
|
||||||
for b22 in range(2):
|
|
||||||
for b18 in range(2):
|
|
||||||
fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
|
|
||||||
for tx in fail_txs:
|
for tx in fail_txs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
# If sequencelock types mismatch, tx should fail
|
# If sequencelock types mismatch, tx should fail
|
||||||
fail_txs = []
|
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
|
||||||
for b25 in range(2):
|
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
|
||||||
for b18 in range(2):
|
|
||||||
fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
|
|
||||||
fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
|
|
||||||
for tx in fail_txs:
|
for tx in fail_txs:
|
||||||
yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
|
self.sync_blocks([self.create_test_block([tx])], success=False)
|
||||||
|
|
||||||
# Remaining txs should pass, just test masking works properly
|
# Remaining txs should pass, just test masking works properly
|
||||||
success_txs = []
|
success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
|
||||||
for b25 in range(2):
|
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
|
||||||
for b18 in range(2):
|
self.sync_blocks([self.create_test_block(success_txs)])
|
||||||
success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
|
|
||||||
success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
|
|
||||||
yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
|
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
# Additional test, of checking that comparison of two time types works properly
|
# Additional test, of checking that comparison of two time types works properly
|
||||||
time_txs = []
|
time_txs = []
|
||||||
for b25 in range(2):
|
for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
|
||||||
for b18 in range(2):
|
tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
|
||||||
tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
|
signtx = sign_transaction(self.nodes[0], tx)
|
||||||
tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
|
time_txs.append(signtx)
|
||||||
signtx = self.sign_transaction(self.nodes[0], tx)
|
|
||||||
time_txs.append(signtx)
|
self.sync_blocks([self.create_test_block(time_txs)])
|
||||||
yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
|
|
||||||
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
||||||
|
|
||||||
### Missing aspects of test
|
# TODO: Test empty stack fails
|
||||||
## Testing empty stack fails
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
BIP68_112_113Test().main()
|
BIP68_112_113Test().main()
|
||||||
|
Reference in New Issue
Block a user