Compare commits

...

7 Commits

Author SHA1 Message Date
fanquake
624b1b3f78
Merge 20778eb0235df70397fc285f9e3b72270bd4aaf4 into 5f4422d68dc3530c353af1f87499de1c864b60ad 2025-03-17 03:51:25 +01:00
merge-script
5f4422d68d
Merge bitcoin/bitcoin#32010: qa: Fix TxIndex race conditions
3301d2cbe8c3b76c97285d75fa59637cb6952d0b qa: Wait for txindex to avoid race condition (Hodlinator)
9bfb0d75ba10591cc6c9620f9fd1ecc0e55e7a48 qa: Remove unnecessary -txindex args (Hodlinator)
7ac281c19cd3d11f316dbbb3308eabf1ad4f26d6 qa: Add missing coverage of corrupt indexes (Hodlinator)

Pull request description:

  - Add synchronization in 3 places where if the Transaction Index happens to be slow, we get rare test failures when querying it for transactions (one such case experienced on Windows, prompting investigation).
  - Remove unnecessary TxIndex initialization in some tests.
  - Add some test coverage where TxIndex aspect could be tested in feature_init.py.

ACKs for top commit:
  fjahr:
    re-ACK 3301d2cbe8c3b76c97285d75fa59637cb6952d0b
  mzumsande:
    Code Review ACK 3301d2cbe8c3b76c97285d75fa59637cb6952d0b
  furszy:
    Code review ACK 3301d2cbe8c3b76c97285d75fa59637cb6952d0b
  Prabhat1308:
    Concept ACK [`3301d2c`](3301d2cbe8)

Tree-SHA512: 7c2019e38455f344856aaf6b381faafbd88d53dc88d13309deb718c1dcfbee4ccca7c7f1b66917395503a6f94c3b216a007ad432cc8b93d0309db9805f38d602
2025-03-17 10:28:14 +08:00
Hodlinator
3301d2cbe8
qa: Wait for txindex to avoid race condition
Can be verified to be necessary through adding std::this_thread::sleep_for(0.5s) at the beginning of TxIndex::CustomAppend.
2025-03-10 15:24:16 +01:00
Hodlinator
9bfb0d75ba
qa: Remove unnecessary -txindex args
(Parent commit ensured indexes in feature_init.py are actually used, otherwise they would be removed here as well).
2025-03-07 22:22:31 +01:00
Hodlinator
7ac281c19c
qa: Add missing coverage of corrupt indexes 2025-03-07 22:22:31 +01:00
fanquake
20778eb023
RFC: disable compression in macOS gen-sdk script
Starting with Python 3.11, Pythons gzip might delegate to zlib.
Depending on the OS, i.e Ubuntu vs Fedora, the underlying zlib
implementation might differ, resulting in different output.

For now, or until a better solution exists, disable compression. This
results in the SDK increasing in size to ~230mb. Which is not
unreasonable, to regain determinism (and would be significantly worse
without the previous commit).

See: https://docs.python.org/3/library/gzip.html#gzip.compress
2025-03-06 16:12:07 +00:00
fanquake
6998e933f9
contrib: more selectively pick files for macOS SDK
Only include what we really need. Skip 100s of mb of manpages. Note that
System/Library is only needed for the Qt build.
2025-03-06 16:10:36 +00:00
8 changed files with 37 additions and 13 deletions

View File

@ -52,7 +52,7 @@ path to `Xcode.app` (extracted in the previous stage) as the first argument.
```
The generated archive should be: `Xcode-15.0-15A240d-extracted-SDK-with-libcxx-headers.tar.gz`.
The `sha256sum` should be `c0c2e7bb92c1fee0c4e9f3a485e4530786732d6c6dd9e9f418c282aa6892f55d`.
The `sha256sum` should be `2184c9699203ca32521899ca0deec831f2b6af214549c85bfad6ef8554d7778e`.
## Deterministic macOS App Notes

View File

@ -54,7 +54,7 @@ def run():
out_sdktgz_path = pathlib.Path("./{}.tar.gz".format(out_name))
def tarfp_add_with_base_change(tarfp, dir_to_add, alt_base_dir):
"""Add all files in dir_to_add to tarfp, but prepent alt_base_dir to the files'
"""Add all files in dir_to_add to tarfp, but prepend alt_base_dir to the files'
names
e.g. if the only file under /root/bazdir is /root/bazdir/qux, invoking:
@ -81,11 +81,13 @@ def run():
return tarinfo
with cd(dir_to_add):
# recursion already adds entries in sorted order
tarfp.add(".", recursive=True, filter=change_tarinfo_base)
tarfp.add("./usr/include", recursive=True, filter=change_tarinfo_base)
tarfp.add("./usr/lib", recursive=True, filter=change_tarinfo_base)
tarfp.add("./System/Library/Frameworks", recursive=True, filter=change_tarinfo_base)
print("Creating output .tar.gz file...")
with out_sdktgz_path.open("wb") as fp:
with gzip.GzipFile(fileobj=fp, mode='wb', compresslevel=9, mtime=0) as gzf:
with gzip.GzipFile(fileobj=fp, mode='wb', compresslevel=0, mtime=0) as gzf:
with tarfile.open(mode="w", fileobj=gzf, format=tarfile.GNU_FORMAT) as tarfp:
print("Adding MacOSX SDK {} files...".format(sdk_version))
tarfp_add_with_base_change(tarfp, sdk_dir, out_name)

View File

@ -88,7 +88,7 @@ class InitTest(BitcoinTestFramework):
args = ['-txindex=1', '-blockfilterindex=1', '-coinstatsindex=1']
for terminate_line in lines_to_terminate_after:
self.log.info(f"Starting node and will exit after line {terminate_line}")
self.log.info(f"Starting node and will terminate after line {terminate_line}")
with node.busy_wait_for_debug_log([terminate_line]):
if platform.system() == 'Windows':
# CREATE_NEW_PROCESS_GROUP is required in order to be able
@ -108,12 +108,22 @@ class InitTest(BitcoinTestFramework):
'blocks/index/*.ldb': 'Error opening block database.',
'chainstate/*.ldb': 'Error opening coins database.',
'blocks/blk*.dat': 'Error loading block database.',
'indexes/txindex/MANIFEST*': 'LevelDB error: Corruption: CURRENT points to a non-existent file',
# Removing these files does not result in a startup error:
# 'indexes/blockfilter/basic/*.dat', 'indexes/blockfilter/basic/db/*.*', 'indexes/coinstats/db/*.*',
# 'indexes/txindex/*.log', 'indexes/txindex/CURRENT', 'indexes/txindex/LOCK'
}
files_to_perturb = {
'blocks/index/*.ldb': 'Error loading block database.',
'chainstate/*.ldb': 'Error opening coins database.',
'blocks/blk*.dat': 'Corrupted block database detected.',
'indexes/blockfilter/basic/db/*.*': 'LevelDB error: Corruption',
'indexes/coinstats/db/*.*': 'LevelDB error: Corruption',
'indexes/txindex/*.log': 'LevelDB error: Corruption',
'indexes/txindex/CURRENT': 'LevelDB error: Corruption',
# Perturbing these files does not result in a startup error:
# 'indexes/blockfilter/basic/*.dat', 'indexes/txindex/MANIFEST*', 'indexes/txindex/LOCK'
}
for file_patt, err_fragment in files_to_delete.items():
@ -135,9 +145,10 @@ class InitTest(BitcoinTestFramework):
self.stop_node(0)
self.log.info("Test startup errors after perturbing certain essential files")
dirs = ["blocks", "chainstate", "indexes"]
for file_patt, err_fragment in files_to_perturb.items():
shutil.copytree(node.chain_path / "blocks", node.chain_path / "blocks_bak")
shutil.copytree(node.chain_path / "chainstate", node.chain_path / "chainstate_bak")
for dir in dirs:
shutil.copytree(node.chain_path / dir, node.chain_path / f"{dir}_bak")
target_files = list(node.chain_path.glob(file_patt))
for target_file in target_files:
@ -151,10 +162,9 @@ class InitTest(BitcoinTestFramework):
start_expecting_error(err_fragment)
shutil.rmtree(node.chain_path / "blocks")
shutil.rmtree(node.chain_path / "chainstate")
shutil.move(node.chain_path / "blocks_bak", node.chain_path / "blocks")
shutil.move(node.chain_path / "chainstate_bak", node.chain_path / "chainstate")
for dir in dirs:
shutil.rmtree(node.chain_path / dir)
shutil.move(node.chain_path / f"{dir}_bak", node.chain_path / dir)
def init_pid_test(self):
BITCOIN_PID_FILENAME_CUSTOM = "my_fancy_bitcoin_pid_file.foobar"

View File

@ -45,6 +45,7 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
sync_txindex,
)
from test_framework.wallet import MiniWallet
from test_framework.wallet_util import generate_keypair
@ -270,6 +271,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
self.log.info('A coinbase transaction')
# Pick the input of the first tx we created, so it has to be a coinbase tx
sync_txindex(self, node)
raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
tx = tx_from_hex(raw_tx_coinbase_spent)
self.check_mempool_result(

View File

@ -34,6 +34,7 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
sync_txindex,
)
from test_framework.wallet import (
getnewdestination,
@ -70,7 +71,7 @@ class RawTransactionsTest(BitcoinTestFramework):
self.num_nodes = 3
self.extra_args = [
["-txindex"],
["-txindex"],
[],
["-fastprune", "-prune=1"],
]
# whitelist peers to speed up tx relay / mempool sync
@ -109,6 +110,7 @@ class RawTransactionsTest(BitcoinTestFramework):
self.log.info(f"Test getrawtransaction {'with' if n == 0 else 'without'} -txindex")
if n == 0:
sync_txindex(self, self.nodes[n])
# With -txindex.
# 1. valid parameters - only supply txid
assert_equal(self.nodes[n].getrawtransaction(txId), tx['hex'])

View File

@ -12,6 +12,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
sync_txindex,
)
from test_framework.wallet import MiniWallet
@ -77,6 +78,7 @@ class MerkleBlockTest(BitcoinTestFramework):
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2]))), sorted(txlist))
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
sync_txindex(self, self.nodes[1])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[1].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[0].gettxoutproof, [txid1, txid3])

View File

@ -592,3 +592,10 @@ def find_vout_for_address(node, txid, addr):
if addr == tx["vout"][i]["scriptPubKey"]["address"]:
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))
def sync_txindex(test_framework, node):
test_framework.log.debug("Waiting for node txindex to sync")
sync_start = int(time.time())
test_framework.wait_until(lambda: node.getindexinfo("txindex")["txindex"]["synced"])
test_framework.log.debug(f"Synced in {time.time() - sync_start} seconds")

View File

@ -117,7 +117,6 @@ class AddressInputTypeGrouping(BitcoinTestFramework):
self.extra_args = [
[
"-addresstype=bech32",
"-txindex",
],
[
"-addresstype=p2sh-segwit",