mirror of
https://github.com/bitcoin/bitcoin.git
synced 2026-04-06 05:37:50 +02:00
Add a test case to interface_ipc_mining.py to verify that the IPC server correctly handles and reports serialization errors rather than crashing the node. This covers the scenario where submitSolution is called with data that cannot be deserialized, as discussed in #33341 Also introduces the assert_capnp_failed helper in ipc_util.py to cleanly handle macOS-specific Cap'n Proto exception strings, and refactors an existing block weight test to use it.
173 lines
6.5 KiB
Python
173 lines
6.5 KiB
Python
#!/usr/bin/env python3
|
|
# Copyright (c) The Bitcoin Core developers
|
|
# Distributed under the MIT software license, see the accompanying
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
|
"""Shared utilities for IPC (multiprocess) interface tests."""
|
|
import asyncio
|
|
import inspect
|
|
from contextlib import asynccontextmanager
|
|
from dataclasses import dataclass
|
|
from io import BytesIO
|
|
from pathlib import Path
|
|
import shutil
|
|
import platform
|
|
from typing import Optional
|
|
|
|
from test_framework.messages import CBlock
|
|
from test_framework.util import (
|
|
assert_equal
|
|
)
|
|
|
|
# Test may be skipped and not have capnp installed
|
|
try:
|
|
import capnp # type: ignore[import] # noqa: F401
|
|
except ModuleNotFoundError:
|
|
pass
|
|
|
|
|
|
# Stores the result of getCoinbaseTx()
|
|
@dataclass
|
|
class CoinbaseTxData:
|
|
version: int
|
|
sequence: int
|
|
scriptSigPrefix: bytes
|
|
witness: Optional[bytes]
|
|
blockRewardRemaining: int
|
|
requiredOutputs: list[bytes]
|
|
lockTime: int
|
|
|
|
|
|
@asynccontextmanager
|
|
async def destroying(obj, ctx):
|
|
"""Call obj.destroy(ctx) at end of with: block. Similar to contextlib.closing."""
|
|
try:
|
|
yield obj
|
|
finally:
|
|
await obj.destroy(ctx)
|
|
|
|
|
|
async def wait_and_do(wait_fn, do_fn):
|
|
"""Call wait_fn, then sleep, then call do_fn in a parallel task. Wait for
|
|
both tasks to complete."""
|
|
wait_started = asyncio.Event()
|
|
result = None
|
|
|
|
async def wait():
|
|
nonlocal result
|
|
wait_started.set()
|
|
result = await wait_fn
|
|
|
|
async def do():
|
|
await wait_started.wait()
|
|
await asyncio.sleep(0.1)
|
|
# Let do_fn be either a callable or an awaitable object
|
|
if inspect.isawaitable(do_fn):
|
|
await do_fn
|
|
else:
|
|
do_fn()
|
|
|
|
await asyncio.gather(wait(), do())
|
|
return result
|
|
|
|
|
|
def load_capnp_modules(config):
|
|
if capnp_bin := shutil.which("capnp"):
|
|
# Add the system cap'nproto path so include/capnp/c++.capnp can be found.
|
|
capnp_dir = Path(capnp_bin).resolve().parent.parent / "include"
|
|
else:
|
|
# If there is no system cap'nproto, the pycapnp module should have its own "bundled"
|
|
# includes at this location. If pycapnp was installed with bundled capnp,
|
|
# capnp/c++.capnp can be found here.
|
|
capnp_dir = Path(capnp.__path__[0]).parent
|
|
src_dir = Path(config['environment']['SRCDIR']) / "src"
|
|
mp_dir = src_dir / "ipc" / "libmultiprocess" / "include"
|
|
# List of import directories. Note: it is important for mp_dir to be
|
|
# listed first, in case there are other libmultiprocess installations on
|
|
# the system, to ensure that `import "/mp/proxy.capnp"` lines load the
|
|
# same file as capnp.load() loads directly below, and there are not
|
|
# "failed: Duplicate ID @0xcc316e3f71a040fb" errors.
|
|
imports = [str(mp_dir), str(capnp_dir), str(src_dir)]
|
|
return {
|
|
"proxy": capnp.load(str(mp_dir / "mp" / "proxy.capnp"), imports=imports),
|
|
"init": capnp.load(str(src_dir / "ipc" / "capnp" / "init.capnp"), imports=imports),
|
|
"echo": capnp.load(str(src_dir / "ipc" / "capnp" / "echo.capnp"), imports=imports),
|
|
"mining": capnp.load(str(src_dir / "ipc" / "capnp" / "mining.capnp"), imports=imports),
|
|
}
|
|
|
|
|
|
async def make_capnp_init_ctx(self):
|
|
node = self.nodes[0]
|
|
# Establish a connection, and create Init proxy object.
|
|
connection = await capnp.AsyncIoStream.create_unix_connection(node.ipc_socket_path)
|
|
client = capnp.TwoPartyClient(connection)
|
|
init = client.bootstrap().cast_as(self.capnp_modules['init'].Init)
|
|
# Create a remote thread on the server for the IPC calls to be executed in.
|
|
threadmap = init.construct().threadMap
|
|
thread = threadmap.makeThread("pythread").result
|
|
ctx = self.capnp_modules['proxy'].Context()
|
|
ctx.thread = thread
|
|
# Return both.
|
|
return ctx, init
|
|
|
|
|
|
async def mining_create_block_template(mining, stack, ctx, opts):
|
|
"""Call mining.createNewBlock() and return template, then call template.destroy() when stack exits."""
|
|
response = await mining.createNewBlock(ctx, opts)
|
|
if not response._has("result"):
|
|
return None
|
|
return await stack.enter_async_context(destroying(response.result, ctx))
|
|
|
|
|
|
async def mining_wait_next_template(template, stack, ctx, opts):
|
|
"""Call template.waitNext() and return template, then call template.destroy() when stack exits."""
|
|
response = await template.waitNext(ctx, opts)
|
|
if not response._has("result"):
|
|
return None
|
|
return await stack.enter_async_context(destroying(response.result, ctx))
|
|
|
|
|
|
async def mining_get_block(block_template, ctx):
|
|
block_data = BytesIO((await block_template.getBlock(ctx)).result)
|
|
block = CBlock()
|
|
block.deserialize(block_data)
|
|
return block
|
|
|
|
|
|
async def mining_get_coinbase_tx(block_template, ctx) -> CoinbaseTxData:
|
|
assert block_template is not None
|
|
# Note: the template_capnp struct will be garbage-collected when this
|
|
# method returns, so it is important to copy any Data fields from it
|
|
# which need to be accessed later using the bytes() cast. Starting with
|
|
# pycapnp v2.2.0, Data fields have type `memoryview` and are ephemeral.
|
|
template_capnp = (await block_template.getCoinbaseTx(ctx)).result
|
|
witness: Optional[bytes] = None
|
|
if template_capnp._has("witness"):
|
|
witness = bytes(template_capnp.witness)
|
|
return CoinbaseTxData(
|
|
version=int(template_capnp.version),
|
|
sequence=int(template_capnp.sequence),
|
|
scriptSigPrefix=bytes(template_capnp.scriptSigPrefix),
|
|
witness=witness,
|
|
blockRewardRemaining=int(template_capnp.blockRewardRemaining),
|
|
requiredOutputs=[bytes(output) for output in template_capnp.requiredOutputs],
|
|
lockTime=int(template_capnp.lockTime),
|
|
)
|
|
|
|
async def make_mining_ctx(self):
|
|
"""Create IPC context and Mining proxy object."""
|
|
ctx, init = await make_capnp_init_ctx(self)
|
|
self.log.debug("Create Mining proxy object")
|
|
mining = init.makeMining(ctx).result
|
|
return ctx, mining
|
|
|
|
def assert_capnp_failed(e, description_prefix):
|
|
if e.description == "remote exception: unknown non-KJ exception of type: kj::Exception":
|
|
# macOS + REDUCE_EXPORTS bug: Cap'n Proto fails to recognize
|
|
# its own exception type and returns a generic error instead.
|
|
# https://github.com/bitcoin/bitcoin/pull/34422#discussion_r2863852691
|
|
# Assert this only occurs on Darwin until fixed.
|
|
assert_equal(platform.system(), "Darwin")
|
|
else:
|
|
assert e.description.startswith(description_prefix), f"Expected description starting with '{description_prefix}', got '{e.description}'"
|
|
assert_equal(e.type, "FAILED")
|