mirror of
https://github.com/bitcoin/bitcoin.git
synced 2026-01-23 00:24:48 +01:00
Compare commits
42 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa91ad3420 | ||
|
|
b834447fb2 | ||
|
|
e9c978391f | ||
|
|
e973b61dbb | ||
|
|
f4b78c42e5 | ||
|
|
c6e7765c0a | ||
|
|
bab1ac827b | ||
|
|
71633a9b5c | ||
|
|
daef5852f0 | ||
|
|
7a71850a6d | ||
|
|
2e4688618b | ||
|
|
6e7ea3cf2a | ||
|
|
3af199531b | ||
|
|
76cdeb7b06 | ||
|
|
9405e915e7 | ||
|
|
5e8ad98163 | ||
|
|
a7e2d106db | ||
|
|
9ea84c08d7 | ||
|
|
833848e9b8 | ||
|
|
a074d36254 | ||
|
|
d91f56e1e3 | ||
|
|
cc324aa2be | ||
|
|
01c04d32aa | ||
|
|
abaf1e37a7 | ||
|
|
7a33cb9062 | ||
|
|
2cf352fd8e | ||
|
|
8a16165ab7 | ||
|
|
6f136cd391 | ||
|
|
be0857745a | ||
|
|
65bcbbc538 | ||
|
|
f24291bd96 | ||
|
|
73d3ab8fc9 | ||
|
|
ddfb9150b8 | ||
|
|
354d46bc10 | ||
|
|
5a0506eea0 | ||
|
|
020ed613be | ||
|
|
56626300b8 | ||
|
|
97088fa75a | ||
|
|
4917d0c0de | ||
|
|
554ff3f7f3 | ||
|
|
16e10f928c | ||
|
|
c7979f429a |
8
.github/actions/configure-docker/action.yml
vendored
8
.github/actions/configure-docker/action.yml
vendored
@@ -19,8 +19,12 @@ runs:
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
core.exportVariable('ACTIONS_CACHE_URL', process.env['ACTIONS_CACHE_URL'])
|
||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env['ACTIONS_RUNTIME_TOKEN'])
|
||||
Object.keys(process.env).forEach(function (key) {
|
||||
if (key.startsWith('ACTIONS_')) {
|
||||
core.info(`Exporting ${key}`);
|
||||
core.exportVariable(key, process.env[key]);
|
||||
}
|
||||
});
|
||||
|
||||
- name: Construct docker build cache args
|
||||
shell: bash
|
||||
|
||||
@@ -17,7 +17,7 @@ runs:
|
||||
- name: Set cache hashes
|
||||
shell: bash
|
||||
run: |
|
||||
echo "DEPENDS_HASH=$(git ls-tree HEAD depends "ci/test/$FILE_ENV" | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
|
||||
echo "DEPENDS_HASH=$(git ls-tree HEAD depends "$FILE_ENV" | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
|
||||
echo "PREVIOUS_RELEASES_HASH=$(git ls-tree HEAD test/get_previous_releases.py | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
|
||||
|
||||
- name: Get container name
|
||||
|
||||
@@ -28,9 +28,9 @@ get_directory_property(precious_variables CACHE_VARIABLES)
|
||||
#=============================
|
||||
set(CLIENT_NAME "Bitcoin Core")
|
||||
set(CLIENT_VERSION_MAJOR 29)
|
||||
set(CLIENT_VERSION_MINOR 2)
|
||||
set(CLIENT_VERSION_MINOR 3)
|
||||
set(CLIENT_VERSION_BUILD 0)
|
||||
set(CLIENT_VERSION_RC 0)
|
||||
set(CLIENT_VERSION_RC 1)
|
||||
set(CLIENT_VERSION_IS_RELEASE "true")
|
||||
set(COPYRIGHT_YEAR "2025")
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
((gnu packages bash) #:select (bash-minimal))
|
||||
(gnu packages bison)
|
||||
((gnu packages certs) #:select (nss-certs))
|
||||
((gnu packages check) #:select (libfaketime))
|
||||
((gnu packages cmake) #:select (cmake-minimal))
|
||||
(gnu packages commencement)
|
||||
(gnu packages compression)
|
||||
@@ -209,7 +210,17 @@ and abstract ELF, PE and MachO formats.")
|
||||
(base32
|
||||
"1j47vwq4caxfv0xw68kw5yh00qcpbd56d7rq6c483ma3y7s96yyz"))))
|
||||
(build-system cmake-build-system)
|
||||
(inputs (list openssl))
|
||||
(arguments
|
||||
(list
|
||||
#:phases
|
||||
#~(modify-phases %standard-phases
|
||||
(replace 'check
|
||||
(lambda* (#:key tests? #:allow-other-keys)
|
||||
(if tests?
|
||||
(invoke "faketime" "-f" "@2025-01-01 00:00:00" ;; Tests fail after 2025.
|
||||
"ctest" "--output-on-failure" "--no-tests=error")
|
||||
(format #t "test suite not run~%")))))))
|
||||
(inputs (list libfaketime openssl))
|
||||
(home-page "https://github.com/mtrojnar/osslsigncode")
|
||||
(synopsis "Authenticode signing and timestamping tool")
|
||||
(description "osslsigncode is a small tool that implements part of the
|
||||
|
||||
@@ -10,14 +10,13 @@ to addrman with).
|
||||
|
||||
Update `MIN_BLOCKS` in `makeseeds.py` and the `-m`/`--minblocks` arguments below, as needed.
|
||||
|
||||
The seeds compiled into the release are created from sipa's, achow101's and luke-jr's
|
||||
The seeds compiled into the release are created from sipa's and achow101's
|
||||
DNS seed, virtu's crawler, and asmap community AS map data. Run the following commands
|
||||
from the `/contrib/seeds` directory:
|
||||
|
||||
```
|
||||
curl https://bitcoin.sipa.be/seeds.txt.gz | gzip -dc > seeds_main.txt
|
||||
curl https://21.ninja/seeds.txt.gz | gzip -dc >> seeds_main.txt
|
||||
curl https://luke.dashjr.org/programs/bitcoin/files/charts/seeds.txt >> seeds_main.txt
|
||||
curl https://mainnet.achownodes.xyz/seeds.txt.gz | gzip -dc >> seeds_main.txt
|
||||
curl https://signet.achownodes.xyz/seeds.txt.gz | gzip -dc > seeds_signet.txt
|
||||
curl https://testnet.achownodes.xyz/seeds.txt.gz | gzip -dc > seeds_test.txt
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package=native_libmultiprocess
|
||||
$(package)_version=1954f7f65661d49e700c344eae0fc8092decf975
|
||||
$(package)_version=v5.0
|
||||
$(package)_download_path=https://github.com/bitcoin-core/libmultiprocess/archive
|
||||
$(package)_file_name=$($(package)_version).tar.gz
|
||||
$(package)_sha256_hash=fc014bd74727c1d5d30b396813685012c965d079244dd07b53bc1c75c610a2cb
|
||||
$(package)_sha256_hash=401984715b271a3446e1910f21adf048ba390d31cc93cc3073742e70d56fa3ea
|
||||
$(package)_dependencies=native_capnp
|
||||
|
||||
define $(package)_config_cmds
|
||||
|
||||
@@ -36,3 +36,7 @@ Bitcoin Core requires one of the following compilers.
|
||||
| [SQLite](../depends/packages/sqlite.mk) (wallet) | [link](https://sqlite.org) | [3.38.5](https://github.com/bitcoin/bitcoin/pull/25378) | [3.7.17](https://github.com/bitcoin/bitcoin/pull/19077) | No |
|
||||
| Python (scripts, tests) | [link](https://www.python.org) | N/A | [3.10](https://github.com/bitcoin/bitcoin/pull/30527) | No |
|
||||
| [systemtap](../depends/packages/systemtap.mk) ([tracing](tracing.md)) | [link](https://sourceware.org/systemtap/) | [4.8](https://github.com/bitcoin/bitcoin/pull/26945)| N/A | No |
|
||||
| [capnproto](../depends/packages/capnp.mk) ([multiprocess](multiprocess.md)) | [link](https://capnproto.org/) | [1.2.0](https://github.com/bitcoin/bitcoin/pull/32760)| [0.7.0](https://github.com/bitcoin-core/libmultiprocess/pull/88) | No |
|
||||
| [libmultiprocess](../depends/packages/libmultiprocess.mk) ([multiprocess](multiprocess.md)) | [link](https://github.com/bitcoin-core/libmultiprocess) | [5.0](https://github.com/bitcoin/bitcoin/pull/31945)| [v5.0-pre1](https://github.com/bitcoin/bitcoin/pull/31740)* | No |
|
||||
|
||||
\* Libmultiprocess 5.x versions should be compatible, but 6.0 and later are not due to bitcoin-core/libmultiprocess#160.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIN-CLI "1" "October 2025" "bitcoin-cli v29.2.0" "User Commands"
|
||||
.TH BITCOIN-CLI "1" "January 2026" "bitcoin-cli v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoin-cli \- manual page for bitcoin-cli v29.2.0
|
||||
bitcoin-cli \- manual page for bitcoin-cli v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoin-cli
|
||||
[\fI\,options\/\fR] \fI\,<command> \/\fR[\fI\,params\/\fR]
|
||||
@@ -15,7 +15,7 @@ bitcoin-cli \- manual page for bitcoin-cli v29.2.0
|
||||
.B bitcoin-cli
|
||||
[\fI\,options\/\fR] \fI\,help <command>\/\fR
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core RPC client version v29.2.0
|
||||
Bitcoin Core RPC client version v29.3.0rc1
|
||||
.PP
|
||||
The bitcoin\-cli utility provides a command line interface to interact with a Bitcoin Core RPC server.
|
||||
.PP
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIN-QT "1" "October 2025" "bitcoin-qt v29.2.0" "User Commands"
|
||||
.TH BITCOIN-QT "1" "January 2026" "bitcoin-qt v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoin-qt \- manual page for bitcoin-qt v29.2.0
|
||||
bitcoin-qt \- manual page for bitcoin-qt v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoin-qt
|
||||
[\fI\,options\/\fR] [\fI\,URI\/\fR]
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core version v29.2.0
|
||||
Bitcoin Core version v29.3.0rc1
|
||||
.PP
|
||||
The bitcoin\-qt application provides a graphical interface for interacting with Bitcoin Core.
|
||||
.PP
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIN-TX "1" "October 2025" "bitcoin-tx v29.2.0" "User Commands"
|
||||
.TH BITCOIN-TX "1" "January 2026" "bitcoin-tx v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoin-tx \- manual page for bitcoin-tx v29.2.0
|
||||
bitcoin-tx \- manual page for bitcoin-tx v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoin-tx
|
||||
[\fI\,options\/\fR] \fI\,<hex-tx> \/\fR[\fI\,commands\/\fR]
|
||||
@@ -9,7 +9,7 @@ bitcoin-tx \- manual page for bitcoin-tx v29.2.0
|
||||
.B bitcoin-tx
|
||||
[\fI\,options\/\fR] \fI\,-create \/\fR[\fI\,commands\/\fR]
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core bitcoin\-tx utility version v29.2.0
|
||||
Bitcoin Core bitcoin\-tx utility version v29.3.0rc1
|
||||
.PP
|
||||
The bitcoin\-tx tool is used for creating and modifying bitcoin transactions.
|
||||
.PP
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIN-UTIL "1" "October 2025" "bitcoin-util v29.2.0" "User Commands"
|
||||
.TH BITCOIN-UTIL "1" "January 2026" "bitcoin-util v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoin-util \- manual page for bitcoin-util v29.2.0
|
||||
bitcoin-util \- manual page for bitcoin-util v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoin-util
|
||||
[\fI\,options\/\fR] [\fI\,command\/\fR]
|
||||
@@ -9,7 +9,7 @@ bitcoin-util \- manual page for bitcoin-util v29.2.0
|
||||
.B bitcoin-util
|
||||
[\fI\,options\/\fR] \fI\,grind <hex-block-header>\/\fR
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core bitcoin\-util utility version v29.2.0
|
||||
Bitcoin Core bitcoin\-util utility version v29.3.0rc1
|
||||
.PP
|
||||
The bitcoin\-util tool provides bitcoin related functionality that does not rely on the ability to access a running node. Available [commands] are listed below.
|
||||
.SH OPTIONS
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIN-WALLET "1" "October 2025" "bitcoin-wallet v29.2.0" "User Commands"
|
||||
.TH BITCOIN-WALLET "1" "January 2026" "bitcoin-wallet v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoin-wallet \- manual page for bitcoin-wallet v29.2.0
|
||||
bitcoin-wallet \- manual page for bitcoin-wallet v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoin-wallet
|
||||
[\fI\,options\/\fR] \fI\,<command>\/\fR
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core bitcoin\-wallet utility version v29.2.0
|
||||
Bitcoin Core bitcoin\-wallet utility version v29.3.0rc1
|
||||
.PP
|
||||
bitcoin\-wallet is an offline tool for creating and interacting with Bitcoin Core wallet files.
|
||||
.PP
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
|
||||
.TH BITCOIND "1" "October 2025" "bitcoind v29.2.0" "User Commands"
|
||||
.TH BITCOIND "1" "January 2026" "bitcoind v29.3.0rc1" "User Commands"
|
||||
.SH NAME
|
||||
bitcoind \- manual page for bitcoind v29.2.0
|
||||
bitcoind \- manual page for bitcoind v29.3.0rc1
|
||||
.SH SYNOPSIS
|
||||
.B bitcoind
|
||||
[\fI\,options\/\fR]
|
||||
.SH DESCRIPTION
|
||||
Bitcoin Core daemon version v29.2.0
|
||||
Bitcoin Core daemon version v29.3.0rc1
|
||||
.PP
|
||||
The Bitcoin Core daemon (bitcoind) is a headless program that connects to the Bitcoin network to validate and relay transactions and blocks, as well as relaying addresses.
|
||||
.PP
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Bitcoin Core version 29.2 is now available from:
|
||||
Bitcoin Core version 29.3rc1 is now available from:
|
||||
|
||||
<https://bitcoincore.org/bin/bitcoin-core-29.2/>
|
||||
<https://bitcoincore.org/bin/bitcoin-core-29.3/test.rc1/>
|
||||
|
||||
This release includes various bug fixes and performance
|
||||
improvements, as well as updated translations.
|
||||
@@ -39,53 +39,60 @@ Notable changes
|
||||
|
||||
### P2P
|
||||
|
||||
- #32646 p2p: Add witness mutation check inside FillBlock
|
||||
- #33296 net: check for empty header before calling FillBlock
|
||||
- #33395 net: do not apply whitelist permissions to onion inbounds
|
||||
- #33050 net, validation: don't punish peers for consensus-invalid txs
|
||||
- #33723 chainparams: remove dnsseed.bitcoin.dashjr-list-of-p2p-nodes.us
|
||||
|
||||
### Mempool
|
||||
### Validation
|
||||
|
||||
- #33504 mempool: Do not enforce TRUC checks on reorg
|
||||
- #32473 Introduce per-txin sighash midstate cache for legacy/p2sh/segwitv0 scripts
|
||||
- #33105 validation: detect witness stripping without re-running Script checks
|
||||
|
||||
### RPC
|
||||
### Wallet
|
||||
|
||||
- #33446 rpc: fix getblock(header) returns target for tip
|
||||
- #33268 wallet: Identify transactions spending 0-value outputs, and add tests for anchor outputs in a wallet
|
||||
- #34156 wallet: fix unnamed legacy wallet migration failure
|
||||
- #34226 wallet: test: Relative wallet failed migration cleanup
|
||||
- #34123 wallet: migration, avoid creating spendable wallet from a watch-only legacy wallet
|
||||
- #34215 wallettool: fix unnamed createfromdump failure walletsdir deletion
|
||||
|
||||
### CI
|
||||
### Mining
|
||||
|
||||
- #32989 ci: Migrate CI to hosted Cirrus Runners
|
||||
- #32999 ci: Use APT_LLVM_V in msan task
|
||||
- #33099 ci: allow for any libc++ intrumentation & use it for TSAN
|
||||
- #33258 ci: use LLVM 21
|
||||
- #33364 ci: always use tag for LLVM checkout
|
||||
- #33475 bugfix: miner: fix `addPackageTxs` unsigned integer overflow
|
||||
|
||||
### Doc
|
||||
### Build
|
||||
|
||||
- #33484 doc: rpc: fix case typo in `finalizepsbt` help
|
||||
- #34227 guix: Fix `osslsigncode` tests
|
||||
|
||||
### Documentation
|
||||
|
||||
- #33623 doc: document capnproto and libmultiprocess deps in 29.x
|
||||
|
||||
### Test
|
||||
|
||||
- #33612 test: change log rate limit version gate
|
||||
|
||||
### Misc
|
||||
|
||||
- #33310 trace: Workaround GCC bug compiling with old systemtap
|
||||
- #33340 Fix benchmark CSV output
|
||||
- #33482 contrib: fix macOS deployment with no translations
|
||||
- #33508 ci: fix buildx gha cache authentication on forks
|
||||
- #33581 ci: Properly include $FILE_ENV in DEPENDS_HASH
|
||||
|
||||
Credits
|
||||
=======
|
||||
|
||||
Thanks to everyone who directly contributed to this release:
|
||||
|
||||
- Amisha Chhajed
|
||||
- Anthony Towns
|
||||
- Antoine Poinsot
|
||||
- Ava Chow
|
||||
- David Gumberg
|
||||
- Eugene Siegel
|
||||
- fanquake
|
||||
- Greg Sanders
|
||||
- furszy
|
||||
- Hennadii Stepanov
|
||||
- Luke Dashjr
|
||||
- MarcoFalke
|
||||
- Martin Zumsande
|
||||
- Sebastian Falbesoner
|
||||
- Sjors Provoost
|
||||
- Vasil Dimov
|
||||
- Will Clark
|
||||
- ismaelsadeeq
|
||||
- Pieter Wuille
|
||||
- SatsAndSports
|
||||
- willcl-ark
|
||||
|
||||
As well as to everyone that helped with translations on
|
||||
[Transifex](https://explore.transifex.com/bitcoin/bitcoin/).
|
||||
|
||||
@@ -146,7 +146,6 @@ public:
|
||||
// release ASAP to avoid it where possible.
|
||||
vSeeds.emplace_back("seed.bitcoin.sipa.be."); // Pieter Wuille, only supports x1, x5, x9, and xd
|
||||
vSeeds.emplace_back("dnsseed.bluematt.me."); // Matt Corallo, only supports x9
|
||||
vSeeds.emplace_back("dnsseed.bitcoin.dashjr-list-of-p2p-nodes.us."); // Luke Dashjr
|
||||
vSeeds.emplace_back("seed.bitcoin.jonasschnelli.ch."); // Jonas Schnelli, only supports x1, x5, x9, and xd
|
||||
vSeeds.emplace_back("seed.btc.petertodd.net."); // Peter Todd, only supports x1, x5, x9, and xd
|
||||
vSeeds.emplace_back("seed.bitcoin.sprovoost.nl."); // Sjors Provoost
|
||||
|
||||
@@ -553,12 +553,6 @@ private:
|
||||
bool via_compact_block, const std::string& message = "")
|
||||
EXCLUSIVE_LOCKS_REQUIRED(!m_peer_mutex);
|
||||
|
||||
/**
|
||||
* Potentially disconnect and discourage a node based on the contents of a TxValidationState object
|
||||
*/
|
||||
void MaybePunishNodeForTx(NodeId nodeid, const TxValidationState& state)
|
||||
EXCLUSIVE_LOCKS_REQUIRED(!m_peer_mutex);
|
||||
|
||||
/** Maybe disconnect a peer and discourage future connections from its address.
|
||||
*
|
||||
* @param[in] pnode The node to check.
|
||||
@@ -1805,32 +1799,6 @@ void PeerManagerImpl::MaybePunishNodeForBlock(NodeId nodeid, const BlockValidati
|
||||
}
|
||||
}
|
||||
|
||||
void PeerManagerImpl::MaybePunishNodeForTx(NodeId nodeid, const TxValidationState& state)
|
||||
{
|
||||
PeerRef peer{GetPeerRef(nodeid)};
|
||||
switch (state.GetResult()) {
|
||||
case TxValidationResult::TX_RESULT_UNSET:
|
||||
break;
|
||||
// The node is providing invalid data:
|
||||
case TxValidationResult::TX_CONSENSUS:
|
||||
if (peer) Misbehaving(*peer, "");
|
||||
return;
|
||||
// Conflicting (but not necessarily invalid) data or different policy:
|
||||
case TxValidationResult::TX_INPUTS_NOT_STANDARD:
|
||||
case TxValidationResult::TX_NOT_STANDARD:
|
||||
case TxValidationResult::TX_MISSING_INPUTS:
|
||||
case TxValidationResult::TX_PREMATURE_SPEND:
|
||||
case TxValidationResult::TX_WITNESS_MUTATED:
|
||||
case TxValidationResult::TX_WITNESS_STRIPPED:
|
||||
case TxValidationResult::TX_CONFLICT:
|
||||
case TxValidationResult::TX_MEMPOOL_POLICY:
|
||||
case TxValidationResult::TX_NO_MEMPOOL:
|
||||
case TxValidationResult::TX_RECONSIDERABLE:
|
||||
case TxValidationResult::TX_UNKNOWN:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool PeerManagerImpl::BlockRequestAllowed(const CBlockIndex* pindex)
|
||||
{
|
||||
AssertLockHeld(cs_main);
|
||||
@@ -2987,8 +2955,6 @@ std::optional<node::PackageToValidate> PeerManagerImpl::ProcessInvalidTx(NodeId
|
||||
if (peer) AddKnownTx(*peer, parent_txid);
|
||||
}
|
||||
|
||||
MaybePunishNodeForTx(nodeid, state);
|
||||
|
||||
return package_to_validate;
|
||||
}
|
||||
|
||||
|
||||
@@ -394,8 +394,8 @@ void BlockAssembler::addPackageTxs(int& nPackagesSelected, int& nDescendantsUpda
|
||||
|
||||
++nConsecutiveFailed;
|
||||
|
||||
if (nConsecutiveFailed > MAX_CONSECUTIVE_FAILURES && nBlockWeight >
|
||||
m_options.nBlockMaxWeight - m_options.block_reserved_weight) {
|
||||
if (nConsecutiveFailed > MAX_CONSECUTIVE_FAILURES && nBlockWeight +
|
||||
m_options.block_reserved_weight > m_options.nBlockMaxWeight) {
|
||||
// Give up if we're close to full and haven't succeeded in a while
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -344,6 +344,42 @@ bool IsWitnessStandard(const CTransaction& tx, const CCoinsViewCache& mapInputs)
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SpendsNonAnchorWitnessProg(const CTransaction& tx, const CCoinsViewCache& prevouts)
|
||||
{
|
||||
if (tx.IsCoinBase()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int version;
|
||||
std::vector<uint8_t> program;
|
||||
for (const auto& txin: tx.vin) {
|
||||
const auto& prev_spk{prevouts.AccessCoin(txin.prevout).out.scriptPubKey};
|
||||
|
||||
// Note this includes not-yet-defined witness programs.
|
||||
if (prev_spk.IsWitnessProgram(version, program) && !prev_spk.IsPayToAnchor(version, program)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// For P2SH extract the redeem script and check if it spends a non-Taproot witness program. Note
|
||||
// this is fine to call EvalScript (as done in AreInputsStandard/IsWitnessStandard) because this
|
||||
// function is only ever called after IsStandardTx, which checks the scriptsig is pushonly.
|
||||
if (prev_spk.IsPayToScriptHash()) {
|
||||
// If EvalScript fails or results in an empty stack, the transaction is invalid by consensus.
|
||||
std::vector <std::vector<uint8_t>> stack;
|
||||
if (!EvalScript(stack, txin.scriptSig, SCRIPT_VERIFY_NONE, BaseSignatureChecker{}, SigVersion::BASE)
|
||||
|| stack.empty()) {
|
||||
continue;
|
||||
}
|
||||
const CScript redeem_script{stack.back().begin(), stack.back().end()};
|
||||
if (redeem_script.IsWitnessProgram(version, program)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
int64_t GetVirtualTransactionSize(int64_t nWeight, int64_t nSigOpCost, unsigned int bytes_per_sigop)
|
||||
{
|
||||
return (std::max(nWeight, nSigOpCost * bytes_per_sigop) + WITNESS_SCALE_FACTOR - 1) / WITNESS_SCALE_FACTOR;
|
||||
|
||||
@@ -167,6 +167,11 @@ bool AreInputsStandard(const CTransaction& tx, const CCoinsViewCache& mapInputs)
|
||||
* Also enforce a maximum stack item size limit and no annexes for tapscript spends.
|
||||
*/
|
||||
bool IsWitnessStandard(const CTransaction& tx, const CCoinsViewCache& mapInputs);
|
||||
/**
|
||||
* Check whether this transaction spends any witness program but P2A, including not-yet-defined ones.
|
||||
* May return `false` early for consensus-invalid transactions.
|
||||
*/
|
||||
bool SpendsNonAnchorWitnessProg(const CTransaction& tx, const CCoinsViewCache& prevouts);
|
||||
|
||||
/** Compute the virtual transaction size (weight reinterpreted as bytes). */
|
||||
int64_t GetVirtualTransactionSize(int64_t nWeight, int64_t nSigOpCost, unsigned int bytes_per_sigop);
|
||||
|
||||
@@ -1564,11 +1564,57 @@ bool SignatureHashSchnorr(uint256& hash_out, ScriptExecutionData& execdata, cons
|
||||
return true;
|
||||
}
|
||||
|
||||
int SigHashCache::CacheIndex(int32_t hash_type) const noexcept
|
||||
{
|
||||
// Note that we do not distinguish between BASE and WITNESS_V0 to determine the cache index,
|
||||
// because no input can simultaneously use both.
|
||||
return 3 * !!(hash_type & SIGHASH_ANYONECANPAY) +
|
||||
2 * ((hash_type & 0x1f) == SIGHASH_SINGLE) +
|
||||
1 * ((hash_type & 0x1f) == SIGHASH_NONE);
|
||||
}
|
||||
|
||||
bool SigHashCache::Load(int32_t hash_type, const CScript& script_code, HashWriter& writer) const noexcept
|
||||
{
|
||||
auto& entry = m_cache_entries[CacheIndex(hash_type)];
|
||||
if (entry.has_value()) {
|
||||
if (script_code == entry->first) {
|
||||
writer = HashWriter(entry->second);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void SigHashCache::Store(int32_t hash_type, const CScript& script_code, const HashWriter& writer) noexcept
|
||||
{
|
||||
auto& entry = m_cache_entries[CacheIndex(hash_type)];
|
||||
entry.emplace(script_code, writer);
|
||||
}
|
||||
|
||||
template <class T>
|
||||
uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn, int32_t nHashType, const CAmount& amount, SigVersion sigversion, const PrecomputedTransactionData* cache)
|
||||
uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn, int32_t nHashType, const CAmount& amount, SigVersion sigversion, const PrecomputedTransactionData* cache, SigHashCache* sighash_cache)
|
||||
{
|
||||
assert(nIn < txTo.vin.size());
|
||||
|
||||
if (sigversion != SigVersion::WITNESS_V0) {
|
||||
// Check for invalid use of SIGHASH_SINGLE
|
||||
if ((nHashType & 0x1f) == SIGHASH_SINGLE) {
|
||||
if (nIn >= txTo.vout.size()) {
|
||||
// nOut out of range
|
||||
return uint256::ONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HashWriter ss{};
|
||||
|
||||
// Try to compute using cached SHA256 midstate.
|
||||
if (sighash_cache && sighash_cache->Load(nHashType, scriptCode, ss)) {
|
||||
// Add sighash type and hash.
|
||||
ss << nHashType;
|
||||
return ss.GetHash();
|
||||
}
|
||||
|
||||
if (sigversion == SigVersion::WITNESS_V0) {
|
||||
uint256 hashPrevouts;
|
||||
uint256 hashSequence;
|
||||
@@ -1583,16 +1629,14 @@ uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn
|
||||
hashSequence = cacheready ? cache->hashSequence : SHA256Uint256(GetSequencesSHA256(txTo));
|
||||
}
|
||||
|
||||
|
||||
if ((nHashType & 0x1f) != SIGHASH_SINGLE && (nHashType & 0x1f) != SIGHASH_NONE) {
|
||||
hashOutputs = cacheready ? cache->hashOutputs : SHA256Uint256(GetOutputsSHA256(txTo));
|
||||
} else if ((nHashType & 0x1f) == SIGHASH_SINGLE && nIn < txTo.vout.size()) {
|
||||
HashWriter ss{};
|
||||
ss << txTo.vout[nIn];
|
||||
hashOutputs = ss.GetHash();
|
||||
HashWriter inner_ss{};
|
||||
inner_ss << txTo.vout[nIn];
|
||||
hashOutputs = inner_ss.GetHash();
|
||||
}
|
||||
|
||||
HashWriter ss{};
|
||||
// Version
|
||||
ss << txTo.version;
|
||||
// Input prevouts/nSequence (none/all, depending on flags)
|
||||
@@ -1609,26 +1653,21 @@ uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn
|
||||
ss << hashOutputs;
|
||||
// Locktime
|
||||
ss << txTo.nLockTime;
|
||||
// Sighash type
|
||||
ss << nHashType;
|
||||
} else {
|
||||
// Wrapper to serialize only the necessary parts of the transaction being signed
|
||||
CTransactionSignatureSerializer<T> txTmp(txTo, scriptCode, nIn, nHashType);
|
||||
|
||||
return ss.GetHash();
|
||||
// Serialize
|
||||
ss << txTmp;
|
||||
}
|
||||
|
||||
// Check for invalid use of SIGHASH_SINGLE
|
||||
if ((nHashType & 0x1f) == SIGHASH_SINGLE) {
|
||||
if (nIn >= txTo.vout.size()) {
|
||||
// nOut out of range
|
||||
return uint256::ONE;
|
||||
}
|
||||
// If a cache object was provided, store the midstate there.
|
||||
if (sighash_cache != nullptr) {
|
||||
sighash_cache->Store(nHashType, scriptCode, ss);
|
||||
}
|
||||
|
||||
// Wrapper to serialize only the necessary parts of the transaction being signed
|
||||
CTransactionSignatureSerializer<T> txTmp(txTo, scriptCode, nIn, nHashType);
|
||||
|
||||
// Serialize and hash
|
||||
HashWriter ss{};
|
||||
ss << txTmp << nHashType;
|
||||
// Add sighash type and hash.
|
||||
ss << nHashType;
|
||||
return ss.GetHash();
|
||||
}
|
||||
|
||||
@@ -1661,7 +1700,7 @@ bool GenericTransactionSignatureChecker<T>::CheckECDSASignature(const std::vecto
|
||||
// Witness sighashes need the amount.
|
||||
if (sigversion == SigVersion::WITNESS_V0 && amount < 0) return HandleMissingData(m_mdb);
|
||||
|
||||
uint256 sighash = SignatureHash(scriptCode, *txTo, nIn, nHashType, amount, sigversion, this->txdata);
|
||||
uint256 sighash = SignatureHash(scriptCode, *txTo, nIn, nHashType, amount, sigversion, this->txdata, &m_sighash_cache);
|
||||
|
||||
if (!VerifyECDSASignature(vchSig, pubkey, sighash))
|
||||
return false;
|
||||
|
||||
@@ -239,8 +239,27 @@ extern const HashWriter HASHER_TAPSIGHASH; //!< Hasher with tag "TapSighash" pre
|
||||
extern const HashWriter HASHER_TAPLEAF; //!< Hasher with tag "TapLeaf" pre-fed to it.
|
||||
extern const HashWriter HASHER_TAPBRANCH; //!< Hasher with tag "TapBranch" pre-fed to it.
|
||||
|
||||
/** Data structure to cache SHA256 midstates for the ECDSA sighash calculations
|
||||
* (bare, P2SH, P2WPKH, P2WSH). */
|
||||
class SigHashCache
|
||||
{
|
||||
/** For each sighash mode (ALL, SINGLE, NONE, ALL|ANYONE, SINGLE|ANYONE, NONE|ANYONE),
|
||||
* optionally store a scriptCode which the hash is for, plus a midstate for the SHA256
|
||||
* computation just before adding the hash_type itself. */
|
||||
std::optional<std::pair<CScript, HashWriter>> m_cache_entries[6];
|
||||
|
||||
/** Given a hash_type, find which of the 6 cache entries is to be used. */
|
||||
int CacheIndex(int32_t hash_type) const noexcept;
|
||||
|
||||
public:
|
||||
/** Load into writer the SHA256 midstate if found in this cache. */
|
||||
[[nodiscard]] bool Load(int32_t hash_type, const CScript& script_code, HashWriter& writer) const noexcept;
|
||||
/** Store into this cache object the provided SHA256 midstate. */
|
||||
void Store(int32_t hash_type, const CScript& script_code, const HashWriter& writer) noexcept;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn, int32_t nHashType, const CAmount& amount, SigVersion sigversion, const PrecomputedTransactionData* cache = nullptr);
|
||||
uint256 SignatureHash(const CScript& scriptCode, const T& txTo, unsigned int nIn, int32_t nHashType, const CAmount& amount, SigVersion sigversion, const PrecomputedTransactionData* cache = nullptr, SigHashCache* sighash_cache = nullptr);
|
||||
|
||||
class BaseSignatureChecker
|
||||
{
|
||||
@@ -289,6 +308,7 @@ private:
|
||||
unsigned int nIn;
|
||||
const CAmount amount;
|
||||
const PrecomputedTransactionData* txdata;
|
||||
mutable SigHashCache m_sighash_cache;
|
||||
|
||||
protected:
|
||||
virtual bool VerifyECDSASignature(const std::vector<unsigned char>& vchSig, const CPubKey& vchPubKey, const uint256& sighash) const;
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
#include <test/fuzz/FuzzedDataProvider.h>
|
||||
#include <test/fuzz/fuzz.h>
|
||||
#include <test/fuzz/util.h>
|
||||
#include <util/check.h>
|
||||
|
||||
#include <cstdint>
|
||||
#include <optional>
|
||||
@@ -45,3 +46,27 @@ FUZZ_TARGET(script_interpreter)
|
||||
(void)CastToBool(ConsumeRandomLengthByteVector(fuzzed_data_provider));
|
||||
}
|
||||
}
|
||||
|
||||
/** Differential fuzzing for SignatureHash with and without cache. */
|
||||
FUZZ_TARGET(sighash_cache)
|
||||
{
|
||||
FuzzedDataProvider provider(buffer.data(), buffer.size());
|
||||
|
||||
// Get inputs to the sighash function that won't change across types.
|
||||
const auto scriptcode{ConsumeScript(provider)};
|
||||
const auto tx{ConsumeTransaction(provider, std::nullopt)};
|
||||
if (tx.vin.empty()) return;
|
||||
const auto in_index{provider.ConsumeIntegralInRange<uint32_t>(0, tx.vin.size() - 1)};
|
||||
const auto amount{ConsumeMoney(provider)};
|
||||
const auto sigversion{(SigVersion)provider.ConsumeIntegralInRange(0, 1)};
|
||||
|
||||
// Check the sighash function will give the same result for 100 fuzzer-generated hash types whether or not a cache is
|
||||
// provided. The cache is conserved across types to exercise cache hits.
|
||||
SigHashCache sighash_cache{};
|
||||
for (int i{0}; i < 100; ++i) {
|
||||
const auto hash_type{((i & 2) == 0) ? provider.ConsumeIntegral<int8_t>() : provider.ConsumeIntegral<int32_t>()};
|
||||
const auto nocache_res{SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion)};
|
||||
const auto cache_res{SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &sighash_cache)};
|
||||
Assert(nocache_res == cache_res);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,4 +207,94 @@ BOOST_AUTO_TEST_CASE(sighash_from_data)
|
||||
BOOST_CHECK_MESSAGE(sh.GetHex() == sigHashHex, strTest);
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(sighash_caching)
|
||||
{
|
||||
// Get a script, transaction and parameters as inputs to the sighash function.
|
||||
CScript scriptcode;
|
||||
RandomScript(scriptcode);
|
||||
CScript diff_scriptcode{scriptcode};
|
||||
diff_scriptcode << OP_1;
|
||||
CMutableTransaction tx;
|
||||
RandomTransaction(tx, /*fSingle=*/false);
|
||||
const auto in_index{static_cast<uint32_t>(m_rng.randrange(tx.vin.size()))};
|
||||
const auto amount{m_rng.rand<CAmount>()};
|
||||
|
||||
// Exercise the sighash function under both legacy and segwit v0.
|
||||
for (const auto sigversion: {SigVersion::BASE, SigVersion::WITNESS_V0}) {
|
||||
// For each, run it against all the 6 standard hash types and a few additional random ones.
|
||||
std::vector<int32_t> hash_types{{SIGHASH_ALL, SIGHASH_SINGLE, SIGHASH_NONE, SIGHASH_ALL | SIGHASH_ANYONECANPAY,
|
||||
SIGHASH_SINGLE | SIGHASH_ANYONECANPAY, SIGHASH_NONE | SIGHASH_ANYONECANPAY,
|
||||
SIGHASH_ANYONECANPAY, 0, std::numeric_limits<int32_t>::max()}};
|
||||
for (int i{0}; i < 10; ++i) {
|
||||
hash_types.push_back(i % 2 == 0 ? m_rng.rand<int8_t>() : m_rng.rand<int32_t>());
|
||||
}
|
||||
|
||||
// Reuse the same cache across script types. This must not cause any issue as the cached value for one hash type must never
|
||||
// be confused for another (instantiating the cache within the loop instead would prevent testing this).
|
||||
SigHashCache cache;
|
||||
for (const auto hash_type: hash_types) {
|
||||
const bool expect_one{sigversion == SigVersion::BASE && ((hash_type & 0x1f) == SIGHASH_SINGLE) && in_index >= tx.vout.size()};
|
||||
|
||||
// The result of computing the sighash should be the same with or without cache.
|
||||
const auto sighash_with_cache{SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache)};
|
||||
const auto sighash_no_cache{SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, nullptr)};
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache, sighash_no_cache);
|
||||
|
||||
// Calling the cached version again should return the same value again.
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache, SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache));
|
||||
|
||||
// While here we might as well also check that the result for legacy is the same as for the old SignatureHash() function.
|
||||
if (sigversion == SigVersion::BASE) {
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache, SignatureHashOld(scriptcode, CTransaction(tx), in_index, hash_type));
|
||||
}
|
||||
|
||||
// Calling with a different scriptcode (for instance in case a CODESEP is encountered) will not return the cache value but
|
||||
// overwrite it. The sighash will always be different except in case of legacy SIGHASH_SINGLE bug.
|
||||
const auto sighash_with_cache2{SignatureHash(diff_scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache)};
|
||||
const auto sighash_no_cache2{SignatureHash(diff_scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, nullptr)};
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache2, sighash_no_cache2);
|
||||
if (!expect_one) {
|
||||
BOOST_CHECK_NE(sighash_with_cache, sighash_with_cache2);
|
||||
} else {
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache, sighash_with_cache2);
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache, uint256::ONE);
|
||||
}
|
||||
|
||||
// Calling the cached version again should return the same value again.
|
||||
BOOST_CHECK_EQUAL(sighash_with_cache2, SignatureHash(diff_scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache));
|
||||
|
||||
// And if we store a different value for this scriptcode and hash type it will return that instead.
|
||||
{
|
||||
HashWriter h{};
|
||||
h << 42;
|
||||
cache.Store(hash_type, scriptcode, h);
|
||||
const auto stored_hash{h.GetHash()};
|
||||
BOOST_CHECK(cache.Load(hash_type, scriptcode, h));
|
||||
const auto loaded_hash{h.GetHash()};
|
||||
BOOST_CHECK_EQUAL(stored_hash, loaded_hash);
|
||||
}
|
||||
|
||||
// And using this mutated cache with the sighash function will return the new value (except in the legacy SIGHASH_SINGLE bug
|
||||
// case in which it'll return 1).
|
||||
if (!expect_one) {
|
||||
BOOST_CHECK_NE(SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache), sighash_with_cache);
|
||||
HashWriter h{};
|
||||
BOOST_CHECK(cache.Load(hash_type, scriptcode, h));
|
||||
h << hash_type;
|
||||
const auto new_hash{h.GetHash()};
|
||||
BOOST_CHECK_EQUAL(SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache), new_hash);
|
||||
} else {
|
||||
BOOST_CHECK_EQUAL(SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache), uint256::ONE);
|
||||
}
|
||||
|
||||
// Wipe the cache and restore the correct cached value for this scriptcode and hash_type before starting the next iteration.
|
||||
HashWriter dummy{};
|
||||
cache.Store(hash_type, diff_scriptcode, dummy);
|
||||
(void)SignatureHash(scriptcode, tx, in_index, hash_type, amount, sigversion, nullptr, &cache);
|
||||
BOOST_CHECK(cache.Load(hash_type, scriptcode, dummy) || expect_one);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
||||
@@ -1144,4 +1144,159 @@ BOOST_AUTO_TEST_CASE(max_standard_legacy_sigops)
|
||||
BOOST_CHECK(!::AreInputsStandard(CTransaction(tx_max_sigops), coins));
|
||||
}
|
||||
|
||||
/** Sanity check the return value of SpendsNonAnchorWitnessProg for various output types. */
|
||||
BOOST_AUTO_TEST_CASE(spends_witness_prog)
|
||||
{
|
||||
CCoinsView coins_dummy;
|
||||
CCoinsViewCache coins(&coins_dummy);
|
||||
CKey key;
|
||||
key.MakeNewKey(true);
|
||||
const CPubKey pubkey{key.GetPubKey()};
|
||||
CMutableTransaction tx_create{}, tx_spend{};
|
||||
tx_create.vout.emplace_back(0, CScript{});
|
||||
tx_spend.vin.emplace_back(Txid{}, 0);
|
||||
std::vector<std::vector<uint8_t>> sol_dummy;
|
||||
|
||||
// CNoDestination, PubKeyDestination, PKHash, ScriptHash, WitnessV0ScriptHash, WitnessV0KeyHash,
|
||||
// WitnessV1Taproot, PayToAnchor, WitnessUnknown.
|
||||
static_assert(std::variant_size_v<CTxDestination> == 9);
|
||||
|
||||
// Go through all defined output types and sanity check SpendsNonAnchorWitnessProg.
|
||||
|
||||
// P2PK
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(PubKeyDestination{pubkey});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::PUBKEY);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2PKH
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(PKHash{pubkey});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::PUBKEYHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH
|
||||
auto redeem_script{CScript{} << OP_1 << OP_CHECKSIG};
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash{redeem_script});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << OP_0 << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
|
||||
// native P2WSH
|
||||
const auto witness_script{CScript{} << OP_12 << OP_HASH160 << OP_DUP << OP_EQUAL};
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(WitnessV0ScriptHash{witness_script});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::WITNESS_V0_SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH-wrapped P2WSH
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// native P2WPKH
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(WitnessV0KeyHash{pubkey});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::WITNESS_V0_KEYHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH-wrapped P2WPKH
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2TR
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(WitnessV1Taproot{XOnlyPubKey{pubkey}});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::WITNESS_V1_TAPROOT);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH-wrapped P2TR (undefined, non-standard)
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2A
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(PayToAnchor{});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::ANCHOR);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH-wrapped P2A (undefined, non-standard)
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
|
||||
// Undefined version 1 witness program
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(WitnessUnknown{1, {0x42, 0x42}});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::WITNESS_UNKNOWN);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// P2SH-wrapped undefined version 1 witness program
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// Various undefined version >1 32-byte witness programs.
|
||||
const auto program{ToByteVector(XOnlyPubKey{pubkey})};
|
||||
for (int i{2}; i <= 16; ++i) {
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(WitnessUnknown{i, program});
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::WITNESS_UNKNOWN);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
|
||||
// It's also detected within P2SH.
|
||||
redeem_script = tx_create.vout[0].scriptPubKey;
|
||||
tx_create.vout[0].scriptPubKey = GetScriptForDestination(ScriptHash(redeem_script));
|
||||
BOOST_CHECK_EQUAL(Solver(tx_create.vout[0].scriptPubKey, sol_dummy), TxoutType::SCRIPTHASH);
|
||||
tx_spend.vin[0].prevout.hash = tx_create.GetHash();
|
||||
tx_spend.vin[0].scriptSig = CScript{} << ToByteVector(redeem_script);
|
||||
AddCoins(coins, CTransaction{tx_create}, 0, false);
|
||||
BOOST_CHECK(::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
tx_spend.vin[0].scriptSig.clear();
|
||||
BOOST_CHECK(!::SpendsNonAnchorWitnessProg(CTransaction{tx_spend}, coins));
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
||||
@@ -1238,13 +1238,8 @@ bool MemPoolAccept::PolicyScriptChecks(const ATMPArgs& args, Workspace& ws)
|
||||
// Check input scripts and signatures.
|
||||
// This is done last to help prevent CPU exhaustion denial-of-service attacks.
|
||||
if (!CheckInputScripts(tx, state, m_view, scriptVerifyFlags, true, false, ws.m_precomputed_txdata, GetValidationCache())) {
|
||||
// SCRIPT_VERIFY_CLEANSTACK requires SCRIPT_VERIFY_WITNESS, so we
|
||||
// need to turn both off, and compare against just turning off CLEANSTACK
|
||||
// to see if the failure is specifically due to witness validation.
|
||||
TxValidationState state_dummy; // Want reported failures to be from first CheckInputScripts
|
||||
if (!tx.HasWitness() && CheckInputScripts(tx, state_dummy, m_view, scriptVerifyFlags & ~(SCRIPT_VERIFY_WITNESS | SCRIPT_VERIFY_CLEANSTACK), true, false, ws.m_precomputed_txdata, GetValidationCache()) &&
|
||||
!CheckInputScripts(tx, state_dummy, m_view, scriptVerifyFlags & ~SCRIPT_VERIFY_CLEANSTACK, true, false, ws.m_precomputed_txdata, GetValidationCache())) {
|
||||
// Only the witness is missing, so the transaction itself may be fine.
|
||||
// Detect a failure due to a missing witness so that p2p code can handle rejection caching appropriately.
|
||||
if (!tx.HasWitness() && SpendsNonAnchorWitnessProg(tx, m_view)) {
|
||||
state.Invalid(TxValidationResult::TX_WITNESS_STRIPPED,
|
||||
state.GetRejectReason(), state.GetDebugMessage());
|
||||
}
|
||||
@@ -2214,34 +2209,17 @@ bool CheckInputScripts(const CTransaction& tx, TxValidationState& state,
|
||||
if (pvChecks) {
|
||||
pvChecks->emplace_back(std::move(check));
|
||||
} else if (auto result = check(); result.has_value()) {
|
||||
// Tx failures never trigger disconnections/bans.
|
||||
// This is so that network splits aren't triggered
|
||||
// either due to non-consensus relay policies (such as
|
||||
// non-standard DER encodings or non-null dummy
|
||||
// arguments) or due to new consensus rules introduced in
|
||||
// soft forks.
|
||||
if (flags & STANDARD_NOT_MANDATORY_VERIFY_FLAGS) {
|
||||
// Check whether the failure was caused by a
|
||||
// non-mandatory script verification check, such as
|
||||
// non-standard DER encodings or non-null dummy
|
||||
// arguments; if so, ensure we return NOT_STANDARD
|
||||
// instead of CONSENSUS to avoid downstream users
|
||||
// splitting the network between upgraded and
|
||||
// non-upgraded nodes by banning CONSENSUS-failing
|
||||
// data providers.
|
||||
CScriptCheck check2(txdata.m_spent_outputs[i], tx, validation_cache.m_signature_cache, i,
|
||||
flags & ~STANDARD_NOT_MANDATORY_VERIFY_FLAGS, cacheSigStore, &txdata);
|
||||
auto mandatory_result = check2();
|
||||
if (!mandatory_result.has_value()) {
|
||||
return state.Invalid(TxValidationResult::TX_NOT_STANDARD, strprintf("non-mandatory-script-verify-flag (%s)", ScriptErrorString(result->first)), result->second);
|
||||
} else {
|
||||
// If the second check failed, it failed due to a mandatory script verification
|
||||
// flag, but the first check might have failed on a non-mandatory script
|
||||
// verification flag.
|
||||
//
|
||||
// Avoid reporting a mandatory script check failure with a non-mandatory error
|
||||
// string by reporting the error from the second check.
|
||||
result = mandatory_result;
|
||||
}
|
||||
return state.Invalid(TxValidationResult::TX_NOT_STANDARD, strprintf("mempool-script-verify-flag-failed (%s)", ScriptErrorString(result->first)), result->second);
|
||||
} else {
|
||||
return state.Invalid(TxValidationResult::TX_CONSENSUS, strprintf("mandatory-script-verify-flag-failed (%s)", ScriptErrorString(result->first)), result->second);
|
||||
}
|
||||
|
||||
// MANDATORY flag failures correspond to
|
||||
// TxValidationResult::TX_CONSENSUS.
|
||||
return state.Invalid(TxValidationResult::TX_CONSENSUS, strprintf("mandatory-script-verify-flag-failed (%s)", ScriptErrorString(result->first)), result->second);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -132,6 +132,21 @@ public:
|
||||
/** Return path to main database filename */
|
||||
std::string Filename() override { return fs::PathToString(env->Directory() / m_filename); }
|
||||
|
||||
std::vector<fs::path> Files() override
|
||||
{
|
||||
std::vector<fs::path> files;
|
||||
files.emplace_back(env->Directory() / m_filename);
|
||||
if (env->m_databases.size() == 1) {
|
||||
files.emplace_back(env->Directory() / "db.log");
|
||||
files.emplace_back(env->Directory() / ".walletlock");
|
||||
files.emplace_back(env->Directory() / "database" / "log.0000000001");
|
||||
files.emplace_back(env->Directory() / "database");
|
||||
// Note that this list is not exhaustive as BDB may create more log files, and possibly other ones too
|
||||
// However it should be good enough for the only calls to Files()
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
std::string Format() override { return "bdb"; }
|
||||
/**
|
||||
* Pointer to shared database environment.
|
||||
|
||||
@@ -170,6 +170,9 @@ public:
|
||||
/** Return path to main database file for logs and error messages. */
|
||||
virtual std::string Filename() = 0;
|
||||
|
||||
/** Return paths to all database created files */
|
||||
virtual std::vector<fs::path> Files() = 0;
|
||||
|
||||
virtual std::string Format() = 0;
|
||||
|
||||
std::atomic<unsigned int> nUpdateCounter;
|
||||
|
||||
@@ -288,11 +288,17 @@ bool CreateFromDump(const ArgsManager& args, const std::string& name, const fs::
|
||||
|
||||
dump_file.close();
|
||||
}
|
||||
// On failure, gather the paths to remove
|
||||
std::vector<fs::path> paths_to_remove = wallet->GetDatabase().Files();
|
||||
if (!name.empty()) paths_to_remove.push_back(wallet_path);
|
||||
|
||||
wallet.reset(); // The pointer deleter will close the wallet for us.
|
||||
|
||||
// Remove the wallet dir if we have a failure
|
||||
if (!ret) {
|
||||
fs::remove_all(wallet_path);
|
||||
for (const auto& p : paths_to_remove) {
|
||||
fs::remove(p);
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
|
||||
@@ -65,6 +65,7 @@ public:
|
||||
|
||||
/** Return path to main database file for logs and error messages. */
|
||||
std::string Filename() override { return fs::PathToString(m_filepath); }
|
||||
std::vector<fs::path> Files() override { return {m_filepath}; }
|
||||
|
||||
std::string Format() override { return "bdb_ro"; }
|
||||
|
||||
|
||||
@@ -1486,7 +1486,6 @@ RPCHelpMan sendall()
|
||||
CoinFilterParams coins_params;
|
||||
coins_params.min_amount = 0;
|
||||
for (const COutput& output : AvailableCoins(*pwallet, &coin_control, fee_rate, coins_params).All()) {
|
||||
CHECK_NONFATAL(output.input_bytes > 0);
|
||||
if (send_max && fee_rate.GetFee(output.input_bytes) > output.txout.nValue) {
|
||||
continue;
|
||||
}
|
||||
@@ -1505,6 +1504,9 @@ RPCHelpMan sendall()
|
||||
|
||||
// estimate final size of tx
|
||||
const TxSize tx_size{CalculateMaximumSignedTxSize(CTransaction(rawTx), pwallet.get())};
|
||||
if (tx_size.vsize == -1) {
|
||||
throw JSONRPCError(RPC_WALLET_ERROR, "Unable to determine the size of the transaction, the wallet contains unsolvable descriptors");
|
||||
}
|
||||
const CAmount fee_from_size{fee_rate.GetFee(tx_size.vsize)};
|
||||
const std::optional<CAmount> total_bump_fees{pwallet->chain().calculateCombinedBumpFee(outpoints_spent, fee_rate)};
|
||||
CAmount effective_value = total_input_value - fee_from_size - total_bump_fees.value_or(0);
|
||||
|
||||
@@ -63,6 +63,7 @@ public:
|
||||
void IncrementUpdateCounter() override { ++nUpdateCounter; }
|
||||
void ReloadDbEnv() override {}
|
||||
std::string Filename() override { return "dummy"; }
|
||||
std::vector<fs::path> Files() override { return {}; }
|
||||
std::string Format() override { return "dummy"; }
|
||||
std::unique_ptr<DatabaseBatch> MakeBatch(bool flush_on_close = true) override { return std::make_unique<DummyBatch>(); }
|
||||
};
|
||||
|
||||
@@ -112,12 +112,12 @@ Mutex SQLiteDatabase::g_sqlite_mutex;
|
||||
int SQLiteDatabase::g_sqlite_count = 0;
|
||||
|
||||
SQLiteDatabase::SQLiteDatabase(const fs::path& dir_path, const fs::path& file_path, const DatabaseOptions& options, bool mock)
|
||||
: WalletDatabase(), m_mock(mock), m_dir_path(fs::PathToString(dir_path)), m_file_path(fs::PathToString(file_path)), m_write_semaphore(1), m_use_unsafe_sync(options.use_unsafe_sync)
|
||||
: WalletDatabase(), m_mock(mock), m_dir_path(dir_path), m_file_path(fs::PathToString(file_path)), m_write_semaphore(1), m_use_unsafe_sync(options.use_unsafe_sync)
|
||||
{
|
||||
{
|
||||
LOCK(g_sqlite_mutex);
|
||||
LogPrintf("Using SQLite Version %s\n", SQLiteDatabaseVersion());
|
||||
LogPrintf("Using wallet %s\n", m_dir_path);
|
||||
LogPrintf("Using wallet %s\n", fs::PathToString(m_dir_path));
|
||||
|
||||
if (++g_sqlite_count == 1) {
|
||||
// Setup logging
|
||||
@@ -253,7 +253,7 @@ void SQLiteDatabase::Open()
|
||||
|
||||
if (m_db == nullptr) {
|
||||
if (!m_mock) {
|
||||
TryCreateDirectories(fs::PathFromString(m_dir_path));
|
||||
TryCreateDirectories(m_dir_path);
|
||||
}
|
||||
int ret = sqlite3_open_v2(m_file_path.c_str(), &m_db, flags, nullptr);
|
||||
if (ret != SQLITE_OK) {
|
||||
|
||||
@@ -105,7 +105,7 @@ class SQLiteDatabase : public WalletDatabase
|
||||
private:
|
||||
const bool m_mock{false};
|
||||
|
||||
const std::string m_dir_path;
|
||||
const fs::path m_dir_path;
|
||||
|
||||
const std::string m_file_path;
|
||||
|
||||
@@ -166,6 +166,14 @@ public:
|
||||
void IncrementUpdateCounter() override { ++nUpdateCounter; }
|
||||
|
||||
std::string Filename() override { return m_file_path; }
|
||||
/** Return paths to all database created files */
|
||||
std::vector<fs::path> Files() override
|
||||
{
|
||||
std::vector<fs::path> files;
|
||||
files.emplace_back(m_dir_path / fs::PathFromString(m_file_path));
|
||||
files.emplace_back(m_dir_path / fs::PathFromString(m_file_path + "-journal"));
|
||||
return files;
|
||||
}
|
||||
std::string Format() override { return "sqlite"; }
|
||||
|
||||
/** Make a SQLiteBatch connected to this database */
|
||||
|
||||
@@ -123,6 +123,7 @@ public:
|
||||
void ReloadDbEnv() override {}
|
||||
|
||||
std::string Filename() override { return "mockable"; }
|
||||
std::vector<fs::path> Files() override { return {}; }
|
||||
std::string Format() override { return "mock"; }
|
||||
std::unique_ptr<DatabaseBatch> MakeBatch(bool flush_on_close = true) override { return std::make_unique<MockableBatch>(m_records, m_pass); }
|
||||
};
|
||||
|
||||
@@ -501,6 +501,8 @@ std::shared_ptr<CWallet> RestoreWallet(WalletContext& context, const fs::path& b
|
||||
const fs::path wallet_path = fsbridge::AbsPathJoin(GetWalletDir(), fs::u8path(wallet_name));
|
||||
auto wallet_file = wallet_path / "wallet.dat";
|
||||
std::shared_ptr<CWallet> wallet;
|
||||
bool wallet_file_copied = false;
|
||||
bool created_parent_dir = false;
|
||||
|
||||
try {
|
||||
if (!fs::exists(backup_file)) {
|
||||
@@ -509,13 +511,34 @@ std::shared_ptr<CWallet> RestoreWallet(WalletContext& context, const fs::path& b
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (fs::exists(wallet_path) || !TryCreateDirectories(wallet_path)) {
|
||||
error = Untranslated(strprintf("Failed to create database path '%s'. Database already exists.", fs::PathToString(wallet_path)));
|
||||
status = DatabaseStatus::FAILED_ALREADY_EXISTS;
|
||||
return nullptr;
|
||||
// Wallet directories are allowed to exist, but must not contain a .dat file.
|
||||
// Any existing wallet database is treated as a hard failure to prevent overwriting.
|
||||
if (fs::exists(wallet_path)) {
|
||||
// If this is a file, it is the db and we don't want to overwrite it.
|
||||
if (!fs::is_directory(wallet_path)) {
|
||||
error = Untranslated(strprintf("Failed to restore wallet. Database file exists '%s'.", fs::PathToString(wallet_path)));
|
||||
status = DatabaseStatus::FAILED_ALREADY_EXISTS;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Check we are not going to overwrite an existing db file
|
||||
if (fs::exists(wallet_file)) {
|
||||
error = Untranslated(strprintf("Failed to restore wallet. Database file exists in '%s'.", fs::PathToString(wallet_file)));
|
||||
status = DatabaseStatus::FAILED_ALREADY_EXISTS;
|
||||
return nullptr;
|
||||
}
|
||||
} else {
|
||||
// The directory doesn't exist, create it
|
||||
if (!TryCreateDirectories(wallet_path)) {
|
||||
error = Untranslated(strprintf("Failed to restore database path '%s'.", fs::PathToString(wallet_path)));
|
||||
status = DatabaseStatus::FAILED_ALREADY_EXISTS;
|
||||
return nullptr;
|
||||
}
|
||||
created_parent_dir = true;
|
||||
}
|
||||
|
||||
fs::copy_file(backup_file, wallet_file, fs::copy_options::none);
|
||||
wallet_file_copied = true;
|
||||
|
||||
if (load_after_restore) {
|
||||
wallet = LoadWallet(context, wallet_name, load_on_start, options, status, error, warnings);
|
||||
@@ -528,7 +551,13 @@ std::shared_ptr<CWallet> RestoreWallet(WalletContext& context, const fs::path& b
|
||||
|
||||
// Remove created wallet path only when loading fails
|
||||
if (load_after_restore && !wallet) {
|
||||
fs::remove_all(wallet_path);
|
||||
if (wallet_file_copied) fs::remove(wallet_file);
|
||||
// Clean up the parent directory if we created it during restoration.
|
||||
// As we have created it, it must be empty after deleting the wallet file.
|
||||
if (created_parent_dir) {
|
||||
Assume(fs::is_empty(wallet_path));
|
||||
fs::remove(wallet_path);
|
||||
}
|
||||
}
|
||||
|
||||
return wallet;
|
||||
@@ -1673,7 +1702,13 @@ isminetype CWallet::IsMine(const COutPoint& outpoint) const
|
||||
|
||||
bool CWallet::IsFromMe(const CTransaction& tx) const
|
||||
{
|
||||
return (GetDebit(tx, ISMINE_ALL) > 0);
|
||||
LOCK(cs_wallet);
|
||||
for (const CTxIn& txin : tx.vin) {
|
||||
if (IsMine(txin.prevout)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
CAmount CWallet::GetDebit(const CTransaction& tx, const isminefilter& filter) const
|
||||
@@ -4297,6 +4332,15 @@ bool CWallet::CanGrindR() const
|
||||
return !IsWalletFlagSet(WALLET_FLAG_EXTERNAL_SIGNER);
|
||||
}
|
||||
|
||||
// Returns wallet prefix for migration.
|
||||
// Used to name the backup file and newly created wallets.
|
||||
// E.g. a watch-only wallet is named "<prefix>_watchonly".
|
||||
static std::string MigrationPrefixName(CWallet& wallet)
|
||||
{
|
||||
const std::string& name{wallet.GetName()};
|
||||
return name.empty() ? "default_wallet" : name;
|
||||
}
|
||||
|
||||
bool DoMigration(CWallet& wallet, WalletContext& context, bilingual_str& error, MigrationResult& res) EXCLUSIVE_LOCKS_REQUIRED(wallet.cs_wallet)
|
||||
{
|
||||
AssertLockHeld(wallet.cs_wallet);
|
||||
@@ -4328,7 +4372,7 @@ bool DoMigration(CWallet& wallet, WalletContext& context, bilingual_str& error,
|
||||
|
||||
DatabaseStatus status;
|
||||
std::vector<bilingual_str> warnings;
|
||||
std::string wallet_name = wallet.GetName() + "_watchonly";
|
||||
std::string wallet_name = MigrationPrefixName(wallet) + "_watchonly";
|
||||
std::unique_ptr<WalletDatabase> database = MakeWalletDatabase(wallet_name, options, status, error);
|
||||
if (!database) {
|
||||
error = strprintf(_("Wallet file creation failed: %s"), error);
|
||||
@@ -4365,7 +4409,7 @@ bool DoMigration(CWallet& wallet, WalletContext& context, bilingual_str& error,
|
||||
|
||||
DatabaseStatus status;
|
||||
std::vector<bilingual_str> warnings;
|
||||
std::string wallet_name = wallet.GetName() + "_solvables";
|
||||
std::string wallet_name = MigrationPrefixName(wallet) + "_solvables";
|
||||
std::unique_ptr<WalletDatabase> database = MakeWalletDatabase(wallet_name, options, status, error);
|
||||
if (!database) {
|
||||
error = strprintf(_("Wallet file creation failed: %s"), error);
|
||||
@@ -4486,7 +4530,12 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
std::string name = to_reload->GetName();
|
||||
to_reload.reset();
|
||||
to_reload = LoadWallet(context, name, /*load_on_start=*/std::nullopt, options, status, error, warnings);
|
||||
return to_reload != nullptr;
|
||||
if (!to_reload) {
|
||||
LogError("Failed to load wallet '%s' after migration. Rolling back migration to preserve consistency. "
|
||||
"Error cause: %s\n", wallet_name, error.original);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// Before anything else, check if there is something to migrate.
|
||||
@@ -4499,7 +4548,7 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
|
||||
// Make a backup of the DB
|
||||
fs::path this_wallet_dir = fs::absolute(fs::PathFromString(local_wallet->GetDatabase().Filename())).parent_path();
|
||||
fs::path backup_filename = fs::PathFromString(strprintf("%s_%d.legacy.bak", (wallet_name.empty() ? "default_wallet" : wallet_name), GetTime()));
|
||||
fs::path backup_filename = fs::PathFromString(strprintf("%s_%d.legacy.bak", MigrationPrefixName(*local_wallet), GetTime()));
|
||||
fs::path backup_path = this_wallet_dir / backup_filename;
|
||||
if (!local_wallet->BackupWallet(fs::PathToString(backup_path))) {
|
||||
if (was_loaded) {
|
||||
@@ -4542,26 +4591,44 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
}
|
||||
}
|
||||
|
||||
// In case of reloading failure, we need to remember the wallet dirs to remove
|
||||
// Set is used as it may be populated with the same wallet directory paths multiple times,
|
||||
// both before and after reloading. This ensures the set is complete even if one of the wallets
|
||||
// fails to reload.
|
||||
std::set<fs::path> wallet_dirs;
|
||||
// In case of loading failure, we need to remember the wallet files we have created to remove.
|
||||
// A `set` is used as it may be populated with the same wallet directory paths multiple times,
|
||||
// both before and after loading. This ensures the set is complete even if one of the wallets
|
||||
// fails to load.
|
||||
std::set<fs::path> wallet_files_to_remove;
|
||||
std::set<fs::path> wallet_empty_dirs_to_remove;
|
||||
|
||||
// Helper to track wallet files and directories for cleanup on failure.
|
||||
// Only directories of wallets created during migration (not the main wallet) are tracked.
|
||||
auto track_for_cleanup = [&](const CWallet& wallet) {
|
||||
const auto files = wallet.GetDatabase().Files();
|
||||
wallet_files_to_remove.insert(files.begin(), files.end());
|
||||
if (wallet.GetName() != wallet_name) {
|
||||
// If this isn’t the main wallet, mark its directory for removal.
|
||||
// This applies to the watch-only and solvable wallets.
|
||||
// Wallets stored directly as files in the top-level directory
|
||||
// (e.g. default unnamed wallets) don’t have a removable parent directory.
|
||||
wallet_empty_dirs_to_remove.insert(fs::PathFromString(wallet.GetDatabase().Filename()).parent_path());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
if (success) {
|
||||
// Migration successful, unload all wallets locally, then reload them.
|
||||
// Reload the main wallet
|
||||
wallet_dirs.insert(fs::PathFromString(local_wallet->GetDatabase().Filename()).parent_path());
|
||||
LogInfo("Loading new wallets after migration...\n");
|
||||
track_for_cleanup(*local_wallet);
|
||||
success = reload_wallet(local_wallet);
|
||||
res.wallet = local_wallet;
|
||||
res.wallet_name = wallet_name;
|
||||
if (success && res.watchonly_wallet) {
|
||||
// Reload watchonly
|
||||
wallet_dirs.insert(fs::PathFromString(res.watchonly_wallet->GetDatabase().Filename()).parent_path());
|
||||
track_for_cleanup(*res.watchonly_wallet);
|
||||
success = reload_wallet(res.watchonly_wallet);
|
||||
}
|
||||
if (success && res.solvables_wallet) {
|
||||
// Reload solvables
|
||||
wallet_dirs.insert(fs::PathFromString(res.solvables_wallet->GetDatabase().Filename()).parent_path());
|
||||
track_for_cleanup(*res.solvables_wallet);
|
||||
success = reload_wallet(res.solvables_wallet);
|
||||
}
|
||||
}
|
||||
@@ -4569,7 +4636,7 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
// Migration failed, cleanup
|
||||
// Before deleting the wallet's directory, copy the backup file to the top-level wallets dir
|
||||
fs::path temp_backup_location = fsbridge::AbsPathJoin(GetWalletDir(), backup_filename);
|
||||
fs::copy_file(backup_path, temp_backup_location, fs::copy_options::none);
|
||||
fs::rename(backup_path, temp_backup_location);
|
||||
|
||||
// Make list of wallets to cleanup
|
||||
std::vector<std::shared_ptr<CWallet>> created_wallets;
|
||||
@@ -4578,8 +4645,8 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
if (res.solvables_wallet) created_wallets.push_back(std::move(res.solvables_wallet));
|
||||
|
||||
// Get the directories to remove after unloading
|
||||
for (std::shared_ptr<CWallet>& w : created_wallets) {
|
||||
wallet_dirs.emplace(fs::PathFromString(w->GetDatabase().Filename()).parent_path());
|
||||
for (std::shared_ptr<CWallet>& wallet : created_wallets) {
|
||||
track_for_cleanup(*wallet);
|
||||
}
|
||||
|
||||
// Unload the wallets
|
||||
@@ -4598,9 +4665,15 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the wallet directories
|
||||
for (const fs::path& dir : wallet_dirs) {
|
||||
fs::remove_all(dir);
|
||||
// First, delete the db files we have created throughout this process and nothing else
|
||||
for (const fs::path& file : wallet_files_to_remove) {
|
||||
fs::remove(file);
|
||||
}
|
||||
|
||||
// Second, delete the created wallet directories and nothing else. They must be empty at this point.
|
||||
for (const fs::path& dir : wallet_empty_dirs_to_remove) {
|
||||
Assume(fs::is_empty(dir));
|
||||
fs::remove(dir);
|
||||
}
|
||||
|
||||
// Restore the backup
|
||||
@@ -4614,8 +4687,7 @@ util::Result<MigrationResult> MigrateLegacyToDescriptor(std::shared_ptr<CWallet>
|
||||
}
|
||||
|
||||
// The wallet directory has been restored, but just in case, copy the previously created backup to the wallet dir
|
||||
fs::copy_file(temp_backup_location, backup_path, fs::copy_options::none);
|
||||
fs::remove(temp_backup_location);
|
||||
fs::rename(temp_backup_location, backup_path);
|
||||
|
||||
// Verify that there is no dangling wallet: when the wallet wasn't loaded before, expect null.
|
||||
// This check is performed after restoration to avoid an early error before saving the backup.
|
||||
|
||||
@@ -69,9 +69,6 @@ class BadTxTemplate:
|
||||
# Only specified if it differs from mempool acceptance error.
|
||||
block_reject_reason = ""
|
||||
|
||||
# Do we expect to be disconnected after submitting this tx?
|
||||
expect_disconnect = False
|
||||
|
||||
# Is this tx considered valid when included in a block, but not for acceptance into
|
||||
# the mempool (i.e. does it violate policy but not consensus)?
|
||||
valid_in_block = False
|
||||
@@ -89,7 +86,6 @@ class BadTxTemplate:
|
||||
|
||||
class OutputMissing(BadTxTemplate):
|
||||
reject_reason = "bad-txns-vout-empty"
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
tx = CTransaction()
|
||||
@@ -100,7 +96,6 @@ class OutputMissing(BadTxTemplate):
|
||||
|
||||
class InputMissing(BadTxTemplate):
|
||||
reject_reason = "bad-txns-vin-empty"
|
||||
expect_disconnect = True
|
||||
|
||||
# We use a blank transaction here to make sure
|
||||
# it is interpreted as a non-witness transaction.
|
||||
@@ -117,7 +112,6 @@ class InputMissing(BadTxTemplate):
|
||||
# tree depth commitment (CVE-2017-12842)
|
||||
class SizeTooSmall(BadTxTemplate):
|
||||
reject_reason = "tx-size-small"
|
||||
expect_disconnect = False
|
||||
valid_in_block = True
|
||||
|
||||
def get_tx(self):
|
||||
@@ -134,7 +128,6 @@ class BadInputOutpointIndex(BadTxTemplate):
|
||||
# Won't be rejected - nonexistent outpoint index is treated as an orphan since the coins
|
||||
# database can't distinguish between spent outpoints and outpoints which never existed.
|
||||
reject_reason = None
|
||||
expect_disconnect = False
|
||||
|
||||
def get_tx(self):
|
||||
num_indices = len(self.spend_tx.vin)
|
||||
@@ -149,7 +142,6 @@ class BadInputOutpointIndex(BadTxTemplate):
|
||||
|
||||
class DuplicateInput(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-inputs-duplicate'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
tx = CTransaction()
|
||||
@@ -162,7 +154,6 @@ class DuplicateInput(BadTxTemplate):
|
||||
|
||||
class PrevoutNullInput(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-prevout-null'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
tx = CTransaction()
|
||||
@@ -175,7 +166,6 @@ class PrevoutNullInput(BadTxTemplate):
|
||||
|
||||
class NonexistentInput(BadTxTemplate):
|
||||
reject_reason = None # Added as an orphan tx.
|
||||
expect_disconnect = False
|
||||
|
||||
def get_tx(self):
|
||||
tx = CTransaction()
|
||||
@@ -188,7 +178,6 @@ class NonexistentInput(BadTxTemplate):
|
||||
|
||||
class SpendTooMuch(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-in-belowout'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
return create_tx_with_script(
|
||||
@@ -197,7 +186,6 @@ class SpendTooMuch(BadTxTemplate):
|
||||
|
||||
class CreateNegative(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-vout-negative'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
return create_tx_with_script(self.spend_tx, 0, amount=-1)
|
||||
@@ -205,7 +193,6 @@ class CreateNegative(BadTxTemplate):
|
||||
|
||||
class CreateTooLarge(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-vout-toolarge'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
return create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY + 1)
|
||||
@@ -213,7 +200,6 @@ class CreateTooLarge(BadTxTemplate):
|
||||
|
||||
class CreateSumTooLarge(BadTxTemplate):
|
||||
reject_reason = 'bad-txns-txouttotal-toolarge'
|
||||
expect_disconnect = True
|
||||
|
||||
def get_tx(self):
|
||||
tx = create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY)
|
||||
@@ -223,8 +209,7 @@ class CreateSumTooLarge(BadTxTemplate):
|
||||
|
||||
|
||||
class InvalidOPIFConstruction(BadTxTemplate):
|
||||
reject_reason = "mandatory-script-verify-flag-failed (Invalid OP_IF construction)"
|
||||
expect_disconnect = True
|
||||
reject_reason = "mempool-script-verify-flag-failed (Invalid OP_IF construction)"
|
||||
valid_in_block = True
|
||||
|
||||
def get_tx(self):
|
||||
@@ -236,7 +221,6 @@ class InvalidOPIFConstruction(BadTxTemplate):
|
||||
class TooManySigops(BadTxTemplate):
|
||||
reject_reason = "bad-txns-too-many-sigops"
|
||||
block_reject_reason = "bad-blk-sigops, out-of-bounds SigOpCount"
|
||||
expect_disconnect = False
|
||||
|
||||
def get_tx(self):
|
||||
lotsa_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
|
||||
@@ -258,15 +242,14 @@ def getDisabledOpcodeTemplate(opcode):
|
||||
|
||||
return type('DisabledOpcode_' + str(opcode), (BadTxTemplate,), {
|
||||
'reject_reason': "disabled opcode",
|
||||
'expect_disconnect': True,
|
||||
'get_tx': get_tx,
|
||||
'valid_in_block' : True
|
||||
})
|
||||
|
||||
class NonStandardAndInvalid(BadTxTemplate):
|
||||
"""A non-standard transaction which is also consensus-invalid should return the consensus error."""
|
||||
reject_reason = "mandatory-script-verify-flag-failed (OP_RETURN was encountered)"
|
||||
expect_disconnect = True
|
||||
"""A non-standard transaction which is also consensus-invalid should return the first error."""
|
||||
reject_reason = "mempool-script-verify-flag-failed (Using OP_CODESEPARATOR in non-witness script)"
|
||||
block_reject_reason = "mandatory-script-verify-flag-failed (OP_RETURN was encountered)"
|
||||
valid_in_block = False
|
||||
|
||||
def get_tx(self):
|
||||
|
||||
@@ -164,9 +164,12 @@ class FullBlockTest(BitcoinTestFramework):
|
||||
self.sign_tx(badtx, attempt_spend_tx)
|
||||
badtx.rehash()
|
||||
badblock = self.update_block(blockname, [badtx])
|
||||
reject_reason = (template.block_reject_reason or template.reject_reason)
|
||||
if reject_reason and reject_reason.startswith("mempool-script-verify-flag-failed"):
|
||||
reject_reason = "mandatory-script-verify-flag-failed" + reject_reason[33:]
|
||||
self.send_blocks(
|
||||
[badblock], success=False,
|
||||
reject_reason=(template.block_reject_reason or template.reject_reason),
|
||||
reject_reason=reject_reason,
|
||||
reconnect=True, timeout=2)
|
||||
|
||||
self.move_tip(2)
|
||||
|
||||
@@ -154,12 +154,14 @@ class BIP65Test(BitcoinTestFramework):
|
||||
coin_vout = coin.prevout.n
|
||||
cltv_invalidate(spendtx, i)
|
||||
|
||||
blk_rej = "mandatory-script-verify-flag-failed"
|
||||
tx_rej = "mempool-script-verify-flag-failed"
|
||||
expected_cltv_reject_reason = [
|
||||
"mandatory-script-verify-flag-failed (Operation not valid with the current stack size)",
|
||||
"mandatory-script-verify-flag-failed (Negative locktime)",
|
||||
"mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
|
||||
"mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
|
||||
"mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
|
||||
" (Operation not valid with the current stack size)",
|
||||
" (Negative locktime)",
|
||||
" (Locktime requirement not satisfied)",
|
||||
" (Locktime requirement not satisfied)",
|
||||
" (Locktime requirement not satisfied)",
|
||||
][i]
|
||||
# First we show that this tx is valid except for CLTV by getting it
|
||||
# rejected from the mempool for exactly that reason.
|
||||
@@ -170,8 +172,8 @@ class BIP65Test(BitcoinTestFramework):
|
||||
'txid': spendtx_txid,
|
||||
'wtxid': spendtx_wtxid,
|
||||
'allowed': False,
|
||||
'reject-reason': expected_cltv_reject_reason,
|
||||
'reject-details': expected_cltv_reject_reason + f", input 0 of {spendtx_txid} (wtxid {spendtx_wtxid}), spending {coin_txid}:{coin_vout}"
|
||||
'reject-reason': tx_rej + expected_cltv_reject_reason,
|
||||
'reject-details': tx_rej + expected_cltv_reject_reason + f", input 0 of {spendtx_txid} (wtxid {spendtx_wtxid}), spending {coin_txid}:{coin_vout}"
|
||||
}],
|
||||
self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0),
|
||||
)
|
||||
@@ -181,7 +183,7 @@ class BIP65Test(BitcoinTestFramework):
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
block.solve()
|
||||
|
||||
with self.nodes[0].assert_debug_log(expected_msgs=[f'Block validation error: {expected_cltv_reject_reason}']):
|
||||
with self.nodes[0].assert_debug_log(expected_msgs=[f'Block validation error: {blk_rej + expected_cltv_reject_reason}']):
|
||||
peer.send_and_ping(msg_block(block))
|
||||
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
||||
peer.sync_with_ping()
|
||||
|
||||
@@ -123,8 +123,8 @@ class BIP66Test(BitcoinTestFramework):
|
||||
'txid': spendtx_txid,
|
||||
'wtxid': spendtx_wtxid,
|
||||
'allowed': False,
|
||||
'reject-reason': 'mandatory-script-verify-flag-failed (Non-canonical DER signature)',
|
||||
'reject-details': 'mandatory-script-verify-flag-failed (Non-canonical DER signature), ' +
|
||||
'reject-reason': 'mempool-script-verify-flag-failed (Non-canonical DER signature)',
|
||||
'reject-details': 'mempool-script-verify-flag-failed (Non-canonical DER signature), ' +
|
||||
f"input 0 of {spendtx_txid} (wtxid {spendtx_wtxid}), spending {coin_txid}:0"
|
||||
}],
|
||||
self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0),
|
||||
|
||||
@@ -37,8 +37,8 @@ from test_framework.util import (
|
||||
from test_framework.wallet import getnewdestination
|
||||
from test_framework.wallet_util import generate_keypair
|
||||
|
||||
NULLDUMMY_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)"
|
||||
|
||||
NULLDUMMY_TX_ERROR = "mempool-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)"
|
||||
NULLDUMMY_BLK_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)"
|
||||
|
||||
def invalidate_nulldummy_tx(tx):
|
||||
"""Transform a NULLDUMMY compliant tx (i.e. scriptSig starts with OP_0)
|
||||
@@ -105,7 +105,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
|
||||
addr=self.ms_address, amount=47,
|
||||
privkey=self.privkey)
|
||||
invalidate_nulldummy_tx(test2tx)
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test2tx.serialize_with_witness().hex(), 0)
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_TX_ERROR, self.nodes[0].sendrawtransaction, test2tx.serialize_with_witness().hex(), 0)
|
||||
|
||||
self.log.info(f"Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [{COINBASE_MATURITY + 4}]")
|
||||
self.block_submit(self.nodes[0], [test2tx], accept=True)
|
||||
@@ -116,7 +116,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
|
||||
privkey=self.privkey)
|
||||
test6txs = [CTransaction(test4tx)]
|
||||
invalidate_nulldummy_tx(test4tx)
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test4tx.serialize_with_witness().hex(), 0)
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_TX_ERROR, self.nodes[0].sendrawtransaction, test4tx.serialize_with_witness().hex(), 0)
|
||||
self.block_submit(self.nodes[0], [test4tx], accept=False)
|
||||
|
||||
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
|
||||
@@ -126,7 +126,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
|
||||
privkey=self.privkey)
|
||||
test6txs.append(CTransaction(test5tx))
|
||||
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test5tx.serialize_with_witness().hex(), 0)
|
||||
assert_raises_rpc_error(-26, NULLDUMMY_TX_ERROR, self.nodes[0].sendrawtransaction, test5tx.serialize_with_witness().hex(), 0)
|
||||
self.block_submit(self.nodes[0], [test5tx], with_witness=True, accept=False)
|
||||
|
||||
self.log.info(f"Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [{COINBASE_MATURITY + 5}]")
|
||||
@@ -142,7 +142,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
|
||||
if with_witness:
|
||||
add_witness_commitment(block)
|
||||
block.solve()
|
||||
assert_equal(None if accept else NULLDUMMY_ERROR, node.submitblock(block.serialize().hex()))
|
||||
assert_equal(None if accept else NULLDUMMY_BLK_ERROR, node.submitblock(block.serialize().hex()))
|
||||
if accept:
|
||||
assert_equal(node.getbestblockhash(), block.hash)
|
||||
self.lastblockhash = block.hash
|
||||
|
||||
@@ -193,8 +193,8 @@ class SegWitTest(BitcoinTestFramework):
|
||||
assert_equal(self.nodes[2].getbalance(), 20 * Decimal("49.999"))
|
||||
|
||||
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
|
||||
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WPKH][1], sign=False)
|
||||
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WSH][1], sign=False)
|
||||
self.fail_accept(self.nodes[2], "mempool-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WPKH][1], sign=False)
|
||||
self.fail_accept(self.nodes[2], "mempool-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WSH][1], sign=False)
|
||||
|
||||
self.generate(self.nodes[0], 1) # block 164
|
||||
|
||||
@@ -213,13 +213,13 @@ class SegWitTest(BitcoinTestFramework):
|
||||
|
||||
self.log.info("Verify default node can't accept txs with missing witness")
|
||||
# unsigned, no scriptsig
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False)
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False)
|
||||
# unsigned with redeem script
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0]))
|
||||
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0]))
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0]))
|
||||
self.fail_accept(self.nodes[0], "mempool-script-verify-flag-failed (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0]))
|
||||
|
||||
# Coinbase contains the witness commitment nonce, check that RPC shows us
|
||||
coinbase_txid = self.nodes[2].getblock(blockhash)['tx'][0]
|
||||
@@ -230,10 +230,10 @@ class SegWitTest(BitcoinTestFramework):
|
||||
assert_equal(witnesses[0], '00' * 32)
|
||||
|
||||
self.log.info("Verify witness txs without witness data are invalid after the fork")
|
||||
self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
|
||||
self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
|
||||
self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
|
||||
self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
|
||||
self.fail_accept(self.nodes[2], 'mempool-script-verify-flag-failed (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
|
||||
self.fail_accept(self.nodes[2], 'mempool-script-verify-flag-failed (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
|
||||
self.fail_accept(self.nodes[2], 'mempool-script-verify-flag-failed (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
|
||||
self.fail_accept(self.nodes[2], 'mempool-script-verify-flag-failed (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
|
||||
|
||||
self.log.info("Verify default node can now use witness txs")
|
||||
self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WPKH][0], True)
|
||||
|
||||
@@ -71,6 +71,7 @@ from test_framework.script import (
|
||||
OP_PUSHDATA1,
|
||||
OP_RETURN,
|
||||
OP_SWAP,
|
||||
OP_TUCK,
|
||||
OP_VERIFY,
|
||||
SIGHASH_DEFAULT,
|
||||
SIGHASH_ALL,
|
||||
@@ -171,9 +172,9 @@ def get(ctx, name):
|
||||
ctx[name] = expr
|
||||
return expr.value
|
||||
|
||||
def getter(name):
|
||||
def getter(name, **kwargs):
|
||||
"""Return a callable that evaluates name in its passed context."""
|
||||
return lambda ctx: get(ctx, name)
|
||||
return lambda ctx: get({**ctx, **kwargs}, name)
|
||||
|
||||
def override(expr, **kwargs):
|
||||
"""Return a callable that evaluates expr in a modified context."""
|
||||
@@ -217,6 +218,20 @@ def default_controlblock(ctx):
|
||||
"""Default expression for "controlblock": combine leafversion, negflag, pubkey_internal, merklebranch."""
|
||||
return bytes([get(ctx, "leafversion") + get(ctx, "negflag")]) + get(ctx, "pubkey_internal") + get(ctx, "merklebranch")
|
||||
|
||||
def default_scriptcode_suffix(ctx):
|
||||
"""Default expression for "scriptcode_suffix", the actually used portion of the scriptcode."""
|
||||
scriptcode = get(ctx, "scriptcode")
|
||||
codesepnum = get(ctx, "codesepnum")
|
||||
if codesepnum == -1:
|
||||
return scriptcode
|
||||
codeseps = 0
|
||||
for (opcode, data, sop_idx) in scriptcode.raw_iter():
|
||||
if opcode == OP_CODESEPARATOR:
|
||||
if codeseps == codesepnum:
|
||||
return CScript(scriptcode[sop_idx+1:])
|
||||
codeseps += 1
|
||||
assert False
|
||||
|
||||
def default_sigmsg(ctx):
|
||||
"""Default expression for "sigmsg": depending on mode, compute BIP341, BIP143, or legacy sigmsg."""
|
||||
tx = get(ctx, "tx")
|
||||
@@ -236,12 +251,12 @@ def default_sigmsg(ctx):
|
||||
return TaprootSignatureMsg(tx, utxos, hashtype, idx, scriptpath=False, annex=annex)
|
||||
elif mode == "witv0":
|
||||
# BIP143 signature hash
|
||||
scriptcode = get(ctx, "scriptcode")
|
||||
scriptcode = get(ctx, "scriptcode_suffix")
|
||||
utxos = get(ctx, "utxos")
|
||||
return SegwitV0SignatureMsg(scriptcode, tx, idx, hashtype, utxos[idx].nValue)
|
||||
else:
|
||||
# Pre-segwit signature hash
|
||||
scriptcode = get(ctx, "scriptcode")
|
||||
scriptcode = get(ctx, "scriptcode_suffix")
|
||||
return LegacySignatureMsg(scriptcode, tx, idx, hashtype)[0]
|
||||
|
||||
def default_sighash(ctx):
|
||||
@@ -301,7 +316,12 @@ def default_hashtype_actual(ctx):
|
||||
|
||||
def default_bytes_hashtype(ctx):
|
||||
"""Default expression for "bytes_hashtype": bytes([hashtype_actual]) if not 0, b"" otherwise."""
|
||||
return bytes([x for x in [get(ctx, "hashtype_actual")] if x != 0])
|
||||
mode = get(ctx, "mode")
|
||||
hashtype_actual = get(ctx, "hashtype_actual")
|
||||
if mode != "taproot" or hashtype_actual != 0:
|
||||
return bytes([hashtype_actual])
|
||||
else:
|
||||
return bytes()
|
||||
|
||||
def default_sign(ctx):
|
||||
"""Default expression for "sign": concatenation of signature and bytes_hashtype."""
|
||||
@@ -379,6 +399,8 @@ DEFAULT_CONTEXT = {
|
||||
"key_tweaked": default_key_tweaked,
|
||||
# The tweak to use (None for script path spends, the actual tweak for key path spends).
|
||||
"tweak": default_tweak,
|
||||
# The part of the scriptcode after the last executed OP_CODESEPARATOR.
|
||||
"scriptcode_suffix": default_scriptcode_suffix,
|
||||
# The sigmsg value (preimage of sighash)
|
||||
"sigmsg": default_sigmsg,
|
||||
# The sighash value (32 bytes)
|
||||
@@ -409,6 +431,8 @@ DEFAULT_CONTEXT = {
|
||||
"annex": None,
|
||||
# The codeseparator position (only when mode=="taproot").
|
||||
"codeseppos": -1,
|
||||
# Which OP_CODESEPARATOR is the last executed one in the script (in legacy/P2SH/P2WSH).
|
||||
"codesepnum": -1,
|
||||
# The redeemscript to add to the scriptSig (if P2SH; None implies not P2SH).
|
||||
"script_p2sh": None,
|
||||
# The script to add to the witness in (if P2WSH; None implies P2WPKH)
|
||||
@@ -1210,6 +1234,70 @@ def spenders_taproot_active():
|
||||
standard = hashtype in VALID_SIGHASHES_ECDSA and (p2sh or witv0)
|
||||
add_spender(spenders, "compat/nocsa", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, script=CScript([OP_IF, OP_11, pubkey1, OP_CHECKSIGADD, OP_12, OP_EQUAL, OP_ELSE, pubkey1, OP_CHECKSIG, OP_ENDIF]), key=eckey1, sigops_weight=4-3*witv0, inputs=[getter("sign"), b''], failure={"inputs": [getter("sign"), b'\x01']}, **ERR_UNDECODABLE)
|
||||
|
||||
# == sighash caching tests ==
|
||||
|
||||
# Sighash caching in legacy.
|
||||
for p2sh in [False, True]:
|
||||
for witv0 in [False, True]:
|
||||
eckey1, pubkey1 = generate_keypair(compressed=compressed)
|
||||
for _ in range(10):
|
||||
# Construct a script with 20 checksig operations (10 sighash types, each 2 times),
|
||||
# randomly ordered and interleaved with 4 OP_CODESEPARATORS.
|
||||
ops = [1, 2, 3, 0x21, 0x42, 0x63, 0x81, 0x83, 0xe1, 0xc2, -1, -1] * 2
|
||||
# Make sure no OP_CODESEPARATOR appears last.
|
||||
while True:
|
||||
random.shuffle(ops)
|
||||
if ops[-1] != -1:
|
||||
break
|
||||
script = [pubkey1]
|
||||
inputs = []
|
||||
codeseps = -1
|
||||
for pos, op in enumerate(ops):
|
||||
if op == -1:
|
||||
codeseps += 1
|
||||
script.append(OP_CODESEPARATOR)
|
||||
elif pos + 1 != len(ops):
|
||||
script += [OP_TUCK, OP_CHECKSIGVERIFY]
|
||||
inputs.append(getter("sign", codesepnum=codeseps, hashtype=op))
|
||||
else:
|
||||
script += [OP_CHECKSIG]
|
||||
inputs.append(getter("sign", codesepnum=codeseps, hashtype=op))
|
||||
inputs.reverse()
|
||||
script = CScript(script)
|
||||
add_spender(spenders, "sighashcache/legacy", p2sh=p2sh, witv0=witv0, standard=False, script=script, inputs=inputs, key=eckey1, sigops_weight=12*8*(4-3*witv0), no_fail=True)
|
||||
|
||||
# Sighash caching in tapscript.
|
||||
for _ in range(10):
|
||||
# Construct a script with 700 checksig operations (7 sighash types, each 100 times),
|
||||
# randomly ordered and interleaved with 100 OP_CODESEPARATORS.
|
||||
ops = [0, 1, 2, 3, 0x81, 0x82, 0x83, -1] * 100
|
||||
# Make sure no OP_CODESEPARATOR appears last.
|
||||
while True:
|
||||
random.shuffle(ops)
|
||||
if ops[-1] != -1:
|
||||
break
|
||||
script = [pubs[1]]
|
||||
inputs = []
|
||||
opcount = 1
|
||||
codeseppos = -1
|
||||
for pos, op in enumerate(ops):
|
||||
if op == -1:
|
||||
codeseppos = opcount
|
||||
opcount += 1
|
||||
script.append(OP_CODESEPARATOR)
|
||||
elif pos + 1 != len(ops):
|
||||
opcount += 2
|
||||
script += [OP_TUCK, OP_CHECKSIGVERIFY]
|
||||
inputs.append(getter("sign", codeseppos=codeseppos, hashtype=op))
|
||||
else:
|
||||
opcount += 1
|
||||
script += [OP_CHECKSIG]
|
||||
inputs.append(getter("sign", codeseppos=codeseppos, hashtype=op))
|
||||
inputs.reverse()
|
||||
script = CScript(script)
|
||||
tap = taproot_construct(pubs[0], [("leaf", script)])
|
||||
add_spender(spenders, "sighashcache/taproot", tap=tap, leaf="leaf", inputs=inputs, standard=True, key=secs[1], no_fail=True)
|
||||
|
||||
return spenders
|
||||
|
||||
|
||||
|
||||
@@ -441,7 +441,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
|
||||
nested_anchor_spend.rehash()
|
||||
|
||||
self.check_mempool_result(
|
||||
result_expected=[{'txid': nested_anchor_spend.rehash(), 'allowed': False, 'reject-reason': 'non-mandatory-script-verify-flag (Witness version reserved for soft-fork upgrades)'}],
|
||||
result_expected=[{'txid': nested_anchor_spend.rehash(), 'allowed': False, 'reject-reason': 'mempool-script-verify-flag-failed (Witness version reserved for soft-fork upgrades)'}],
|
||||
rawtxs=[nested_anchor_spend.serialize().hex()],
|
||||
maxfeerate=0,
|
||||
)
|
||||
|
||||
@@ -73,14 +73,9 @@ class InvalidTxRequestTest(BitcoinTestFramework):
|
||||
tx = template.get_tx()
|
||||
node.p2ps[0].send_txs_and_test(
|
||||
[tx], node, success=False,
|
||||
expect_disconnect=template.expect_disconnect,
|
||||
reject_reason=template.reject_reason,
|
||||
)
|
||||
|
||||
if template.expect_disconnect:
|
||||
self.log.info("Reconnecting to peer")
|
||||
self.reconnect_p2p()
|
||||
|
||||
# Make two p2p connections to provide the node with orphans
|
||||
# * p2ps[0] will send valid orphan txs (one with low fee)
|
||||
# * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
|
||||
@@ -144,7 +139,6 @@ class InvalidTxRequestTest(BitcoinTestFramework):
|
||||
# tx_orphan_2_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
|
||||
# tx_orphan_2_invalid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)
|
||||
|
||||
self.wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected
|
||||
assert_equal(expected_mempool, set(node.getrawmempool()))
|
||||
|
||||
self.log.info('Test orphan pool overflow')
|
||||
@@ -165,7 +159,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
|
||||
node.p2ps[0].send_txs_and_test([rejected_parent], node, success=False)
|
||||
|
||||
self.log.info('Test that a peer disconnection causes erase its transactions from the orphan pool')
|
||||
with node.assert_debug_log(['Erased 100 orphan transaction(s) from peer=26']):
|
||||
with node.assert_debug_log(['Erased 100 orphan transaction(s) from peer=']):
|
||||
self.reconnect_p2p(num_connections=1)
|
||||
|
||||
self.log.info('Test that a transaction in the orphan pool is included in a new tip block causes erase this transaction from the orphan pool')
|
||||
|
||||
@@ -251,8 +251,10 @@ class PackageRelayTest(BitcoinTestFramework):
|
||||
assert tx_orphan_bad_wit.rehash() not in node_mempool
|
||||
|
||||
# 5. Have the other peer send the tx too, so that tx_orphan_bad_wit package is attempted.
|
||||
bad_orphan_sender.send_message(msg_tx(low_fee_parent["tx"]))
|
||||
bad_orphan_sender.wait_for_disconnect()
|
||||
bad_orphan_sender.send_and_ping(msg_tx(low_fee_parent["tx"]))
|
||||
|
||||
# The bad orphan sender should not be disconnected.
|
||||
bad_orphan_sender.sync_with_ping()
|
||||
|
||||
# The peer that didn't provide the orphan should not be disconnected.
|
||||
parent_sender.sync_with_ping()
|
||||
|
||||
@@ -704,14 +704,20 @@ class SegWitTest(BitcoinTestFramework):
|
||||
# segwit activation. Note that older bitcoind's that are not
|
||||
# segwit-aware would also reject this for failing CLEANSTACK.
|
||||
with self.nodes[0].assert_debug_log(
|
||||
expected_msgs=[spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Witness program was passed an empty witness)']):
|
||||
expected_msgs=[spend_tx.hash, 'was not accepted: mempool-script-verify-flag-failed (Witness program was passed an empty witness)']):
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
|
||||
|
||||
# The transaction was detected as witness stripped above and not added to the reject
|
||||
# filter. Trying again will check it again and result in the same error.
|
||||
with self.nodes[0].assert_debug_log(
|
||||
expected_msgs=[spend_tx.hash, 'was not accepted: mempool-script-verify-flag-failed (Witness program was passed an empty witness)']):
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
|
||||
|
||||
# Try to put the witness script in the scriptSig, should also fail.
|
||||
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
|
||||
spend_tx.rehash()
|
||||
with self.nodes[0].assert_debug_log(
|
||||
expected_msgs=[spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Script evaluated without error but finished with a false/empty top stack element)']):
|
||||
expected_msgs=[spend_tx.hash, 'was not accepted: mempool-script-verify-flag-failed (Script evaluated without error but finished with a false/empty top stack element)']):
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
|
||||
|
||||
# Now put the witness script in the witness, should succeed after
|
||||
@@ -1282,6 +1288,13 @@ class SegWitTest(BitcoinTestFramework):
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True)
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False)
|
||||
|
||||
# Now do the opposite: strip the witness entirely. This will be detected as witness stripping and
|
||||
# the (w)txid won't be added to the reject filter: we can try again and get the same error.
|
||||
tx3.wit.vtxinwit[0].scriptWitness.stack = []
|
||||
reason = "was not accepted: mempool-script-verify-flag-failed (Witness program was passed an empty witness)"
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=False, accepted=False, reason=reason)
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=False, accepted=False, reason=reason)
|
||||
|
||||
# Get rid of the extra witness, and verify acceptance.
|
||||
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
|
||||
# Also check that old_node gets a tx announcement, even though this is
|
||||
@@ -1477,7 +1490,7 @@ class SegWitTest(BitcoinTestFramework):
|
||||
sign_input_segwitv0(tx2, 0, script, tx.vout[0].nValue, key)
|
||||
|
||||
# Should fail policy test.
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'mempool-script-verify-flag-failed (Using non-compressed keys in segwit)')
|
||||
# But passes consensus.
|
||||
block = self.build_next_block()
|
||||
self.update_witness_block_with_transactions(block, [tx2])
|
||||
@@ -1496,7 +1509,7 @@ class SegWitTest(BitcoinTestFramework):
|
||||
sign_p2pk_witness_input(witness_script, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
|
||||
|
||||
# Should fail policy test.
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, True, False, 'mempool-script-verify-flag-failed (Using non-compressed keys in segwit)')
|
||||
# But passes consensus.
|
||||
block = self.build_next_block()
|
||||
self.update_witness_block_with_transactions(block, [tx3])
|
||||
@@ -1513,7 +1526,7 @@ class SegWitTest(BitcoinTestFramework):
|
||||
sign_p2pk_witness_input(witness_script, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
|
||||
|
||||
# Should fail policy test.
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx4, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
|
||||
test_transaction_acceptance(self.nodes[0], self.test_node, tx4, True, False, 'mempool-script-verify-flag-failed (Using non-compressed keys in segwit)')
|
||||
block = self.build_next_block()
|
||||
self.update_witness_block_with_transactions(block, [tx4])
|
||||
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
|
||||
|
||||
@@ -122,8 +122,8 @@ class RPCPackagesTest(BitcoinTestFramework):
|
||||
assert_equal(testres_bad_sig, self.independent_txns_testres + [{
|
||||
"txid": tx_bad_sig_txid,
|
||||
"wtxid": tx_bad_sig_wtxid, "allowed": False,
|
||||
"reject-reason": "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)",
|
||||
"reject-details": "mandatory-script-verify-flag-failed (Operation not valid with the current stack size), " +
|
||||
"reject-reason": "mempool-script-verify-flag-failed (Operation not valid with the current stack size)",
|
||||
"reject-details": "mempool-script-verify-flag-failed (Operation not valid with the current stack size), " +
|
||||
f"input 0 of {tx_bad_sig_txid} (wtxid {tx_bad_sig_wtxid}), spending {coin['txid']}:{coin['vout']}"
|
||||
}])
|
||||
|
||||
|
||||
@@ -893,13 +893,12 @@ class P2PDataStore(P2PInterface):
|
||||
else:
|
||||
assert node.getbestblockhash() != blocks[-1].hash
|
||||
|
||||
def send_txs_and_test(self, txs, node, *, success=True, expect_disconnect=False, reject_reason=None):
|
||||
def send_txs_and_test(self, txs, node, *, success=True, reject_reason=None):
|
||||
"""Send txs to test node and test whether they're accepted to the mempool.
|
||||
|
||||
- add all txs to our tx_store
|
||||
- send tx messages for all txs
|
||||
- if success is True/False: assert that the txs are/are not accepted to the mempool
|
||||
- if expect_disconnect is True: Skip the sync with ping
|
||||
- if reject_reason is set: assert that the correct reject message is logged."""
|
||||
|
||||
with p2p_lock:
|
||||
@@ -911,10 +910,7 @@ class P2PDataStore(P2PInterface):
|
||||
for tx in txs:
|
||||
self.send_message(msg_tx(tx))
|
||||
|
||||
if expect_disconnect:
|
||||
self.wait_for_disconnect()
|
||||
else:
|
||||
self.sync_with_ping()
|
||||
self.sync_with_ping()
|
||||
|
||||
raw_mempool = node.getrawmempool()
|
||||
if success:
|
||||
|
||||
@@ -50,6 +50,7 @@ DUMMY_MIN_OP_RETURN_SCRIPT = CScript([OP_RETURN] + ([OP_0] * (MIN_PADDING - 1)))
|
||||
assert len(DUMMY_MIN_OP_RETURN_SCRIPT) == MIN_PADDING
|
||||
|
||||
PAY_TO_ANCHOR = CScript([OP_1, bytes.fromhex("4e73")])
|
||||
ANCHOR_ADDRESS = "bcrt1pfeesnyr2tx"
|
||||
|
||||
def key_to_p2pk_script(key):
|
||||
key = check_key(key)
|
||||
|
||||
@@ -137,7 +137,7 @@ class TestNode():
|
||||
self.args.append("-logsourcelocations")
|
||||
if self.version_is_at_least(239000):
|
||||
self.args.append("-loglevel=trace")
|
||||
if self.version_is_at_least(299900):
|
||||
if self.version_is_at_least(290100):
|
||||
self.args.append("-nologratelimit")
|
||||
|
||||
# Default behavior from global -v2transport flag is added to args to persist it over restarts.
|
||||
|
||||
@@ -170,6 +170,8 @@ BASE_SCRIPTS = [
|
||||
'wallet_listreceivedby.py --descriptors',
|
||||
'wallet_abandonconflict.py --legacy-wallet',
|
||||
'wallet_abandonconflict.py --descriptors',
|
||||
'wallet_anchor.py --legacy-wallet',
|
||||
'wallet_anchor.py --descriptors',
|
||||
'feature_reindex.py',
|
||||
'feature_reindex_readonly.py',
|
||||
'wallet_labels.py --legacy-wallet',
|
||||
|
||||
@@ -409,6 +409,18 @@ class ToolWalletTest(BitcoinTestFramework):
|
||||
self.write_dump(dump_data, bad_sum_wallet_dump)
|
||||
self.assert_raises_tool_error('Error: Checksum is not the correct size', '-wallet=badload', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump')
|
||||
assert not (self.nodes[0].wallets_path / "badload").is_dir()
|
||||
if not self.options.descriptors:
|
||||
os.rename(self.nodes[0].wallets_path / "wallet.dat", self.nodes[0].wallets_path / "default.wallet.dat")
|
||||
self.assert_raises_tool_error('Error: Checksum is not the correct size', '-wallet=', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump')
|
||||
assert self.nodes[0].wallets_path.exists()
|
||||
assert not (self.nodes[0].wallets_path / "wallet.dat").exists()
|
||||
|
||||
self.log.info('Checking createfromdump with an unnamed wallet')
|
||||
self.do_tool_createfromdump("", "wallet.dump")
|
||||
assert (self.nodes[0].wallets_path / "wallet.dat").exists()
|
||||
os.unlink(self.nodes[0].wallets_path / "wallet.dat")
|
||||
if not self.options.descriptors:
|
||||
os.rename(self.nodes[0].wallets_path / "default.wallet.dat", self.nodes[0].wallets_path / "wallet.dat")
|
||||
|
||||
def test_chainless_conflicts(self):
|
||||
self.log.info("Test wallet tool when wallet contains conflicting transactions")
|
||||
|
||||
128
test/functional/wallet_anchor.py
Executable file
128
test/functional/wallet_anchor.py
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2025-present The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
import time
|
||||
|
||||
from test_framework.blocktools import MAX_FUTURE_BLOCK_TIME
|
||||
from test_framework.descriptors import descsum_create
|
||||
from test_framework.messages import (
|
||||
COutPoint,
|
||||
CTxIn,
|
||||
CTxInWitness,
|
||||
CTxOut,
|
||||
)
|
||||
from test_framework.script_util import (
|
||||
ANCHOR_ADDRESS,
|
||||
PAY_TO_ANCHOR,
|
||||
)
|
||||
from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_raises_rpc_error,
|
||||
)
|
||||
from test_framework.wallet import MiniWallet
|
||||
|
||||
class WalletAnchorTest(BitcoinTestFramework):
|
||||
def add_options(self, parser):
|
||||
self.add_wallet_options(parser)
|
||||
|
||||
def set_test_params(self):
|
||||
self.num_nodes = 1
|
||||
|
||||
def skip_test_if_missing_module(self):
|
||||
self.skip_if_no_wallet()
|
||||
|
||||
def test_0_value_anchor_listunspent(self):
|
||||
self.log.info("Test that 0-value anchor outputs are detected as UTXOs")
|
||||
|
||||
# Create an anchor output, and spend it
|
||||
sender = MiniWallet(self.nodes[0])
|
||||
anchor_tx = sender.create_self_transfer(fee_rate=0, version=3)["tx"]
|
||||
anchor_tx.vout.append(CTxOut(0, PAY_TO_ANCHOR))
|
||||
anchor_tx.rehash() # Rehash after modifying anchor_tx
|
||||
anchor_spend = sender.create_self_transfer(version=3)["tx"]
|
||||
anchor_spend.vin.append(CTxIn(COutPoint(anchor_tx.sha256, 1), b""))
|
||||
anchor_spend.wit.vtxinwit.append(CTxInWitness())
|
||||
anchor_spend.rehash() # Rehash after modifying anchor_spend
|
||||
submit_res = self.nodes[0].submitpackage([anchor_tx.serialize().hex(), anchor_spend.serialize().hex()])
|
||||
assert_equal(submit_res["package_msg"], "success")
|
||||
anchor_txid = anchor_tx.hash
|
||||
anchor_spend_txid = anchor_spend.hash
|
||||
|
||||
# Mine each tx in separate blocks
|
||||
self.generateblock(self.nodes[0], sender.get_address(), [anchor_tx.serialize().hex()])
|
||||
anchor_tx_height = self.nodes[0].getblockcount()
|
||||
self.generateblock(self.nodes[0], sender.get_address(), [anchor_spend.serialize().hex()])
|
||||
|
||||
# Mock time forward and generate some blocks to avoid rescanning of latest blocks
|
||||
self.nodes[0].setmocktime(int(time.time()) + MAX_FUTURE_BLOCK_TIME + 1)
|
||||
self.generate(self.nodes[0], 10)
|
||||
|
||||
self.nodes[0].createwallet(wallet_name="anchor", disable_private_keys=True)
|
||||
wallet = self.nodes[0].get_wallet_rpc("anchor")
|
||||
|
||||
wallet.importaddress(ANCHOR_ADDRESS, rescan=False)
|
||||
|
||||
# The wallet should have no UTXOs, and not know of the anchor tx or its spend
|
||||
assert_equal(wallet.listunspent(), [])
|
||||
assert_raises_rpc_error(-5, "Invalid or non-wallet transaction id", wallet.gettransaction, anchor_txid)
|
||||
assert_raises_rpc_error(-5, "Invalid or non-wallet transaction id", wallet.gettransaction, anchor_spend_txid)
|
||||
|
||||
# Rescanning the block containing the anchor so that listunspent will list the output
|
||||
wallet.rescanblockchain(0, anchor_tx_height)
|
||||
utxos = wallet.listunspent()
|
||||
assert_equal(len(utxos), 1)
|
||||
assert_equal(utxos[0]["txid"], anchor_txid)
|
||||
assert_equal(utxos[0]["address"], ANCHOR_ADDRESS)
|
||||
assert_equal(utxos[0]["amount"], 0)
|
||||
wallet.gettransaction(anchor_txid)
|
||||
assert_raises_rpc_error(-5, "Invalid or non-wallet transaction id", wallet.gettransaction, anchor_spend_txid)
|
||||
|
||||
# Rescan the rest of the blockchain to see the anchor was spent
|
||||
wallet.rescanblockchain()
|
||||
assert_equal(wallet.listunspent(), [])
|
||||
wallet.gettransaction(anchor_spend_txid)
|
||||
|
||||
def test_cannot_sign_anchors(self):
|
||||
self.log.info("Test that the wallet cannot spend anchor outputs")
|
||||
for disable_privkeys in [False, True]:
|
||||
self.nodes[0].createwallet(wallet_name=f"anchor_spend_{disable_privkeys}", disable_private_keys=disable_privkeys)
|
||||
wallet = self.nodes[0].get_wallet_rpc(f"anchor_spend_{disable_privkeys}")
|
||||
if self.options.descriptors:
|
||||
import_res = wallet.importdescriptors([
|
||||
{"desc": descsum_create(f"addr({ANCHOR_ADDRESS})"), "timestamp": "now"},
|
||||
{"desc": descsum_create(f"raw({PAY_TO_ANCHOR.hex()})"), "timestamp": "now"}
|
||||
])
|
||||
assert_equal(import_res[0]["success"], disable_privkeys)
|
||||
assert_equal(import_res[1]["success"], disable_privkeys)
|
||||
else:
|
||||
wallet.importaddress(ANCHOR_ADDRESS)
|
||||
|
||||
anchor_txid = self.default_wallet.sendtoaddress(ANCHOR_ADDRESS, 1)
|
||||
self.generate(self.nodes[0], 1)
|
||||
|
||||
wallet = self.nodes[0].get_wallet_rpc("anchor_spend_True")
|
||||
utxos = wallet.listunspent()
|
||||
assert_equal(len(utxos), 1)
|
||||
assert_equal(utxos[0]["txid"], anchor_txid)
|
||||
assert_equal(utxos[0]["address"], ANCHOR_ADDRESS)
|
||||
assert_equal(utxos[0]["amount"], 1)
|
||||
|
||||
if self.options.descriptors:
|
||||
assert_raises_rpc_error(-4, "Missing solving data for estimating transaction size", wallet.send, [{self.default_wallet.getnewaddress(): 0.9999}])
|
||||
assert_raises_rpc_error(-4, "Unable to determine the size of the transaction, the wallet contains unsolvable descriptors", wallet.sendall, recipients=[self.default_wallet.getnewaddress()])
|
||||
else:
|
||||
assert_raises_rpc_error(-4, "Insufficient funds", wallet.send, [{self.default_wallet.getnewaddress(): 0.9999}])
|
||||
assert_raises_rpc_error(-6, "Total value of UTXO pool too low to pay for transaction. Try using lower feerate or excluding uneconomic UTXOs with 'send_max' option.", wallet.sendall, recipients=[self.default_wallet.getnewaddress()])
|
||||
assert_raises_rpc_error(-4, "Error: Private keys are disabled for this wallet", wallet.sendtoaddress, self.default_wallet.getnewaddress(), 0.9999)
|
||||
assert_raises_rpc_error(-4, "Unable to determine the size of the transaction, the wallet contains unsolvable descriptors", wallet.sendall, recipients=[self.default_wallet.getnewaddress()], inputs=utxos)
|
||||
|
||||
def run_test(self):
|
||||
self.default_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
|
||||
self.test_0_value_anchor_listunspent()
|
||||
self.test_cannot_sign_anchors()
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletAnchorTest(__file__).main()
|
||||
@@ -40,6 +40,7 @@ from test_framework.test_framework import BitcoinTestFramework
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_raises_rpc_error,
|
||||
sha256sum_file,
|
||||
)
|
||||
|
||||
|
||||
@@ -136,10 +137,71 @@ class WalletBackupTest(BitcoinTestFramework):
|
||||
backup_file = self.nodes[0].datadir_path / 'wallet.bak'
|
||||
wallet_name = "res0"
|
||||
wallet_file = node.wallets_path / wallet_name
|
||||
error_message = "Failed to create database path '{}'. Database already exists.".format(wallet_file)
|
||||
error_message = "Failed to restore wallet. Database file exists in '{}'.".format(wallet_file / "wallet.dat")
|
||||
assert_raises_rpc_error(-36, error_message, node.restorewallet, wallet_name, backup_file)
|
||||
assert wallet_file.exists()
|
||||
|
||||
def test_restore_existent_dir(self):
|
||||
self.log.info("Test restore on an existent empty directory")
|
||||
node = self.nodes[3]
|
||||
backup_file = self.nodes[0].datadir_path / 'wallet.bak'
|
||||
wallet_name = "restored_wallet"
|
||||
wallet_dir = node.wallets_path / wallet_name
|
||||
os.mkdir(wallet_dir)
|
||||
res = node.restorewallet(wallet_name, backup_file)
|
||||
assert_equal(res['name'], wallet_name)
|
||||
node.unloadwallet(wallet_name)
|
||||
|
||||
self.log.info("Test restore succeeds when the target directory contains non-wallet files")
|
||||
wallet_file = node.wallets_path / wallet_name / "wallet.dat"
|
||||
os.remove(wallet_file)
|
||||
extra_file = node.wallets_path / wallet_name / "not_a_wallet.txt"
|
||||
extra_file.touch()
|
||||
res = node.restorewallet(wallet_name, backup_file)
|
||||
assert_equal(res['name'], wallet_name)
|
||||
assert extra_file.exists() # extra file was not removed by mistake
|
||||
node.unloadwallet(wallet_name)
|
||||
|
||||
self.log.info("Test restore failure due to existing db file in the destination directory")
|
||||
original_shasum = sha256sum_file(wallet_file)
|
||||
error_message = "Failed to restore wallet. Database file exists in '{}'.".format(wallet_dir / "wallet.dat")
|
||||
assert_raises_rpc_error(-36, error_message, node.restorewallet, wallet_name, backup_file)
|
||||
# Ensure the wallet file remains untouched
|
||||
assert wallet_dir.exists()
|
||||
assert_equal(original_shasum, sha256sum_file(wallet_file))
|
||||
|
||||
self.log.info("Test restore succeeds when the .dat file in the destination has a different name")
|
||||
second_wallet = wallet_dir / "hidden_storage.dat"
|
||||
os.rename(wallet_dir / "wallet.dat", second_wallet)
|
||||
original_shasum = sha256sum_file(second_wallet)
|
||||
res = node.restorewallet(wallet_name, backup_file)
|
||||
assert_equal(res['name'], wallet_name)
|
||||
assert (wallet_dir / "hidden_storage.dat").exists()
|
||||
assert_equal(original_shasum, sha256sum_file(second_wallet))
|
||||
node.unloadwallet(wallet_name)
|
||||
|
||||
# Clean for follow-up tests
|
||||
os.remove(wallet_file)
|
||||
|
||||
def test_restore_into_unnamed_wallet(self):
|
||||
self.log.info("Test restore into a default unnamed wallet")
|
||||
# This is also useful to test the migration recovery after failure logic
|
||||
node = self.nodes[3]
|
||||
if not self.options.descriptors:
|
||||
node.unloadwallet("")
|
||||
os.rename(node.wallets_path / "wallet.dat", node.wallets_path / "default.wallet.dat")
|
||||
backup_file = self.nodes[0].datadir_path / 'wallet.bak'
|
||||
wallet_name = ""
|
||||
res = node.restorewallet(wallet_name, backup_file)
|
||||
assert_equal(res['name'], "")
|
||||
assert (node.wallets_path / "wallet.dat").exists()
|
||||
# Clean for follow-up tests
|
||||
node.unloadwallet("")
|
||||
os.remove(node.wallets_path / "wallet.dat")
|
||||
if not self.options.descriptors:
|
||||
os.rename(node.wallets_path / "default.wallet.dat", node.wallets_path / "wallet.dat")
|
||||
node.loadwallet("")
|
||||
|
||||
def test_pruned_wallet_backup(self):
|
||||
self.log.info("Test loading backup on a pruned node when the backup was created close to the prune height of the restoring node")
|
||||
node = self.nodes[3]
|
||||
@@ -159,6 +221,19 @@ class WalletBackupTest(BitcoinTestFramework):
|
||||
# the backup to load successfully this close to the prune height
|
||||
node.restorewallet('pruned', node.datadir_path / 'wallet_pruned.bak')
|
||||
|
||||
self.log.info("Test restore on a pruned node when the backup was beyond the pruning point")
|
||||
if not self.options.descriptors:
|
||||
node.unloadwallet("")
|
||||
os.rename(node.wallets_path / "wallet.dat", node.wallets_path / "default.wallet.dat")
|
||||
backup_file = self.nodes[0].datadir_path / 'wallet.bak'
|
||||
wallet_name = ""
|
||||
error_message = "Wallet loading failed. Prune: last wallet synchronisation goes beyond pruned data. You need to -reindex (download the whole blockchain again in case of pruned node)"
|
||||
assert_raises_rpc_error(-4, error_message, node.restorewallet, wallet_name, backup_file)
|
||||
assert node.wallets_path.exists() # ensure the wallets dir exists
|
||||
if not self.options.descriptors:
|
||||
os.rename(node.wallets_path / "default.wallet.dat", node.wallets_path / "wallet.dat")
|
||||
node.loadwallet("")
|
||||
|
||||
def run_test(self):
|
||||
self.log.info("Generating initial blockchain")
|
||||
self.generate(self.nodes[0], 1)
|
||||
@@ -227,6 +302,8 @@ class WalletBackupTest(BitcoinTestFramework):
|
||||
assert_equal(res2_rpc.getbalance(), balance2)
|
||||
|
||||
self.restore_wallet_existent_name()
|
||||
self.test_restore_existent_dir()
|
||||
self.test_restore_into_unnamed_wallet()
|
||||
|
||||
if not self.options.descriptors:
|
||||
self.log.info("Restoring using dumped wallet")
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
"""Test the listtransactions API."""
|
||||
|
||||
from decimal import Decimal
|
||||
import time
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from test_framework.blocktools import MAX_FUTURE_BLOCK_TIME
|
||||
from test_framework.messages import (
|
||||
COIN,
|
||||
tx_from_hex,
|
||||
@@ -17,7 +19,9 @@ from test_framework.util import (
|
||||
assert_array_result,
|
||||
assert_equal,
|
||||
assert_raises_rpc_error,
|
||||
find_vout_for_address,
|
||||
)
|
||||
from test_framework.wallet_util import get_generate_key
|
||||
|
||||
|
||||
class ListTransactionsTest(BitcoinTestFramework):
|
||||
@@ -114,6 +118,8 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
self.run_invalid_parameters_test()
|
||||
self.test_op_return()
|
||||
|
||||
self.test_from_me_status_change()
|
||||
|
||||
def run_rbf_opt_in_test(self):
|
||||
"""Test the opt-in-rbf flag for sent and received transactions."""
|
||||
|
||||
@@ -327,6 +333,47 @@ class ListTransactionsTest(BitcoinTestFramework):
|
||||
|
||||
assert 'address' not in op_ret_tx
|
||||
|
||||
def test_from_me_status_change(self):
|
||||
self.log.info("Test gettransaction after changing a transaction's 'from me' status")
|
||||
self.nodes[0].createwallet("fromme")
|
||||
default_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
|
||||
wallet = self.nodes[0].get_wallet_rpc("fromme")
|
||||
|
||||
# The 'fee' field of gettransaction is only added when the transaction is 'from me'
|
||||
# Run twice, once for a transaction in the mempool, again when it confirms
|
||||
for confirm in [False, True]:
|
||||
key = get_generate_key()
|
||||
default_wallet.importprivkey(key.privkey)
|
||||
|
||||
send_res = default_wallet.send(outputs=[{key.p2wpkh_addr: 1}, {wallet.getnewaddress(): 1}])
|
||||
assert_equal(send_res["complete"], True)
|
||||
vout = find_vout_for_address(self.nodes[0], send_res["txid"], key.p2wpkh_addr)
|
||||
utxos = [{"txid": send_res["txid"], "vout": vout}]
|
||||
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
|
||||
|
||||
# Send to the test wallet, ensuring that one input is for the descriptor we will import,
|
||||
# and that there are other inputs belonging to only the sending wallet
|
||||
send_res = default_wallet.send(outputs=[{wallet.getnewaddress(): 1.5}], inputs=utxos, add_inputs=True)
|
||||
assert_equal(send_res["complete"], True)
|
||||
txid = send_res["txid"]
|
||||
self.nodes[0].syncwithvalidationinterfacequeue()
|
||||
tx_info = wallet.gettransaction(txid)
|
||||
assert "fee" not in tx_info
|
||||
assert_equal(any(detail["category"] == "send" for detail in tx_info["details"]), False)
|
||||
|
||||
if confirm:
|
||||
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
|
||||
# Mock time forward and generate blocks so that the import does not rescan the transaction
|
||||
self.nodes[0].setmocktime(int(time.time()) + MAX_FUTURE_BLOCK_TIME + 1)
|
||||
self.generate(self.nodes[0], 10, sync_fun=self.no_op)
|
||||
|
||||
wallet.importprivkey(key.privkey)
|
||||
# TODO: We should check that the fee matches, but since the transaction spends inputs
|
||||
# not known to the wallet, it is incorrectly calculating the fee.
|
||||
# assert_equal(wallet.gettransaction(txid)["fee"], fee)
|
||||
tx_info = wallet.gettransaction(txid)
|
||||
assert "fee" in tx_info
|
||||
assert_equal(any(detail["category"] == "send" for detail in tx_info["details"]), True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
ListTransactionsTest(__file__).main()
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test Migrating a wallet from legacy to descriptor."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import random
|
||||
import shutil
|
||||
import struct
|
||||
@@ -24,6 +26,7 @@ from test_framework.script import hash160
|
||||
from test_framework.script_util import key_to_p2pkh_script, key_to_p2pk_script, script_to_p2sh_script, script_to_p2wsh_script
|
||||
from test_framework.util import (
|
||||
assert_equal,
|
||||
assert_greater_than,
|
||||
assert_raises_rpc_error,
|
||||
find_vout_for_address,
|
||||
sha256sum_file,
|
||||
@@ -522,6 +525,14 @@ class WalletMigrationTest(BitcoinTestFramework):
|
||||
|
||||
assert_equal(bals, wallet.getbalances())
|
||||
|
||||
def clear_default_wallet(self, backup_file):
|
||||
# Test cleanup: Clear unnamed default wallet for subsequent tests
|
||||
(self.old_node.wallets_path / "wallet.dat").unlink()
|
||||
(self.master_node.wallets_path / "wallet.dat").unlink(missing_ok=True)
|
||||
shutil.rmtree(self.master_node.wallets_path / "default_wallet_watchonly", ignore_errors=True)
|
||||
shutil.rmtree(self.master_node.wallets_path / "default_wallet_solvables", ignore_errors=True)
|
||||
backup_file.unlink()
|
||||
|
||||
def test_default_wallet(self):
|
||||
self.log.info("Test migration of the wallet named as the empty string")
|
||||
wallet = self.create_legacy_wallet("")
|
||||
@@ -548,6 +559,103 @@ class WalletMigrationTest(BitcoinTestFramework):
|
||||
|
||||
self.master_node.setmocktime(0)
|
||||
|
||||
wallet.unloadwallet()
|
||||
self.clear_default_wallet(backup_file=Path(res["backup_path"]))
|
||||
|
||||
def test_default_wallet_watch_only(self):
|
||||
self.log.info("Test unnamed (default) watch-only wallet migration")
|
||||
master_wallet = self.master_node.get_wallet_rpc(self.default_wallet_name)
|
||||
wallet = self.create_legacy_wallet("", blank=True)
|
||||
wallet.importaddress(master_wallet.getnewaddress(address_type="legacy"))
|
||||
|
||||
res, def_wallet = self.migrate_and_get_rpc("")
|
||||
wallet = self.master_node.get_wallet_rpc("default_wallet_watchonly")
|
||||
|
||||
info = wallet.getwalletinfo()
|
||||
assert_equal(info["descriptors"], True)
|
||||
assert_equal(info["format"], "sqlite")
|
||||
assert_equal(info["private_keys_enabled"], False)
|
||||
assert_equal(info["walletname"], "default_wallet_watchonly")
|
||||
|
||||
# The default wallet will still exist and have newly generated descriptors
|
||||
assert (self.master_node.wallets_path / "wallet.dat").exists()
|
||||
def_wallet_info = def_wallet.getwalletinfo()
|
||||
assert_equal(def_wallet_info["descriptors"], True)
|
||||
assert_equal(def_wallet_info["format"], "sqlite")
|
||||
assert_equal(def_wallet_info["private_keys_enabled"], True)
|
||||
assert_equal(def_wallet_info["walletname"], "")
|
||||
assert_greater_than(def_wallet_info["keypoolsize"], 0)
|
||||
|
||||
wallet.unloadwallet()
|
||||
self.clear_default_wallet(backup_file=Path(res["backup_path"]))
|
||||
|
||||
def test_migration_failure(self, wallet_name):
|
||||
is_default = wallet_name == ""
|
||||
wallet_pretty_name = "unnamed (default)" if is_default else f'"{wallet_name}"'
|
||||
self.log.info(f"Test failure during migration of wallet named: {wallet_pretty_name}")
|
||||
# Preface, set up legacy wallet and unload it
|
||||
master_wallet = self.master_node.get_wallet_rpc(self.default_wallet_name)
|
||||
wallet = self.create_legacy_wallet(wallet_name, blank=True)
|
||||
wallet.importaddress(master_wallet.getnewaddress(address_type="legacy"))
|
||||
wallet.unloadwallet()
|
||||
|
||||
if os.path.isabs(wallet_name):
|
||||
old_path = master_path = Path(wallet_name)
|
||||
else:
|
||||
old_path = self.old_node.wallets_path / wallet_name
|
||||
master_path = self.master_node.wallets_path / wallet_name
|
||||
os.makedirs(master_path, exist_ok=True)
|
||||
shutil.copyfile(old_path / "wallet.dat", master_path / "wallet.dat")
|
||||
|
||||
# This will be the watch-only directory the migration tries to create,
|
||||
# we make migration fail by placing a wallet.dat file there.
|
||||
wo_prefix = wallet_name or "default_wallet"
|
||||
# wo_prefix might have path characters in it, this corresponds with
|
||||
# DoMigration().
|
||||
wo_dirname = f"{wo_prefix}_watchonly"
|
||||
watch_only_dir = self.master_node.wallets_path / wo_dirname
|
||||
os.mkdir(watch_only_dir)
|
||||
shutil.copyfile(old_path / "wallet.dat", watch_only_dir / "wallet.dat")
|
||||
|
||||
# Make a file in the wallets dir that must still exist after migration
|
||||
survive_path = self.master_node.wallets_path / "survive"
|
||||
open(survive_path, "wb").close()
|
||||
assert survive_path.exists()
|
||||
|
||||
mocked_time = int(time.time())
|
||||
self.master_node.setmocktime(mocked_time)
|
||||
assert_raises_rpc_error(-4, "Failed to create database", self.master_node.migratewallet, wallet_name)
|
||||
self.master_node.setmocktime(0)
|
||||
|
||||
# Verify the /wallets/ path exists.
|
||||
assert self.master_node.wallets_path.exists()
|
||||
|
||||
# Verify survive is still there
|
||||
assert survive_path.exists()
|
||||
# Verify both wallet paths exist.
|
||||
assert Path(old_path / "wallet.dat").exists()
|
||||
assert Path(master_path / "wallet.dat").exists()
|
||||
|
||||
backup_prefix = "default_wallet" if is_default else wallet_name
|
||||
backup_path = master_path / f"{backup_prefix}_{mocked_time}.legacy.bak"
|
||||
assert backup_path.exists()
|
||||
|
||||
with open(self.master_node.wallets_path / wallet_name / self.wallet_data_filename, "rb") as f:
|
||||
data = f.read(16)
|
||||
_, _, magic = struct.unpack("QII", data)
|
||||
assert_equal(magic, BTREE_MAGIC)
|
||||
|
||||
|
||||
# Cleanup
|
||||
if is_default:
|
||||
self.clear_default_wallet(backup_path)
|
||||
else:
|
||||
backup_path.unlink()
|
||||
Path(watch_only_dir / "wallet.dat").unlink()
|
||||
Path(watch_only_dir).rmdir()
|
||||
Path(master_path / "wallet.dat").unlink()
|
||||
Path(old_path / "wallet.dat").unlink(missing_ok=True)
|
||||
|
||||
def test_direct_file(self):
|
||||
self.log.info("Test migration of a wallet that is not in a wallet directory")
|
||||
wallet = self.create_legacy_wallet("plainfile")
|
||||
@@ -1357,6 +1465,42 @@ class WalletMigrationTest(BitcoinTestFramework):
|
||||
assert_equal(addr_info["solvable"], True)
|
||||
assert "hex" in addr_info
|
||||
|
||||
def unsynced_wallet_on_pruned_node_fails(self):
|
||||
self.log.info("Test migration of an unsynced wallet on a pruned node fails gracefully")
|
||||
wallet = self.create_legacy_wallet("", load_on_startup=False)
|
||||
last_wallet_synced_block = wallet.getwalletinfo()['lastprocessedblock']['height']
|
||||
wallet.unloadwallet()
|
||||
|
||||
shutil.copyfile(self.old_node.wallets_path / "wallet.dat", self.master_node.wallets_path / "wallet.dat")
|
||||
|
||||
# Generate blocks just so the wallet best block is pruned
|
||||
self.restart_node(0, ["-fastprune", "-prune=1", "-nowallet"])
|
||||
self.connect_nodes(0, 1)
|
||||
self.generate(self.master_node, 450, sync_fun=self.no_op)
|
||||
self.master_node.pruneblockchain(250)
|
||||
# Ensure next block to sync is unavailable
|
||||
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.master_node.getblock, self.master_node.getblockhash(last_wallet_synced_block + 1))
|
||||
|
||||
# Check migration failure
|
||||
mocked_time = int(time.time())
|
||||
self.master_node.setmocktime(mocked_time)
|
||||
assert_raises_rpc_error(-4, "last wallet synchronisation goes beyond pruned data. You need to -reindex (download the whole blockchain again in case of pruned node)", self.master_node.migratewallet, wallet_name="")
|
||||
self.master_node.setmocktime(0)
|
||||
|
||||
# Verify the /wallets/ path exists, the wallet is still BDB and the backup file is there.
|
||||
assert self.master_node.wallets_path.exists()
|
||||
|
||||
with open(self.master_node.wallets_path / "wallet.dat", "rb") as f:
|
||||
data = f.read(16)
|
||||
_, _, magic = struct.unpack("QII", data)
|
||||
assert_equal(magic, BTREE_MAGIC)
|
||||
|
||||
backup_path = self.master_node.wallets_path / f"default_wallet_{mocked_time}.legacy.bak"
|
||||
assert backup_path.exists()
|
||||
|
||||
self.clear_default_wallet(backup_path)
|
||||
|
||||
|
||||
def run_test(self):
|
||||
self.master_node = self.nodes[0]
|
||||
self.old_node = self.nodes[1]
|
||||
@@ -1372,7 +1516,18 @@ class WalletMigrationTest(BitcoinTestFramework):
|
||||
self.test_encrypted()
|
||||
self.test_nonexistent()
|
||||
self.test_unloaded_by_path()
|
||||
|
||||
migration_failure_cases = [
|
||||
"",
|
||||
"../",
|
||||
os.path.abspath(self.master_node.datadir_path / "absolute_path"),
|
||||
"normallynamedwallet"
|
||||
]
|
||||
for wallet_name in migration_failure_cases:
|
||||
self.test_migration_failure(wallet_name=wallet_name)
|
||||
|
||||
self.test_default_wallet()
|
||||
self.test_default_wallet_watch_only()
|
||||
self.test_direct_file()
|
||||
self.test_addressbook()
|
||||
self.test_migrate_raw_p2sh()
|
||||
@@ -1390,5 +1545,8 @@ class WalletMigrationTest(BitcoinTestFramework):
|
||||
self.test_taproot()
|
||||
self.test_solvable_no_privs()
|
||||
|
||||
# Note: After this test the first 250 blocks of 'master_node' are pruned
|
||||
self.unsynced_wallet_on_pruned_node_fails()
|
||||
|
||||
if __name__ == '__main__':
|
||||
WalletMigrationTest(__file__).main()
|
||||
|
||||
Reference in New Issue
Block a user