diff --git a/arcanist/linter/ISortLinter.php b/arcanist/linter/ISortLinter.php index b2caa3ddb..150a3f2b9 100644 --- a/arcanist/linter/ISortLinter.php +++ b/arcanist/linter/ISortLinter.php @@ -1,83 +1,81 @@ getProjectRoot(); $path = Filesystem::resolvePath($path, $root); $orig = file_get_contents($path); if ($orig == $stdout) { return array(); } $message = id(new ArcanistLintMessage()) ->setPath($path) ->setLine(1) ->setChar(1) ->setGranularity(ArcanistLinter::GRANULARITY_FILE) ->setCode('ISORT') ->setSeverity(ArcanistLintSeverity::SEVERITY_AUTOFIX) ->setName('Sorting Python imports') ->setDescription("'$path' has unsorted imports.") ->setOriginalText($orig) ->setReplacementText($stdout); return array($message); } } diff --git a/test/functional/abc-mempool-coherence-on-activations.py b/test/functional/abc-mempool-coherence-on-activations.py index eb7fac54e..39ba68698 100755 --- a/test/functional/abc-mempool-coherence-on-activations.py +++ b/test/functional/abc-mempool-coherence-on-activations.py @@ -1,376 +1,369 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Copyright (c) 2017 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This test checks the mempool coherence when changing validation rulesets, which happens on (de)activations of network upgrades (forks). We test the mempool coherence in 3 cases: 1) on activations, pre-fork-only transactions are evicted from the mempool, while always-valid transactions remain. 2) on deactivations, post-fork-only transactions (unconfirmed or once confirmed) are evicted from the mempool, while always-valid transactions are reincluded. 3) on a reorg to a chain that deactivates and reactivates the fork, post-fork-only and always-valid transactions (unconfirmed and/or once confirmed on the shorter chain) are kept or reincluded in the mempool. """ from test_framework.blocktools import ( create_block, create_coinbase, create_tx_with_script, make_conform_to_ctor, ) from test_framework.key import ECKey -from test_framework.messages import ( - COIN, - COutPoint, - CTransaction, - CTxIn, - CTxOut, - ToHex, -) +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, ToHex from test_framework.p2p import P2PDataStore from test_framework.script import ( OP_CHECKSIG, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, CScript, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error # ---Code specific to the activation used for this test--- # It might change depending on the activation code currently existing in the # client software. We use the replay protection activation for this test. ACTIVATION_TIME = 2000000000 EXTRA_ARG = f"-replayprotectionactivationtime={ACTIVATION_TIME}" # simulation starts before activation FIRST_BLOCK_TIME = ACTIVATION_TIME - 86400 # Expected RPC error when trying to send an activation specific spend txn. RPC_EXPECTED_ERROR = "mandatory-script-verify-flag-failed (Signature must be zero for failed CHECK(MULTI)SIG operation)" def create_fund_and_activation_specific_spending_tx(spend, pre_fork_only): # Creates 2 transactions: # 1) txfund: create outputs to be used by txspend. Must be valid pre-fork. # 2) txspend: spending transaction that is specific to the activation # being used and can be pre-fork-only or post-fork-only, depending on the # function parameter. # This specific implementation uses the replay protection mechanism to # create transactions that are only valid before or after the fork. # Generate a key pair to test private_key = ECKey() private_key.generate() public_key = private_key.get_pubkey().get_bytes() # Fund transaction script = CScript([public_key, OP_CHECKSIG]) txfund = create_tx_with_script( spend.tx, spend.n, b'', amount=50 * COIN, script_pub_key=script) txfund.rehash() # Activation specific spending tx txspend = CTransaction() txspend.vout.append(CTxOut(50 * COIN - 1000, CScript([OP_TRUE]))) txspend.vin.append(CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction # Use forkvalues that create pre-fork-only or post-fork-only # transactions. forkvalue = 0 if pre_fork_only else 0xffdead sighashtype = (forkvalue << 8) | SIGHASH_ALL | SIGHASH_FORKID sighash = SignatureHashForkId( script, txspend, 0, sighashtype, 50 * COIN) sig = private_key.sign_ecdsa(sighash) + \ bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) txspend.vin[0].scriptSig = CScript([sig]) txspend.rehash() return txfund, txspend def create_fund_and_pre_fork_only_tx(spend): return create_fund_and_activation_specific_spending_tx( spend, pre_fork_only=True) def create_fund_and_post_fork_only_tx(spend): return create_fund_and_activation_specific_spending_tx( spend, pre_fork_only=False) # ---Mempool coherence on activations test--- class PreviousSpendableOutput(object): def __init__(self, tx=CTransaction(), n=-1): self.tx = tx self.n = n class MempoolCoherenceOnActivationsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.block_heights = {} self.tip = None self.blocks = {} self.extra_args = [[ '-whitelist=noban@127.0.0.1', EXTRA_ARG, '-acceptnonstdtxn=1', '-automaticunparking=1', ]] def next_block(self, number): if self.tip is None: base_block_hash = self.genesis_hash block_time = FIRST_BLOCK_TIME else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height) coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) # Do PoW, which is cheap on regnet block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def run_test(self): node = self.nodes[0] peer = node.add_p2p_connection(P2PDataStore()) node.setmocktime(ACTIVATION_TIME) self.genesis_hash = int(node.getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] block.vtx.extend(new_transactions) old_sha256 = block.sha256 make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[ block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # send a txn to the mempool and check it was accepted def send_transaction_to_mempool(tx): tx_id = node.sendrawtransaction(ToHex(tx)) assert tx_id in node.getrawmempool() # checks the mempool has exactly the same txns as in the provided list def check_mempool_equal(txns): assert set(node.getrawmempool()) == set(tx.hash for tx in txns) # Create an always-valid chained transaction. It spends a # scriptPub=OP_TRUE coin into another. Returns the transaction and its # spendable output for further chaining. def create_always_valid_chained_tx(spend): tx = create_tx_with_script( spend.tx, spend.n, b'', amount=spend.tx.vout[0].nValue - 1000, script_pub_key=CScript([OP_TRUE])) tx.rehash() return tx, PreviousSpendableOutput(tx, 0) # shorthand block = self.next_block # Create a new block block(0) save_spendable_output() peer.send_blocks_and_test([self.tip], node) # Now we need that block to mature so we can spend the coinbase. maturity_blocks = [] for i in range(110): block(5000 + i) maturity_blocks.append(self.tip) save_spendable_output() peer.send_blocks_and_test(maturity_blocks, node) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(get_spendable_output()) # Create 2 pre-fork-only txns (tx_pre0, tx_pre1). Fund txns are valid # pre-fork, so we can mine them right away. txfund0, tx_pre0 = create_fund_and_pre_fork_only_tx(out[0]) txfund1, tx_pre1 = create_fund_and_pre_fork_only_tx(out[1]) # Create 2 post-fork-only txns (tx_post0, tx_post1). Fund txns are # valid pre-fork, so we can mine them right away. txfund2, tx_post0 = create_fund_and_post_fork_only_tx(out[2]) txfund3, tx_post1 = create_fund_and_post_fork_only_tx(out[3]) # Create blocks to activate the fork. Mine all funding transactions. bfork = block(5555) bfork.nTime = ACTIVATION_TIME - 1 update_block(5555, [txfund0, txfund1, txfund2, txfund3]) peer.send_blocks_and_test([self.tip], node) for i in range(5): peer.send_blocks_and_test([block(5200 + i)], node) # Check we are just before the activation time assert_equal( node.getblockchaininfo()['mediantime'], ACTIVATION_TIME - 1) # We are just before the fork. Pre-fork-only and always-valid chained # txns (tx_chain0, tx_chain1) are valid, post-fork-only txns are # rejected. send_transaction_to_mempool(tx_pre0) send_transaction_to_mempool(tx_pre1) tx_chain0, last_chained_output = create_always_valid_chained_tx(out[4]) tx_chain1, last_chained_output = create_always_valid_chained_tx( last_chained_output) send_transaction_to_mempool(tx_chain0) send_transaction_to_mempool(tx_chain1) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_post0)) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_post1)) check_mempool_equal([tx_chain0, tx_chain1, tx_pre0, tx_pre1]) # Activate the fork. Mine the 1st always-valid chained txn and a # pre-fork-only txn. block(5556) update_block(5556, [tx_chain0, tx_pre0]) peer.send_blocks_and_test([self.tip], node) forkblockid = node.getbestblockhash() # Check we just activated the fork assert_equal(node.getblockheader(forkblockid)['mediantime'], ACTIVATION_TIME) # Check mempool coherence when activating the fork. Pre-fork-only txns # were evicted from the mempool, while always-valid txns remain. # Evicted: tx_pre1 check_mempool_equal([tx_chain1]) # Post-fork-only and always-valid txns are accepted, pre-fork-only txn # are rejected. send_transaction_to_mempool(tx_post0) send_transaction_to_mempool(tx_post1) tx_chain2, _ = create_always_valid_chained_tx(last_chained_output) send_transaction_to_mempool(tx_chain2) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_pre1)) check_mempool_equal([tx_chain1, tx_chain2, tx_post0, tx_post1]) # Mine the 2nd always-valid chained txn and a post-fork-only txn. block(5557) update_block(5557, [tx_chain1, tx_post0]) peer.send_blocks_and_test([self.tip], node) postforkblockid = node.getbestblockhash() # The mempool contains the 3rd chained txn and a post-fork-only txn. check_mempool_equal([tx_chain2, tx_post1]) # In the following we will testing block disconnections and reorgs. # - tx_chain2 will always be retained in the mempool since it is always # valid. Its continued presence shows that we are never simply # clearing the entire mempool. # - tx_post1 may be evicted from mempool if we land before the fork. # - tx_post0 is in a block and if 'de-mined', it will either be evicted # or end up in mempool depending if we land before/after the fork. # - tx_pre0 is in a block and if 'de-mined', it will either be evicted # or end up in mempool depending if we land after/before the fork. # First we do a disconnection of the post-fork block, which is a # normal disconnection that merely returns the block contents into # the mempool -- nothing is lost. node.invalidateblock(postforkblockid) # In old mempool: tx_chain2, tx_post1 # Recovered from blocks: tx_chain1 and tx_post0. # Lost from blocks: NONE # Retained from old mempool: tx_chain2, tx_post1 # Evicted from old mempool: NONE check_mempool_equal([tx_chain1, tx_chain2, tx_post0, tx_post1]) # Now, disconnect the fork block. This is a special disconnection # that requires reprocessing the mempool due to change in rules. node.invalidateblock(forkblockid) # In old mempool: tx_chain1, tx_chain2, tx_post0, tx_post1 # Recovered from blocks: tx_chain0, tx_pre0 # Lost from blocks: NONE # Retained from old mempool: tx_chain1, tx_chain2 # Evicted from old mempool: tx_post0, tx_post1 check_mempool_equal([tx_chain0, tx_chain1, tx_chain2, tx_pre0]) # Restore state node.reconsiderblock(postforkblockid) node.reconsiderblock(forkblockid) send_transaction_to_mempool(tx_post1) check_mempool_equal([tx_chain2, tx_post1]) # Test a reorg that crosses the fork. # If such a reorg happens, most likely it will both start *and end* # after the fork. We will test such a case here and make sure that # post-fork-only transactions are not unnecessarily discarded from # the mempool in such a reorg. Pre-fork-only transactions however can # get lost. # Set up a longer competing chain that doesn't confirm any of our txns. # This starts after 5204, so it contains neither the forkblockid nor # the postforkblockid from above. self.tip = self.blocks[5204] reorg_blocks = [] for i in range(3): reorg_blocks.append(block(5900 + i)) # Perform the reorg peer.send_blocks_and_test(reorg_blocks, node) # reorg finishes after the fork assert_equal( node.getblockchaininfo()['mediantime'], ACTIVATION_TIME + 2) # In old mempool: tx_chain2, tx_post1 # Recovered from blocks: tx_chain0, tx_chain1, tx_post0 # Lost from blocks: tx_pre0 # Retained from old mempool: tx_chain2, tx_post1 # Evicted from old mempool: NONE check_mempool_equal( [tx_chain0, tx_chain1, tx_chain2, tx_post0, tx_post1]) if __name__ == '__main__': MempoolCoherenceOnActivationsTest().main() diff --git a/test/functional/abc-replay-protection.py b/test/functional/abc-replay-protection.py index 478aa4a0f..89a80a677 100755 --- a/test/functional/abc-replay-protection.py +++ b/test/functional/abc-replay-protection.py @@ -1,321 +1,314 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Copyright (c) 2017 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This test checks activation of UAHF and the different consensus related to this activation. It is derived from the much more complex p2p-fullblocktest. """ import time from test_framework.blocktools import ( create_block, create_coinbase, create_tx_with_script, make_conform_to_ctor, ) from test_framework.key import ECKey -from test_framework.messages import ( - COIN, - COutPoint, - CTransaction, - CTxIn, - CTxOut, - ToHex, -) +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, ToHex from test_framework.p2p import P2PDataStore from test_framework.script import ( OP_CHECKSIG, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, CScript, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error # far into the future REPLAY_PROTECTION_START_TIME = 2000000000 # Error due to invalid signature RPC_INVALID_SIGNATURE_ERROR = "mandatory-script-verify-flag-failed (Signature must be zero for failed CHECK(MULTI)SIG operation)" class PreviousSpendableOutput(object): def __init__(self, tx=CTransaction(), n=-1): self.tx = tx self.n = n class ReplayProtectionTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.block_heights = {} self.tip = None self.blocks = {} self.extra_args = [['-whitelist=noban@127.0.0.1', "-replayprotectionactivationtime={}".format( REPLAY_PROTECTION_START_TIME), "-acceptnonstdtxn=1"]] def next_block(self, number): if self.tip is None: base_block_hash = self.genesis_hash block_time = int(time.time()) + 1 else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height) block = create_block(base_block_hash, coinbase, block_time) # Do PoW, which is cheap on regnet block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def set_tip(self, number: int): """ Move the tip back to a previous block. """ self.tip = self.blocks[number] def run_test(self): node = self.nodes[0] peer = node.add_p2p_connection(P2PDataStore()) node.setmocktime(REPLAY_PROTECTION_START_TIME) self.genesis_hash = int(node.getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] block.vtx.extend(new_transactions) old_sha256 = block.sha256 make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[ block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand block = self.next_block # Create a new block block(0) save_spendable_output() peer.send_blocks_and_test([self.tip], node) # Now we need that block to mature so we can spend the coinbase. maturity_blocks = [] for i in range(99): block(5000 + i) maturity_blocks.append(self.tip) save_spendable_output() peer.send_blocks_and_test(maturity_blocks, node) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(get_spendable_output()) # Generate a key pair to test P2SH sigCheck count private_key = ECKey() private_key.generate() public_key = private_key.get_pubkey().get_bytes() # This is a little handier to use than the version in blocktools.py def create_fund_and_spend_tx(spend, forkvalue=0): # Fund transaction script = CScript([public_key, OP_CHECKSIG]) txfund = create_tx_with_script( spend.tx, spend.n, b'', amount=50 * COIN - 1000, script_pub_key=script) txfund.rehash() # Spend transaction txspend = CTransaction() txspend.vout.append(CTxOut(50 * COIN - 2000, CScript([OP_TRUE]))) txspend.vin.append(CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction sighashtype = (forkvalue << 8) | SIGHASH_ALL | SIGHASH_FORKID sighash = SignatureHashForkId( script, txspend, 0, sighashtype, 50 * COIN - 1000) sig = private_key.sign_ecdsa(sighash) + \ bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) txspend.vin[0].scriptSig = CScript([sig]) txspend.rehash() return [txfund, txspend] def send_transaction_to_mempool(tx): tx_id = node.sendrawtransaction(ToHex(tx)) assert tx_id in set(node.getrawmempool()) return tx_id # Before the fork, no replay protection required to get in the mempool. txns = create_fund_and_spend_tx(out[0]) send_transaction_to_mempool(txns[0]) send_transaction_to_mempool(txns[1]) # And txns get mined in a block properly. block(1) update_block(1, txns) peer.send_blocks_and_test([self.tip], node) # Replay protected transactions are rejected. replay_txns = create_fund_and_spend_tx(out[1], 0xffdead) send_transaction_to_mempool(replay_txns[0]) assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR, node.sendrawtransaction, ToHex(replay_txns[1])) # And block containing them are rejected as well. block(2) update_block(2, replay_txns) peer.send_blocks_and_test( [self.tip], node, success=False, reject_reason='blk-bad-inputs') # Rewind bad block self.set_tip(1) # Create a block that would activate the replay protection. bfork = block(5555) bfork.nTime = REPLAY_PROTECTION_START_TIME - 1 update_block(5555, []) peer.send_blocks_and_test([self.tip], node) activation_blocks = [] for i in range(5): block(5100 + i) activation_blocks.append(self.tip) peer.send_blocks_and_test(activation_blocks, node) # Check we are just before the activation time assert_equal( node.getblockchaininfo()['mediantime'], REPLAY_PROTECTION_START_TIME - 1) # We are just before the fork, replay protected txns still are rejected assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR, node.sendrawtransaction, ToHex(replay_txns[1])) block(3) update_block(3, replay_txns) peer.send_blocks_and_test( [self.tip], node, success=False, reject_reason='blk-bad-inputs') # Rewind bad block self.set_tip(5104) # Send some non replay protected txns in the mempool to check # they get cleaned at activation. txns = create_fund_and_spend_tx(out[2]) send_transaction_to_mempool(txns[0]) tx_id = send_transaction_to_mempool(txns[1]) # Activate the replay protection block(5556) peer.send_blocks_and_test([self.tip], node) # Check we just activated the replay protection assert_equal( node.getblockchaininfo()['mediantime'], REPLAY_PROTECTION_START_TIME) # Non replay protected transactions are not valid anymore, # so they should be removed from the mempool. assert tx_id not in set(node.getrawmempool()) # Good old transactions are now invalid. send_transaction_to_mempool(txns[0]) assert_raises_rpc_error(-26, RPC_INVALID_SIGNATURE_ERROR, node.sendrawtransaction, ToHex(txns[1])) # They also cannot be mined block(4) update_block(4, txns) peer.send_blocks_and_test( [self.tip], node, success=False, reject_reason='blk-bad-inputs') # Rewind bad block self.set_tip(5556) # The replay protected transaction is now valid replay_tx0_id = send_transaction_to_mempool(replay_txns[0]) replay_tx1_id = send_transaction_to_mempool(replay_txns[1]) # Make sure the transaction are ready to be mined. tmpl = node.getblocktemplate() found_id0 = False found_id1 = False for txn in tmpl['transactions']: txid = txn['txid'] if txid == replay_tx0_id: found_id0 = True elif txid == replay_tx1_id: found_id1 = True assert found_id0 and found_id1 # And the mempool is still in good shape. assert replay_tx0_id in set(node.getrawmempool()) assert replay_tx1_id in set(node.getrawmempool()) # They also can also be mined block(5) update_block(5, replay_txns) peer.send_blocks_and_test([self.tip], node) # Ok, now we check if a reorg work properly across the activation. postforkblockid = node.getbestblockhash() node.invalidateblock(postforkblockid) assert replay_tx0_id in set(node.getrawmempool()) assert replay_tx1_id in set(node.getrawmempool()) # Deactivating replay protection. forkblockid = node.getbestblockhash() node.invalidateblock(forkblockid) # The funding tx is not evicted from the mempool, since it's valid in # both sides of the fork assert replay_tx0_id in set(node.getrawmempool()) assert replay_tx1_id not in set(node.getrawmempool()) # Check that we also do it properly on deeper reorg. node.reconsiderblock(forkblockid) node.reconsiderblock(postforkblockid) node.invalidateblock(forkblockid) assert replay_tx0_id in set(node.getrawmempool()) assert replay_tx1_id not in set(node.getrawmempool()) if __name__ == '__main__': ReplayProtectionTest().main() diff --git a/test/functional/abc-segwit-recovery.py b/test/functional/abc-segwit-recovery.py index 3e8799743..ce6d1c6bc 100755 --- a/test/functional/abc-segwit-recovery.py +++ b/test/functional/abc-segwit-recovery.py @@ -1,227 +1,221 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This test checks that blocks containing segwit recovery transactions will be accepted, that segwit recovery transactions are rejected from mempool acceptance (even with -acceptnonstdtxn=1), and that segwit recovery transactions don't result in bans. """ import time from typing import Optional, Sequence from test_framework.blocktools import ( create_block, create_coinbase, make_conform_to_ctor, ) from test_framework.messages import ( COIN, CBlock, COutPoint, CTransaction, CTxIn, CTxOut, ToHex, ) from test_framework.p2p import P2PDataStore -from test_framework.script import ( - OP_EQUAL, - OP_HASH160, - OP_TRUE, - CScript, - hash160, -) +from test_framework.script import OP_EQUAL, OP_HASH160, OP_TRUE, CScript, hash160 from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error TEST_TIME = int(time.time()) # Error due to non clean stack CLEANSTACK_ERROR = 'non-mandatory-script-verify-flag (Stack size must be exactly one after execution)' RPC_CLEANSTACK_ERROR = CLEANSTACK_ERROR EVAL_FALSE_ERROR = 'non-mandatory-script-verify-flag (Script evaluated without error but finished with a false/empty top stack elem' RPC_EVAL_FALSE_ERROR = f"{EVAL_FALSE_ERROR}ent)" class PreviousSpendableOutput(object): def __init__(self, tx=CTransaction(), n=-1): self.tx = tx self.n = n class SegwitRecoveryTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.tip_height = 0 # We have 2 nodes: # 1) node_nonstd (nodes[0]) accepts non-standard txns. It does not # accept Segwit recovery transactions, since it is included in # standard flags, and transactions that violate these flags are # never accepted into the mempool. # 2) node_std (nodes[1]) doesn't accept non-standard txns and # doesn't have us whitelisted. It's used to test for bans, as we # connect directly to it via mininode and send a segwit spending # txn. This transaction is non-standard. We check that sending # this transaction doesn't result in a ban. # Nodes are connected to each other, so node_std receives blocks and # transactions that node_nonstd has accepted. Since we are checking # that segwit spending txn are not resulting in bans, node_nonstd # doesn't get banned when forwarding this kind of transactions to # node_std. self.extra_args = [['-whitelist=noban@127.0.0.1', "-acceptnonstdtxn"], ["-acceptnonstdtxn=0"]] def make_block(self, base_block: Optional[CBlock]) -> CBlock: """ Build a new block and return it. Increment the tip_height counter. If base_block is None, use the genesis block as base block. """ if base_block is None: base_block_hash = self.genesis_hash block_time = TEST_TIME else: base_block_hash = base_block.sha256 block_time = base_block.nTime + 1 # First create the coinbase self.tip_height += 1 coinbase = create_coinbase(self.tip_height) block = create_block(base_block_hash, coinbase, block_time) # Do PoW, which is cheap on regnet block.solve() return block def run_test(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) spendable_outputs = [] # shorthand node_nonstd = self.nodes[0] node_std = self.nodes[1] peer_nonstd = node_nonstd.add_p2p_connection(P2PDataStore()) peer_std = node_std.add_p2p_connection(P2PDataStore()) # adds transactions to the block and updates state def update_block(block: CBlock, new_transactions: Sequence[CTransaction]): block.vtx.extend(new_transactions) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Returns 2 transactions: # 1) txfund: create outputs in segwit addresses # 2) txspend: spends outputs from segwit addresses def create_segwit_fund_and_spend_tx(spend, case0=False): if not case0: # Spending from a P2SH-P2WPKH coin, # txhash:a45698363249312f8d3d93676aa714be59b0bd758e62fa054fb1ea6218480691 redeem_script0 = bytearray.fromhex( '0014fcf9969ce1c98a135ed293719721fb69f0b686cb') # Spending from a P2SH-P2WSH coin, # txhash:6b536caf727ccd02c395a1d00b752098ec96e8ec46c96bee8582be6b5060fa2f redeem_script1 = bytearray.fromhex( '0020fc8b08ed636cb23afcb425ff260b3abd03380a2333b54cfa5d51ac52d803baf4') else: redeem_script0 = bytearray.fromhex('51020000') redeem_script1 = bytearray.fromhex('53020080') redeem_scripts = [redeem_script0, redeem_script1] # Fund transaction to segwit addresses txfund = CTransaction() txfund.vin = [CTxIn(COutPoint(spend.tx.sha256, spend.n))] amount = (50 * COIN - 1000) // len(redeem_scripts) for redeem_script in redeem_scripts: txfund.vout.append( CTxOut(amount, CScript([OP_HASH160, hash160(redeem_script), OP_EQUAL]))) txfund.rehash() # Segwit spending transaction # We'll test if a node that checks for standardness accepts this # txn. It should fail exclusively because of the restriction in # the scriptSig (non clean stack..), so all other characteristcs # must pass standardness checks. For this reason, we create # standard P2SH outputs. txspend = CTransaction() for i in range(len(redeem_scripts)): txspend.vin.append( CTxIn(COutPoint(txfund.sha256, i), CScript([redeem_scripts[i]]))) txspend.vout = [CTxOut(50 * COIN - 2000, CScript([OP_HASH160, hash160(CScript([OP_TRUE])), OP_EQUAL]))] txspend.rehash() return txfund, txspend # Create a new block block = self.make_block(base_block=None) spendable_outputs.append(block) peer_nonstd.send_blocks_and_test([block], node_nonstd) # Now we need that block to mature so we can spend the coinbase. matureblocks = [] for _ in range(199): block = self.make_block(block) matureblocks.append(block) spendable_outputs.append(block) peer_nonstd.send_blocks_and_test(matureblocks, node_nonstd) # collect spendable outputs now to avoid cluttering the code later on out = [] for _ in range(100): out.append( PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)) # Create segwit funding and spending transactions txfund, txspend = create_segwit_fund_and_spend_tx(out[0]) txfund_case0, txspend_case0 = create_segwit_fund_and_spend_tx( out[1], True) # Mine txfund, as it can't go into node_std mempool because it's # nonstandard. block = self.make_block(block) update_block(block, [txfund, txfund_case0]) peer_nonstd.send_blocks_and_test([block], node_nonstd) # Check both nodes are synchronized before continuing. self.sync_blocks() # Check that upgraded nodes checking for standardness are not banning # nodes sending segwit spending txns. peer_nonstd.send_txs_and_test([txspend], node_nonstd, success=False, reject_reason=CLEANSTACK_ERROR) peer_nonstd.send_txs_and_test([txspend_case0], node_nonstd, success=False, reject_reason=EVAL_FALSE_ERROR) peer_std.send_txs_and_test([txspend], node_std, success=False, reject_reason=CLEANSTACK_ERROR) peer_std.send_txs_and_test([txspend_case0], node_std, success=False, reject_reason=EVAL_FALSE_ERROR) # Segwit recovery txns are never accepted into the mempool, # as they are included in standard flags. assert_raises_rpc_error(-26, RPC_CLEANSTACK_ERROR, node_nonstd.sendrawtransaction, ToHex(txspend)) assert_raises_rpc_error(-26, RPC_EVAL_FALSE_ERROR, node_nonstd.sendrawtransaction, ToHex(txspend_case0)) assert_raises_rpc_error(-26, RPC_CLEANSTACK_ERROR, node_std.sendrawtransaction, ToHex(txspend)) assert_raises_rpc_error(-26, RPC_EVAL_FALSE_ERROR, node_std.sendrawtransaction, ToHex(txspend_case0)) # Blocks containing segwit spending txns are accepted in both nodes. block = self.make_block(block) update_block(block, [txspend, txspend_case0]) peer_nonstd.send_blocks_and_test([block], node_nonstd) self.sync_blocks() if __name__ == '__main__': SegwitRecoveryTest().main() diff --git a/test/functional/abc_feature_proof_cleanup.py b/test/functional/abc_feature_proof_cleanup.py index 588d8dfb4..321676d42 100644 --- a/test/functional/abc_feature_proof_cleanup.py +++ b/test/functional/abc_feature_proof_cleanup.py @@ -1,138 +1,134 @@ #!/usr/bin/env python3 # Copyright (c) 2022 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Test the dangling proofs cleanup """ import time from test_framework.avatools import ( gen_proof, get_ava_p2p_interface, get_ava_p2p_interface_no_handshake, get_proof_ids, wait_for_proof, ) from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - uint256_hex, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, uint256_hex from test_framework.wallet_util import bytes_to_wif # Interval between 2 proof cleanups AVALANCHE_CLEANUP_INTERVAL = 5 * 60 # Dangling proof timeout AVALANCHE_DANGLING_PROOF_TIMEOUT = 15 * 60 class ProofsCleanupTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [[ '-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=1', # Get rid of the getdata delay penalty for inbounds '-whitelist=noban@127.0.0.1', ]] * self.num_nodes def run_test(self): node = self.nodes[0] master_key, local_proof = gen_proof(self, node) self.restart_node(0, self.extra_args[0] + [ f"-avaproof={local_proof.serialize().hex()}", f"-avamasterkey={bytes_to_wif(master_key.get_bytes())}", ]) # Add an inbound so the node proof can be registered and advertised node.add_p2p_connection(P2PInterface()) self.generate(node, 1, sync_fun=self.no_op) wait_for_proof(node, uint256_hex(local_proof.proofid)) mocktime = int(time.time()) node.setmocktime(mocktime) proofs = [local_proof] keys = [master_key] peers = [] # The first 5 peers have a node attached for _ in range(5): peer = get_ava_p2p_interface(self, node) proofs.append(peer.proof) keys.append(peer.master_privkey) peers.append(peer) # The last 5 peers have no node attached for _ in range(5): _, proof = gen_proof(self, node) node.sendavalancheproof(proof.serialize().hex()) proofs.append(proof) peer_info = node.getavalanchepeerinfo() assert_equal(len(peer_info), 11) assert_equal(set(get_proof_ids(node)), set([proof.proofid for proof in proofs])) self.log.info("No proof is cleaned before the timeout expires") mocktime += AVALANCHE_DANGLING_PROOF_TIMEOUT - 1 node.setmocktime(mocktime) # Run the cleanup, the proofs are still there node.mockscheduler(AVALANCHE_CLEANUP_INTERVAL) assert_equal(len(peer_info), 11) self.log.info("Check the proofs with attached nodes are not cleaned") # Run the cleanup, the proofs with no node are cleaned excepted our # local proof with node.assert_debug_log([f"Proof dropped for dangling too long (no connected node): {uint256_hex(p.proofid)}" for p in proofs[6:]]): # Expire the dangling proof timeout mocktime += 1 node.setmocktime(mocktime) node.mockscheduler(AVALANCHE_CLEANUP_INTERVAL) self.wait_until(lambda: set(get_proof_ids(node)) == set( [proof.proofid for proof in proofs[:6]]), timeout=5) self.log.info( "Check the proofs are cleaned on next cleanup after the nodes disconnected") for peer in peers: peer.peer_disconnect() peer.wait_for_disconnect() node.mockscheduler(AVALANCHE_CLEANUP_INTERVAL) self.wait_until(lambda: get_proof_ids(node) == [local_proof.proofid]) self.log.info("Check the cleaned up proofs are no longer accepted...") sender = get_ava_p2p_interface_no_handshake(node) for proof in proofs[1:]: with node.assert_debug_log(["dangling-proof"]): sender.send_avaproof(proof) assert_raises_rpc_error(-8, "dangling-proof", node.sendavalancheproof, proof.serialize().hex()) assert_equal(get_proof_ids(node), [local_proof.proofid]) self.log.info("...until there is a node to attach") node.disconnect_p2ps() assert_equal(len(node.p2ps), 0) avanode = get_ava_p2p_interface(self, node) avanode.wait_until(lambda: avanode.last_message.get( "getdata") and avanode.last_message["getdata"].inv[-1].hash == avanode.proof.proofid) avanode.send_avaproof(avanode.proof) self.wait_until(lambda: avanode.proof.proofid in get_proof_ids(node)) if __name__ == '__main__': ProofsCleanupTest().main() diff --git a/test/functional/abc_rpc_addavalanchenode.py b/test/functional/abc_rpc_addavalanchenode.py index 6b04f71a1..0ebc3edc0 100644 --- a/test/functional/abc_rpc_addavalanchenode.py +++ b/test/functional/abc_rpc_addavalanchenode.py @@ -1,176 +1,173 @@ #!/usr/bin/env python3 # Copyright (c) 2021 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the addavalanchenode RPC""" -from test_framework.avatools import ( - avalanche_proof_from_hex, - create_coinbase_stakes, -) +from test_framework.avatools import avalanche_proof_from_hex, create_coinbase_stakes from test_framework.key import ECKey from test_framework.messages import ( AvalancheDelegation, AvalancheDelegationLevel, hash256, ser_string, ) from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error from test_framework.wallet_util import bytes_to_wif def add_interface_node(test_node) -> int: """Create a peer, connect it to test_node, return the nodeid of the peer as registered by test_node. """ n = P2PInterface() test_node.add_p2p_connection(n) n.wait_for_verack() return test_node.getpeerinfo()[-1]['id'] class AddAvalancheNodeTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [['-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=1', '-avacooldown=0']] def run_test(self): node = self.nodes[0] addrkey0 = node.get_deterministic_priv_key() blockhashes = self.generatetoaddress( node, 2, addrkey0.address, sync_fun=self.no_op) stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key) privkey = ECKey() privkey.generate() wif_privkey = bytes_to_wif(privkey.get_bytes()) proof_master = privkey.get_pubkey().get_bytes().hex() proof_sequence = 42 proof_expiration = 2000000000 proof = node.buildavalancheproof( proof_sequence, proof_expiration, wif_privkey, stakes) nodeid = add_interface_node(node) def check_addavalanchenode_error( error_code, error_message, nodeid=nodeid, proof=proof, pubkey=proof_master, delegation=None): assert_raises_rpc_error( error_code, error_message, node.addavalanchenode, nodeid, pubkey, proof, delegation, ) self.log.info("Invalid proof") check_addavalanchenode_error(-22, "Proof must be an hexadecimal string", proof="not a proof") check_addavalanchenode_error(-22, "Proof has invalid format", proof="f000") no_stake = node.buildavalancheproof( proof_sequence, proof_expiration, wif_privkey, []) check_addavalanchenode_error(-8, "The proof is invalid: no-stake", proof=no_stake) self.log.info("Node doesn't exist") check_addavalanchenode_error(-8, f"The node does not exist: {nodeid + 1}", nodeid=nodeid + 1) self.log.info("Invalid delegation") dg_privkey = ECKey() dg_privkey.generate() dg_pubkey = dg_privkey.get_pubkey().get_bytes() check_addavalanchenode_error(-22, "Delegation must be an hexadecimal string", pubkey=dg_pubkey.hex(), delegation="not a delegation") check_addavalanchenode_error(-22, "Delegation has invalid format", pubkey=dg_pubkey.hex(), delegation="f000") self.log.info("Delegation mismatch with the proof") delegation_wrong_proofid = AvalancheDelegation() check_addavalanchenode_error(-8, "The delegation does not match the proof", pubkey=dg_pubkey.hex(), delegation=delegation_wrong_proofid.serialize().hex()) proofobj = avalanche_proof_from_hex(proof) delegation = AvalancheDelegation( limited_proofid=proofobj.limited_proofid, proof_master=proofobj.master, ) self.log.info("Delegation with bad signature") bad_level = AvalancheDelegationLevel( pubkey=dg_pubkey, ) delegation.levels.append(bad_level) check_addavalanchenode_error(-8, "The delegation is invalid", pubkey=dg_pubkey.hex(), delegation=delegation.serialize().hex()) delegation.levels = [] level = AvalancheDelegationLevel( pubkey=dg_pubkey, sig=privkey.sign_schnorr( hash256( delegation.getid() + ser_string(dg_pubkey) ) ) ) delegation.levels.append(level) self.log.info("Key mismatch with the proof") check_addavalanchenode_error( -5, "The public key does not match the proof", pubkey=dg_pubkey.hex(), ) self.log.info("Key mismatch with the delegation") random_privkey = ECKey() random_privkey.generate() random_pubkey = random_privkey.get_pubkey() check_addavalanchenode_error( -5, "The public key does not match the delegation", pubkey=random_pubkey.get_bytes().hex(), delegation=delegation.serialize().hex(), ) self.log.info("Happy path") assert node.addavalanchenode(nodeid, proof_master, proof) # Adding several times is OK assert node.addavalanchenode(nodeid, proof_master, proof) self.log.info("Add a node with a valid delegation") assert node.addavalanchenode( nodeid, dg_pubkey.hex(), proof, delegation.serialize().hex(), ) self.log.info("Several nodes can share a proof") nodeid2 = add_interface_node(node) assert node.addavalanchenode(nodeid2, proof_master, proof) if __name__ == '__main__': AddAvalancheNodeTest().main() diff --git a/test/functional/abc_rpc_getavalanchepeerinfo.py b/test/functional/abc_rpc_getavalanchepeerinfo.py index 534fd70c6..2ba904639 100755 --- a/test/functional/abc_rpc_getavalanchepeerinfo.py +++ b/test/functional/abc_rpc_getavalanchepeerinfo.py @@ -1,99 +1,95 @@ #!/usr/bin/env python3 # Copyright (c) 2020-2021 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the getavalanchepeerinfo RPC.""" from random import choice from test_framework.avatools import ( avalanche_proof_from_hex, create_coinbase_stakes, get_ava_p2p_interface_no_handshake, ) from test_framework.key import ECKey from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - uint256_hex, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, uint256_hex from test_framework.wallet_util import bytes_to_wif class GetAvalanchePeerInfoTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.extra_args = [['-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=1', '-avacooldown=0']] def run_test(self): node = self.nodes[0] peercount = 5 nodecount = 10 self.log.info( f"Generating {peercount} peers with {nodecount} nodes each") addrkey0 = node.get_deterministic_priv_key() blockhashes = self.generatetoaddress( node, peercount, addrkey0.address, sync_fun=self.no_op) # Use the first coinbase to create a stake stakes = create_coinbase_stakes(node, blockhashes, addrkey0.key) def getProof(stake): privkey = ECKey() privkey.generate() pubkey = privkey.get_pubkey() proof_sequence = 11 proof_expiration = 0 proof = node.buildavalancheproof( proof_sequence, proof_expiration, bytes_to_wif( privkey.get_bytes()), [stake]) return (pubkey.get_bytes().hex(), proof) # Create peercount * nodecount node array nodes = [[get_ava_p2p_interface_no_handshake(node) for _ in range( nodecount)] for _ in range(peercount)] # Add peercount peers and bind all the nodes to each proofs = [] for i in range(peercount): pubkey_hex, proof = getProof(stakes[i]) proofs.append(proof) [node.addavalanchenode(n.nodeid, pubkey_hex, proof) for n in nodes[i]] self.log.info("Testing getavalanchepeerinfo...") avapeerinfo = node.getavalanchepeerinfo() assert_equal(len(avapeerinfo), peercount) for i, peer in enumerate(avapeerinfo): proofid_hex = uint256_hex( avalanche_proof_from_hex( proofs[i]).proofid) assert_equal(peer["avalanche_peerid"], i) assert_equal(peer["proofid"], proofid_hex) assert_equal(peer["proof"], proofs[i]) assert_equal(peer["nodecount"], nodecount) assert_equal(set(peer["node_list"]), set( [n.nodeid for n in nodes[i]])) self.log.info("Testing with a specified proofid") assert_raises_rpc_error(-8, "Proofid not found", node.getavalanchepeerinfo, proofid="0" * 64) target_proof = choice(proofs) target_proofid = avalanche_proof_from_hex(target_proof).proofid avapeerinfo = node.getavalanchepeerinfo( proofid=uint256_hex(target_proofid)) assert_equal(len(avapeerinfo), 1) assert_equal(avapeerinfo[0]["proof"], target_proof) if __name__ == '__main__': GetAvalanchePeerInfoTest().main() diff --git a/test/functional/abc_rpc_isfinal.py b/test/functional/abc_rpc_isfinal.py index d4119f889..da8d812ed 100755 --- a/test/functional/abc_rpc_isfinal.py +++ b/test/functional/abc_rpc_isfinal.py @@ -1,236 +1,232 @@ #!/usr/bin/env python3 # Copyright (c) 2022 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the isfinalxxx RPCS.""" import random from test_framework.address import ADDRESS_ECREG_UNSPENDABLE from test_framework.authproxy import JSONRPCException from test_framework.avatools import AvaP2PInterface from test_framework.blocktools import create_block, create_coinbase from test_framework.messages import ( AvalancheVote, AvalancheVoteError, CBlockHeader, msg_headers, ) from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - uint256_hex, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, uint256_hex QUORUM_NODE_COUNT = 16 class AvalancheIsFinalTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.extra_args = [ [ '-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=1', '-avacooldown=0', '-avaminquorumstake=0', '-avaminavaproofsnodecount=0', ] ] def run_test(self): node = self.nodes[0] tip = node.getbestblockhash() assert_raises_rpc_error( -1, "Avalanche is not ready to poll yet.", self.nodes[0].isfinalblock, tip, ) assert_raises_rpc_error( -1, "Avalanche is not ready to poll yet.", self.nodes[0].isfinaltransaction, node.getblock(tip)['tx'][0], tip, ) # Build a fake quorum of nodes. def get_quorum(): return [node.add_p2p_connection(AvaP2PInterface(self, node)) for _ in range(0, QUORUM_NODE_COUNT)] # Pick one node from the quorum for polling. quorum = get_quorum() def is_quorum_established(): return node.getavalancheinfo()['ready_to_poll'] is True self.wait_until(is_quorum_established) def can_find_block_in_poll( blockhash, resp=AvalancheVoteError.ACCEPTED): found_hash = False for n in quorum: poll = n.get_avapoll_if_available() # That node has not received a poll if poll is None: continue # We got a poll, check for the hash and repond votes = [] for inv in poll.invs: # Vote yes to everything r = AvalancheVoteError.ACCEPTED # Look for what we expect if inv.hash == int(blockhash, 16): r = resp found_hash = True votes.append(AvalancheVote(r, inv.hash)) n.send_avaresponse(poll.round, votes, n.delegated_privkey) return found_hash blockhash = self.generate(node, 1, sync_fun=self.no_op)[0] cb_txid = node.getblock(blockhash)['tx'][0] assert not node.isfinalblock(blockhash) assert not node.isfinaltransaction(cb_txid, blockhash) def is_finalblock(blockhash): can_find_block_in_poll(blockhash) return node.isfinalblock(blockhash) with node.assert_debug_log([f"Avalanche finalized block {blockhash}"]): self.wait_until(lambda: is_finalblock(blockhash)) assert node.isfinaltransaction(cb_txid, blockhash) self.log.info("Check block ancestors are finalized as well") tip_height = node.getblockheader(blockhash)['height'] for height in range(0, tip_height): hash = node.getblockhash(height) assert node.isfinalblock(hash) txid = node.getblock(hash)['tx'][0] assert node.isfinaltransaction(txid, hash) if self.is_wallet_compiled(): self.log.info("Check mempool transactions are not finalized") # Mature some utxos tip = self.generate(node, 100, sync_fun=self.no_op)[-1] wallet_txid = node.sendtoaddress( ADDRESS_ECREG_UNSPENDABLE, 1_000_000) assert wallet_txid in node.getrawmempool() assert_raises_rpc_error( -5, "No such transaction found in the provided block.", node.isfinaltransaction, wallet_txid, tip, ) self.log.info( "A transaction is only finalized if the containing block is finalized") tip = self.generate(node, 1, sync_fun=self.no_op)[0] assert wallet_txid not in node.getrawmempool() assert not node.isfinaltransaction(wallet_txid, tip) self.wait_until(lambda: is_finalblock(tip)) assert node.isfinaltransaction(wallet_txid, tip) # Needs -txindex assert_raises_rpc_error( -5, "No such transaction. Use -txindex or provide a block hash to enable blockchain transaction queries.", node.isfinaltransaction, wallet_txid, ) self.log.info( "Repeat with -txindex so we don't need the blockhash") self.restart_node(0, self.extra_args[0] + ['-txindex']) quorum = get_quorum() self.wait_until(is_quorum_established) # Try to raise a -txindex not synced yet error. This is not # guaranteed because syncing is fast! try: node.isfinaltransaction( uint256_hex(random.randint(0, 2**256 - 1)), ) except JSONRPCException as e: assert_equal(e.error['code'], -5) if e.error['message'] == "No such mempool or blockchain transaction.": # If we got a regular "not found" error, the txindex should # have synced. assert node.getindexinfo()['txindex']['synced'] is True else: # Otherwise we might have successfully raised before the # indexer completed. Checking the status now is useless as # the indexer might have completed the synchronization in # the meantime and the status is no longer relevant. assert e.error['message'] == "No such transaction. Blockchain transactions are still in the process of being indexed." else: assert False, "The isfinaltransaction RPC call did not throw as expected." self.wait_until(lambda: node.getindexinfo()[ 'txindex']['synced'] is True) self.wait_until(lambda: is_finalblock(tip)) assert node.isfinaltransaction(wallet_txid) wallet_txid = node.sendtoaddress( ADDRESS_ECREG_UNSPENDABLE, 1_000_000) assert wallet_txid in node.getrawmempool() assert not node.isfinaltransaction(wallet_txid) assert_raises_rpc_error( -5, "No such mempool or blockchain transaction.", node.isfinaltransaction, uint256_hex(random.randint(0, 2**256 - 1)), ) self.log.info("Check unknown item") for _ in range(10): assert_raises_rpc_error( -8, "Block not found", node.isfinalblock, uint256_hex(random.randint(0, 2**256 - 1)), ) assert_raises_rpc_error( -8, "Block not found", node.isfinaltransaction, uint256_hex(random.randint(0, 2**256 - 1)), uint256_hex(random.randint(0, 2**256 - 1)), ) tip = node.getbestblockhash() height = node.getblockcount() + 1 time = node.getblock(tip)['time'] + 1 block = create_block(int(tip, 16), create_coinbase(height), time) block.solve() peer = node.add_p2p_connection(AvaP2PInterface()) msg = msg_headers() msg.headers = [CBlockHeader(block)] peer.send_message(msg) self.wait_until(lambda: node.getchaintips()[0]['height'] == height) assert_raises_rpc_error( -1, "Block data not downloaded yet.", node.isfinaltransaction, uint256_hex(random.randint(0, 2**256 - 1)), uint256_hex(block.sha256), ) if __name__ == '__main__': AvalancheIsFinalTest().main() diff --git a/test/functional/data/invalid_txs.py b/test/functional/data/invalid_txs.py index 6e4d38a93..7f8cd4062 100644 --- a/test/functional/data/invalid_txs.py +++ b/test/functional/data/invalid_txs.py @@ -1,268 +1,262 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Templates for constructing various sorts of invalid transactions. These templates (or an iterator over all of them) can be reused in different contexts to test using a number of invalid transaction types. Hopefully this makes it easier to get coverage of a full variety of tx validation checks through different interfaces (AcceptBlock, AcceptToMemPool, etc.) without repeating ourselves. Invalid tx cases not covered here can be found by running: $ diff \ <(grep -IREho "bad-txns[a-zA-Z-]+" src | sort -u) \ <(grep -IEho "bad-txns[a-zA-Z-]+" test/functional/data/invalid_txs.py | sort -u) """ import abc from typing import Optional from test_framework import script as sc from test_framework.blocktools import create_tx_with_script -from test_framework.messages import ( - MAX_MONEY, - COutPoint, - CTransaction, - CTxIn, - CTxOut, -) +from test_framework.messages import MAX_MONEY, COutPoint, CTransaction, CTxIn, CTxOut from test_framework.script import ( OP_2DIV, OP_2MUL, OP_INVERT, OP_LSHIFT, OP_MUL, OP_RSHIFT, CScript, ) from test_framework.txtools import pad_tx basic_p2sh = sc.CScript( [sc.OP_HASH160, sc.hash160(sc.CScript([sc.OP_0])), sc.OP_EQUAL]) class BadTxTemplate: """Allows simple construction of a certain kind of invalid tx. Base class to be subclassed.""" __metaclass__ = abc.ABCMeta # The expected error code given by bitcoind upon submission of the tx. reject_reason: Optional[str] = "" # Only specified if it differs from mempool acceptance error. block_reject_reason = "" # Do we expect to be disconnected after submitting this tx? expect_disconnect = False # Is this tx considered valid when included in a block, but not for acceptance into # the mempool (i.e. does it violate policy but not consensus)? valid_in_block = False def __init__(self, *, spend_tx=None, spend_block=None): self.spend_tx = spend_block.vtx[0] if spend_block else spend_tx self.spend_avail = sum(o.nValue for o in self.spend_tx.vout) self.valid_txin = CTxIn( COutPoint( self.spend_tx.sha256, 0), b"", 0xffffffff) @abc.abstractmethod def get_tx(self, *args, **kwargs): """Return a CTransaction that is invalid per the subclass.""" pass class OutputMissing(BadTxTemplate): reject_reason = "bad-txns-vout-empty" expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.calc_sha256() return tx class InputMissing(BadTxTemplate): reject_reason = "bad-txns-vin-empty" expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE] * 100))) tx.calc_sha256() return tx class SizeTooSmall(BadTxTemplate): reject_reason = "bad-txns-undersize" expect_disconnect = False valid_in_block = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE]))) tx.calc_sha256() return tx class BadInputOutpointIndex(BadTxTemplate): # Won't be rejected - nonexistent outpoint index is treated as an orphan since the coins # database can't distinguish between spent outpoints and outpoints which # never existed. reject_reason = None expect_disconnect = False def get_tx(self): num_indices = len(self.spend_tx.vin) bad_idx = num_indices + 100 tx = CTransaction() tx.vin.append( CTxIn( COutPoint( self.spend_tx.sha256, bad_idx), b"", 0xffffffff)) tx.vout.append(CTxOut(0, basic_p2sh)) tx.calc_sha256() return tx class DuplicateInput(BadTxTemplate): reject_reason = 'bad-txns-inputs-duplicate' expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class PrevoutNullInput(BadTxTemplate): reject_reason = 'bad-txns-prevout-null' expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.vin.append(CTxIn(COutPoint(hash=0, n=0xffffffff))) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class NonexistentInput(BadTxTemplate): # Added as an orphan tx. reject_reason = None expect_disconnect = False def get_tx(self): tx = CTransaction() tx.vin.append( CTxIn( COutPoint( self.spend_tx.sha256 + 1, 0), b"", 0xffffffff)) tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class SpendTooMuch(BadTxTemplate): reject_reason = 'bad-txns-in-belowout' expect_disconnect = True def get_tx(self): return create_tx_with_script( self.spend_tx, 0, script_pub_key=basic_p2sh, amount=(self.spend_avail + 1)) class CreateNegative(BadTxTemplate): reject_reason = 'bad-txns-vout-negative' expect_disconnect = True def get_tx(self): return create_tx_with_script(self.spend_tx, 0, amount=-1) class CreateTooLarge(BadTxTemplate): reject_reason = 'bad-txns-vout-toolarge' expect_disconnect = True def get_tx(self): return create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY + 1) class CreateSumTooLarge(BadTxTemplate): reject_reason = 'bad-txns-txouttotal-toolarge' expect_disconnect = True def get_tx(self): tx = create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY) tx.vout = [tx.vout[0]] * 2 tx.calc_sha256() return tx class InvalidOPIFConstruction(BadTxTemplate): reject_reason = "mandatory-script-verify-flag-failed (Invalid OP_IF construction)" expect_disconnect = True valid_in_block = True def get_tx(self): return create_tx_with_script( self.spend_tx, 0, script_sig=b'\x64' * 35, amount=(self.spend_avail // 2)) def getDisabledOpcodeTemplate(opcode): """ Creates disabled opcode tx template class""" def get_tx(self): tx = CTransaction() vin = self.valid_txin vin.scriptSig = CScript([opcode]) tx.vin.append(vin) tx.vout.append(CTxOut(1, basic_p2sh)) pad_tx(tx) tx.calc_sha256() return tx return type(f"DisabledOpcode_{str(opcode)}", (BadTxTemplate,), { 'reject_reason': "disabled opcode", 'expect_disconnect': True, 'get_tx': get_tx, 'valid_in_block': True }) # Disabled opcode tx templates (CVE-2010-5137) DisabledOpcodeTemplates = [getDisabledOpcodeTemplate(opcode) for opcode in [ OP_INVERT, OP_2MUL, OP_2DIV, OP_MUL, OP_LSHIFT, OP_RSHIFT]] def iter_all_templates(): """Iterate through all bad transaction template types.""" return BadTxTemplate.__subclasses__() diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py index d3e883dc2..7a0e07fec 100755 --- a/test/functional/feature_blocksdir.py +++ b/test/functional/feature_blocksdir.py @@ -1,47 +1,44 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the blocksdir option. """ import os import shutil -from test_framework.test_framework import ( - BitcoinTestFramework, - initialize_datadir, -) +from test_framework.test_framework import BitcoinTestFramework, initialize_datadir class BlocksdirTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): self.stop_node(0) assert os.path.isdir(os.path.join( self.nodes[0].datadir, self.chain, "blocks")) assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "blocks")) shutil.rmtree(self.nodes[0].datadir) initialize_datadir(self.options.tmpdir, 0, self.chain) self.log.info("Starting with nonexistent blocksdir ...") blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir') self.nodes[0].assert_start_raises_init_error( [f"-blocksdir={blocksdir_path}"], f'Error: Specified blocks directory "{blocksdir_path}" does not exist.') os.mkdir(blocksdir_path) self.log.info("Starting with existing blocksdir ...") self.start_node(0, [f"-blocksdir={blocksdir_path}"]) self.log.info("mining blocks..") self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) assert os.path.isfile(os.path.join( blocksdir_path, self.chain, "blocks", "blk00000.dat")) assert os.path.isdir(os.path.join( self.nodes[0].datadir, self.chain, "blocks", "index")) if __name__ == '__main__': BlocksdirTest().main() diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py index e4c66cbab..bffbe9668 100755 --- a/test/functional/feature_csv_activation.py +++ b/test/functional/feature_csv_activation.py @@ -1,678 +1,673 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test activation of the first version bits soft fork. This soft fork will activate the following BIPS: BIP 68 - nSequence relative lock times BIP 112 - CHECKSEQUENCEVERIFY BIP 113 - MedianTimePast semantics for nLockTime regtest lock-in with 108/144 block signalling activation after a further 144 blocks mine 82 blocks whose coinbases will be used to generate inputs for our tests mine 489 blocks and seed block chain with the 82 inputs will use for our tests at height 572 mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered mine 1 block and test that enforcement has triggered (which triggers ACTIVE) Test BIP 113 is enforced Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height Mine 1 block so next height is 581 and test BIP 68 now passes time but not height Mine 1 block so next height is 582 and test BIP 68 now passes time and height Test that BIP 112 is enforced Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates And that after the soft fork activates transactions pass and fail as they should according to the rules. For each BIP, transactions of versions 1 and 2 will be tested. ---------------- BIP 113: bip113tx - modify the nLocktime variable BIP 68: bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below BIP 112: bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP bip112tx_special - test negative argument to OP_CSV """ import time from decimal import Decimal from itertools import product from test_framework.blocktools import ( create_block, create_coinbase, make_conform_to_ctor, ) from test_framework.messages import XEC, CTransaction, FromHex, ToHex from test_framework.p2p import P2PDataStore -from test_framework.script import ( - OP_CHECKSEQUENCEVERIFY, - OP_DROP, - OP_TRUE, - CScript, -) +from test_framework.script import OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE, CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal BASE_RELATIVE_LOCKTIME = 10 SEQ_DISABLE_FLAG = 1 << 31 SEQ_RANDOM_HIGH_BIT = 1 << 25 SEQ_TYPE_FLAG = 1 << 22 SEQ_RANDOM_LOW_BIT = 1 << 18 def relative_locktime(sdf, srhb, stf, srlb): """Returns a locktime with certain bits set.""" locktime = BASE_RELATIVE_LOCKTIME if sdf: locktime |= SEQ_DISABLE_FLAG if srhb: locktime |= SEQ_RANDOM_HIGH_BIT if stf: locktime |= SEQ_TYPE_FLAG if srlb: locktime |= SEQ_RANDOM_LOW_BIT return locktime def all_rlt_txs(txs): return [tx['tx'] for tx in txs] def get_csv_status(node): height = node.getblockchaininfo()['blocks'] return height >= 576 def create_transaction(node, txid, to_address, *, amount): inputs = [{"txid": txid, "vout": 0}] outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) tx = FromHex(CTransaction(), rawtx) return tx def sign_transaction(node, unsignedtx): rawtx = ToHex(unsignedtx) signresult = node.signrawtransactionwithwallet(rawtx) tx = FromHex(CTransaction(), signresult['hex']) return tx def spend_tx(node, prev_tx, address): spendtx = create_transaction( node, prev_tx.hash, address, amount=(prev_tx.vout[0].nValue - 1000) / XEC) spendtx.nVersion = prev_tx.nVersion pad_tx(spendtx) spendtx.rehash() return spendtx def create_bip112special(node, input, txversion, address): tx = create_transaction( node, input, address, amount=Decimal("49980000")) tx.nVersion = txversion tx.vout[0].scriptPubKey = CScript( [-1, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) tx.rehash() signtx = sign_transaction(node, tx) signtx.rehash() return signtx def send_generic_input_tx(node, coinbases, address): return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction( node, node.getblock(coinbases.pop())['tx'][0], address, amount=Decimal("49990000"))))) def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0): """Returns a list of bip68 transactions with different bits set.""" txs = [] assert len(bip68inputs) >= 16 for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): locktime = relative_locktime(sdf, srhb, stf, srlb) tx = create_transaction( node, bip68inputs[i], address, amount=Decimal("49980000")) tx.nVersion = txversion tx.vin[0].nSequence = locktime + locktime_delta tx = sign_transaction(node, tx) tx.rehash() txs.append({'tx': tx, 'sdf': sdf, 'stf': stf}) return txs def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0): """Returns a list of bip112 transactions with different bits set.""" txs = [] assert len(bip112inputs) >= 16 for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): locktime = relative_locktime(sdf, srhb, stf, srlb) tx = create_transaction( node, bip112inputs[i], address, amount=Decimal("49980000")) if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta else: # vary nSequence instead, OP_CSV is fixed tx.vin[0].nSequence = locktime + locktime_delta tx.nVersion = txversion if (varyOP_CSV): tx.vout[0].scriptPubKey = CScript( [locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) else: tx.vout[0].scriptPubKey = CScript( [BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) tx.rehash() signtx = sign_transaction(node, tx) signtx.rehash() txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf}) return txs class BIP68_112_113Test(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.extra_args = [['-whitelist=noban@127.0.0.1']] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def generate_blocks(self, number): test_blocks = [] for _ in range(number): block = self.create_test_block([]) test_blocks.append(block) self.last_block_time += 600 self.tip = block.sha256 self.tipheight += 1 return test_blocks def create_test_block(self, txs, version=536870912): block = create_block(self.tip, create_coinbase( self.tipheight + 1), self.last_block_time + 600) block.nVersion = version block.vtx.extend(txs) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() return block # Create a block with given txs, and spend these txs in the same block. # Spending utxos in the same block is OK as long as nSequence is not enforced. # Otherwise a number of intermediate blocks should be generated, and this # method should not be used. def create_test_block_spend_utxos(self, node, txs, version=536870912): block = self.create_test_block(txs, version) block.vtx.extend([spend_tx(node, tx, self.nodeaddress) for tx in txs]) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() return block def send_blocks(self, blocks, success=True): """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block. Call with success = False if the tip shouldn't advance to the most recent block.""" self.helper_peer.send_blocks_and_test( blocks, self.nodes[0], success=success) def run_test(self): self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore()) self.log.info("Generate blocks in the past for coinbase outputs.") # Enough to build up to 1000 blocks 10 minutes apart without worrying # about getting into the future long_past_time = int(time.time()) - 600 * 1000 # Enough so that the generated blocks will still all be before # long_past_time self.nodes[0].setmocktime(long_past_time - 100) # 82 blocks generated for inputs self.coinbase_blocks = self.generate( self.nodes[0], 1 + 16 + 2 * 32 + 1) # Set time back to present so yielded blocks aren't in the future as # we advance last_block_time self.nodes[0].setmocktime(0) # height of the next block to build self.tipheight = 82 self.last_block_time = long_past_time self.tip = int(self.nodes[0].getbestblockhash(), 16) self.nodeaddress = self.nodes[0].getnewaddress() # CSV is not activated yet. assert_equal(get_csv_status(self.nodes[0]), False) # Generate 489 more version 4 blocks test_blocks = self.generate_blocks(489) # Test #1 self.send_blocks(test_blocks) # Still not activated. assert_equal(get_csv_status(self.nodes[0]), False) # Inputs at height = 572 # # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) # Note we reuse inputs for v1 and v2 txs so must test these separately # 16 normal inputs bip68inputs = [] for _ in range(16): bip68inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be # prepended to spending scriptSig) bip112basicinputs = [] for _ in range(2): inputs = [] for _ in range(16): inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) bip112basicinputs.append(inputs) # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP # (actually will be prepended to spending scriptSig) bip112diverseinputs = [] for _ in range(2): inputs = [] for _ in range(16): inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) bip112diverseinputs.append(inputs) # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to # spending scriptSig) bip112specialinput = send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress) # 1 normal input bip113input = send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress) self.nodes[0].setmocktime(self.last_block_time + 600) # 1 block generated for inputs to be in chain at height 572 inputblockhash = self.generate(self.nodes[0], 1)[0] self.nodes[0].setmocktime(0) self.tip = int(inputblockhash, 16) self.tipheight += 1 self.last_block_time += 600 assert_equal(len(self.nodes[0].getblock( inputblockhash, True)["tx"]), 82 + 1) # 2 more version 4 blocks test_blocks = self.generate_blocks(2) # Test #2 self.send_blocks(test_blocks) self.log.info( "Not yet activated, height = 574 (will activate for block 576, not 575)") assert_equal(get_csv_status(self.nodes[0]), False) # Test both version 1 and version 2 transactions for all tests # BIP113 test transaction will be modified before each use to # put in appropriate block time bip113tx_v1 = create_transaction( self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49980000")) bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE bip113tx_v1.nVersion = 1 bip113tx_v2 = create_transaction( self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49980000")) bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE bip113tx_v2.nVersion = 2 # For BIP68 test all 16 relative sequence locktimes bip68txs_v1 = create_bip68txs( self.nodes[0], bip68inputs, 1, self.nodeaddress) bip68txs_v2 = create_bip68txs( self.nodes[0], bip68inputs, 2, self.nodeaddress) # For BIP112 test: # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_v1 = create_bip112txs( self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress) bip112txs_vary_nSequence_v2 = create_bip112txs( self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress) # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_9_v1 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1) bip112txs_vary_nSequence_9_v2 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1) # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV # OP_DROP inputs bip112txs_vary_OP_CSV_v1 = create_bip112txs( self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress) bip112txs_vary_OP_CSV_v2 = create_bip112txs( self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress) # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV # OP_DROP inputs bip112txs_vary_OP_CSV_9_v1 = create_bip112txs( self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1) bip112txs_vary_OP_CSV_9_v2 = create_bip112txs( self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1) # -1 OP_CSV OP_DROP input bip112tx_special_v1 = create_bip112special( self.nodes[0], bip112specialinput, 1, self.nodeaddress) bip112tx_special_v2 = create_bip112special( self.nodes[0], bip112specialinput, 2, self.nodeaddress) self.log.info("TESTING") self.log.info("Pre-Soft Fork Tests. All txs should pass.") self.log.info("Test version 1 txs") success_txs = [] # add BIP113 tx and -1 CSV tx # = MTP of prior block (not <) but < time put on current block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) success_txs.append(bip113signed1) success_txs.append(bip112tx_special_v1) success_txs.append( spend_tx(self.nodes[0], bip112tx_special_v1, self.nodeaddress)) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v1)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_v1)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_v1)]) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_9_v1)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)]) # Test #3 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") success_txs = [] # add BIP113 tx and -1 CSV tx # = MTP of prior block (not <) but < time put on current block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) success_txs.append(bip113signed2) success_txs.append(bip112tx_special_v2) success_txs.append( spend_tx(self.nodes[0], bip112tx_special_v2, self.nodeaddress)) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v2)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_v2)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_v2)]) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_9_v2)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)]) # Test #4 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # 1 more version 4 block to get us to height 575 so the fork should # now be active for the next block test_blocks = self.generate_blocks(1) # Test #5 self.send_blocks(test_blocks) assert_equal(get_csv_status(self.nodes[0]), False) self.generate(self.nodes[0], 1) assert_equal(get_csv_status(self.nodes[0]), True) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Post-Soft Fork Tests.") self.log.info("BIP 113 tests") # BIP 113 tests should now fail regardless of version number # if nLockTime isn't satisfied by new rules # = MTP of prior block (not <) but < time put on current block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) # = MTP of prior block (not <) but < time put on current block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: # Test #6, Test #7 self.send_blocks( [self.create_test_block([bip113tx])], success=False) # BIP 113 tests should now pass if the locktime is < MTP # < MTP of prior block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) # < MTP of prior block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: # Test #8, Test #9 self.send_blocks([self.create_test_block([bip113tx])]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Next block height = 580 after 4 blocks of random version test_blocks = self.generate_blocks(4) # Test #10 self.send_blocks(test_blocks) self.log.info("BIP 68 tests") self.log.info("Test version 1 txs - all should still pass") success_txs = [] success_txs.extend(all_rlt_txs(bip68txs_v1)) # Test #11 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']] self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # All txs without flag fail as we are at delta height = 8 < 10 and # delta time = 8 * 600 < 10 * 512 bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']] for tx in bip68timetxs: # Test #13 - Test #16 self.send_blocks([self.create_test_block([tx])], success=False) bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']] for tx in bip68heighttxs: # Test #17 - Test #20 self.send_blocks([self.create_test_block([tx])], success=False) # Advance one block to 581 test_blocks = self.generate_blocks(1) # Test #21 self.send_blocks(test_blocks,) # Height txs should fail and time txs should now pass 9 * 600 > 10 * # 512 bip68success_txs.extend(bip68timetxs) # Test #22 self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) for tx in bip68heighttxs: # Test #23 - Test #26 self.send_blocks([self.create_test_block([tx])], success=False) # Advance one block to 582 test_blocks = self.generate_blocks(1) # Test #27 self.send_blocks(test_blocks) # All BIP 68 txs should pass bip68success_txs.extend(bip68heighttxs) # Test #28 self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("BIP 112 tests") self.log.info("Test version 1 txs") # -1 OP_CSV tx should fail # Test #29 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [bip112tx_special_v1])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, # version 1 txs should still pass success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']] # Test #30 self.send_blocks( [self.create_test_block_spend_utxos(self.nodes[0], success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, # version 1 txs should now fail fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1) fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1) fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] for tx in fail_txs: # Test #31 - Test #78 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) self.log.info("Test version 2 txs") # -1 OP_CSV tx should fail # Test #79 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [bip112tx_special_v2])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, # version 2 txs should pass success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']] # Test #80 self.send_blocks( [self.create_test_block_spend_utxos(self.nodes[0], success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all # remaining txs ## # All txs with nSequence 9 should fail either due to earlier mismatch # or failing the CSV check fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2) fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']] for tx in fail_txs: # Test #81 - Test #104 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']] for tx in fail_txs: # Test #105 - Test #112 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # If sequencelock types mismatch, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']] fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']] for tx in fail_txs: # Test #113 - Test #120 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # Remaining txs should pass, just test masking works properly success_txs = [ tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']] # Test #121 self.send_blocks([self.create_test_block(success_txs)]) # Spending the previous block utxos requires a difference of 10 blocks (nSequence = 10). # Generate 9 blocks then spend in the 10th block = self.nodes[0].getbestblockhash() self.last_block_time += 600 self.tip = int(f"0x{block}", 0) self.tipheight += 1 # Test #122 self.send_blocks(self.generate_blocks(9)) spend_txs = [] for tx in success_txs: raw_tx = spend_tx(self.nodes[0], tx, self.nodeaddress) raw_tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME raw_tx.rehash() spend_txs.append(raw_tx) # Test #123 self.send_blocks([self.create_test_block(spend_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Additional test, of checking that comparison of two time types works # properly time_txs = [] for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]: signtx = sign_transaction(self.nodes[0], tx) time_txs.append(signtx) # Test #124 self.send_blocks([self.create_test_block(time_txs)]) # Spending the previous block utxos requires a block time difference of # at least 10 * 512s (nSequence = 10). # Generate 8 blocks then spend in the 9th (9 * 600 > 10 * 512) block = self.nodes[0].getbestblockhash() self.last_block_time += 600 self.tip = int(f"0x{block}", 0) self.tipheight += 1 # Test #125 self.send_blocks(self.generate_blocks(8)) spend_txs = [] for tx in time_txs: raw_tx = spend_tx(self.nodes[0], tx, self.nodeaddress) raw_tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG raw_tx.rehash() spend_txs.append(raw_tx) # Test #126 self.send_blocks([self.create_test_block(spend_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # TODO: Test empty stack fails if __name__ == '__main__': BIP68_112_113Test().main() diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py index 3304ad9ab..2131bb534 100755 --- a/test/functional/feature_dbcrash.py +++ b/test/functional/feature_dbcrash.py @@ -1,332 +1,325 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test recovery from a crash during chainstate writing. - 4 nodes * node0, node1, and node2 will have different dbcrash ratios, and different dbcache sizes * node3 will be a regular node, with no crashing. * The nodes will not connect to each other. - use default test framework starting chain. initialize starting_tip_height to tip height. - Main loop: * generate lots of transactions on node3, enough to fill up a block. * uniformly randomly pick a tip height from starting_tip_height to tip_height; with probability 1/(height_difference+4), invalidate this block. * mine enough blocks to overtake tip_height at start of loop. * for each node in [node0,node1,node2]: - for each mined block: * submit block to node * if node crashed on/after submitting: - restart until recovery succeeds - check that utxo matches node3 using gettxoutsetinfo""" import errno import http.client import random import time from test_framework.blocktools import create_confirmed_utxos from test_framework.cdefs import DEFAULT_MAX_BLOCK_SIZE -from test_framework.messages import ( - XEC, - COutPoint, - CTransaction, - CTxIn, - CTxOut, - ToHex, -) +from test_framework.messages import XEC, COutPoint, CTransaction, CTxIn, CTxOut, ToHex from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class ChainstateWriteCrashTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.rpc_timeout = 480 self.supports_cli = False # Set -maxmempool=0 to turn off mempool memory sharing with dbcache # Set -rpcservertimeout=900 to reduce socket disconnects in this # long-running test self.base_args = ["-limitdescendantsize=0", "-maxmempool=0", "-rpcservertimeout=900", "-dbbatchsize=200000", "-noparkdeepreorg"] # Set different crash ratios and cache sizes. Note that not all of # -dbcache goes to the in-memory coins cache. self.node0_args = ["-dbcrashratio=8", "-dbcache=4"] + self.base_args self.node1_args = ["-dbcrashratio=16", "-dbcache=8"] + self.base_args self.node2_args = ["-dbcrashratio=24", "-dbcache=16"] + self.base_args # Node3 is a normal node with default args, except will mine full blocks # and non-standard txs (e.g. txs with "dust" outputs) self.node3_args = [ f"-blockmaxsize={DEFAULT_MAX_BLOCK_SIZE}", "-acceptnonstdtxn"] self.extra_args = [self.node0_args, self.node1_args, self.node2_args, self.node3_args] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): self.add_nodes(self.num_nodes, extra_args=self.extra_args) self.start_nodes() self.import_deterministic_coinbase_privkeys() # Leave them unconnected, we'll use submitblock directly in this test def restart_node(self, node_index, expected_tip): """Start up a given node id, wait for the tip to reach the given block hash, and calculate the utxo hash. Exceptions on startup should indicate node crash (due to -dbcrashratio), in which case we try again. Give up after 60 seconds. Returns the utxo hash of the given node.""" time_start = time.time() while time.time() - time_start < 120: try: # Any of these RPC calls could throw due to node crash self.start_node(node_index) self.nodes[node_index].waitforblock(expected_tip) utxo_hash = self.nodes[node_index].gettxoutsetinfo()[ 'hash_serialized'] return utxo_hash except Exception: # An exception here should mean the node is about to crash. # If bitcoind exits, then try again. wait_for_node_exit() # should raise an exception if bitcoind doesn't exit. self.wait_for_node_exit(node_index, timeout=15) self.crashed_on_restart += 1 time.sleep(1) # If we got here, bitcoind isn't coming back up on restart. Could be a # bug in bitcoind, or we've gotten unlucky with our dbcrash ratio -- # perhaps we generated a test case that blew up our cache? # TODO: If this happens a lot, we should try to restart without -dbcrashratio # and make sure that recovery happens. raise AssertionError( f"Unable to successfully restart node {node_index} in allotted time") def submit_block_catch_error(self, node_index, block): """Try submitting a block to the given node. Catch any exceptions that indicate the node has crashed. Returns true if the block was submitted successfully; false otherwise.""" try: self.nodes[node_index].submitblock(block) return True except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e: self.log.debug( f"node {node_index} submitblock raised exception: {e}") return False except OSError as e: self.log.debug( f"node {node_index} submitblock raised OSError exception: errno={e.errno}") if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]: # The node has likely crashed return False else: # Unexpected exception, raise raise def sync_node3blocks(self, block_hashes): """Use submitblock to sync node3's chain with the other nodes If submitblock fails, restart the node and get the new utxo hash. If any nodes crash while updating, we'll compare utxo hashes to ensure recovery was successful.""" node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized'] # Retrieve all the blocks from node3 blocks = [] for block_hash in block_hashes: blocks.append( [block_hash, self.nodes[3].getblock(block_hash, False)]) # Deliver each block to each other node for i in range(3): nodei_utxo_hash = None self.log.debug(f"Syncing blocks to node {i}") for (block_hash, block) in blocks: # Get the block from node3, and submit to node_i self.log.debug(f"submitting block {block_hash}") if not self.submit_block_catch_error(i, block): # TODO: more carefully check that the crash is due to -dbcrashratio # (change the exit code perhaps, and check that here?) self.wait_for_node_exit(i, timeout=30) self.log.debug( f"Restarting node {i} after block hash {block_hash}") nodei_utxo_hash = self.restart_node(i, block_hash) assert nodei_utxo_hash is not None self.restart_counts[i] += 1 else: # Clear it out after successful submitblock calls -- the cached # utxo hash will no longer be correct nodei_utxo_hash = None # Check that the utxo hash matches node3's utxo set # NOTE: we only check the utxo set if we had to restart the node # after the last block submitted: # - checking the utxo hash causes a cache flush, which we don't # want to do every time; so # - we only update the utxo cache after a node restart, since flushing # the cache is a no-op at that point if nodei_utxo_hash is not None: self.log.debug( f"Checking txoutsetinfo matches for node {i}") assert_equal(nodei_utxo_hash, node3_utxo_hash) def verify_utxo_hash(self): """Verify that the utxo hash of each node matches node3. Restart any nodes that crash while querying.""" node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized'] self.log.info("Verifying utxo hash matches for all nodes") for i in range(3): try: nodei_utxo_hash = self.nodes[i].gettxoutsetinfo()[ 'hash_serialized'] except OSError: # probably a crash on db flushing nodei_utxo_hash = self.restart_node( i, self.nodes[3].getbestblockhash()) assert_equal(nodei_utxo_hash, node3_utxo_hash) def generate_small_transactions(self, node, count, utxo_list): FEE = 1000 # TODO: replace this with node relay fee based calculation num_transactions = 0 random.shuffle(utxo_list) while len(utxo_list) >= 2 and num_transactions < count: tx = CTransaction() input_amount = 0 for _ in range(2): utxo = utxo_list.pop() tx.vin.append( CTxIn(COutPoint(int(utxo['txid'], 16), utxo['vout']))) input_amount += int(utxo['amount'] * XEC) output_amount = (input_amount - FEE) // 3 if output_amount <= 0: # Sanity check -- if we chose inputs that are too small, skip continue for _ in range(3): tx.vout.append( CTxOut(output_amount, bytes.fromhex(utxo['scriptPubKey']))) # Sign and send the transaction to get into the mempool tx_signed_hex = node.signrawtransactionwithwallet(ToHex(tx))['hex'] node.sendrawtransaction(tx_signed_hex) num_transactions += 1 def run_test(self): # Track test coverage statistics self.restart_counts = [0, 0, 0] # Track the restarts for nodes 0-2 self.crashed_on_restart = 0 # Track count of crashes during recovery # Start by creating a lot of utxos on node3 initial_height = self.nodes[3].getblockcount() utxo_list = create_confirmed_utxos( self, self.nodes[3], 5000, sync_fun=self.no_op) self.log.info(f"Prepped {len(utxo_list)} utxo entries") # Sync these blocks with the other nodes block_hashes_to_sync = [] for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1): block_hashes_to_sync.append(self.nodes[3].getblockhash(height)) self.log.debug( f"Syncing {len(block_hashes_to_sync)} blocks with other nodes") # Syncing the blocks could cause nodes to crash, so the test begins # here. self.sync_node3blocks(block_hashes_to_sync) starting_tip_height = self.nodes[3].getblockcount() # Set mock time to the last block time. This will allow us to increase # the time at each loop so the block hash will always differ for the # same block height, and avoid duplication. # Note that the current time can be behind the block time due to the # way the miner sets the block time. tip = self.nodes[3].getbestblockhash() block_time = self.nodes[3].getblockheader(tip)['time'] self.nodes[3].setmocktime(block_time) # Main test loop: # each time through the loop, generate a bunch of transactions, # and then either mine a single new block on the tip, or some-sized # reorg. for i in range(40): block_time += 10 self.nodes[3].setmocktime(block_time) self.log.info( f"Iteration {i}, generating 2500 transactions {self.restart_counts}") # Generate a bunch of small-ish transactions self.generate_small_transactions(self.nodes[3], 2500, utxo_list) # Pick a random block between current tip, and starting tip current_height = self.nodes[3].getblockcount() random_height = random.randint(starting_tip_height, current_height) self.log.debug( f"At height {current_height}, considering height {random_height}") if random_height > starting_tip_height: # Randomly reorg from this point with some probability (1/4 for # tip, 1/5 for tip-1, ...) if random.random() < 1.0 / (current_height + 4 - random_height): self.log.debug( f"Invalidating block at height {random_height}") self.nodes[3].invalidateblock( self.nodes[3].getblockhash(random_height)) # Now generate new blocks until we pass the old tip height self.log.debug("Mining longer tip") block_hashes = [] while current_height + 1 > self.nodes[3].getblockcount(): block_hashes.extend( self.generatetoaddress( self.nodes[3], nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()), # new address to avoid mining a block that has just been # invalidated address=self.nodes[3].getnewaddress(), sync_fun=self.no_op, )) self.log.debug(f"Syncing {len(block_hashes)} new blocks...") self.sync_node3blocks(block_hashes) utxo_list = self.nodes[3].listunspent() self.log.debug(f"Node3 utxo count: {len(utxo_list)}") # Check that the utxo hashes agree with node3 # Useful side effect: each utxo cache gets flushed here, so that we # won't get crashes on shutdown at the end of the test. self.verify_utxo_hash() # Check the test coverage self.log.info( f"Restarted nodes: {self.restart_counts}; " f"crashes on restart: {self.crashed_on_restart}") # If no nodes were restarted, we didn't test anything. assert self.restart_counts != [0, 0, 0] # Make sure we tested the case of crash-during-recovery. assert self.crashed_on_restart > 0 # Warn if any of the nodes escaped restart. for i in range(3): if self.restart_counts[i] == 0: self.log.warning( f"Node {i} never crashed during utxo flush!") if __name__ == "__main__": ChainstateWriteCrashTest().main() diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py index abb9bc527..e0b526739 100755 --- a/test/functional/feature_dersig.py +++ b/test/functional/feature_dersig.py @@ -1,116 +1,112 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test BIP66 (DER SIG). Test that the DERSIG soft-fork activates at (regtest) height 1251. """ -from test_framework.blocktools import ( - create_block, - create_coinbase, - create_transaction, -) +from test_framework.blocktools import create_block, create_coinbase, create_transaction from test_framework.messages import msg_block from test_framework.p2p import P2PInterface from test_framework.script import CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal DERSIG_HEIGHT = 1251 # A canonical signature consists of: # <30> <02> <02> def unDERify(tx): """ Make the signature in vin 0 of a tx non-DER-compliant, by adding padding after the S-value. """ scriptSig = CScript(tx.vin[0].scriptSig) newscript = [] for i in scriptSig: if (len(newscript) == 0): newscript.append(i[0:-1] + b'\0' + i[-1:]) else: newscript.append(i) tx.vin[0].scriptSig = CScript(newscript) class BIP66Test(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [['-whitelist=noban@127.0.0.1']] self.setup_clean_chain = True self.rpc_timeout = 240 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): peer = self.nodes[0].add_p2p_connection(P2PInterface()) self.log.info(f"Mining {DERSIG_HEIGHT - 1} blocks") self.coinbase_txids = [self.nodes[0].getblock( b)['tx'][0] for b in self.generate(self.nodes[0], DERSIG_HEIGHT - 1)] self.nodeaddress = self.nodes[0].getnewaddress() self.log.info("Test that blocks must now be at least version 3") tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 block = create_block( int(tip, 16), create_coinbase(DERSIG_HEIGHT), block_time) block.nVersion = 2 block.rehash() block.solve() with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000002)']): peer.send_and_ping(msg_block(block)) assert_equal(self.nodes[0].getbestblockhash(), tip) peer.sync_with_ping() self.log.info( "Test that transactions with non-DER signatures cannot appear in a block") block.nVersion = 3 spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1000000) unDERify(spendtx) spendtx.rehash() # First we show that this tx is valid except for DERSIG by getting it # rejected from the mempool for exactly that reason. assert_equal( [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': 'mandatory-script-verify-flag-failed (Non-canonical DER signature)'}], self.nodes[0].testmempoolaccept( rawtxs=[spendtx.serialize().hex()], maxfeerate=0) ) # Now we verify that a block with this transaction is also invalid. block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() with self.nodes[0].assert_debug_log(expected_msgs=[f'ConnectBlock {block.hash} failed, blk-bad-inputs']): peer.send_and_ping(msg_block(block)) assert_equal(self.nodes[0].getbestblockhash(), tip) peer.sync_with_ping() self.log.info( "Test that a version 3 block with a DERSIG-compliant transaction is accepted") block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1.0) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() peer.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256) if __name__ == '__main__': BIP66Test().main() diff --git a/test/functional/feature_deterministic_chain_setup.py b/test/functional/feature_deterministic_chain_setup.py index 0ad0b4f15..2bc82f4d4 100644 --- a/test/functional/feature_deterministic_chain_setup.py +++ b/test/functional/feature_deterministic_chain_setup.py @@ -1,115 +1,108 @@ #!/usr/bin/env python3 # Copyright (c) 2022 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """This test reproduces the unittest deterministic chain setup and verifies the checkpoints and coinstatindexes.""" from test_framework.key import ECKey -from test_framework.messages import ( - COIN, - CBlock, - COutPoint, - CTransaction, - CTxIn, - CTxOut, -) +from test_framework.messages import COIN, CBlock, COutPoint, CTransaction, CTxIn, CTxOut from test_framework.script import OP_CHECKSIG, CScript, CScriptNum, CScriptOp from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal INITIAL_MOCKTIME = 1598887952 def get_coinbase_scriptsig(height: int) -> bytes: OP_1 = 0x51 if height <= 16: bip34_coinbase_height = bytes([OP_1 + height - 1]) else: bip34_coinbase_height = CScriptNum.encode(CScriptNum(height)) extra_nonce = CScriptNum.encode(CScriptNum(1)) excessive_blocksize_sig = CScriptOp.encode_op_pushdata(b'/EB32.0/') return bip34_coinbase_height + extra_nonce + excessive_blocksize_sig def get_coinbase(height: int, pubkey: bytes) -> CTransaction: coinbase = CTransaction() coinbase.nVersion = 2 coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), get_coinbase_scriptsig(height), 0xffffffff)) coinbaseoutput = CTxOut() coinbaseoutput.nValue = 50 * COIN regtest_halvings = int(height / 150) coinbaseoutput.nValue >>= regtest_halvings coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG]) coinbase.vout = [coinbaseoutput] coinbase.calc_sha256() return coinbase def get_empty_block(height: int, base_block_hash: str, block_time: int, coinbase_pubkey: bytes) -> CBlock: block = CBlock() block.nVersion = 0x20000000 block.nTime = block_time block.hashPrevBlock = int(base_block_hash, 16) # difficulty retargeting is disabled in REGTEST chainparams block.nBits = 0x207fffff block.vtx.append(get_coinbase(height, coinbase_pubkey)) block.hashMerkleRoot = block.calc_merkle_root() block.solve() return block class DeterministicChainSetupTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def run_test(self): node = self.nodes[0] genesis_hash = node.getbestblockhash() coinbase_key = ECKey() coinbase_key.set(31 * b"\x00" + b"\x01", compressed=True) coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() tip = genesis_hash chain_height = 1 mock_time = INITIAL_MOCKTIME def mine_blocks(num_blocks: int): nonlocal tip nonlocal chain_height nonlocal mock_time for _ in range(num_blocks): block = get_empty_block(chain_height, tip, mock_time, coinbase_pubkey) assert node.submitblock(block.serialize().hex()) is None tip = node.getbestblockhash() chain_height += 1 mock_time += 1 self.log.info( "Reproduce the assertion in the TestChain100Setup constructor.") mine_blocks(100) assert_equal(tip, "7487ae41496da318b430ad04cc5039507a9365bdb26275d79b3fc148c6eea1e9") self.log.info("Check m_assumeutxo_data at height 110.") mine_blocks(10) assert_equal(node.getblockchaininfo()["blocks"], 110) assert_equal(node.gettxoutsetinfo()["hash_serialized"], "ff755939f6fd81bf966e2f347f5d3660d6239334050eb557a6f005d7d8184ea9") self.log.info("Check m_assumeutxo_data at height 210.") mine_blocks(100) assert_equal(node.getblockchaininfo()["blocks"], 210) assert_equal(node.gettxoutsetinfo()["hash_serialized"], "d6089fa8d2100926326cacdd452231e30bb4e64f07aa5bfec96e055ac2a9a87a") if __name__ == '__main__': DeterministicChainSetupTest().main() diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index aafdc7b45..ff2601c6a 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -1,681 +1,678 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the ZMQ notification interface.""" import struct from io import BytesIO from time import sleep -from test_framework.address import ( - ADDRESS_ECREG_P2SH_OP_TRUE, - ADDRESS_ECREG_UNSPENDABLE, -) +from test_framework.address import ADDRESS_ECREG_P2SH_OP_TRUE, ADDRESS_ECREG_UNSPENDABLE from test_framework.blocktools import ( create_block, create_coinbase, make_conform_to_ctor, ) from test_framework.messages import CTransaction, FromHex, hash256 from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error # Test may be skipped and not have zmq installed try: import zmq except ImportError: pass def hash256_reversed(byte_str): return hash256(byte_str)[::-1] class ZMQSubscriber: def __init__(self, socket, topic): # no sequence number received yet self.sequence = None self.socket = socket self.topic = topic self.socket.setsockopt(zmq.SUBSCRIBE, self.topic) # Receive message from publisher and verify that topic and sequence match def _receive_from_publisher_and_check(self): topic, body, seq = self.socket.recv_multipart() # Topic should match the subscriber topic. assert_equal(topic, self.topic) # Sequence should be incremental. received_seq = struct.unpack('C : Blockhash connected <32-byte hash>D : Blockhash disconnected <32-byte hash>R<8-byte LE uint> : Transactionhash removed from mempool for non-block inclusion reason <32-byte hash>A<8-byte LE uint> : Transactionhash added mempool """ self.log.info("Testing 'sequence' publisher") [seq] = self.setup_zmq_test([("sequence", "tcp://127.0.0.1:28333")]) self.disconnect_nodes(0, 1) # Mempool sequence number starts at 1 seq_num = 1 # Generate 1 block in nodes[0] and receive all notifications dc_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE, sync_fun=self.no_op)[0] # Note: We are not notified of any block transactions, coinbase or # mined assert_equal((self.nodes[0].getbestblockhash(), "C", None), seq.receive_sequence()) # Generate 2 blocks in nodes[1] to a different address to ensure # a chain split self.generatetoaddress( self.nodes[1], 2, ADDRESS_ECREG_P2SH_OP_TRUE, sync_fun=self.no_op) # nodes[0] will reorg chain after connecting back nodes[1] self.connect_nodes(0, 1) # Then we receive all block (dis)connect notifications for the # 2 block reorg assert_equal((dc_block, "D", None), seq.receive_sequence()) block_count = self.nodes[1].getblockcount() assert_equal((self.nodes[1].getblockhash(block_count - 1), "C", None), seq.receive_sequence()) assert_equal((self.nodes[1].getblockhash(block_count), "C", None), seq.receive_sequence()) # Rest of test requires wallet functionality if self.is_wallet_compiled(): (block_hash, txid_to_be_replaced, replacement_txid ) = self.create_conflicting_tx() self.log.info( "Testing sequence notifications with mempool sequence values") # Should receive the initially broadcasted txid. assert_equal((txid_to_be_replaced, "A", seq_num), seq.receive_sequence()) seq_num += 1 self.log.info("Testing a tx removal notification") # Next we receive a notification for the transaction removal assert_equal((txid_to_be_replaced, "R", seq_num), seq.receive_sequence()) seq_num += 1 # Then we see the block notification assert_equal((block_hash, "C", None), seq.receive_sequence()) # There is no sequence notification for the transaction that was # never in node0's mempool, but it can be found in the block. assert replacement_txid in self.nodes[0].getblock(block_hash)["tx"] self.log.info("Wait for tx from second node") payment_txid = self.nodes[1].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=5_000_000) self.sync_all() assert_equal((payment_txid, "A", seq_num), seq.receive_sequence()) seq_num += 1 # Doesn't get published when mined, make a block and tx to "flush" # the possibility though the mempool sequence number does go up by # the number of transactions removed from the mempool by the block # mining it. mempool_size = len(self.nodes[0].getrawmempool()) c_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE)[0] # Make sure the number of mined transactions matches the number of # txs out of mempool mempool_size_delta = mempool_size - \ len(self.nodes[0].getrawmempool()) assert_equal(len(self.nodes[0].getblock(c_block)["tx"]) - 1, mempool_size_delta) seq_num += mempool_size_delta payment_txid_2 = self.nodes[1].sendtoaddress( self.nodes[0].getnewaddress(), 1_000_000) self.sync_all() assert_equal((c_block, "C", None), seq.receive_sequence()) assert_equal((payment_txid_2, "A", seq_num), seq.receive_sequence()) seq_num += 1 # Spot check getrawmempool results that they only show up when # asked for assert isinstance(self.nodes[0].getrawmempool(), list) assert isinstance( self.nodes[0].getrawmempool(mempool_sequence=False), list) assert "mempool_sequence" not in self.nodes[0].getrawmempool( verbose=True) assert_raises_rpc_error( -8, "Verbose results cannot contain mempool sequence values.", self.nodes[0].getrawmempool, True, True) assert_equal(self.nodes[0].getrawmempool( mempool_sequence=True)["mempool_sequence"], seq_num) self.log.info("Testing reorg notifications") # Manually invalidate the last block to test mempool re-entry # N.B. This part could be made more lenient in exact ordering # since it greatly depends on inner-workings of blocks/mempool # during "deep" re-orgs. Probably should "re-construct" # blockchain/mempool state from notifications instead. block_count = self.nodes[0].getblockcount() best_hash = self.nodes[0].getbestblockhash() self.nodes[0].invalidateblock(best_hash) # Bit of room to make sure transaction things happened sleep(2) # Make sure getrawmempool mempool_sequence results aren't "queued" # but immediately reflective of the time they were gathered. assert self.nodes[0].getrawmempool( mempool_sequence=True)["mempool_sequence"] > seq_num assert_equal((payment_txid_2, "R", seq_num), seq.receive_sequence()) seq_num += 1 assert_equal((best_hash, "D", None), seq.receive_sequence()) assert_equal((payment_txid, "A", seq_num), seq.receive_sequence()) seq_num += 1 # Other things may happen but aren't wallet-deterministic so we # don't test for them currently self.nodes[0].reconsiderblock(best_hash) self.generatetoaddress(self.nodes[1], 1, ADDRESS_ECREG_UNSPENDABLE) self.log.info("Evict mempool transaction by block conflict") orig_txid = self.nodes[0].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=1_000_000) # More to be simply mined more_tx = [] for _ in range(5): more_tx.append(self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), 100_000)) raw_tx = self.nodes[0].getrawtransaction(orig_txid) block = create_block( int(self.nodes[0].getbestblockhash(), 16), create_coinbase(self.nodes[0].getblockcount() + 1)) tx = FromHex(CTransaction(), raw_tx) block.vtx.append(tx) for txid in more_tx: tx = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid)) block.vtx.append(tx) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() assert_equal(self.nodes[0].submitblock(block.serialize().hex()), None) tip = self.nodes[0].getbestblockhash() assert_equal(int(tip, 16), block.sha256) orig_txid_2 = self.nodes[0].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=1_000_000) # Flush old notifications until evicted tx original entry (hash_str, label, mempool_seq) = seq.receive_sequence() while hash_str != orig_txid: (hash_str, label, mempool_seq) = seq.receive_sequence() mempool_seq += 1 # Added original tx assert_equal(label, "A") # More transactions to be simply mined for i in range(len(more_tx)): assert_equal((more_tx[i], "A", mempool_seq), seq.receive_sequence()) mempool_seq += 1 # Removed RBF tests mempool_seq += 1 assert_equal((tip, "C", None), seq.receive_sequence()) mempool_seq += len(more_tx) # Last tx assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence()) mempool_seq += 1 self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE) # want to make sure we didn't break "consensus" for other tests self.sync_all() def test_mempool_sync(self): """ Use sequence notification plus getrawmempool sequence results to "sync mempool" """ if not self.is_wallet_compiled(): self.log.info("Skipping mempool sync test") return self.log.info("Testing 'mempool sync' usage of sequence notifier") [seq] = self.setup_zmq_test([("sequence", "tcp://127.0.0.1:28333")]) # In-memory counter, should always start at 1 next_mempool_seq = self.nodes[0].getrawmempool( mempool_sequence=True)["mempool_sequence"] assert_equal(next_mempool_seq, 1) # Some transactions have been happening but we aren't consuming # zmq notifications yet or we lost a ZMQ message somehow and want # to start over txids = [] num_txs = 5 for _ in range(num_txs): txids.append(self.nodes[1].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=1_000_000)) self.sync_all() # 1) Consume backlog until we get a mempool sequence number (hash_str, label, zmq_mem_seq) = seq.receive_sequence() while zmq_mem_seq is None: (hash_str, label, zmq_mem_seq) = seq.receive_sequence() assert label == "A" assert hash_str is not None # 2) We need to "seed" our view of the mempool mempool_snapshot = self.nodes[0].getrawmempool(mempool_sequence=True) mempool_view = set(mempool_snapshot["txids"]) get_raw_seq = mempool_snapshot["mempool_sequence"] assert_equal(get_raw_seq, 6) # Snapshot may be too old compared to zmq message we read off latest while zmq_mem_seq >= get_raw_seq: sleep(2) mempool_snapshot = self.nodes[0].getrawmempool( mempool_sequence=True) mempool_view = set(mempool_snapshot["txids"]) get_raw_seq = mempool_snapshot["mempool_sequence"] # Things continue to happen in the "interim" while waiting for # snapshot results for _ in range(num_txs): txids.append(self.nodes[0].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=1_000_000)) self.sync_all() self.create_conflicting_tx() self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE) final_txid = self.nodes[0].sendtoaddress( address=self.nodes[0].getnewaddress(), amount=100_000) # 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot while True: if zmq_mem_seq == get_raw_seq - 1: break (hash_str, label, mempool_sequence) = seq.receive_sequence() if mempool_sequence is not None: zmq_mem_seq = mempool_sequence if zmq_mem_seq > get_raw_seq: raise Exception( f"We somehow jumped mempool sequence numbers! " f"zmq_mem_seq: {zmq_mem_seq} > " f"get_raw_seq: {get_raw_seq}") # 4) Moving forward, we apply the delta to our local view # remaining txs + conflict (A, R, C) + 1 block connect + 1 final tx expected_sequence = get_raw_seq for _ in range(num_txs + 3 + 1 + 1): (hash_str, label, mempool_sequence) = seq.receive_sequence() if label == "A": assert hash_str not in mempool_view mempool_view.add(hash_str) expected_sequence = mempool_sequence + 1 elif label == "R": assert hash_str in mempool_view mempool_view.remove(hash_str) expected_sequence = mempool_sequence + 1 elif label == "C": # (Attempt to) remove all txids from known block connects block_txids = self.nodes[0].getblock(hash_str)["tx"][1:] for txid in block_txids: if txid in mempool_view: expected_sequence += 1 mempool_view.remove(txid) elif label == "D": # Not useful for mempool tracking per se continue else: raise Exception("Unexpected ZMQ sequence label!") assert_equal(self.nodes[0].getrawmempool(), [final_txid]) assert_equal( self.nodes[0].getrawmempool( mempool_sequence=True)["mempool_sequence"], expected_sequence) # 5) If you miss a zmq/mempool sequence number, go back to step (2) self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE) def test_multiple_interfaces(self): # Set up two subscribers with different addresses # (note that after the reorg test, syncing would fail due to different # chain lengths on node0 and node1; for this test we only need node0, so # we can disable syncing blocks on the setup) subscribers = self.setup_zmq_test([ ("hashblock", "tcp://127.0.0.1:28334"), ("hashblock", "tcp://127.0.0.1:28335"), ], sync_blocks=False) # Generate 1 block in nodes[0] and receive all notifications self.generatetoaddress( self.nodes[0], 1, ADDRESS_ECREG_UNSPENDABLE, sync_fun=self.no_op) # Should receive the same block hash on both subscribers assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex()) assert_equal(self.nodes[0].getbestblockhash(), subscribers[1].receive().hex()) if __name__ == '__main__': ZMQTest().main() diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py index a8dbb09f3..ce366b958 100755 --- a/test/functional/mempool_limit.py +++ b/test/functional/mempool_limit.py @@ -1,93 +1,90 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool limiting together/eviction with the wallet.""" from decimal import Decimal -from test_framework.blocktools import ( - create_confirmed_utxos, - send_big_transactions, -) +from test_framework.blocktools import create_confirmed_utxos, send_big_transactions from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_raises_rpc_error, ) class MempoolLimitTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.extra_args = [[ "-acceptnonstdtxn=1", "-maxmempool=5", "-spendzeroconfchange=0", ]] self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): relayfee = self.nodes[0].getnetworkinfo()['relayfee'] self.log.info('Check that mempoolminfee is minrelytxfee') assert_equal(self.nodes[0].getmempoolinfo()[ 'minrelaytxfee'], Decimal('10.00')) assert_equal(self.nodes[0].getmempoolinfo()[ 'mempoolminfee'], Decimal('10.00')) txids = [] utxo_groups = 4 utxos = create_confirmed_utxos( self, self.nodes[0], 1 + 30 * utxo_groups) self.log.info('Create a mempool tx that will be evicted') us0 = utxos.pop() inputs = [{"txid": us0["txid"], "vout": us0["vout"]}] outputs = {self.nodes[0].getnewaddress(): 100} tx = self.nodes[0].createrawtransaction(inputs, outputs) # specifically fund this tx with low fee self.nodes[0].settxfee(relayfee) txF = self.nodes[0].fundrawtransaction(tx) # return to automatic fee selection self.nodes[0].settxfee(0) txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex']) txid = self.nodes[0].sendrawtransaction(txFS['hex']) for i in range(utxo_groups): txids.append([]) txids[i] = send_big_transactions( self.nodes[0], utxos[30 * i:30 * i + 30], 30, 10 * (i + 1)) self.log.info('The tx should be evicted by now') assert txid not in self.nodes[0].getrawmempool() txdata = self.nodes[0].gettransaction(txid) # confirmation should still be 0 assert txdata['confirmations'] == 0 self.log.info('Check that mempoolminfee is larger than minrelytxfee') assert_equal(self.nodes[0].getmempoolinfo()[ 'minrelaytxfee'], Decimal('10.00')) assert_greater_than(self.nodes[0].getmempoolinfo()[ 'mempoolminfee'], Decimal('10.00')) self.log.info('Create a mempool tx that will not pass mempoolminfee') us0 = utxos.pop() inputs = [{"txid": us0["txid"], "vout": us0["vout"]}] outputs = {self.nodes[0].getnewaddress(): 100} tx = self.nodes[0].createrawtransaction(inputs, outputs) # specifically fund this tx with a fee < mempoolminfee, >= than # minrelaytxfee txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee}) txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex']) assert_raises_rpc_error(-26, "mempool min fee not met", self.nodes[0].sendrawtransaction, txFS['hex']) if __name__ == '__main__': MempoolLimitTest().main() diff --git a/test/functional/mempool_package_limits.py b/test/functional/mempool_package_limits.py index 61316ca42..d1f82b5d1 100755 --- a/test/functional/mempool_package_limits.py +++ b/test/functional/mempool_package_limits.py @@ -1,604 +1,601 @@ #!/usr/bin/env python3 # Copyright (c) 2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test logic for limiting mempool and package ancestors/descendants.""" from decimal import Decimal -from test_framework.address import ( - ADDRESS_ECREG_P2SH_OP_TRUE, - SCRIPTSIG_OP_TRUE, -) +from test_framework.address import ADDRESS_ECREG_P2SH_OP_TRUE, SCRIPTSIG_OP_TRUE from test_framework.messages import XEC, CTransaction, FromHex, ToHex from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal from test_framework.wallet import ( bulk_transaction, create_child_with_parents, make_chain, ) FAR_IN_THE_FUTURE = 2000000000 class MempoolPackageLimitsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.extra_args = [[ # The packages mempool limits are no longer applied after wellington # activation. f'-wellingtonactivationtime={FAR_IN_THE_FUTURE}', ]] def run_test(self): self.log.info("Generate blocks to create UTXOs") node = self.nodes[0] self.privkeys = [node.get_deterministic_priv_key().key] self.address = node.get_deterministic_priv_key().address self.coins = [] # The last 100 coinbase transactions are premature for b in self.generatetoaddress(node, 200, self.address)[:100]: coinbase = node.getblock(blockhash=b, verbosity=2)["tx"][0] self.coins.append({ "txid": coinbase["txid"], "amount": coinbase["vout"][0]["value"], "scriptPubKey": coinbase["vout"][0]["scriptPubKey"], }) self.test_chain_limits() self.test_desc_count_limits() self.test_desc_count_limits_2() self.test_anc_count_limits() self.test_anc_count_limits_2() self.test_anc_count_limits_bushy() # The node will accept our (nonstandard) extra large OP_RETURN outputs self.restart_node(0, extra_args=["-acceptnonstdtxn=1"]) self.test_anc_size_limits() self.test_desc_size_limits() def test_chain_limits_helper(self, mempool_count, package_count): node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) first_coin = self.coins.pop() spk = None txid = first_coin["txid"] chain_hex = [] chain_txns = [] value = first_coin["amount"] for i in range(mempool_count + package_count): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) txid = tx.get_id() if i < mempool_count: node.sendrawtransaction(txhex) else: chain_hex.append(txhex) chain_txns.append(tx) testres_too_long = node.testmempoolaccept(rawtxs=chain_hex) for txres in testres_too_long: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=chain_hex)]) def test_chain_limits(self): """Create chains from mempool and package transactions that are longer than 50, but only if both in-mempool and in-package transactions are considered together. This checks that both mempool and in-package transactions are taken into account when calculating ancestors/descendant limits. """ self.log.info( "Check that in-package ancestors count for mempool ancestor limits") self.test_chain_limits_helper(mempool_count=49, package_count=2) self.test_chain_limits_helper(mempool_count=2, package_count=49) self.test_chain_limits_helper(mempool_count=26, package_count=26) def test_desc_count_limits(self): """Create an 'A' shaped package with 49 transactions in the mempool and 2 in the package: M1 ^ ^ M2a M2b . . . . M25a M25b ^ ^ Pa Pb The top ancestor in the package exceeds descendant limits but only if the in-mempool and in-package descendants are all considered together (49 including in-mempool descendants and 51 including both package transactions). """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) self.log.info( "Check that in-mempool and in-package descendants are calculated properly in packages") # Top parent in mempool, M1 first_coin = self.coins.pop() # Deduct reasonable fee and make 2 outputs parent_value = (first_coin["amount"] - Decimal("200.00")) / 2 inputs = [{"txid": first_coin["txid"], "vout": 0}] outputs = [{self.address: parent_value}, {ADDRESS_ECREG_P2SH_OP_TRUE: parent_value}] rawtx = node.createrawtransaction(inputs, outputs) parent_signed = node.signrawtransactionwithkey( hexstring=rawtx, privkeys=self.privkeys) assert parent_signed["complete"] parent_tx = FromHex(CTransaction(), parent_signed["hex"]) parent_txid = parent_tx.rehash() node.sendrawtransaction(parent_signed["hex"]) package_hex = [] # Chain A spk = parent_tx.vout[0].scriptPubKey.hex() value = parent_value txid = parent_txid for i in range(25): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) txid = tx.get_id() if i < 24: # M2a... M25a node.sendrawtransaction(txhex) else: # Pa package_hex.append(txhex) # Chain B value = parent_value - Decimal("100.00") rawtx_b = node.createrawtransaction( [{"txid": parent_txid, "vout": 1}], {self.address: value}) # M2b tx_child_b = FromHex(CTransaction(), rawtx_b) tx_child_b.vin[0].scriptSig = SCRIPTSIG_OP_TRUE pad_tx(tx_child_b) tx_child_b_hex = ToHex(tx_child_b) node.sendrawtransaction(tx_child_b_hex) spk = tx_child_b.vout[0].scriptPubKey.hex() txid = tx_child_b.rehash() for i in range(24): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) txid = tx.get_id() if i < 23: # M3b... M25b node.sendrawtransaction(txhex) else: # Pb package_hex.append(txhex) assert_equal(49, node.getmempoolinfo()["size"]) assert_equal(2, len(package_hex)) testres_too_long = node.testmempoolaccept(rawtxs=package_hex) for txres in testres_too_long: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=package_hex)]) def test_desc_count_limits_2(self): """Create a Package with 49 transactions in mempool and 2 transactions in package: M1 ^ ^ M2 ^ . ^ . ^ . ^ M49 ^ ^ P1 ^ P2 P1 has M1 as a mempool ancestor, P2 has no in-mempool ancestors, but when combined P2 has M1 as an ancestor and M1 exceeds descendant_limits (48 in-mempool descendants + 2 in-package descendants, a total of 51 including itself). """ node = self.nodes[0] package_hex = [] # M1 first_coin_a = self.coins.pop() # Deduct reasonable fee and make 2 outputs parent_value = (first_coin_a["amount"] - Decimal('200.0')) / 2 inputs = [{"txid": first_coin_a["txid"], "vout": 0}] outputs = [{self.address: parent_value}, {ADDRESS_ECREG_P2SH_OP_TRUE: parent_value}] rawtx = node.createrawtransaction(inputs, outputs) parent_signed = node.signrawtransactionwithkey( hexstring=rawtx, privkeys=self.privkeys) assert parent_signed["complete"] parent_tx = FromHex(CTransaction(), parent_signed["hex"]) pad_tx(parent_tx) parent_txid = parent_tx.rehash() node.sendrawtransaction(parent_signed["hex"]) # Chain M2...M49 spk = parent_tx.vout[0].scriptPubKey.hex() value = parent_value txid = parent_txid for _ in range(48): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) pad_tx(tx) txid = tx.hash node.sendrawtransaction(txhex) # P1 value_p1 = parent_value - Decimal('100') rawtx_p1 = node.createrawtransaction( [{"txid": parent_txid, "vout": 1}], [{self.address: value_p1}]) tx_child_p1 = FromHex(CTransaction(), rawtx_p1) tx_child_p1.vin[0].scriptSig = SCRIPTSIG_OP_TRUE pad_tx(tx_child_p1) tx_child_p1_hex = tx_child_p1.serialize().hex() package_hex.append(tx_child_p1_hex) tx_child_p1_spk = tx_child_p1.vout[0].scriptPubKey.hex() # P2 (_, tx_child_p2_hex, _, _) = make_chain(node, self.address, self.privkeys, tx_child_p1.hash, value_p1, 0, tx_child_p1_spk) package_hex.append(tx_child_p2_hex) assert_equal(49, node.getmempoolinfo()["size"]) assert_equal(2, len(package_hex)) testres = node.testmempoolaccept(rawtxs=package_hex) assert_equal(len(testres), len(package_hex)) for txres in testres: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] for res in node.testmempoolaccept(rawtxs=package_hex)]) def test_anc_count_limits(self): """Create a 'V' shaped chain with 49 transactions in the mempool and 3 in the package: M1a ^ M1b M2a ^ . M2b . . . . M25a M24b ^ ^ Pa Pb ^ ^ Pc The lowest descendant, Pc, exceeds ancestor limits, but only if the in-mempool and in-package ancestors are all considered together. """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) package_hex = [] parents_tx = [] values = [] scripts = [] self.log.info( "Check that in-mempool and in-package ancestors are calculated " "properly in packages") # Two chains of 26 & 25 transactions for chain_length in [26, 25]: spk = None top_coin = self.coins.pop() txid = top_coin["txid"] value = top_coin["amount"] for i in range(chain_length): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) txid = tx.get_id() if i < chain_length - 1: node.sendrawtransaction(txhex) else: # Save the last transaction for the package package_hex.append(txhex) parents_tx.append(tx) scripts.append(spk) values.append(value) # Child Pc child_hex = create_child_with_parents( node, self.address, self.privkeys, parents_tx, values, scripts) package_hex.append(child_hex) assert_equal(49, node.getmempoolinfo()["size"]) assert_equal(3, len(package_hex)) testres_too_long = node.testmempoolaccept(rawtxs=package_hex) for txres in testres_too_long: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=package_hex)]) def test_anc_count_limits_2(self): """Create a 'Y' shaped chain with 49 transactions in the mempool and 2 in the package: M1a ^ M1b M2a ^ . M2b . . . . M25a M24b ^ ^ Pc ^ Pd The lowest descendant, Pc, exceeds ancestor limits, but only if the in-mempool and in-package ancestors are all considered together. """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) parents_tx = [] values = [] scripts = [] self.log.info( "Check that in-mempool and in-package ancestors are calculated properly in packages") # Two chains of 25 & 24 transactions for chain_length in [25, 24]: spk = None top_coin = self.coins.pop() txid = top_coin["txid"] value = top_coin["amount"] for i in range(chain_length): (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk) txid = tx.get_id() node.sendrawtransaction(txhex) if i == chain_length - 1: # last 2 transactions will be the parents of Pc parents_tx.append(tx) values.append(value) scripts.append(spk) # Child Pc pc_hex = create_child_with_parents( node, self.address, self.privkeys, parents_tx, values, scripts) pc_tx = FromHex(CTransaction(), pc_hex) pc_value = sum(values) - Decimal("100.00") pc_spk = pc_tx.vout[0].scriptPubKey.hex() # Child Pd (_, pd_hex, _, _) = make_chain( node, self.address, self.privkeys, pc_tx.get_id(), pc_value, 0, pc_spk) assert_equal(49, node.getmempoolinfo()["size"]) testres_too_long = node.testmempoolaccept(rawtxs=[pc_hex, pd_hex]) for txres in testres_too_long: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=[pc_hex, pd_hex])]) def test_anc_count_limits_bushy(self): """Create a tree with 45 transactions in the mempool and 6 in the package: M1...M9 M10...M18 M19...M27 M28...M36 M37...M45 ^ ^ ^ ^ ^ (each with 9 parents) P0 P1 P2 P3 P4 ^ ^ ^ ^ ^ (5 parents) PC Where M(9i+1)...M+(9i+9) are the parents of Pi and P0, P1, P2, P3, and P4 are the parents of PC. P0... P4 individually only have 9 parents each, and PC has no in-mempool parents. But combined, PC has 50 in-mempool and in-package parents. """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) package_hex = [] parent_txns = [] parent_values = [] scripts = [] # Make package transactions P0 ... P4 for _ in range(5): gp_tx = [] gp_values = [] gp_scripts = [] # Make mempool transactions M(9i+1)...M(9i+9) for _ in range(9): parent_coin = self.coins.pop() value = parent_coin["amount"] txid = parent_coin["txid"] (tx, txhex, value, spk) = make_chain( node, self.address, self.privkeys, txid, value) gp_tx.append(tx) gp_values.append(value) gp_scripts.append(spk) node.sendrawtransaction(txhex) # Package transaction Pi pi_hex = create_child_with_parents( node, self.address, self.privkeys, gp_tx, gp_values, gp_scripts) package_hex.append(pi_hex) pi_tx = FromHex(CTransaction(), pi_hex) parent_txns.append(pi_tx) parent_values.append(Decimal(pi_tx.vout[0].nValue) / XEC) scripts.append(pi_tx.vout[0].scriptPubKey.hex()) # Package transaction PC package_hex.append( create_child_with_parents(node, self.address, self.privkeys, parent_txns, parent_values, scripts)) assert_equal(45, node.getmempoolinfo()["size"]) assert_equal(6, len(package_hex)) testres = node.testmempoolaccept(rawtxs=package_hex) for txres in testres: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=package_hex)]) def test_anc_size_limits(self): """Test Case with 2 independent transactions in the mempool and a parent + child in the package, where the package parent is the child of both mempool transactions (30KB each): A B ^ ^ C ^ D The lowest descendant, D, exceeds ancestor size limits, but only if the in-mempool and in-package ancestors are all considered together. """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) parents_tx = [] values = [] scripts = [] target_size = 30_000 # 10 sats/B high_fee = Decimal("3000.00") self.log.info( "Check that in-mempool and in-package ancestor size limits are calculated properly in packages") # Mempool transactions A and B for _ in range(2): spk = None top_coin = self.coins.pop() txid = top_coin["txid"] value = top_coin["amount"] (tx, _, _, _) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk, high_fee) bulked_tx = bulk_transaction(tx, node, target_size, self.privkeys) node.sendrawtransaction(ToHex(bulked_tx)) parents_tx.append(bulked_tx) values.append(Decimal(bulked_tx.vout[0].nValue) / XEC) scripts.append(bulked_tx.vout[0].scriptPubKey.hex()) # Package transaction C small_pc_hex = create_child_with_parents( node, self.address, self.privkeys, parents_tx, values, scripts, high_fee) pc_tx = bulk_transaction( FromHex(CTransaction(), small_pc_hex), node, target_size, self.privkeys) pc_value = Decimal(pc_tx.vout[0].nValue) / XEC pc_spk = pc_tx.vout[0].scriptPubKey.hex() pc_hex = ToHex(pc_tx) # Package transaction D (small_pd, _, val, spk) = make_chain( node, self.address, self.privkeys, pc_tx.rehash(), pc_value, 0, pc_spk, high_fee) prevtxs = [{ "txid": pc_tx.get_id(), "vout": 0, "scriptPubKey": spk, "amount": pc_value, }] pd_tx = bulk_transaction( small_pd, node, target_size, self.privkeys, prevtxs) pd_hex = ToHex(pd_tx) assert_equal(2, node.getmempoolinfo()["size"]) testres_too_heavy = node.testmempoolaccept(rawtxs=[pc_hex, pd_hex]) for txres in testres_too_heavy: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=[pc_hex, pd_hex])]) def test_desc_size_limits(self): """Create 3 mempool transactions and 2 package transactions (25KB each): Ma ^ ^ Mb Mc ^ ^ Pd Pe The top ancestor in the package exceeds descendant size limits but only if the in-mempool and in-package descendants are all considered together. """ node = self.nodes[0] assert_equal(0, node.getmempoolinfo()["size"]) target_size = 21_000 # 10 sats/vB high_fee = Decimal("2100.00") self.log.info( "Check that in-mempool and in-package descendant sizes are calculated properly in packages") # Top parent in mempool, Ma first_coin = self.coins.pop() # Deduct fee and make 2 outputs parent_value = (first_coin["amount"] - high_fee) / 2 inputs = [{"txid": first_coin["txid"], "vout": 0}] outputs = [{self.address: parent_value}, {ADDRESS_ECREG_P2SH_OP_TRUE: parent_value}] rawtx = node.createrawtransaction(inputs, outputs) parent_tx = bulk_transaction( FromHex(CTransaction(), rawtx), node, target_size, self.privkeys) node.sendrawtransaction(ToHex(parent_tx)) package_hex = [] # Two legs (left and right) for j in range(2): # Mempool transaction (Mb and Mc) spk = parent_tx.vout[j].scriptPubKey.hex() value = Decimal(parent_tx.vout[j].nValue) / XEC txid = parent_tx.get_id() prevtxs = [{ "txid": txid, "vout": j, "scriptPubKey": spk, "amount": value, }] if j == 0: # normal key (tx_small, _, _, _) = make_chain( node, self.address, self.privkeys, txid, value, j, spk, high_fee) mempool_tx = bulk_transaction( tx_small, node, target_size, self.privkeys, prevtxs) else: # OP_TRUE inputs = [{"txid": txid, "vout": 1}] outputs = {self.address: value - high_fee} small_tx = FromHex( CTransaction(), node.createrawtransaction(inputs, outputs)) mempool_tx = bulk_transaction( small_tx, node, target_size, None, prevtxs) node.sendrawtransaction(ToHex(mempool_tx)) # Package transaction (Pd and Pe) spk = mempool_tx.vout[0].scriptPubKey.hex() value = Decimal(mempool_tx.vout[0].nValue) / XEC txid = mempool_tx.get_id() (tx_small, _, _, _) = make_chain( node, self.address, self.privkeys, txid, value, 0, spk, high_fee) prevtxs = [{ "txid": txid, "vout": 0, "scriptPubKey": spk, "amount": value, }] package_tx = bulk_transaction( tx_small, node, target_size, self.privkeys, prevtxs) package_hex.append(ToHex(package_tx)) assert_equal(3, node.getmempoolinfo()["size"]) assert_equal(2, len(package_hex)) testres_too_heavy = node.testmempoolaccept(rawtxs=package_hex) for txres in testres_too_heavy: assert_equal(txres["package-error"], "package-mempool-limits") # Clear mempool and check that the package passes now self.generate(node, 1) assert all([res["allowed"] is True for res in node.testmempoolaccept(rawtxs=package_hex)]) if __name__ == "__main__": MempoolPackageLimitsTest().main() diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py index cde48c3ee..43d85c3b6 100755 --- a/test/functional/mempool_packages.py +++ b/test/functional/mempool_packages.py @@ -1,373 +1,369 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test descendant package tracking code.""" from decimal import Decimal from test_framework.p2p import P2PTxInvStore from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - satoshi_round, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round # default limits MAX_ANCESTORS = 50 MAX_DESCENDANTS = 50 # custom limits for node1 MAX_ANCESTORS_CUSTOM = 5 FAR_IN_THE_FUTURE = 2000000000 class MempoolPackagesTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 common_params = [ "-maxorphantx=1000", "-deprecatedrpc=mempool_ancestors_descendants", # This test tests mempool ancestor chain limits, which are no longer # enforced after wellington, so we need to force wellington to # activate in the distant future f"-wellingtonactivationtime={FAR_IN_THE_FUTURE}", ] self.extra_args = [ common_params, common_params + [f"-limitancestorcount={MAX_ANCESTORS_CUSTOM}"]] def skip_test_if_missing_module(self): self.skip_if_no_wallet() # Build a transaction that spends parent_txid:vout # Return amount sent def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs): send_value = satoshi_round((value - fee) / num_outputs) inputs = [{'txid': parent_txid, 'vout': vout}] outputs = {} for _ in range(num_outputs): outputs[node.getnewaddress()] = send_value rawtx = node.createrawtransaction(inputs, outputs) signedtx = node.signrawtransactionwithwallet(rawtx) txid = node.sendrawtransaction(signedtx['hex']) fulltx = node.getrawtransaction(txid, 1) # make sure we didn't generate a change output assert len(fulltx['vout']) == num_outputs return (txid, send_value) def run_test(self): # Mine some blocks and have them mature. # keep track of invs peer_inv_store = self.nodes[0].add_p2p_connection(P2PTxInvStore()) self.generate(self.nodes[0], 101) utxo = self.nodes[0].listunspent(10) txid = utxo[0]['txid'] vout = utxo[0]['vout'] value = utxo[0]['amount'] assert 'ancestorcount' not in utxo[0] assert 'ancestorsize' not in utxo[0] assert 'ancestorfees' not in utxo[0] fee = Decimal("100") # MAX_ANCESTORS transactions off a confirmed tx should be fine chain = [] ancestor_size = 0 ancestor_fees = Decimal(0) for i in range(MAX_ANCESTORS): (txid, sent_value) = self.chain_transaction( self.nodes[0], txid, 0, value, fee, 1) value = sent_value chain.append(txid) # Check that listunspent ancestor{count, size, fees} yield the # correct results wallet_unspent = self.nodes[0].listunspent(minconf=0) this_unspent = next( utxo_info for utxo_info in wallet_unspent if utxo_info['txid'] == txid) assert_equal(this_unspent['ancestorcount'], i + 1) ancestor_size += self.nodes[0].getrawtransaction( txid=txid, verbose=True)['size'] assert_equal(this_unspent['ancestorsize'], ancestor_size) ancestor_fees -= self.nodes[0].gettransaction(txid=txid)['fee'] assert_equal(this_unspent['ancestorfees'], ancestor_fees) # Wait until mempool transactions have passed initial broadcast # (sent inv and received getdata) # Otherwise, getrawmempool may be inconsistent with getmempoolentry if # unbroadcast changes in between peer_inv_store.wait_for_broadcast(chain) # Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor # count and fees should look correct mempool = self.nodes[0].getrawmempool(True) assert_equal(len(mempool), MAX_ANCESTORS) descendant_count = 1 descendant_fees = 0 descendant_size = 0 assert_equal(ancestor_size, sum([mempool[tx]['size'] for tx in mempool])) ancestor_count = MAX_ANCESTORS assert_equal(ancestor_fees, sum([mempool[tx]['fees']['base'] for tx in mempool])) descendants = [] ancestors = list(chain) for x in reversed(chain): # Check that getmempoolentry is consistent with getrawmempool entry = self.nodes[0].getmempoolentry(x) assert_equal(entry, mempool[x]) # Check that the descendant calculations are correct assert_equal(mempool[x]['descendantcount'], descendant_count) descendant_fees += mempool[x]['fees']['base'] assert_equal( mempool[x]['fees']['modified'], mempool[x]['fees']['base']) assert_equal(mempool[x]['fees']['descendant'], descendant_fees) descendant_size += mempool[x]['size'] assert_equal(mempool[x]['descendantsize'], descendant_size) descendant_count += 1 # Check that ancestor calculations are correct assert_equal(mempool[x]['ancestorcount'], ancestor_count) assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees) assert_equal(mempool[x]['ancestorsize'], ancestor_size) ancestor_size -= mempool[x]['size'] ancestor_fees -= mempool[x]['fees']['base'] ancestor_count -= 1 # Check that parent/child list is correct assert_equal(mempool[x]['spentby'], descendants[-1:]) assert_equal(mempool[x]['depends'], ancestors[-2:-1]) # Check that getmempooldescendants is correct assert_equal(sorted(descendants), sorted( self.nodes[0].getmempooldescendants(x))) # Check getmempooldescendants verbose output is correct for descendant, dinfo in self.nodes[0].getmempooldescendants( x, True).items(): assert_equal(dinfo['depends'], [ chain[chain.index(descendant) - 1]]) if dinfo['descendantcount'] > 1: assert_equal(dinfo['spentby'], [ chain[chain.index(descendant) + 1]]) else: assert_equal(dinfo['spentby'], []) descendants.append(x) # Check that getmempoolancestors is correct ancestors.remove(x) assert_equal(sorted(ancestors), sorted( self.nodes[0].getmempoolancestors(x))) # Check that getmempoolancestors verbose output is correct for ancestor, ainfo in self.nodes[0].getmempoolancestors( x, True).items(): assert_equal(ainfo['spentby'], [ chain[chain.index(ancestor) + 1]]) if ainfo['ancestorcount'] > 1: assert_equal(ainfo['depends'], [ chain[chain.index(ancestor) - 1]]) else: assert_equal(ainfo['depends'], []) # Check that getmempoolancestors/getmempooldescendants correctly handle # verbose=true v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True) assert_equal(len(v_ancestors), len(chain) - 1) for x in v_ancestors.keys(): assert_equal(mempool[x], v_ancestors[x]) assert chain[-1] not in v_ancestors.keys() v_descendants = self.nodes[0].getmempooldescendants(chain[0], True) assert_equal(len(v_descendants), len(chain) - 1) for x in v_descendants.keys(): assert_equal(mempool[x], v_descendants[x]) assert chain[0] not in v_descendants.keys() # Check that ancestor modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) ancestor_fees = 0 for x in chain: ancestor_fees += mempool[x]['fees']['base'] assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('10.00')) # Undo the prioritisetransaction for later tests self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000) # Check that descendant modified fees includes fee deltas from # prioritisetransaction self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=1000) mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fees']['base'] assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('10.00')) # Adding one more transaction on to the chain should fail. assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1) # Check that prioritising a tx before it's added to the mempool works # First clear the mempool by mining a block. self.generate(self.nodes[0], 1) assert_equal(len(self.nodes[0].getrawmempool()), 0) # Prioritise a transaction that has been mined, then add it back to the # mempool by using invalidateblock. self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=2000) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Keep node1's tip synced with node0 self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) # Now check that the transaction is in the mempool, with the right # modified fee mempool = self.nodes[0].getrawmempool(True) descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fees']['base'] if (x == chain[-1]): assert_equal(mempool[x]['fees']['modified'], mempool[x]['fees']['base'] + satoshi_round(20.00)) assert_equal(mempool[x]['fees']['descendant'], descendant_fees + satoshi_round(20.00)) # Check that node1's mempool is as expected (-> custom ancestor limit) mempool0 = self.nodes[0].getrawmempool(False) mempool1 = self.nodes[1].getrawmempool(False) assert_equal(len(mempool1), MAX_ANCESTORS_CUSTOM) assert set(mempool1).issubset(set(mempool0)) for tx in chain[:MAX_ANCESTORS_CUSTOM]: assert tx in mempool1 # TODO: more detailed check of node1's mempool (fees etc.) # check transaction unbroadcast info (should be false if in both # mempools) mempool = self.nodes[0].getrawmempool(True) for tx in mempool: assert_equal(mempool[tx]['unbroadcast'], False) # TODO: test ancestor size limits # Now test descendant chain limits txid = utxo[1]['txid'] value = utxo[1]['amount'] vout = utxo[1]['vout'] transaction_package = [] tx_children = [] # First create one parent tx with 10 children (txid, sent_value) = self.chain_transaction( self.nodes[0], txid, vout, value, fee, 10) parent_transaction = txid for i in range(10): transaction_package.append( {'txid': txid, 'vout': i, 'amount': sent_value}) # Sign and send up to MAX_DESCENDANT transactions chained off the # parent tx for _ in range(MAX_DESCENDANTS - 1): utxo = transaction_package.pop(0) (txid, sent_value) = self.chain_transaction( self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) if utxo['txid'] is parent_transaction: tx_children.append(txid) for j in range(10): transaction_package.append( {'txid': txid, 'vout': j, 'amount': sent_value}) mempool = self.nodes[0].getrawmempool(True) assert_equal(mempool[parent_transaction] ['descendantcount'], MAX_DESCENDANTS) assert_equal(sorted(mempool[parent_transaction] ['spentby']), sorted(tx_children)) for child in tx_children: assert_equal(mempool[child]['depends'], [parent_transaction]) # Sending one more chained transaction will fail utxo = transaction_package.pop(0) assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) # TODO: check that node1's mempool is as expected # TODO: test descendant size limits # Test reorg handling # First, the basics: self.generate(self.nodes[0], 1) self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash()) self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash()) # Now test the case where node1 has a transaction T in its mempool that # depends on transactions A and B which are in a mined block, and the # block containing A and B is disconnected, AND B is not accepted back # into node1's mempool because its ancestor count is too high. # Create 8 transactions, like so: # Tx0 -> Tx1 (vout0) # \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7 # # Mine them in the next block, then generate a new tx8 that spends # Tx1 and Tx7, and add to node1's mempool, then disconnect the # last block. # Create tx0 with 2 outputs utxo = self.nodes[0].listunspent() txid = utxo[0]['txid'] value = utxo[0]['amount'] vout = utxo[0]['vout'] send_value = satoshi_round((value - fee) / 2) inputs = [{'txid': txid, 'vout': vout}] outputs = {} for _ in range(2): outputs[self.nodes[0].getnewaddress()] = send_value rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) tx0_id = txid value = send_value # Create tx1 tx1_id, _ = self.chain_transaction( self.nodes[0], tx0_id, 0, value, fee, 1) # Create tx2-7 vout = 1 txid = tx0_id for _ in range(6): (txid, sent_value) = self.chain_transaction( self.nodes[0], txid, vout, value, fee, 1) vout = 0 value = sent_value # Mine these in a block self.generate(self.nodes[0], 1) # Now generate tx8, with a big fee inputs = [{'txid': tx1_id, 'vout': 0}, {'txid': txid, 'vout': 0}] outputs = {self.nodes[0].getnewaddress(): send_value + value - 4 * fee} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) self.sync_mempools() # Now try to disconnect the tip on each node... self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.sync_blocks() if __name__ == '__main__': MempoolPackagesTest().main() diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index 4c1dbf89c..c07982736 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -1,223 +1,220 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the prioritisetransaction mining RPC.""" import time -from test_framework.blocktools import ( - create_confirmed_utxos, - send_big_transactions, -) +from test_framework.blocktools import create_confirmed_utxos, send_big_transactions # FIXME: review how this test needs to be adapted w.r.t _LEGACY_MAX_BLOCK_SIZE from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE from test_framework.messages import COIN from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class PrioritiseTransactionTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 # TODO: remove -txindex. Currently required for getrawtransaction call # (called by calculate_fee_from_txid) self.extra_args = [[ "-printpriority=1", "-acceptnonstdtxn=1", "-txindex" ]] * self.num_nodes self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): # Test `prioritisetransaction` required parameters assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction) assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '') assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0) # Test `prioritisetransaction` invalid extra parameters assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0, 0, 0) # Test `prioritisetransaction` invalid `txid` assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].prioritisetransaction, txid='foo', fee_delta=0) assert_raises_rpc_error( -8, "txid must be hexadecimal string (not 'Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000')", self.nodes[0].prioritisetransaction, txid='Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000', fee_delta=0) # Test `prioritisetransaction` invalid `dummy` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-1, "JSON value is not a number as expected", self.nodes[0].prioritisetransaction, txid, 'foo', 0) assert_raises_rpc_error( -8, "Priority is no longer supported, dummy argument to prioritisetransaction must be 0.", self.nodes[0].prioritisetransaction, txid, 1, 0) # Test `prioritisetransaction` invalid `fee_delta` assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].prioritisetransaction, txid=txid, fee_delta='foo') self.relayfee = self.nodes[0].getnetworkinfo()['relayfee'] utxo_count = 90 utxos = create_confirmed_utxos(self, self.nodes[0], utxo_count) txids = [] # Create 3 batches of transactions at 3 different fee rate levels range_size = utxo_count // 3 for i in range(3): txids.append([]) start_range = i * range_size end_range = start_range + range_size txids[i] = send_big_transactions(self.nodes[0], utxos[start_range:end_range], end_range - start_range, 10 * (i + 1)) # Make sure that the size of each group of transactions exceeds # LEGACY_MAX_BLOCK_SIZE -- otherwise the test needs to be revised to create # more transactions. mempool = self.nodes[0].getrawmempool(True) sizes = [0, 0, 0] for i in range(3): for j in txids[i]: assert j in mempool sizes[i] += mempool[j]['size'] # Fail => raise utxo_count assert sizes[i] > LEGACY_MAX_BLOCK_SIZE # add a fee delta to something in the cheapest bucket and make sure it gets mined # also check that a different entry in the cheapest bucket is NOT mined self.nodes[0].prioritisetransaction( txid=txids[0][0], fee_delta=100 * self.nodes[0].calculate_fee_from_txid(txids[0][0])) self.generate(self.nodes[0], 1) mempool = self.nodes[0].getrawmempool() self.log.info("Assert that prioritised transaction was mined") assert txids[0][0] not in mempool assert txids[0][1] in mempool confirmed_transactions = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['tx'] # Pull the highest fee-rate transaction from a block high_fee_tx = confirmed_transactions[1] # Something high-fee should have been mined! assert high_fee_tx is not None # Add a prioritisation before a tx is in the mempool (de-prioritising a # high-fee transaction so that it's now low fee). # # NOTE WELL: gettransaction returns the fee as a negative number and # as fractional coins. However, the prioritisetransaction expects a # number of satoshi to add or subtract from the actual fee. # Thus the conversation here is simply int(tx_fee*COIN) to remove all fees, and then # we add the minimum fee back. tx_fee = self.nodes[0].gettransaction(high_fee_tx)['fee'] self.nodes[0].prioritisetransaction( txid=high_fee_tx, fee_delta=int(tx_fee * COIN) + self.nodes[0].calculate_fee_from_txid(high_fee_tx)) # Add everything back to mempool self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Check to make sure our high fee rate tx is back in the mempool mempool = self.nodes[0].getrawmempool() assert high_fee_tx in mempool # Now verify the modified-high feerate transaction isn't mined before # the other high fee transactions. Keep mining until our mempool has # decreased by all the high fee size that we calculated above. while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]): self.generate(self.nodes[0], 1, sync_fun=self.no_op) # High fee transaction should not have been mined, but other high fee rate # transactions should have been. mempool = self.nodes[0].getrawmempool() self.log.info( "Assert that de-prioritised transaction is still in mempool") assert high_fee_tx in mempool for x in txids[2]: if (x != high_fee_tx): assert x not in mempool # Create a free transaction. Should be rejected. utxo_list = self.nodes[0].listunspent() assert len(utxo_list) > 0 utxo = utxo_list[0] inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[0].getnewaddress()] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"] tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"] # This will raise an exception due to min relay fee not being met assert_raises_rpc_error(-26, "min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex) assert tx_id not in self.nodes[0].getrawmempool() # This is a less than 1000-byte transaction, so just set the fee # to be the minimum for a 1000-byte transaction and check that it is # accepted. self.nodes[0].prioritisetransaction( txid=tx_id, fee_delta=int(self.relayfee * COIN)) self.log.info( "Assert that prioritised free transaction is accepted to mempool") assert_equal(self.nodes[0].sendrawtransaction(tx_hex), tx_id) assert tx_id in self.nodes[0].getrawmempool() # Test that calling prioritisetransaction is sufficient to trigger # getblocktemplate to (eventually) return a new block. mock_time = int(time.time()) self.nodes[0].setmocktime(mock_time) template = self.nodes[0].getblocktemplate() self.nodes[0].prioritisetransaction( txid=tx_id, fee_delta=-int(self.relayfee * COIN)) self.nodes[0].setmocktime(mock_time + 10) new_template = self.nodes[0].getblocktemplate() assert template != new_template if __name__ == '__main__': PrioritiseTransactionTest().main() diff --git a/test/functional/p2p_inv_download.py b/test/functional/p2p_inv_download.py index aaefdf6bb..135412b12 100755 --- a/test/functional/p2p_inv_download.py +++ b/test/functional/p2p_inv_download.py @@ -1,483 +1,475 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Test inventory download behavior """ import functools import time from test_framework.address import ADDRESS_ECREG_UNSPENDABLE -from test_framework.avatools import ( - avalanche_proof_from_hex, - gen_proof, - wait_for_proof, -) +from test_framework.avatools import avalanche_proof_from_hex, gen_proof, wait_for_proof from test_framework.key import ECKey from test_framework.messages import ( MSG_AVA_PROOF, MSG_TX, MSG_TYPE_MASK, CInv, CTransaction, FromHex, msg_avaproof, msg_inv, msg_notfound, ) from test_framework.p2p import P2PInterface, p2p_lock from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - uint256_hex, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, uint256_hex from test_framework.wallet_util import bytes_to_wif class TestP2PConn(P2PInterface): def __init__(self, inv_type): super().__init__() self.inv_type = inv_type self.getdata_count = 0 def on_getdata(self, message): for i in message.inv: if i.type & MSG_TYPE_MASK == self.inv_type: self.getdata_count += 1 class NetConstants: """Constants from net_processing""" def __init__(self, getdata_interval, inbound_peer_delay, overloaded_peer_delay, max_getdata_in_flight, max_peer_announcements, bypass_request_limits_permission_flags, ): self.getdata_interval = getdata_interval self.inbound_peer_delay = inbound_peer_delay self.overloaded_peer_delay = overloaded_peer_delay self.max_getdata_in_flight = max_getdata_in_flight self.max_peer_announcements = max_peer_announcements self.max_getdata_inbound_wait = self.getdata_interval + self.inbound_peer_delay self.bypass_request_limits_permission_flags = bypass_request_limits_permission_flags class TestContext: def __init__(self, inv_type, inv_name, constants): self.inv_type = inv_type self.inv_name = inv_name self.constants = constants def p2p_conn(self): return TestP2PConn(self.inv_type) PROOF_TEST_CONTEXT = TestContext( MSG_AVA_PROOF, "avalanche proof", NetConstants( getdata_interval=60, # seconds inbound_peer_delay=2, # seconds overloaded_peer_delay=2, # seconds max_getdata_in_flight=100, max_peer_announcements=5000, bypass_request_limits_permission_flags="bypass_proof_request_limits", ), ) TX_TEST_CONTEXT = TestContext( MSG_TX, "transaction", NetConstants( getdata_interval=60, # seconds inbound_peer_delay=2, # seconds overloaded_peer_delay=2, # seconds max_getdata_in_flight=100, max_peer_announcements=5000, bypass_request_limits_permission_flags="relay", ), ) # Python test constants NUM_INBOUND = 10 # Common network parameters UNCONDITIONAL_RELAY_DELAY = 2 * 60 def skip(context): def decorator(test): @functools.wraps(test) def wrapper(*args, **kwargs): # Assume the signature is test(self, context) unless context is # passed by name call_context = kwargs.get("context", args[1]) if call_context == context: return lambda *args, **kwargs: None return test(*args, **kwargs) return wrapper return decorator class InventoryDownloadTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [['-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=1', '-avacooldown=0']] * self.num_nodes def test_data_requests(self, context): self.log.info( "Test that we request data from all our peers, eventually") invid = 0xdeadbeef self.log.info("Announce the invid from each incoming peer to node 0") msg = msg_inv([CInv(t=context.inv_type, h=invid)]) for p in self.nodes[0].p2ps: p.send_and_ping(msg) outstanding_peer_index = [i for i in range(len(self.nodes[0].p2ps))] def getdata_found(peer_index): p = self.nodes[0].p2ps[peer_index] with p2p_lock: return p.last_message.get( "getdata") and p.last_message["getdata"].inv[-1].hash == invid node_0_mocktime = int(time.time()) while outstanding_peer_index: node_0_mocktime += context.constants.max_getdata_inbound_wait self.nodes[0].setmocktime(node_0_mocktime) self.wait_until(lambda: any(getdata_found(i) for i in outstanding_peer_index)) for i in outstanding_peer_index: if getdata_found(i): outstanding_peer_index.remove(i) self.nodes[0].setmocktime(0) self.log.info("All outstanding peers received a getdata") @skip(PROOF_TEST_CONTEXT) def test_inv_tx(self, context): self.log.info("Generate a transaction on node 0") tx = self.nodes[0].createrawtransaction( inputs=[{ # coinbase "txid": self.nodes[0].getblock(self.nodes[0].getblockhash(1))['tx'][0], "vout": 0 }], outputs={ADDRESS_ECREG_UNSPENDABLE: 50000000 - 250.00}, ) tx = self.nodes[0].signrawtransactionwithkey( hexstring=tx, privkeys=[self.nodes[0].get_deterministic_priv_key().key], )['hex'] ctx = FromHex(CTransaction(), tx) txid = int(ctx.rehash(), 16) self.log.info( f"Announce the transaction to all nodes from all {NUM_INBOUND} incoming peers, but never send it") msg = msg_inv([CInv(t=context.inv_type, h=txid)]) for p in self.peers: p.send_and_ping(msg) self.log.info("Put the tx in node 0's mempool") self.nodes[0].sendrawtransaction(tx) # node1 is an inbound peer for node0, so the tx relay is delayed by a # duration calculated using a poisson's law with a 5s average time. # In order to make sure the inv is sent we move the time 2 minutes # forward, which has the added side effect that the tx can be # unconditionally requested. with self.nodes[1].assert_debug_log([f"got inv: tx {uint256_hex(txid)} new peer=0"]): self.nodes[0].setmocktime( int(time.time()) + UNCONDITIONAL_RELAY_DELAY) # Since node 1 is connected outbound to an honest peer (node 0), it # should get the tx within a timeout. # The timeout is the sum of # * the worst case until the tx is first requested from an inbound # peer, plus # * the first time it is re-requested from the outbound peer, plus # * 2 seconds to avoid races assert self.nodes[1].getpeerinfo()[0]['inbound'] is False max_delay = context.constants.inbound_peer_delay + \ context.constants.getdata_interval margin = 2 self.log.info( f"Tx should be received at node 1 after {max_delay + margin} seconds") self.nodes[1].setmocktime(int(time.time()) + max_delay) self.sync_mempools(timeout=margin) def test_in_flight_max(self, context): max_getdata_in_flight = context.constants.max_getdata_in_flight max_inbound_delay = context.constants.inbound_peer_delay + \ context.constants.overloaded_peer_delay self.log.info("Test that we don't load peers with more than {} getdata requests immediately".format( max_getdata_in_flight)) invids = [i for i in range(max_getdata_in_flight + 2)] p = self.nodes[0].p2ps[0] with p2p_lock: p.getdata_count = 0 mock_time = int(time.time() + 1) self.nodes[0].setmocktime(mock_time) for i in range(max_getdata_in_flight): p.send_message(msg_inv([CInv(t=context.inv_type, h=invids[i])])) p.sync_with_ping() mock_time += context.constants.inbound_peer_delay self.nodes[0].setmocktime(mock_time) p.wait_until(lambda: p.getdata_count >= max_getdata_in_flight) for i in range(max_getdata_in_flight, len(invids)): p.send_message(msg_inv([CInv(t=context.inv_type, h=invids[i])])) p.sync_with_ping() self.log.info( "No more than {} requests should be seen within {} seconds after announcement".format( max_getdata_in_flight, max_inbound_delay - 1)) self.nodes[0].setmocktime( mock_time + max_inbound_delay - 1) p.sync_with_ping() with p2p_lock: assert_equal(p.getdata_count, max_getdata_in_flight) self.log.info( "If we wait {} seconds after announcement, we should eventually get more requests".format( max_inbound_delay)) self.nodes[0].setmocktime( mock_time + max_inbound_delay) p.wait_until(lambda: p.getdata_count == len(invids)) def test_expiry_fallback(self, context): self.log.info( 'Check that expiry will select another peer for download') peer1 = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer2 = self.nodes[0].add_p2p_connection(context.p2p_conn()) for p in [peer1, peer2]: p.send_message(msg_inv([CInv(t=context.inv_type, h=0xffaa)])) # One of the peers is asked for the data peer2.wait_until( lambda: sum( p.getdata_count for p in [ peer1, peer2]) == 1) with p2p_lock: peer_expiry, peer_fallback = ( peer1, peer2) if peer1.getdata_count == 1 else ( peer2, peer1) assert_equal(peer_fallback.getdata_count, 0) # Wait for request to peer_expiry to expire self.nodes[0].setmocktime( int(time.time()) + context.constants.getdata_interval + 1) peer_fallback.wait_until( lambda: peer_fallback.getdata_count >= 1) with p2p_lock: assert_equal(peer_fallback.getdata_count, 1) # reset mocktime self.restart_node(0) def test_disconnect_fallback(self, context): self.log.info( 'Check that disconnect will select another peer for download') peer1 = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer2 = self.nodes[0].add_p2p_connection(context.p2p_conn()) for p in [peer1, peer2]: p.send_message(msg_inv([CInv(t=context.inv_type, h=0xffbb)])) # One of the peers is asked for the data peer2.wait_until( lambda: sum( p.getdata_count for p in [ peer1, peer2]) == 1) with p2p_lock: peer_disconnect, peer_fallback = ( peer1, peer2) if peer1.getdata_count == 1 else ( peer2, peer1) assert_equal(peer_fallback.getdata_count, 0) peer_disconnect.peer_disconnect() peer_disconnect.wait_for_disconnect() peer_fallback.wait_until( lambda: peer_fallback.getdata_count >= 1) with p2p_lock: assert_equal(peer_fallback.getdata_count, 1) def test_notfound_fallback(self, context): self.log.info( 'Check that notfounds will select another peer for download immediately') peer1 = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer2 = self.nodes[0].add_p2p_connection(context.p2p_conn()) for p in [peer1, peer2]: p.send_message(msg_inv([CInv(t=context.inv_type, h=0xffdd)])) # One of the peers is asked for the data peer2.wait_until( lambda: sum( p.getdata_count for p in [ peer1, peer2]) == 1) with p2p_lock: peer_notfound, peer_fallback = ( peer1, peer2) if peer1.getdata_count == 1 else ( peer2, peer1) assert_equal(peer_fallback.getdata_count, 0) # Send notfound, so that fallback peer is selected peer_notfound.send_and_ping(msg_notfound( vec=[CInv(context.inv_type, 0xffdd)])) peer_fallback.wait_until( lambda: peer_fallback.getdata_count >= 1) with p2p_lock: assert_equal(peer_fallback.getdata_count, 1) def test_preferred_inv(self, context): self.log.info( 'Check that invs from preferred peers are downloaded immediately') self.restart_node( 0, extra_args=self.extra_args[0] + ['-whitelist=noban@127.0.0.1']) peer = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer.send_message(msg_inv([CInv(t=context.inv_type, h=0xff00ff00)])) peer.wait_until(lambda: peer.getdata_count >= 1) with p2p_lock: assert_equal(peer.getdata_count, 1) def test_large_inv_batch(self, context): max_peer_announcements = context.constants.max_peer_announcements net_permissions = context.constants.bypass_request_limits_permission_flags self.log.info( f'Test how large inv batches are handled with {net_permissions} permission') self.restart_node( 0, extra_args=self.extra_args[0] + [f'-whitelist={net_permissions}@127.0.0.1']) peer = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer.send_message(msg_inv([CInv(t=context.inv_type, h=invid) for invid in range(max_peer_announcements + 1)])) peer.wait_until(lambda: peer.getdata_count == max_peer_announcements + 1) self.log.info( f'Test how large inv batches are handled without {net_permissions} permission') self.restart_node(0) peer = self.nodes[0].add_p2p_connection(context.p2p_conn()) peer.send_message(msg_inv([CInv(t=context.inv_type, h=invid) for invid in range(max_peer_announcements + 1)])) peer.wait_until(lambda: peer.getdata_count == max_peer_announcements) peer.sync_with_ping() with p2p_lock: assert_equal(peer.getdata_count, max_peer_announcements) def test_spurious_notfound(self, context): self.log.info('Check that spurious notfound is ignored') self.nodes[0].p2ps[0].send_message( msg_notfound(vec=[CInv(context.inv_type, 1)])) @skip(TX_TEST_CONTEXT) def test_immature_download(self, context): node = self.nodes[0] # Build a proof with immature utxos privkey, immature = gen_proof(self, node) proofid_hex = uint256_hex(immature.proofid) self.restart_node(0, extra_args=self.extra_args[0] + [ "-avaproofstakeutxoconfirmations=3", f"-avaproof={immature.serialize().hex()}", f"-avamasterkey={bytes_to_wif(privkey.get_bytes())}", ]) # Add an inbound so the node proof can be registered and advertised node.add_p2p_connection(P2PInterface()) self.generate(node, 1, sync_fun=self.no_op) wait_for_proof(node, proofid_hex, expect_status="immature") peer = node.add_p2p_connection(context.p2p_conn()) peer.send_message( msg_inv([CInv(t=context.inv_type, h=immature.proofid)])) # Give enough time for the node to eventually request the proof. node.setmocktime(int(time.time()) + context.constants.getdata_interval + 1) peer.sync_with_ping() assert_equal(peer.getdata_count, 0) @skip(TX_TEST_CONTEXT) def test_request_invalid_once(self, context): node = self.nodes[0] privkey = ECKey() privkey.generate() # Build an invalid proof (no stake) no_stake_hex = node.buildavalancheproof( 42, 2000000000, bytes_to_wif(privkey.get_bytes()), [] ) no_stake = avalanche_proof_from_hex(no_stake_hex) assert_raises_rpc_error(-8, "The proof is invalid: no-stake", node.verifyavalancheproof, no_stake_hex) # Send the proof msg = msg_avaproof() msg.proof = no_stake node.p2ps[0].send_message(msg) # Check we get banned node.p2ps[0].wait_for_disconnect() # Now that the node knows the proof is invalid, it should not be # requested anymore node.p2ps[1].send_message( msg_inv([CInv(t=context.inv_type, h=no_stake.proofid)])) # Give enough time for the node to eventually request the proof node.setmocktime(int(time.time()) + context.constants.getdata_interval + 1) node.p2ps[1].sync_with_ping() assert all(p.getdata_count == 0 for p in node.p2ps[1:]) def run_test(self): for context in [TX_TEST_CONTEXT, PROOF_TEST_CONTEXT]: self.log.info( f"Starting tests using {context.inv_name} inventory type") # Run tests without mocktime that only need one peer-connection first, # to avoid restarting the nodes self.test_expiry_fallback(context) self.test_disconnect_fallback(context) self.test_notfound_fallback(context) self.test_preferred_inv(context) self.test_large_inv_batch(context) self.test_spurious_notfound(context) # Run each test against new bitcoind instances, as setting mocktimes has long-term effects on when # the next trickle relay event happens. for test in [self.test_in_flight_max, self.test_inv_tx, self.test_data_requests, self.test_immature_download, self.test_request_invalid_once]: self.stop_nodes() self.start_nodes() self.connect_nodes(1, 0) # Setup the p2p connections self.peers = [] for node in self.nodes: for _ in range(NUM_INBOUND): self.peers.append( node.add_p2p_connection( context.p2p_conn())) self.log.info( f"Nodes are setup with {NUM_INBOUND} incoming connections each") test(context) if __name__ == '__main__': InventoryDownloadTest().main() diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py index fa4339cab..00f67a50d 100755 --- a/test/functional/p2p_invalid_locator.py +++ b/test/functional/p2p_invalid_locator.py @@ -1,53 +1,49 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test node responses to invalid locators. """ -from test_framework.messages import ( - MAX_LOCATOR_SZ, - msg_getblocks, - msg_getheaders, -) +from test_framework.messages import MAX_LOCATOR_SZ, msg_getblocks, msg_getheaders from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework class InvalidLocatorTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def run_test(self): # Convenience reference to the node node = self.nodes[0] # Get node out of IBD self.generatetoaddress( node, 1, node.get_deterministic_priv_key().address) self.log.info('Test max locator size') block_count = node.getblockcount() for msg in [msg_getheaders(), msg_getblocks()]: self.log.info('Wait for disconnect when sending {} hashes in locator'.format( MAX_LOCATOR_SZ + 1)) exceed_max_peer = node.add_p2p_connection(P2PInterface()) msg.locator.vHave = [int(node.getblockhash( i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ + 1), -1)] exceed_max_peer.send_message(msg) exceed_max_peer.wait_for_disconnect() self.log.info( f'Wait for response when sending {MAX_LOCATOR_SZ} hashes in locator') within_max_peer = node.add_p2p_connection(P2PInterface()) msg.locator.vHave = [int(node.getblockhash( i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)] within_max_peer.send_message(msg) if isinstance(msg, msg_getheaders): within_max_peer.wait_for_header(node.getbestblockhash()) else: within_max_peer.wait_for_block( int(node.getbestblockhash(), 16)) if __name__ == '__main__': InvalidLocatorTest().main() diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py index 42e4ffbae..a5bf60d18 100755 --- a/test/functional/p2p_invalid_tx.py +++ b/test/functional/p2p_invalid_tx.py @@ -1,213 +1,207 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test node responses to invalid transactions. In this test we connect to one node over p2p, and test tx requests. """ from data import invalid_txs from test_framework.blocktools import create_block, create_coinbase -from test_framework.messages import ( - COIN, - COutPoint, - CTransaction, - CTxIn, - CTxOut, -) +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut from test_framework.p2p import P2PDataStore from test_framework.script import OP_TRUE, CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal class InvalidTxRequestTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [ ["-acceptnonstdtxn=1", ] ] self.setup_clean_chain = True def bootstrap_p2p(self, *, num_connections=1): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" for _ in range(num_connections): self.nodes[0].add_p2p_connection(P2PDataStore()) def reconnect_p2p(self, **kwargs): """Tear down and bootstrap the P2P connection to the node. The node gets disconnected several times in this test. This helper method reconnects the p2p and restarts the network thread.""" self.nodes[0].disconnect_p2ps() self.bootstrap_p2p(**kwargs) def run_test(self): node = self.nodes[0] # convenience reference to the node self.bootstrap_p2p() # Add one p2p connection to the node best_block = self.nodes[0].getbestblockhash() tip = int(best_block, 16) best_block_time = self.nodes[0].getblock(best_block)['time'] block_time = best_block_time + 1 self.log.info("Create a new block with an anyone-can-spend coinbase.") height = 1 block = create_block(tip, create_coinbase(height), block_time) block.solve() # Save the coinbase for later block1 = block tip = block.sha256 node.p2ps[0].send_blocks_and_test([block], node, success=True) self.log.info("Mature the block.") self.generatetoaddress(self.nodes[0], 100, self.nodes[0].get_deterministic_priv_key().address) # Iterate through a list of known invalid transaction types, ensuring each is # rejected. Some are consensus invalid and some just violate policy. for BadTxTemplate in invalid_txs.iter_all_templates(): self.log.info( "Testing invalid transaction: %s", BadTxTemplate.__name__) template = BadTxTemplate(spend_block=block1) tx = template.get_tx() node.p2ps[0].send_txs_and_test( [tx], node, success=False, expect_disconnect=template.expect_disconnect, reject_reason=template.reject_reason, ) if template.expect_disconnect: self.log.info("Reconnecting to peer") self.reconnect_p2p() # Make two p2p connections to provide the node with orphans # * p2ps[0] will send valid orphan txs (one with low fee) # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that) self.reconnect_p2p(num_connections=2) self.log.info('Test orphan transaction handling ... ') # Create a root transaction that we withold until all dependend transactions # are sent out and in the orphan cache SCRIPT_PUB_KEY_OP_TRUE = CScript([OP_TRUE]) tx_withhold = CTransaction() tx_withhold.vin.append( CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0))) tx_withhold.vout.append( CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) pad_tx(tx_withhold) tx_withhold.calc_sha256() # Our first orphan tx with some outputs to create further orphan txs tx_orphan_1 = CTransaction() tx_orphan_1.vin.append( CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0))) tx_orphan_1.vout = [ CTxOut( nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3 pad_tx(tx_orphan_1) tx_orphan_1.calc_sha256() # A valid transaction with low fee tx_orphan_2_no_fee = CTransaction() tx_orphan_2_no_fee.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0))) tx_orphan_2_no_fee.vout.append( CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) pad_tx(tx_orphan_2_no_fee) # A valid transaction with sufficient fee tx_orphan_2_valid = CTransaction() tx_orphan_2_valid.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1))) tx_orphan_2_valid.vout.append( CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) tx_orphan_2_valid.calc_sha256() pad_tx(tx_orphan_2_valid) # An invalid transaction with negative fee tx_orphan_2_invalid = CTransaction() tx_orphan_2_invalid.vin.append( CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2))) tx_orphan_2_invalid.vout.append( CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) pad_tx(tx_orphan_2_invalid) tx_orphan_2_invalid.calc_sha256() self.log.info('Send the orphans ... ') # Send valid orphan txs from p2ps[0] node.p2ps[0].send_txs_and_test( [tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False) # Send invalid tx from p2ps[1] node.p2ps[1].send_txs_and_test( [tx_orphan_2_invalid], node, success=False) # Mempool should be empty assert_equal(0, node.getmempoolinfo()['size']) # p2ps[1] is still connected assert_equal(2, len(node.getpeerinfo())) self.log.info('Send the withhold tx ... ') with node.assert_debug_log(expected_msgs=["bad-txns-in-belowout"]): node.p2ps[0].send_txs_and_test([tx_withhold], node, success=True) # Transactions that should end up in the mempool expected_mempool = { t.hash for t in [ tx_withhold, # The transaction that is the root for all orphans tx_orphan_1, # The orphan transaction that splits the coins # The valid transaction (with sufficient fee) tx_orphan_2_valid, ] } # Transactions that do not end up in the mempool # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx) # tx_orphan_invaid, because it has negative fee (p2ps[1] is # disconnected for relaying that tx) # p2ps[1] is no longer connected self.wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) assert_equal(expected_mempool, set(node.getrawmempool())) self.log.info('Test orphan pool overflow') orphan_tx_pool = [CTransaction() for _ in range(101)] for i in range(len(orphan_tx_pool)): orphan_tx_pool[i].vin.append(CTxIn(outpoint=COutPoint(i, 333))) orphan_tx_pool[i].vout.append( CTxOut( nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) pad_tx(orphan_tx_pool[i]) with node.assert_debug_log(['orphanage overflow, removed 1 tx']): node.p2ps[0].send_txs_and_test(orphan_tx_pool, node, success=False) rejected_parent = CTransaction() rejected_parent.vin.append( CTxIn( outpoint=COutPoint( tx_orphan_2_invalid.sha256, 0))) rejected_parent.vout.append( CTxOut( nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) pad_tx(rejected_parent) rejected_parent.rehash() with node.assert_debug_log([f'not keeping orphan with rejected parents {rejected_parent.hash}']): node.p2ps[0].send_txs_and_test( [rejected_parent], node, success=False) if __name__ == '__main__': InvalidTxRequestTest().main() diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py index 001a68105..f823f48db 100755 --- a/test/functional/p2p_permissions.py +++ b/test/functional/p2p_permissions.py @@ -1,211 +1,208 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test p2p permission message. Test that permissions are correctly calculated and applied """ -from test_framework.address import ( - ADDRESS_ECREG_P2SH_OP_TRUE, - SCRIPTSIG_OP_TRUE, -) +from test_framework.address import ADDRESS_ECREG_P2SH_OP_TRUE, SCRIPTSIG_OP_TRUE from test_framework.messages import CTransaction, FromHex from test_framework.p2p import P2PDataStore from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch from test_framework.txtools import pad_tx from test_framework.util import assert_equal, p2p_port class P2PPermissionsTests(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True def run_test(self): self.check_tx_relay() self.checkpermission( # default permissions (no specific permissions) ["-whitelist=127.0.0.1"], # Make sure the default values in the command line documentation # match the ones here ["relay", "noban", "mempool", "download"], ) self.checkpermission( # check without deprecatedrpc=whitelisted ["-whitelist=127.0.0.1"], # Make sure the default values in the command line documentation # match the ones here ["relay", "noban", "mempool", "download"], ) self.checkpermission( # no permission (even with forcerelay) ["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"], [], ) self.checkpermission( # relay permission removed (no specific permissions) ["-whitelist=127.0.0.1", "-whitelistrelay=0"], ["noban", "mempool", "download"], ) self.checkpermission( # forcerelay and relay permission added # Legacy parameter interaction which set whitelistrelay to true # if whitelistforcerelay is true ["-whitelist=127.0.0.1", "-whitelistforcerelay"], ["forcerelay", "relay", "noban", "mempool", "download"], ) # Let's make sure permissions are merged correctly # For this, we need to use whitebind instead of bind # by modifying the configuration file. ip_port = f"127.0.0.1:{p2p_port(1)}" self.replaceinconfig( 1, "bind=127.0.0.1", f"whitebind=bloomfilter,forcerelay@{ip_port}") self.checkpermission( ["-whitelist=noban@127.0.0.1"], # Check parameter interaction forcerelay should activate relay ["noban", "bloomfilter", "forcerelay", "relay", "download"], ) self.replaceinconfig( 1, f"whitebind=bloomfilter,forcerelay@{ip_port}", "bind=127.0.0.1") self.checkpermission( # legacy whitelistrelay should be ignored ["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"], ["noban", "mempool", "download"], ) self.checkpermission( # check without deprecatedrpc=whitelisted ["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"], ["noban", "mempool", "download"], ) self.checkpermission( # legacy whitelistforcerelay should be ignored ["-whitelist=noban,mempool@127.0.0.1", "-whitelistforcerelay"], ["noban", "mempool", "download"], ) self.checkpermission( # missing mempool permission to be considered legacy whitelisted ["-whitelist=noban@127.0.0.1"], ["noban", "download"], ) self.checkpermission( # all permission added ["-whitelist=all@127.0.0.1"], ["forcerelay", "noban", "mempool", "bloomfilter", "relay", "download", "bypass_proof_request_limits", "addr"], ) self.checkpermission( # bypass_proof_request_limits permission ["-whitelist=bypass_proof_request_limits@127.0.0.1"], ["bypass_proof_request_limits"], ) self.stop_node(1) self.nodes[1].assert_start_raises_init_error( ["-whitelist=oopsie@127.0.0.1"], "Invalid P2P permission", match=ErrorMatch.PARTIAL_REGEX) self.nodes[1].assert_start_raises_init_error( ["-whitelist=noban@127.0.0.1:230"], "Invalid netmask specified in", match=ErrorMatch.PARTIAL_REGEX) self.nodes[1].assert_start_raises_init_error( ["-whitebind=noban@127.0.0.1/10"], "Cannot resolve -whitebind address", match=ErrorMatch.PARTIAL_REGEX) def check_tx_relay(self): block_op_true = self.nodes[0].getblock( self.generatetoaddress(self.nodes[0], 100, ADDRESS_ECREG_P2SH_OP_TRUE)[0]) self.log.debug( "Create a connection from a forcerelay peer that rebroadcasts raw txs") # A python mininode is needed to send the raw transaction directly. # If a full node was used, it could only rebroadcast via the inv-getdata # mechanism. However, even for forcerelay connections, a full node would # currently not request a txid that is already in the mempool. self.restart_node(1, extra_args=["-whitelist=forcerelay@127.0.0.1"]) p2p_rebroadcast_wallet = self.nodes[1].add_p2p_connection( P2PDataStore()) self.log.debug("Send a tx from the wallet initially") tx = FromHex(CTransaction(), self.nodes[0].createrawtransaction( inputs=[{'txid': block_op_true['tx'][0], 'vout': 0}], outputs=[{ADDRESS_ECREG_P2SH_OP_TRUE: 50}])) # push the one byte script to the stack tx.vin[0].scriptSig = SCRIPTSIG_OP_TRUE pad_tx(tx) txid = tx.rehash() self.log.debug("Wait until tx is in node[1]'s mempool") p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1]) self.log.debug( "Check that node[1] will send the tx to node[0] even though it" " is already in the mempool") self.connect_nodes(1, 0) with self.nodes[1].assert_debug_log( [f"Force relaying tx {txid} from peer=0"]): p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1]) self.wait_until(lambda: txid in self.nodes[0].getrawmempool()) self.log.debug( "Check that node[1] will not send an invalid tx to node[0]") tx.vout[0].nValue += 1 txid = tx.rehash() # Send the transaction twice. The first time, it'll be rejected by ATMP # because it conflicts with a mempool transaction. The second time, # it'll be in the recentRejects filter. p2p_rebroadcast_wallet.send_txs_and_test( [tx], self.nodes[1], success=False, reject_reason=f'{txid} from peer=0 was not accepted: ' f'txn-mempool-conflict', ) p2p_rebroadcast_wallet.send_txs_and_test( [tx], self.nodes[1], success=False, reject_reason='Not relaying non-mempool transaction ' '{} from forcerelay peer=0'.format(txid), ) def checkpermission(self, args, expectedPermissions): self.restart_node(1, args) self.connect_nodes(0, 1) peerinfo = self.nodes[1].getpeerinfo()[0] assert_equal(len(expectedPermissions), len(peerinfo['permissions'])) for p in expectedPermissions: if p not in peerinfo['permissions']: raise AssertionError( f"Expected permissions {p!r} is not granted.") def replaceinconfig(self, nodeid, old, new): with open(self.nodes[nodeid].bitcoinconf, encoding="utf8") as f: newText = f.read().replace(old, new) with open(self.nodes[nodeid].bitcoinconf, 'w', encoding="utf8") as f: f.write(newText) if __name__ == '__main__': P2PPermissionsTests().main() diff --git a/test/functional/rpc_packages.py b/test/functional/rpc_packages.py index b8a9de7b0..c81e10bb1 100755 --- a/test/functional/rpc_packages.py +++ b/test/functional/rpc_packages.py @@ -1,329 +1,326 @@ #!/usr/bin/env python3 # Copyright (c) 2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """RPCs that handle raw transaction packages.""" import random from decimal import Decimal -from test_framework.address import ( - ADDRESS_ECREG_P2SH_OP_TRUE, - SCRIPTSIG_OP_TRUE, -) +from test_framework.address import ADDRESS_ECREG_P2SH_OP_TRUE, SCRIPTSIG_OP_TRUE from test_framework.messages import CTransaction, FromHex, ToHex from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal from test_framework.wallet import ( create_child_with_parents, create_raw_chain, make_chain, ) class RPCPackagesTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def assert_testres_equal(self, package_hex, testres_expected): """Shuffle package_hex and assert that the testmempoolaccept result matches testres_expected. This should only be used to test packages where the order does not matter. The ordering of transactions in package_hex and testres_expected must match. """ shuffled_indices = list(range(len(package_hex))) random.shuffle(shuffled_indices) shuffled_package = [package_hex[i] for i in shuffled_indices] shuffled_testres = [testres_expected[i] for i in shuffled_indices] assert_equal( shuffled_testres, self.nodes[0].testmempoolaccept(shuffled_package)) def run_test(self): self.log.info("Generate blocks to create UTXOs") node = self.nodes[0] self.privkeys = [node.get_deterministic_priv_key().key] self.address = node.get_deterministic_priv_key().address self.coins = [] # The last 100 coinbase transactions are premature for b in self.generatetoaddress(node, 300, self.address)[:100]: coinbase = node.getblock(blockhash=b, verbosity=2)["tx"][0] self.coins.append({ "txid": coinbase["txid"], "amount": coinbase["vout"][0]["value"], "scriptPubKey": coinbase["vout"][0]["scriptPubKey"], }) # Create some transactions that can be reused throughout the test. # Never submit these to mempool. self.independent_txns_hex = [] self.independent_txns_testres = [] for _ in range(3): coin = self.coins.pop() rawtx = node.createrawtransaction([{"txid": coin["txid"], "vout": 0}], {self.address: coin["amount"] - Decimal("100.00")}) signedtx = node.signrawtransactionwithkey( hexstring=rawtx, privkeys=self.privkeys) assert signedtx["complete"] testres = node.testmempoolaccept([signedtx["hex"]]) assert testres[0]["allowed"] self.independent_txns_hex.append(signedtx["hex"]) # testmempoolaccept returns a list of length one, avoid creating a # 2D list self.independent_txns_testres.append(testres[0]) self.independent_txns_testres_blank = [ {"txid": res["txid"], } for res in self.independent_txns_testres] self.test_independent() self.test_chain() self.test_multiple_children() self.test_multiple_parents() self.test_conflicting() def test_independent(self): self.log.info("Test multiple independent transactions in a package") node = self.nodes[0] # For independent transactions, order doesn't matter. self.assert_testres_equal( self.independent_txns_hex, self.independent_txns_testres) self.log.info( "Test an otherwise valid package with an extra garbage tx appended") garbage_tx = node.createrawtransaction( [{"txid": "00" * 32, "vout": 5}], {self.address: 1_000_000}) tx = FromHex(CTransaction(), garbage_tx) pad_tx(tx) garbage_tx = ToHex(tx) # This particular test differs from Core, because we do not test the # missing inputs separately from the signature verification for a given # transaction. Both are done in validation as part of PreChecks. # See https://reviews.bitcoinabc.org/D8203 testres_bad = node.testmempoolaccept( self.independent_txns_hex + [garbage_tx]) assert_equal( testres_bad, self.independent_txns_testres + [ {"txid": tx.get_id(), "allowed": False, "reject-reason": "missing-inputs"}]) self.log.info( "Check testmempoolaccept tells us when some transactions completed validation successfully") coin = self.coins.pop() tx_bad_sig_hex = node.createrawtransaction( [{"txid": coin["txid"], "vout": 0}], {self.address: coin["amount"] - Decimal("100.00")}) tx_bad_sig = FromHex(CTransaction(), tx_bad_sig_hex) pad_tx(tx_bad_sig) tx_bad_sig_hex = ToHex(tx_bad_sig) testres_bad_sig = node.testmempoolaccept( self.independent_txns_hex + [tx_bad_sig_hex]) # By the time the signature for the last transaction is checked, all the # other transactions have been fully validated, which is why the node # returns full validation results for all transactions here but empty # results in other cases. assert_equal(testres_bad_sig, self.independent_txns_testres + [{ "txid": tx_bad_sig.get_id(), "allowed": False, "reject-reason": "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)" }]) self.log.info( "Check testmempoolaccept reports txns in packages that exceed max feerate") coin = self.coins.pop() tx_high_fee_raw = node.createrawtransaction( [{"txid": coin["txid"], "vout": 0}], {self.address: coin["amount"] - Decimal("999_000")}) tx_high_fee_signed = node.signrawtransactionwithkey( hexstring=tx_high_fee_raw, privkeys=self.privkeys) assert tx_high_fee_signed["complete"] tx_high_fee = FromHex(CTransaction(), tx_high_fee_signed["hex"]) testres_high_fee = node.testmempoolaccept([tx_high_fee_signed["hex"]]) assert_equal( testres_high_fee, [{"txid": tx_high_fee.get_id(), "allowed": False, "reject-reason": "max-fee-exceeded"}]) package_high_fee = [tx_high_fee_signed["hex"]] + \ self.independent_txns_hex testres_package_high_fee = node.testmempoolaccept(package_high_fee) assert_equal(testres_package_high_fee, testres_high_fee + self.independent_txns_testres_blank) def test_chain(self): node = self.nodes[0] first_coin = self.coins.pop() (chain_hex, chain_txns) = create_raw_chain( node, first_coin, self.address, self.privkeys) self.log.info( "Check that testmempoolaccept requires packages to be sorted by dependency") assert_equal( node.testmempoolaccept(rawtxs=chain_hex[::-1]), [{"txid": tx.get_id(), "package-error": "package-not-sorted"} for tx in chain_txns[::-1]]) self.log.info("Testmempoolaccept a chain of 50 transactions") testres_multiple = node.testmempoolaccept(rawtxs=chain_hex) testres_single = [] # Test accept and then submit each one individually, which should be # identical to package test accept for rawtx in chain_hex: testres = node.testmempoolaccept([rawtx]) testres_single.append(testres[0]) # Submit the transaction now so its child should have no problem # validating node.sendrawtransaction(rawtx) assert_equal(testres_single, testres_multiple) # Clean up by clearing the mempool self.generate(node, 1) def test_multiple_children(self): node = self.nodes[0] self.log.info( "Testmempoolaccept a package in which a transaction has two children within the package") first_coin = self.coins.pop() # Deduct reasonable fee and make 2 outputs value = (first_coin["amount"] - Decimal("200.00")) / 2 inputs = [{"txid": first_coin["txid"], "vout": 0}] outputs = [{self.address: value}, {ADDRESS_ECREG_P2SH_OP_TRUE: value}] rawtx = node.createrawtransaction(inputs, outputs) parent_signed = node.signrawtransactionwithkey( hexstring=rawtx, privkeys=self.privkeys) assert parent_signed["complete"] parent_tx = FromHex(CTransaction(), parent_signed["hex"]) parent_txid = parent_tx.get_id() assert node.testmempoolaccept([parent_signed["hex"]])[0]["allowed"] parent_locking_script_a = parent_tx.vout[0].scriptPubKey.hex() child_value = value - Decimal("100.00") # Child A (_, tx_child_a_hex, _, _) = make_chain( node, self.address, self.privkeys, parent_txid, value, 0, parent_locking_script_a) assert not node.testmempoolaccept([tx_child_a_hex])[0]["allowed"] # Child B rawtx_b = node.createrawtransaction( [{"txid": parent_txid, "vout": 1}], {self.address: child_value}) tx_child_b = FromHex(CTransaction(), rawtx_b) tx_child_b.vin[0].scriptSig = SCRIPTSIG_OP_TRUE pad_tx(tx_child_b) tx_child_b_hex = ToHex(tx_child_b) assert not node.testmempoolaccept([tx_child_b_hex])[0]["allowed"] self.log.info( "Testmempoolaccept with entire package, should work with children in either order") testres_multiple_ab = node.testmempoolaccept( rawtxs=[parent_signed["hex"], tx_child_a_hex, tx_child_b_hex]) testres_multiple_ba = node.testmempoolaccept( rawtxs=[parent_signed["hex"], tx_child_b_hex, tx_child_a_hex]) assert all([testres["allowed"] for testres in testres_multiple_ab + testres_multiple_ba]) testres_single = [] # Test accept and then submit each one individually, which should be # identical to package testaccept for rawtx in [parent_signed["hex"], tx_child_a_hex, tx_child_b_hex]: testres = node.testmempoolaccept([rawtx]) testres_single.append(testres[0]) # Submit the transaction now so its child should have no problem # validating node.sendrawtransaction(rawtx) assert_equal(testres_single, testres_multiple_ab) def test_multiple_parents(self): node = self.nodes[0] self.log.info( "Testmempoolaccept a package in which a transaction has multiple parents within the package") for num_parents in [2, 10, 49]: # Test a package with num_parents parents and 1 child transaction. package_hex = [] parents_tx = [] values = [] parent_locking_scripts = [] for _ in range(num_parents): parent_coin = self.coins.pop() value = parent_coin["amount"] (tx, txhex, value, parent_locking_script) = make_chain( node, self.address, self.privkeys, parent_coin["txid"], value) package_hex.append(txhex) parents_tx.append(tx) values.append(value) parent_locking_scripts.append(parent_locking_script) child_hex = create_child_with_parents( node, self.address, self.privkeys, parents_tx, values, parent_locking_scripts) # Package accept should work with the parents in any order # (as long as parents come before child) for _ in range(10): random.shuffle(package_hex) testres_multiple = node.testmempoolaccept( rawtxs=package_hex + [child_hex]) assert all([testres["allowed"] for testres in testres_multiple]) testres_single = [] # Test accept and then submit each one individually, which should be # identical to package testaccept for rawtx in package_hex + [child_hex]: testres_single.append(node.testmempoolaccept([rawtx])[0]) # Submit the transaction now so its child should have no problem # validating node.sendrawtransaction(rawtx) assert_equal(testres_single, testres_multiple) def test_conflicting(self): node = self.nodes[0] prevtx = self.coins.pop() inputs = [{"txid": prevtx["txid"], "vout": 0}] output1 = { node.get_deterministic_priv_key().address: 50_000_000 - 1250} output2 = {ADDRESS_ECREG_P2SH_OP_TRUE: 50_000_000 - 1250} # tx1 and tx2 share the same inputs rawtx1 = node.createrawtransaction(inputs, output1) rawtx2 = node.createrawtransaction(inputs, output2) signedtx1 = node.signrawtransactionwithkey( hexstring=rawtx1, privkeys=self.privkeys) signedtx2 = node.signrawtransactionwithkey( hexstring=rawtx2, privkeys=self.privkeys) tx1 = FromHex(CTransaction(), signedtx1["hex"]) tx2 = FromHex(CTransaction(), signedtx2["hex"]) assert signedtx1["complete"] assert signedtx2["complete"] # Ensure tx1 and tx2 are valid by themselves assert node.testmempoolaccept([signedtx1["hex"]])[0]["allowed"] assert node.testmempoolaccept([signedtx2["hex"]])[0]["allowed"] self.log.info("Test duplicate transactions in the same package") testres = node.testmempoolaccept([signedtx1["hex"], signedtx1["hex"]]) assert_equal( testres, [ {"txid": tx1.get_id(), "package-error": "conflict-in-package"}, {"txid": tx1.get_id(), "package-error": "conflict-in-package"} ]) self.log.info("Test conflicting transactions in the same package") testres = node.testmempoolaccept([signedtx1["hex"], signedtx2["hex"]]) assert_equal( testres, [ {"txid": tx1.get_id(), "package-error": "conflict-in-package"}, {"txid": tx2.get_id(), "package-error": "conflict-in-package"} ]) if __name__ == "__main__": RPCPackagesTest().main() diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py index 5e21780a0..8d15a2d15 100755 --- a/test/functional/rpc_rawtransaction.py +++ b/test/functional/rpc_rawtransaction.py @@ -1,730 +1,724 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the rawtranscation RPCs. Test the following RPCs: - createrawtransaction - signrawtransactionwithwallet - sendrawtransaction - decoderawtransaction - getrawtransaction """ from collections import OrderedDict from decimal import Decimal from io import BytesIO -from test_framework.messages import ( - COutPoint, - CTransaction, - CTxIn, - CTxOut, - ToHex, -) +from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut, ToHex from test_framework.script import CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_raw_tx from test_framework.util import ( assert_equal, assert_greater_than, assert_raises_rpc_error, find_vout_for_address, ) class multidict(dict): """Dictionary that allows duplicate keys. Constructed with a list of (key, value) tuples. When dumped by the json module, will output invalid json with repeated keys, eg: >>> json.dumps(multidict([(1,2),(1,2)]) '{"1": 2, "1": 2}' Used to test calls to rpc methods with repeated keys in the json object.""" def __init__(self, x): dict.__init__(self, x) self.x = x def items(self): return self.x # Create one-input, one-output, no-fee transaction: class RawTransactionsTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [["-txindex"], ["-txindex"], ["-txindex"]] # whitelist all peers to speed up tx relay / mempool sync for args in self.extra_args: args.append("-whitelist=noban@127.0.0.1") self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): super().setup_network() self.connect_nodes(0, 2) def run_test(self): self.log.info( 'prepare some coins for multiple *rawtransaction commands') self.generate(self.nodes[2], 1) self.generate(self.nodes[0], 101) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1500000) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1000000) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5000000) self.sync_all() self.generate(self.nodes[0], 5) self.log.info( 'Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) self.log.info( 'Check parameter types and required parameters of createrawtransaction') # Test `createrawtransaction` required parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) # Test `createrawtransaction` invalid extra parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') # Test `createrawtransaction` invalid `inputs` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {}) assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout must be a number", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) # Test `createrawtransaction` invalid `outputs` address = self.nodes[0].getnewaddress() address2 = self.nodes[0].getnewaddress() assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') # Should not throw for backwards compatibility self.nodes[0].createrawtransaction(inputs=[], outputs={}) self.nodes[0].createrawtransaction(inputs=[], outputs=[]) assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0}) assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format( address), self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format( address), self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")])) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) # Test `createrawtransaction` invalid `locktime` assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) self.log.info( 'Check that createrawtransaction accepts an array and object as outputs') tx = CTransaction() # One output tx.deserialize(BytesIO(bytes.fromhex(self.nodes[2].createrawtransaction( inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) assert_equal(len(tx.vout), 1) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction( inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), ) # Two outputs tx.deserialize(BytesIO(bytes.fromhex(self.nodes[2].createrawtransaction(inputs=[ {'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) assert_equal(len(tx.vout), 2) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[ {address: 99}, {address2: 99}]), ) # Multiple mixed outputs tx.deserialize(BytesIO(bytes.fromhex(self.nodes[2].createrawtransaction(inputs=[ {'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')]))))) assert_equal(len(tx.vout), 3) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[ {address: 99}, {address2: 99}, {'data': '99'}]), ) for type in ["legacy"]: addr = self.nodes[0].getnewaddress("", type) addrinfo = self.nodes[0].getaddressinfo(addr) pubkey = addrinfo["scriptPubKey"] self.log.info( f'sendrawtransaction with missing prevtx info ({type})') # Test `signrawtransactionwithwallet` invalid `prevtxs` inputs = [{'txid': txid, 'vout': 3, 'sequence': 1000}] outputs = {self.nodes[0].getnewaddress(): 1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] assert_raises_rpc_error(-8, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "vout": 3, } ]) assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "scriptPubKey": pubkey, "vout": 3, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "vout": 3, "amount": 1 } ]) ######################################### # sendrawtransaction with missing input # ######################################### self.log.info('sendrawtransaction with missing input') # won't exists inputs = [ {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout': 1}] outputs = {self.nodes[0].getnewaddress(): 4998000} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) rawtx = pad_raw_tx(rawtx) rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) # This will raise an exception since there are missing inputs assert_raises_rpc_error(-25, "bad-txns-inputs-missingorspent", self.nodes[2].sendrawtransaction, rawtx['hex']) ##################################### # getrawtransaction with block hash # ##################################### # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress( self.nodes[1].getnewaddress(), 1000000) block1, block2 = self.generate(self.nodes[2], 2) self.sync_all() # We should be able to get the raw transaction by providing the correct # block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) assert_equal(gottx['in_active_chain'], True) # We should not have the 'in_active_chain' flag when we don't provide a # block gottx = self.nodes[0].getrawtransaction(tx, True) assert_equal(gottx['txid'], tx) assert 'in_active_chain' not in gottx # We should not get the tx if we provide an unrelated block assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) # An invalid block hash should raise the correct errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True) assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar") assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234") assert_raises_rpc_error( -8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000") assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") # Undo the blocks and check in_active_chain self.nodes[0].invalidateblock(block1) gottx = self.nodes[0].getrawtransaction( txid=tx, verbose=True, blockhash=block1) assert_equal(gottx['in_active_chain'], False) self.nodes[0].reconsiderblock(block1) assert_equal(self.nodes[0].getbestblockhash(), block2) if not self.options.descriptors: # The traditional multisig workflow does not work with descriptor # wallets so these are legacy only. # The multisig workflow with descriptor wallets uses PSBTs and is # tested elsewhere, no need to do them here. # # RAW TX MULTISIG TESTS # # # 2of2 test addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) # Tests for createmultisig and addmultisigaddress assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) # createmultisig can only take public keys self.nodes[0].createmultisig( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # addmultisigaddress can take both pubkeys and addresses so long as # they are in the wallet, which is tested here. assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr1])['address'] # use balance deltas instead of absolute values bal = self.nodes[2].getbalance() # send 1,200,000 XEC to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1200000) self.sync_all() self.generate(self.nodes[0], 1) self.sync_all() # node2 has both keys of the 2of2 ms addr., tx should affect the # balance assert_equal(self.nodes[2].getbalance(), bal + Decimal('1200000.00')) # 2of3 test from different nodes bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr3 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) addr3Obj = self.nodes[2].getaddressinfo(addr3) mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']] )['address'] txId = self.nodes[0].sendtoaddress(mSigObj, 2200000) decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.generate(self.nodes[0], 1) self.sync_all() # THIS IS AN INCOMPLETE FEATURE # NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND # COUNT AT BALANCE CALCULATION # for now, assume the funds of a 2of3 multisig tx are not marked as # spendable assert_equal(self.nodes[2].getbalance(), bal) txDetails = self.nodes[0].gettransaction(txId, True) rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2200000.00')) bal = self.nodes[0].getbalance() inputs = [{ "txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "amount": vout['value'], }] outputs = {self.nodes[0].getnewaddress(): 2190000} rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet( rawTx, inputs) # node1 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned['complete'], False) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) # node2 can sign the tx compl., own two of three keys assert_equal(rawTxSigned['complete'], True) self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.generate(self.nodes[0], 1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal + Decimal( '50000000.00') + Decimal('2190000.00')) # block reward + tx rawTxBlock = self.nodes[0].getblock( self.nodes[0].getbestblockhash()) # 2of2 test for combining transactions bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) self.nodes[1].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) txId = self.nodes[0].sendtoaddress(mSigObj, 2200000) decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.generate(self.nodes[0], 1) self.sync_all() # the funds of a 2of2 multisig tx should not be marked as spendable assert_equal(self.nodes[2].getbalance(), bal) txDetails = self.nodes[0].gettransaction(txId, True) rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2200000.00')) bal = self.nodes[0].getbalance() inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "redeemScript": mSigObjValid['hex'], "amount": vout['value']}] outputs = {self.nodes[0].getnewaddress(): 2190000} rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet( rawTx2, inputs) self.log.debug(rawTxPartialSigned1) # node1 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned1['complete'], False) rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet( rawTx2, inputs) self.log.debug(rawTxPartialSigned2) # node2 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned2['complete'], False) rawTxComb = self.nodes[2].combinerawtransaction( [rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) self.log.debug(rawTxComb) self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.generate(self.nodes[0], 1) self.sync_all() # block reward + tx assert_equal(self.nodes[0].getbalance(), bal + Decimal('50000000.00') + Decimal('2190000.00')) # Sanity checks on verbose getrawtransaction output txId = rawTx["txid"] rawTxOutput = self.nodes[0].getrawtransaction(txId, True) assert_equal(rawTxOutput["hex"], rawTxSigned["hex"]) assert_equal(rawTxOutput["txid"], txId) assert_equal(rawTxOutput["hash"], txId) assert_greater_than(rawTxOutput["size"], 300) assert_equal(rawTxOutput["version"], 0x02) assert_equal(rawTxOutput["locktime"], 0) assert_equal(len(rawTxOutput["vin"]), 1) assert_equal(len(rawTxOutput["vout"]), 1) assert_equal(rawTxOutput["blockhash"], rawTxBlock["hash"]) assert_equal(rawTxOutput["confirmations"], 3) assert_equal(rawTxOutput["time"], rawTxBlock["time"]) assert_equal(rawTxOutput["blocktime"], rawTxBlock["time"]) # Basic signrawtransaction test addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 10_000_000) self.generate(self.nodes[0], 1) self.sync_all() vout = find_vout_for_address(self.nodes[1], txid, addr) rawTx = self.nodes[1].createrawtransaction( [{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9_999_000}) rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx) txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex']) self.generate(self.nodes[0], 1) self.sync_all() # getrawtransaction tests # 1. valid parameters - only supply txid assert_equal( self.nodes[0].getrawtransaction(txId), rawTxSigned['hex']) # 2. valid parameters - supply txid and 0 for non-verbose assert_equal( self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex']) # 3. valid parameters - supply txid and False for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex']) # 4. valid parameters - supply txid and 1 for verbose. # We only check the "hex" field of the output so we don't need to # update this test every time the output format changes. assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"], rawTxSigned['hex']) # 5. valid parameters - supply txid and True for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"], rawTxSigned['hex']) # 6. invalid parameters - supply txid and string "Flase" assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, "Flase") # 7. invalid parameters - supply txid and empty array assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, []) # 8. invalid parameters - supply txid and empty dict assert_raises_rpc_error( -1, "not a boolean", self.nodes[0].getrawtransaction, txId, {}) inputs = [ {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'sequence': 1000}] outputs = {self.nodes[0].getnewaddress(): 1} assert_raises_rpc_error( -8, 'Invalid parameter, missing vout key', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = "1" assert_raises_rpc_error( -8, 'Invalid parameter, vout must be a number', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = -1 assert_raises_rpc_error( -8, 'Invalid parameter, vout cannot be negative', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = 1 rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 1000) # 9. invalid parameters - sequence number out of range inputs[0]['sequence'] = -1 assert_raises_rpc_error( -8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) # 10. invalid parameters - sequence number out of range inputs[0]['sequence'] = 4294967296 assert_raises_rpc_error( -8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['sequence'] = 4294967294 rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) #################################### # TRANSACTION VERSION NUMBER TESTS # #################################### # Test the minimum transaction version number that fits in a signed # 32-bit integer. # As transaction version is unsigned, this should convert to its # unsigned equivalent. tx = CTransaction() tx.nVersion = -0x80000000 rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x80000000) # Test the maximum transaction version number that fits in a signed # 32-bit integer. tx = CTransaction() tx.nVersion = 0x7fffffff rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x7fffffff) self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate') # Test a transaction with a small fee. txId = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 1000000) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1000000.00')) self.sync_all() inputs = [{"txid": txId, "vout": vout['n']}] # Fee 10,000 satoshis, (1,000,000 - (10000 sat * 0.01 XEC/sat)) = # 999900 outputs = {self.nodes[0].getnewaddress(): Decimal("999900.00")} rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 10,000 satoshis, ~200 b transaction, fee rate should land around 50 sat/byte = 500 XEC/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept( [rawTxSigned['hex']], 500.00)[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'max-fee-exceeded') # and sendrawtransaction should throw assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 10.00) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept( rawtxs=[rawTxSigned['hex']])[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex']) # Test a transaction with a large fee. txId = self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), 1000000) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1000000.00')) self.sync_all() inputs = [{"txid": txId, "vout": vout['n']}] # Fee 2,000,000 satoshis, (1,000,000 - (2,000,000 sat * 0.01 XEC/sat)) = # 980000 outputs = {self.nodes[0].getnewaddress(): Decimal("980000.00")} rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 200,000 XEC/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'max-fee-exceeded') # and sendrawtransaction should throw assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex']) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept( rawtxs=[rawTxSigned['hex']], maxfeerate='200000.00')[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction( hexstring=rawTxSigned['hex'], maxfeerate='200000.00') self.log.info( 'sendrawtransaction/testmempoolaccept with tx that is already in the chain') self.generate(self.nodes[2], 1) for node in self.nodes: testres = node.testmempoolaccept([rawTxSigned['hex']])[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'txn-already-known') assert_raises_rpc_error( -27, 'Transaction already in block chain', node.sendrawtransaction, rawTxSigned['hex']) ########################################## # Decoding weird scripts in transactions # ########################################## self.log.info('Decode correctly-formatted but weird transactions') tx = CTransaction() # empty self.nodes[0].decoderawtransaction(ToHex(tx)) # truncated push tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4e\x00\x00')) tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4c\x10TRUNC')) tx.vout.append(CTxOut(0, b'\x4e\x00\x00')) tx.vout.append(CTxOut(0, b'\x4c\x10TRUNC')) self.nodes[0].decoderawtransaction(ToHex(tx)) # giant pushes and long scripts tx.vin.append( CTxIn(COutPoint(42, 0), CScript([b'giant push' * 10000]))) tx.vout.append(CTxOut(0, CScript([b'giant push' * 10000]))) self.nodes[0].decoderawtransaction(ToHex(tx)) self.log.info('Refuse garbage after transaction') assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, f"{ToHex(tx)}00") if __name__ == '__main__': RawTransactionsTest().main() diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py index a4b5cb52d..f894ce80c 100755 --- a/test/functional/rpc_scantxoutset.py +++ b/test/functional/rpc_scantxoutset.py @@ -1,233 +1,229 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the scantxoutset rpc call.""" from decimal import Decimal from test_framework.messages import XEC from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error -from test_framework.wallet import ( - MiniWallet, - address_to_scriptpubkey, - getnewdestination, -) +from test_framework.wallet import MiniWallet, address_to_scriptpubkey, getnewdestination def descriptors(out): return sorted(u['desc'] for u in out['unspents']) class ScantxoutsetTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def sendtodestination(self, destination, amount): # interpret strings as addresses, assume scriptPubKey otherwise if isinstance(destination, str): destination = address_to_scriptpubkey(destination) self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=destination, amount=int( XEC * amount)) def run_test(self): self.wallet = MiniWallet(self.nodes[0]) self.wallet.rescan_utxos() self.log.info("Create UTXOs...") pubk, spk, addr = getnewdestination() self.sendtodestination(spk, 2000) # send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK # (m/0'/0'/0') self.sendtodestination( "mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 8000) # (m/0'/0'/1') self.sendtodestination( "mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 16000) # (m/0'/0'/1500') self.sendtodestination( "n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 32000) # (m/0'/0'/0) self.sendtodestination( "mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 64000) # (m/0'/0'/1) self.sendtodestination( "mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 128000) # (m/0'/0'/1500) self.sendtodestination( "mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 256000) # (m/1/1/0') self.sendtodestination( "mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 512000) # (m/1/1/1') self.sendtodestination( "mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1024000) # (m/1/1/1500') self.sendtodestination( "mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2048000) # (m/1/1/0) self.sendtodestination( "mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4096000) # (m/1/1/1) self.sendtodestination( "mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8192000) # (m/1/1/1500) self.sendtodestination( "mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16384000) self.generate(self.nodes[0], 1) scan = self.nodes[0].scantxoutset("start", []) info = self.nodes[0].gettxoutsetinfo() assert_equal(scan['success'], True) assert_equal(scan['height'], info['height']) assert_equal(scan['txouts'], info['txouts']) assert_equal(scan['bestblock'], info['bestblock']) self.log.info("Test if we have found the non HD unspent outputs.") assert_equal(self.nodes[0].scantxoutset( "start", [f"pkh({pubk.hex()})"])['total_amount'], Decimal("2000")) assert_equal(self.nodes[0].scantxoutset( "start", [f"combo({pubk.hex()})"])['total_amount'], Decimal("2000")) assert_equal(self.nodes[0].scantxoutset( "start", [f"addr({addr})"])['total_amount'], Decimal("2000")) assert_equal(self.nodes[0].scantxoutset( "start", [f"addr({addr})"])['total_amount'], Decimal("2000")) self.log.info("Test range validation.") assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": -1}]) assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [-1, 10]}]) assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}]) assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [2, 1]}]) assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [0, 1000001]}]) self.log.info("Test extended key derivation.") # Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset. # Note that all amounts in the UTXO set are powers of 2 multiplied by # 0.001 BTC, so each amounts uniquely identifies a subset. assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("8000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("16000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("32000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("64000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("128000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("256000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("24000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("56000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("192000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("448000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("0512000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], Decimal("1024000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], Decimal("2048000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], Decimal("4096000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], Decimal("8192000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], Decimal("16384000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], Decimal("4096000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], Decimal("8192000")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], Decimal("16384000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], Decimal("1536000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], Decimal("3584000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], Decimal("12288000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], Decimal("28672000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12288000")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28672000")) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": [ 1500, 1500]}])['total_amount'], Decimal("16384000")) # Test the reported descriptors for a few matches assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])), ["pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x", "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed"]) assert_equal( descriptors( self.nodes[0].scantxoutset( "start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])), ["pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"]) assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])), ['pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8', 'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g', 'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa']) # Check that status and abort don't need second arg assert_equal(self.nodes[0].scantxoutset("status"), None) assert_equal(self.nodes[0].scantxoutset("abort"), False) # Check that second arg is needed for start assert_raises_rpc_error(-1, "scanobjects argument is required for the start action", self.nodes[0].scantxoutset, "start") if __name__ == '__main__': ScantxoutsetTest().main() diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py index 47018fded..072518521 100755 --- a/test/functional/wallet_abandonconflict.py +++ b/test/functional/wallet_abandonconflict.py @@ -1,242 +1,238 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the abandontransaction RPC. The abandontransaction RPC marks a transaction and all its in-wallet descendants as abandoned which allows their inputs to be respent. It can be used to replace "stuck" or evicted transactions. It only works on transactions which are not included in a block and are not currently in the mempool. It has no effect on transactions which are already abandoned. """ from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - assert_raises_rpc_error, - satoshi_round, -) +from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round class AbandonConflictTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [["-minrelaytxfee=10"], []] # whitelist peers to speed up tx relay / mempool sync for args in self.extra_args: args.append("-whitelist=noban@127.0.0.1") def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): def total_fees(*txids): total = 0 for txid in txids: # '-=' is because gettransaction(txid)['fee'] returns a negative total -= self.nodes[0].gettransaction(txid)['fee'] return satoshi_round(total) self.generate(self.nodes[1], 100) balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), Decimal("10000000")) txB = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), Decimal("10000000")) txC = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), Decimal("10000000")) self.sync_mempools() self.generate(self.nodes[1], 1) # Can not abandon non-wallet transaction assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) # Can not abandon confirmed transaction assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) newbalance = self.nodes[0].getbalance() # no more than fees lost assert balance - newbalance <= total_fees(txA, txB, txC) balance = newbalance # Disconnect nodes so node0's transactions don't get into node1's # mempool self.disconnect_nodes(0, 1) # Identify the 10btc outputs nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction( txA)["details"] if tx_out["amount"] == Decimal("10000000")) nB = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction( txB)["details"] if tx_out["amount"] == Decimal("10000000")) nC = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction( txC)["details"] if tx_out["amount"] == Decimal("10000000")) inputs = [] # spend 10btc outputs from txA and txB inputs.append({"txid": txA, "vout": nA}) inputs.append({"txid": txB, "vout": nB}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("14999980") outputs[self.nodes[1].getnewaddress()] = Decimal("5000000") signed = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) # Identify the 14,999,980 XEC output nAB = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction( txAB1)["details"] if tx_out["amount"] == Decimal("14999980")) # Create a child tx spending AB1 and C inputs = [] # Amount 14,999,980 XEC inputs.append({"txid": txAB1, "vout": nAB}) # Amount 10,000,000 XEC inputs.append({"txid": txC, "vout": nC}) outputs = {} outputs[self.nodes[0].getnewaddress()] = Decimal("24999600") signed2 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) # Create a child tx spending ABC2 signed3_change = Decimal("24999000") inputs = [{"txid": txABC2, "vout": 0}] outputs = {self.nodes[0].getnewaddress(): signed3_change} signed3 = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs)) # note tx is never directly referenced, only abandoned as a child of # the above self.nodes[0].sendrawtransaction(signed3["hex"]) # In mempool txs from self should increase balance from change newbalance = self.nodes[0].getbalance() assert_equal( newbalance, balance - Decimal("30000000") + signed3_change) balance = newbalance # Restart the node with a higher min relay fee so the parent tx is no longer in mempool # TODO: redo with eviction self.restart_node(0, extra_args=["-minrelaytxfee=100"]) assert self.nodes[0].getmempoolinfo()['loaded'] # Verify txs no longer in either node's mempool assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Transactions which are not in the mempool should only reduce wallet balance. # Transaction inputs should still be spent, but the change not yet # received. newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - signed3_change) # Unconfirmed received funds that are not in mempool also shouldn't show # up in unconfirmed balance. Note that the transactions stored in the wallet # are not necessarily in the node's mempool. balances = self.nodes[0].getbalances()['mine'] assert_equal( balances['untrusted_pending'] + balances['trusted'], newbalance) # Unconfirmed transactions which are not in the mempool should also # not be in listunspent assert txABC2 not in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)] balance = newbalance # Abandon original transaction and verify inputs are available again # including that the child tx was also abandoned self.nodes[0].abandontransaction(txAB1) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("30000000")) balance = newbalance self.log.info("Check abandoned transactions in listsinceblock") listsinceblock = self.nodes[0].listsinceblock() txAB1_listsinceblock = [d for d in listsinceblock['transactions'] if d['txid'] == txAB1 and d['category'] == 'send'] for tx in txAB1_listsinceblock: assert_equal(tx['abandoned'], True) assert_equal(tx['confirmations'], 0) assert_equal(tx['trusted'], False) # Verify that even with a low min relay fee, the tx is not re-accepted # from wallet on startup once abandoned. self.restart_node(0, extra_args=["-minrelaytxfee=10"]) assert self.nodes[0].getmempoolinfo()['loaded'] assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(self.nodes[0].getbalance(), balance) # If the transaction is re-sent the wallet also unabandons it. The # change should be available, and it's child transaction should remain # abandoned. # NOTE: Abandoned transactions are internal to the wallet, and tracked # separately from other indices. self.nodes[0].sendrawtransaction(signed["hex"]) newbalance = self.nodes[0].getbalance() assert_equal( newbalance, balance - Decimal("20000000") + Decimal("14999980")) balance = newbalance # Send child tx again so it is no longer abandoned. self.nodes[0].sendrawtransaction(signed2["hex"]) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("10000000") - Decimal("14999980") + Decimal("24999600")) balance = newbalance # Reset to a higher relay fee so that we abandon a transaction self.restart_node(0, extra_args=["-minrelaytxfee=100"]) assert self.nodes[0].getmempoolinfo()['loaded'] assert_equal(len(self.nodes[0].getrawmempool()), 0) newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance - Decimal("24999600")) balance = newbalance # Create a double spend of AB1. Spend it again from only A's 10 output. # Mine double spend from node 1. inputs = [] inputs.append({"txid": txA, "vout": nA}) outputs = {} outputs[self.nodes[1].getnewaddress()] = Decimal("9999900") tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) self.generate(self.nodes[1], 1, sync_fun=self.no_op) self.connect_nodes(0, 1) self.sync_blocks() # Verify that B and C's 10,000,000 XEC outputs are available for # spending again because AB1 is now conflicted newbalance = self.nodes[0].getbalance() assert_equal(newbalance, balance + Decimal("20000000")) balance = newbalance # There is currently a minor bug around this and so this test doesn't # work. See Issue #7315 # Invalidate the block with the double spend and B's 10,000,000 XEC # output should no longer be available. Don't think C's should either self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) newbalance = self.nodes[0].getbalance() # assert_equal(newbalance, balance - Decimal("10000000")) self.log.info( "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer") self.log.info( "conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315") self.log.info(f"{str(balance)} -> {str(newbalance)} ?") if __name__ == '__main__': AbandonConflictTest().main() diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py index 1f60aefcf..e9f7e0ccf 100755 --- a/test/functional/wallet_avoidreuse.py +++ b/test/functional/wallet_avoidreuse.py @@ -1,472 +1,468 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the avoid_reuse and setwalletflag features.""" from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_approx, - assert_equal, - assert_raises_rpc_error, -) +from test_framework.util import assert_approx, assert_equal, assert_raises_rpc_error def reset_balance(node, discardaddr): '''Throw away all owned coins by the node so it gets a balance of 0.''' balance = node.getbalance(avoid_reuse=False) if balance > 500000: node.sendtoaddress( address=discardaddr, amount=balance, subtractfeefromamount=True, avoid_reuse=False) def count_unspent(node): '''Count the unspent outputs for the given node and return various statistics''' r = { "total": { "count": 0, "sum": 0, }, "reused": { "count": 0, "sum": 0, }, } supports_reused = True for utxo in node.listunspent(minconf=0): r["total"]["count"] += 1 r["total"]["sum"] += utxo["amount"] if supports_reused and "reused" in utxo: if utxo["reused"]: r["reused"]["count"] += 1 r["reused"]["sum"] += utxo["amount"] else: supports_reused = False r["reused"]["supported"] = supports_reused return r def assert_unspent(node, total_count=None, total_sum=None, reused_supported=None, reused_count=None, reused_sum=None): '''Make assertions about a node's unspent output statistics''' stats = count_unspent(node) if total_count is not None: assert_equal(stats["total"]["count"], total_count) if total_sum is not None: assert_approx(stats["total"]["sum"], total_sum, 1000) if reused_supported is not None: assert_equal(stats["reused"]["supported"], reused_supported) if reused_count is not None: assert_equal(stats["reused"]["count"], reused_count) if reused_sum is not None: assert_approx(stats["reused"]["sum"], reused_sum, 0.001) def assert_balances(node, mine): '''Make assertions about a node's getbalances output''' got = node.getbalances()["mine"] for k, v in mine.items(): assert_approx(got[k], v, 1000) class AvoidReuseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 # This test isn't testing txn relay/timing, so set whitelist on the # peers for instant txn relay. This speeds up the test run time 2-3x. self.extra_args = [["-whitelist=noban@127.0.0.1"]] * self.num_nodes def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): '''Set up initial chain and run tests defined below''' self.test_persistence() self.test_immutable() self.generate(self.nodes[0], 110) self.test_change_remains_change(self.nodes[1]) reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_sending_from_reused_address_without_avoid_reuse() reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_sending_from_reused_address_fails() reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_getbalances_used() reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_full_destination_group_is_preferred() reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_all_destination_groups_are_used() def test_persistence(self): '''Test that wallet files persist the avoid_reuse flag.''' self.log.info("Test wallet files persist avoid_reuse flag") # Configure node 1 to use avoid_reuse self.nodes[1].setwalletflag('avoid_reuse') # Flags should be node1.avoid_reuse=false, node2.avoid_reuse=true assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False) assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True) # Stop and restart node 1 self.restart_node(1) self.connect_nodes(0, 1) # Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False) assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True) # Attempting to set flag to its current state should throw assert_raises_rpc_error(-8, "Wallet flag is already set to false", self.nodes[0].setwalletflag, 'avoid_reuse', False) assert_raises_rpc_error(-8, "Wallet flag is already set to true", self.nodes[1].setwalletflag, 'avoid_reuse', True) def test_immutable(self): '''Test immutable wallet flags''' self.log.info("Test immutable wallet flags") # Attempt to set the disable_private_keys flag; this should not work assert_raises_rpc_error(-8, "Wallet flag is immutable", self.nodes[1].setwalletflag, 'disable_private_keys') tempwallet = ".wallet_avoidreuse.py_test_immutable_wallet.dat" # Create a wallet with disable_private_keys set; this should work self.nodes[1].createwallet(wallet_name=tempwallet, disable_private_keys=True) w = self.nodes[1].get_wallet_rpc(tempwallet) # Attempt to unset the disable_private_keys flag; this should not work assert_raises_rpc_error(-8, "Wallet flag is immutable", w.setwalletflag, 'disable_private_keys', False) # Unload temp wallet self.nodes[1].unloadwallet(tempwallet) def test_change_remains_change(self, node): self.log.info( "Test that change doesn't turn into non-change when spent") reset_balance(node, node.getnewaddress()) addr = node.getnewaddress() txid = node.sendtoaddress(addr, 1000000) out = node.listunspent( minconf=0, query_options={ 'minimumAmount': 2000000}) assert_equal(len(out), 1) assert_equal(out[0]['txid'], txid) changeaddr = out[0]['address'] # Make sure it's starting out as change as expected assert node.getaddressinfo(changeaddr)['ischange'] for logical_tx in node.listtransactions(): assert logical_tx.get('address') != changeaddr # Spend it reset_balance(node, node.getnewaddress()) # It should still be change assert node.getaddressinfo(changeaddr)['ischange'] for logical_tx in node.listtransactions(): assert logical_tx.get('address') != changeaddr def test_sending_from_reused_address_without_avoid_reuse(self): ''' Test the same as test_sending_from_reused_address_fails, except send the 10MM XEC with the avoid_reuse flag set to false. This means the 10MM XEC send should succeed, where it fails in test_sending_from_reused_address_fails. ''' self.log.info( "Test sending from reused address with avoid_reuse=false") fundaddr = self.nodes[1].getnewaddress() retaddr = self.nodes[0].getnewaddress() self.nodes[0].sendtoaddress(fundaddr, 10000000) self.generate(self.nodes[0], 1) # listunspent should show 1 single, unused 10MM XEC output assert_unspent( self.nodes[1], total_count=1, total_sum=10000000, reused_supported=True, reused_count=0) # getbalances should show no used, 10MM XEC trusted assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10000000}) # node 0 should not show a used entry, as it does not enable # avoid_reuse assert "used" not in self.nodes[0].getbalances()["mine"] self.nodes[1].sendtoaddress(retaddr, 5000000) self.generate(self.nodes[0], 1) # listunspent should show 1 single, unused 5MM XEC output assert_unspent( self.nodes[1], total_count=1, total_sum=5000000, reused_supported=True, reused_count=0) # getbalances should show no used, 5MM XEC trusted assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000}) self.nodes[0].sendtoaddress(fundaddr, 10000000) self.generate(self.nodes[0], 1) # listunspent should show 2 total outputs (5MM, 10MM XEC), one unused # (5MM), one reused (10MM) assert_unspent( self.nodes[1], total_count=2, total_sum=15000000, reused_count=1, reused_sum=10000000) # getbalances should show 10MM used, 5MM XEC trusted assert_balances( self.nodes[1], mine={ "used": 10000000, "trusted": 5000000}) self.nodes[1].sendtoaddress( address=retaddr, amount=10000000, avoid_reuse=False) # listunspent should show 1 total outputs (5MM XEC), unused assert_unspent( self.nodes[1], total_count=1, total_sum=5000000, reused_count=0) # getbalances should show no used, 5MM XEC trusted assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000}) # node 1 should now have about 5MM XEC left (for both cases) assert_approx(self.nodes[1].getbalance(), 5000000, 1000) assert_approx( self.nodes[1].getbalance( avoid_reuse=False), 5000000, 1000) def test_sending_from_reused_address_fails(self): ''' Test the simple case where [1] generates a new address A, then [0] sends 10MM XEC to A. [1] spends 5MM XEC from A. (leaving roughly 5MM XEC useable) [0] sends 10MM XEC to A again. [1] tries to spend 10MM XEC (fails; dirty). [1] tries to spend 4MM XEC (succeeds; change address sufficient) ''' self.log.info("Test sending from reused address fails") fundaddr = self.nodes[1].getnewaddress(label="", address_type="legacy") retaddr = self.nodes[0].getnewaddress() self.nodes[0].sendtoaddress(fundaddr, 10000000) self.generate(self.nodes[0], 1) # listunspent should show 1 single, unused 10MM XEC output assert_unspent( self.nodes[1], total_count=1, total_sum=10000000, reused_supported=True, reused_count=0) # getbalances should show no used, 10MM XEC trusted assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10000000}) self.nodes[1].sendtoaddress(retaddr, 5000000) self.generate(self.nodes[0], 1) # listunspent should show 1 single, unused 5MM XEC output assert_unspent( self.nodes[1], total_count=1, total_sum=5000000, reused_supported=True, reused_count=0) # getbalances should show no used, 5MM XEC trusted assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000}) if not self.options.descriptors: # For the second send, we transmute it to a related single-key address # to make sure it's also detected as re-use # NB: this is not very useful for ABC, but we keep the new variable # name for consistency. new_fundaddr = fundaddr self.nodes[0].sendtoaddress(new_fundaddr, 10000000) self.generate(self.nodes[0], 1) # listunspent should show 2 total outputs (5MM, 10MM XEC), one unused # (5MM), one reused (10MM) assert_unspent( self.nodes[1], total_count=2, total_sum=15000000, reused_count=1, reused_sum=10000000) # getbalances should show 10MM used, 5MM XEC trusted assert_balances( self.nodes[1], mine={ "used": 10000000, "trusted": 5000000}) # node 1 should now have a balance of 5MM (no dirty) or 15MM # (including dirty) assert_approx(self.nodes[1].getbalance(), 5000000, 1000) assert_approx( self.nodes[1].getbalance( avoid_reuse=False), 15000000, 1000) assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10000000) self.nodes[1].sendtoaddress(retaddr, 4000000) # listunspent should show 2 total outputs (1MM, 10MM XEC), one unused # (1MM), one reused (10MM) assert_unspent( self.nodes[1], total_count=2, total_sum=11000000, reused_count=1, reused_sum=10000000) # getbalances should show 10MM used, 1MM XEC trusted assert_balances( self.nodes[1], mine={ "used": 10000000, "trusted": 1000000}) # node 1 should now have about 1MM XEC left (no dirty) and 11MM # (including dirty) assert_approx(self.nodes[1].getbalance(), 1000000, 1000) assert_approx( self.nodes[1].getbalance( avoid_reuse=False), 11000000, 1000) def test_getbalances_used(self): ''' getbalances and listunspent should pick up on reused addresses immediately, even for address reusing outputs created before the first transaction was spending from that address ''' self.log.info("Test getbalances used category") # node under test should be completely empty assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0) new_addr = self.nodes[1].getnewaddress() ret_addr = self.nodes[0].getnewaddress() # send multiple transactions, reusing one address for _ in range(11): self.nodes[0].sendtoaddress(new_addr, 1000000) self.generate(self.nodes[0], 1) # send transaction that should not use all the available outputs # per the current coin selection algorithm self.nodes[1].sendtoaddress(ret_addr, 5000000) # getbalances and listunspent should show the remaining outputs # in the reused address as used/reused assert_unspent( self.nodes[1], total_count=2, total_sum=6000000, reused_count=1, reused_sum=1000000) assert_balances( self.nodes[1], mine={ "used": 1000000, "trusted": 5000000}) def test_full_destination_group_is_preferred(self): ''' Test the case where [1] only has 11 outputs of 1MM XEC in the same reused address and tries to send a small payment of 500,000 XEC. The wallet should use 10 outputs from the reused address as inputs and not a single 1MM XEC input, in order to join several outputs from the reused address. ''' self.log.info( "Test that full destination groups are preferred in coin selection") # Node under test should be empty assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0) new_addr = self.nodes[1].getnewaddress() ret_addr = self.nodes[0].getnewaddress() # Send 11 outputs of 1MM XEC to the same, reused address in the wallet for _ in range(11): self.nodes[0].sendtoaddress(new_addr, 1000000) self.generate(self.nodes[0], 1) # Sending a transaction that is smaller than each one of the # available outputs txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=500000) inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"] # The transaction should use 10 inputs exactly assert_equal(len(inputs), 10) def test_all_destination_groups_are_used(self): ''' Test the case where [1] only has 22 outputs of 1MM XEC in the same reused address and tries to send a payment of 20,5MM XEC. The wallet should use all 22 outputs from the reused address as inputs. ''' self.log.info("Test that all destination groups are used") # Node under test should be empty assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0) new_addr = self.nodes[1].getnewaddress() ret_addr = self.nodes[0].getnewaddress() # Send 22 outputs of 1MM XEC to the same, reused address in the wallet for _ in range(22): self.nodes[0].sendtoaddress(new_addr, 1000000) self.generate(self.nodes[0], 1) # Sending a transaction that needs to use the full groups # of 10 inputs but also the incomplete group of 2 inputs. txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=20500000) inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"] # The transaction should use 22 inputs exactly assert_equal(len(inputs), 22) if __name__ == '__main__': AvoidReuseTest().main() diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py index 0b96e07b7..4ec4fe306 100755 --- a/test/functional/wallet_balance.py +++ b/test/functional/wallet_balance.py @@ -1,344 +1,342 @@ #!/usr/bin/env python3 # Copyright (c) 2018-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the wallet balance RPC methods.""" import struct from decimal import Decimal -from test_framework.address import ( - ADDRESS_ECREG_UNSPENDABLE as ADDRESS_WATCHONLY, -) +from test_framework.address import ADDRESS_ECREG_UNSPENDABLE as ADDRESS_WATCHONLY from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error FAR_IN_THE_FUTURE = 2000000000 def create_transactions(node, address, amt, fees): # Create and sign raw transactions from node to address for amt. # Creates a transaction for each fee and returns an array # of the raw transactions. utxos = [u for u in node.listunspent(0) if u['spendable']] # Create transactions inputs = [] ins_total = 0 for utxo in utxos: inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) ins_total += utxo['amount'] if ins_total >= amt + max(fees): break # make sure there was enough utxos assert ins_total >= amt + max(fees) txs = [] for fee in fees: outputs = {address: amt} # prevent 0 change output if ins_total > amt + fee: outputs[node.getrawchangeaddress()] = ins_total - amt - fee raw_tx = node.createrawtransaction(inputs, outputs, 0) raw_tx = node.signrawtransactionwithwallet(raw_tx) assert_equal(raw_tx['complete'], True) txs.append(raw_tx) return txs class WalletTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [ # Limit mempool descendants as a hack to have wallet txs rejected # from the mempool. This will no longer work after wellington, so # move the activation in the future for this test. ['-limitdescendantcount=3', f'-wellingtonactivationtime={FAR_IN_THE_FUTURE}', ], [], ] # whitelist peers to speed up tx relay / mempool sync for args in self.extra_args: args.append("-whitelist=noban@127.0.0.1") def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): if not self.options.descriptors: # Tests legacy watchonly behavior which is not present (and does # not need to be tested) in descriptor wallets self.nodes[0].importaddress(ADDRESS_WATCHONLY) # Check that nodes don't own any UTXOs assert_equal(len(self.nodes[0].listunspent()), 0) assert_equal(len(self.nodes[1].listunspent()), 0) self.log.info("Check that only node 0 is watching an address") assert 'watchonly' in self.nodes[0].getbalances() assert 'watchonly' not in self.nodes[1].getbalances() self.log.info("Mining blocks ...") self.generate(self.nodes[0], 1) self.generate(self.nodes[1], 1) self.generatetoaddress(self.nodes[1], 101, ADDRESS_WATCHONLY) if not self.options.descriptors: # Tests legacy watchonly behavior which is not present (and does not # need to be tested) in descriptor wallets assert_equal(self.nodes[0].getbalances()['mine']['trusted'], 50000000) assert_equal(self.nodes[0].getwalletinfo()['balance'], 50000000) assert_equal(self.nodes[1].getbalances()['mine']['trusted'], 50000000) assert_equal(self.nodes[0].getbalances()['watchonly']['immature'], 5000000000) assert 'watchonly' not in self.nodes[1].getbalances() assert_equal(self.nodes[0].getbalance(), 50000000) assert_equal(self.nodes[1].getbalance(), 50000000) self.log.info("Test getbalance with different arguments") assert_equal(self.nodes[0].getbalance("*"), 50000000) assert_equal(self.nodes[0].getbalance("*", 1), 50000000) assert_equal(self.nodes[0].getbalance(minconf=1), 50000000) if not self.options.descriptors: assert_equal(self.nodes[0].getbalance(minconf=0, include_watchonly=True), 100_000_000) assert_equal(self.nodes[0].getbalance("*", 1, True), 100_000_000) else: assert_equal(self.nodes[0].getbalance(minconf=0, include_watchonly=True), 50_000_000) assert_equal(self.nodes[0].getbalance("*", 1, True), 50_000_000) assert_equal( self.nodes[1].getbalance( minconf=0, include_watchonly=True), 50000000) # Send 40 BTC from 0 to 1 and 60 BTC from 1 to 0. txs = create_transactions( self.nodes[0], self.nodes[1].getnewaddress(), 40000000, [Decimal('10000')]) self.nodes[0].sendrawtransaction(txs[0]['hex']) # sending on both nodes is faster than waiting for propagation self.nodes[1].sendrawtransaction(txs[0]['hex']) self.sync_all() txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), 60000000, [ Decimal('10000'), Decimal('20000')]) self.nodes[1].sendrawtransaction(txs[0]['hex']) # sending on both nodes is faster than waiting for propagation self.nodes[0].sendrawtransaction(txs[0]['hex']) self.sync_all() # First argument of getbalance must be set to "*" assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"", self.nodes[1].getbalance, "") self.log.info("Test balances with unconfirmed inputs") # Before `test_balance()`, we have had two nodes with a balance of 50 # each and then we: # # 1) Sent 40 from node A to node B with fee 0.01 # 2) Sent 60 from node B to node A with fee 0.01 # # Then we check the balances: # # 1) As is # 2) With transaction 2 from above with 2x the fee # # Prior to #16766, in this situation, the node would immediately report # a balance of 30 on node B as unconfirmed and trusted. # # After #16766, we show that balance as unconfirmed. # # The balance is indeed "trusted" and "confirmed" insofar as removing # the mempool transactions would return at least that much money. But # the algorithm after #16766 marks it as unconfirmed because the 'taint' # tracking of transaction trust for summing balances doesn't consider # which inputs belong to a user. In this case, the change output in # question could be "destroyed" by replace the 1st transaction above. # # The post #16766 behavior is correct; we shouldn't be treating those # funds as confirmed. If you want to rely on that specific UTXO existing # which has given you that balance, you cannot, as a third party # spending the other input would destroy that unconfirmed. # # For example, if the test transactions were: # # 1) Sent 40 from node A to node B with fee 0.01 # 2) Sent 10 from node B to node A with fee 0.01 # # Then our node would report a confirmed balance of 40 + 50 - 10 = 80 # BTC, which is more than would be available if transaction 1 were # replaced. def test_balances(*, fee_node_1=0): # getbalances expected_balances_0 = {'mine': {'immature': Decimal('0E-2'), # change from node 0's send 'trusted': Decimal('9990000'), 'untrusted_pending': Decimal('60000000.0')}, 'watchonly': {'immature': Decimal('5000000000'), 'trusted': Decimal('50000000.0'), 'untrusted_pending': Decimal('0E-2')}} expected_balances_1 = {'mine': {'immature': Decimal('0E-2'), # node 1's send had an unsafe input 'trusted': Decimal('0E-2'), # Doesn't include output of node # 0's send since it was spent 'untrusted_pending': Decimal('30000000.0') - fee_node_1}} if self.options.descriptors: del expected_balances_0["watchonly"] assert_equal(self.nodes[0].getbalances(), expected_balances_0) assert_equal(self.nodes[1].getbalances(), expected_balances_1) # getbalance without any arguments includes unconfirmed transactions, but not untrusted transactions # change from node 0's send assert_equal(self.nodes[0].getbalance(), Decimal('9990000')) # node 1's send had an unsafe input assert_equal(self.nodes[1].getbalance(), Decimal('0')) # Same with minconf=0 assert_equal( self.nodes[0].getbalance( minconf=0), Decimal('9990000')) assert_equal(self.nodes[1].getbalance(minconf=0), Decimal('0')) # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago # TODO: fix getbalance tracking of coin spentness depth assert_equal(self.nodes[0].getbalance(minconf=1), Decimal('0')) assert_equal(self.nodes[1].getbalance(minconf=1), Decimal('0')) # getunconfirmedbalance # output of node 1's spend assert_equal( self.nodes[0].getunconfirmedbalance(), Decimal('60000000')) # Doesn't include output of node 0's send since it was spent assert_equal( self.nodes[1].getunconfirmedbalance(), Decimal('30000000') - fee_node_1) # getwalletinfo.unconfirmed_balance assert_equal(self.nodes[0].getwalletinfo()[ "unconfirmed_balance"], Decimal('60000000')) assert_equal( self.nodes[1].getwalletinfo()["unconfirmed_balance"], Decimal('30000000') - fee_node_1) test_balances(fee_node_1=Decimal('10000')) # In the original Core version of this test, Node 1 would've bumped # the fee by 0.01 here to resend, but this is XEC, so it has 10000 XEC # left to spend on goods and services self.sync_all() self.log.info( "Test getbalance and getbalances.mine.untrusted_pending with conflicted unconfirmed inputs") test_balances(fee_node_1=Decimal('10000')) self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY) # balances are correct after the transactions are confirmed # node 1's send plus change from node 0's send balance_node0 = Decimal('69990000') # change from node 0's send balance_node1 = Decimal('29990000') assert_equal(self.nodes[0].getbalances()[ 'mine']['trusted'], balance_node0) assert_equal(self.nodes[1].getbalances()[ 'mine']['trusted'], balance_node1) assert_equal(self.nodes[0].getbalance(), balance_node0) assert_equal(self.nodes[1].getbalance(), balance_node1) # Send total balance away from node 1 txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress( ), Decimal('29970000'), [Decimal('10000')]) self.nodes[1].sendrawtransaction(txs[0]['hex']) self.generatetoaddress(self.nodes[1], 2, ADDRESS_WATCHONLY) # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago # TODO: fix getbalance tracking of coin spentness depth # getbalance with minconf=3 should still show the old balance assert_equal(self.nodes[1].getbalance(minconf=3), Decimal('0')) # getbalance with minconf=2 will show the new balance. assert_equal(self.nodes[1].getbalance(minconf=2), Decimal('10000')) # check mempool transactions count for wallet unconfirmed balance after # dynamically loading the wallet. before = self.nodes[1].getbalances()['mine']['untrusted_pending'] dst = self.nodes[1].getnewaddress() self.nodes[1].unloadwallet(self.default_wallet_name) self.nodes[0].sendtoaddress(dst, 100000) self.sync_all() self.nodes[1].loadwallet(self.default_wallet_name) after = self.nodes[1].getbalances()['mine']['untrusted_pending'] assert_equal(before + Decimal('100000'), after) # Create 3 more wallet txs, where the last is not accepted to the # mempool because it is the third descendant of the tx above for _ in range(3): # Set amount high enough such that all coins are spent by each tx txid = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), 99000000) self.log.info('Check that wallet txs not in the mempool are untrusted') assert txid not in self.nodes[0].getrawmempool() assert_equal(self.nodes[0].gettransaction(txid)['trusted'], False) assert_equal(self.nodes[0].getbalance(minconf=0), 0) self.log.info("Test replacement and reorg of non-mempool tx") tx_orig = self.nodes[0].gettransaction(txid)['hex'] # Increase fee by 1 coin tx_replace = tx_orig.replace( struct.pack(" uri-path).", node.getwalletinfo) w1, w2, w3, w4, *_ = wallets self.generatetoaddress( node, nblocks=101, address=w1.getnewaddress(), sync_fun=self.no_op) assert_equal(w1.getbalance(), 100000000) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) assert_equal(w4.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1000000) w1.sendtoaddress(w3.getnewaddress(), 2000000) w1.sendtoaddress(w4.getnewaddress(), 3000000) self.generatetoaddress( node, nblocks=1, address=w1.getnewaddress(), sync_fun=self.no_op) assert_equal(w2.getbalance(), 1000000) assert_equal(w3.getbalance(), 2000000) assert_equal(w4.getbalance(), 3000000) batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()]) assert_equal(batch[0]["result"]["chain"], self.chain) assert_equal(batch[1]["result"]["walletname"], "w1") self.log.info('Check for per-wallet settxfee call') assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], 0) w2.settxfee(1000) assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], Decimal('1000.00')) self.log.info("Test dynamic wallet loading") self.restart_node(0, ['-nowallet']) assert_equal(node.listwallets(), []) assert_raises_rpc_error( -18, "No wallet is loaded. Load a wallet using loadwallet or create a new" " one with createwallet. (Note: A default wallet is no longer " "automatically created)", node.getwalletinfo ) self.log.info("Load first wallet") loadwallet_name = node.loadwallet(wallet_names[0]) assert_equal(loadwallet_name['name'], wallet_names[0]) assert_equal(node.listwallets(), wallet_names[0:1]) node.getwalletinfo() w1 = node.get_wallet_rpc(wallet_names[0]) w1.getwalletinfo() self.log.info("Load second wallet") loadwallet_name = node.loadwallet(wallet_names[1]) assert_equal(loadwallet_name['name'], wallet_names[1]) assert_equal(node.listwallets(), wallet_names[0:2]) assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w2 = node.get_wallet_rpc(wallet_names[1]) w2.getwalletinfo() self.log.info("Concurrent wallet loading") threads = [] for _ in range(3): n = node.cli if self.options.usecli else get_rpc_proxy( node.url, 1, timeout=600, coveragedir=node.coverage_dir) t = Thread( target=test_load_unload, args=( n, wallet_names[2], 20 * self.rpc_timeout)) t.start() threads.append(t) for t in threads: t.join() global got_loading_error assert_equal(got_loading_error, True) self.log.info("Load remaining wallets") for wallet_name in wallet_names[2:]: loadwallet_name = self.nodes[0].loadwallet(wallet_name) assert_equal(loadwallet_name['name'], wallet_name) assert_equal(set(self.nodes[0].listwallets()), set(wallet_names)) # Fail to load if wallet doesn't exist path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "wallets") assert_raises_rpc_error( -18, "Wallet file verification failed. Failed to load database path " "'{}'. Path does not exist.".format(path), self.nodes[0].loadwallet, 'wallets') # Fail to load duplicate wallets path = os.path.join( self.options.tmpdir, "node0", "regtest", "wallets", "w1", self.wallet_data_filename) assert_raises_rpc_error( -4, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format( path), self.nodes[0].loadwallet, wallet_names[0]) # Fail to load duplicate wallets by different ways (directory and # filepath) path = os.path.join( self.options.tmpdir, "node0", "regtest", "wallets", self.wallet_data_filename) assert_raises_rpc_error( -4, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format( path), self.nodes[0].loadwallet, self.wallet_data_filename) # Fail to load if one wallet is a copy of another assert_raises_rpc_error(-4, "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') # Fail to load if one wallet is a copy of another. # Test this twice to make sure that we don't re-introduce # https://github.com/bitcoin/bitcoin/issues/14304 assert_raises_rpc_error(-4, "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') # Fail to load if wallet file is a symlink if os.name != 'nt': assert_raises_rpc_error( -4, "Wallet file verification failed. Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink') # Fail to load if a directory is specified that doesn't contain a # wallet os.mkdir(wallet_dir('empty_wallet_dir')) path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "empty_wallet_dir") assert_raises_rpc_error( -18, "Wallet file verification failed. Failed to load database " "path '{}'. Data is not in recognized format.".format(path), self.nodes[0].loadwallet, 'empty_wallet_dir') self.log.info("Test dynamic wallet creation.") # Fail to create a wallet if it already exists. path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "w2") assert_raises_rpc_error( -4, f"Failed to create database path '{path}'. Database already exists.", self.nodes[0].createwallet, 'w2') # Successfully create a wallet with a new name loadwallet_name = self.nodes[0].createwallet('w9') assert_equal(loadwallet_name['name'], 'w9') w9 = node.get_wallet_rpc('w9') assert_equal(w9.getwalletinfo()['walletname'], 'w9') assert 'w9' in self.nodes[0].listwallets() # Successfully create a wallet using a full path new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir') new_wallet_name = os.path.join(new_wallet_dir, 'w10') loadwallet_name = self.nodes[0].createwallet(new_wallet_name) assert_equal(loadwallet_name['name'], new_wallet_name) w10 = node.get_wallet_rpc(new_wallet_name) assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name) assert new_wallet_name in self.nodes[0].listwallets() self.log.info("Test dynamic wallet unloading") # Test `unloadwallet` errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet) assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy") assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet) assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"), # Successfully unload the specified wallet name self.nodes[0].unloadwallet("w1") assert 'w1' not in self.nodes[0].listwallets() # Successfully unload the wallet referenced by the request endpoint # Also ensure unload works during walletpassphrase timeout w2.encryptwallet('test') w2.walletpassphrase('test', 1) w2.unloadwallet() time.sleep(1.1) assert 'w2' not in self.nodes[0].listwallets() # Successfully unload all wallets for wallet_name in self.nodes[0].listwallets(): self.nodes[0].unloadwallet(wallet_name) assert_equal(self.nodes[0].listwallets(), []) assert_raises_rpc_error( -18, "No wallet is loaded. Load a wallet using loadwallet or create a new" " one with createwallet. (Note: A default wallet is no longer " "automatically created)", self.nodes[0].getwalletinfo ) # Successfully load a previously unloaded wallet self.nodes[0].loadwallet('w1') assert_equal(self.nodes[0].listwallets(), ['w1']) assert_equal(w1.getwalletinfo()['walletname'], 'w1') assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [self.default_wallet_name, os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9']) # Test backing up and restoring wallets self.log.info("Test wallet backup") self.restart_node(0, ['-nowallet']) for wallet_name in wallet_names: self.nodes[0].loadwallet(wallet_name) for wallet_name in wallet_names: rpc = self.nodes[0].get_wallet_rpc(wallet_name) addr = rpc.getnewaddress() backup = os.path.join(self.options.tmpdir, 'backup.dat') rpc.backupwallet(backup) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(empty_wallet, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], False) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(backup, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], True) # Test .walletlock file is closed self.start_node(1) wallet = os.path.join(self.options.tmpdir, 'my_wallet') self.nodes[0].createwallet(wallet) assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet) self.nodes[0].unloadwallet(wallet) self.nodes[1].loadwallet(wallet) if __name__ == '__main__': MultiWalletTest().main()