diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index fcc4caacf..c16a424c3 100644 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -1,155 +1,154 @@ # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool re-org scenarios. Test re-org scenarios with a mempool that contains transactions that spend (directly or indirectly) coinbase transactions. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error from test_framework.wallet import MiniWallet class MempoolCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [ # immediate tx relay [ "-whitelist=noban@127.0.0.1", ], [], ] def run_test(self): wallet = MiniWallet(self.nodes[0]) # Start with a 200 block chain assert_equal(self.nodes[0].getblockcount(), 200) self.log.info("Add 4 coinbase utxos to the miniwallet") # Block 76 contains the first spendable coinbase txs. first_block = 76 - wallet.rescan_utxos() # Three scenarios for re-orging coinbase spends in the memory pool: # 1. Direct coinbase spend : spend_1 # 2. Indirect (coinbase spend in chain, child in mempool) : spend_2 and spend_2_1 # 3. Indirect (coinbase and child both in chain) : spend_3 and spend_3_1 # Use invalidateblock to make all of the above coinbase spends invalid (immature coinbase), # and make sure the mempool code behaves correctly. b = [self.nodes[0].getblockhash(n) for n in range(first_block, first_block + 4)] coinbase_txids = [self.nodes[0].getblock(h)["tx"][0] for h in b] utxo_1 = wallet.get_utxo(txid=coinbase_txids[1]) utxo_2 = wallet.get_utxo(txid=coinbase_txids[2]) utxo_3 = wallet.get_utxo(txid=coinbase_txids[3]) self.log.info( "Create three transactions spending from coinbase utxos: spend_1, spend_2," " spend_3" ) spend_1 = wallet.create_self_transfer(utxo_to_spend=utxo_1) spend_2 = wallet.create_self_transfer(utxo_to_spend=utxo_2) spend_3 = wallet.create_self_transfer(utxo_to_spend=utxo_3) self.log.info( "Create another transaction which is time-locked to two blocks in the" " future" ) utxo = wallet.get_utxo(txid=coinbase_txids[0]) timelock_tx = wallet.create_self_transfer( utxo_to_spend=utxo, locktime=self.nodes[0].getblockcount() + 2, )["hex"] self.log.info("Check that the time-locked transaction is too immature to spend") assert_raises_rpc_error( -26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx ) self.log.info("Broadcast and mine spend_2 and spend_3") wallet.sendrawtransaction(from_node=self.nodes[0], tx_hex=spend_2["hex"]) wallet.sendrawtransaction(from_node=self.nodes[0], tx_hex=spend_3["hex"]) self.log.info("Generate a block") self.generate(self.nodes[0], 1) self.log.info( "Check that time-locked transaction is still too immature to spend" ) assert_raises_rpc_error( -26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx ) self.log.info("Create spend_2_1 and spend_3_1") spend_2_1 = wallet.create_self_transfer(utxo_to_spend=spend_2["new_utxo"]) spend_3_1 = wallet.create_self_transfer(utxo_to_spend=spend_3["new_utxo"]) self.log.info("Broadcast and mine spend_3_1") spend_3_1_id = self.nodes[0].sendrawtransaction(spend_3_1["hex"]) self.log.info("Generate a block") last_block = self.generate(self.nodes[0], 1) # generate() implicitly syncs blocks, so that peer 1 gets the block # before timelock_tx. # Otherwise, peer 1 would put the timelock_tx in m_recent_rejects self.log.info("The time-locked transaction can now be spent") timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx) self.log.info("Add spend_1 and spend_2_1 to the mempool") spend_1_id = self.nodes[0].sendrawtransaction(spend_1["hex"]) spend_2_1_id = self.nodes[0].sendrawtransaction(spend_2_1["hex"]) assert_equal( set(self.nodes[0].getrawmempool()), {spend_1_id, spend_2_1_id, timelock_tx_id}, ) self.sync_all() # save acceptance heights of 2 of the txs to later test that they are # preserved across reorgs spend_1_height = self.nodes[0].getmempoolentry(spend_1_id)["height"] spend_2_1_height = self.nodes[0].getmempoolentry(spend_2_1_id)["height"] self.log.info("invalidate the last block") for node in self.nodes: node.invalidateblock(last_block[0]) self.log.info( "The time-locked transaction is now too immature and has been removed from" " the mempool" ) self.log.info( "spend_3_1 has been re-orged out of the chain and is back in the mempool" ) assert_equal( set(self.nodes[0].getrawmempool()), {spend_1_id, spend_2_1_id, spend_3_1_id} ) # now ensure that the acceptance height of the two txs was preserved # across reorgs (and is not the same as the current tip height) tip_height = self.nodes[0].getblockchaininfo()["blocks"] assert spend_1_height != tip_height assert spend_2_1_height != tip_height assert_equal( spend_1_height, self.nodes[0].getmempoolentry(spend_1_id)["height"] ) assert_equal( spend_2_1_height, self.nodes[0].getmempoolentry(spend_2_1_id)["height"] ) # The new resurrected tx should just have height equal to current tip # height assert_equal(tip_height, self.nodes[0].getmempoolentry(spend_3_1_id)["height"]) self.log.info( "Use invalidateblock to re-org back and make all those coinbase spends" " immature/invalid" ) b = self.nodes[0].getblockhash(first_block + 100) for node in self.nodes: node.invalidateblock(b) self.log.info("Check that the mempool is empty") assert_equal(set(self.nodes[0].getrawmempool()), set()) if __name__ == "__main__": MempoolCoinbaseTest().main() diff --git a/test/functional/mempool_spend_coinbase.py b/test/functional/mempool_spend_coinbase.py index 112dd1553..d2a9ab571 100644 --- a/test/functional/mempool_spend_coinbase.py +++ b/test/functional/mempool_spend_coinbase.py @@ -1,67 +1,66 @@ # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test spending coinbase transactions. The coinbase transaction in block N can appear in block N+100... so is valid in the mempool when the best block height is N+99. This test makes sure coinbase spends that will be mature in the next block are accepted into the memory pool, but less mature coinbase spends are NOT. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error from test_framework.wallet import MiniWallet class MempoolSpendCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def run_test(self): wallet = MiniWallet(self.nodes[0]) # Invalidate two blocks, so that miniwallet has access to a coin that # will mature in the next block chain_height = 198 self.nodes[0].invalidateblock(self.nodes[0].getblockhash(chain_height + 1)) assert_equal(chain_height, self.nodes[0].getblockcount()) - wallet.rescan_utxos() # Coinbase at height chain_height-100+1 ok in mempool, should # get mined. Coinbase at height chain_height-100+2 is # too immature to spend. def coinbase_txid(h): return self.nodes[0].getblock(self.nodes[0].getblockhash(h))["tx"][0] utxo_mature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 1)) utxo_immature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 2)) spend_mature_id = wallet.send_self_transfer( from_node=self.nodes[0], utxo_to_spend=utxo_mature )["txid"] # other coinbase should be too immature to spend immature_tx = wallet.create_self_transfer(utxo_to_spend=utxo_immature) assert_raises_rpc_error( -26, "bad-txns-premature-spend-of-coinbase", lambda: self.nodes[0].sendrawtransaction(immature_tx["hex"]), ) # mempool should have just the mature one assert_equal(self.nodes[0].getrawmempool(), [spend_mature_id]) # mine a block, mature one should get confirmed self.generate(self.nodes[0], 1) assert_equal(set(self.nodes[0].getrawmempool()), set()) # ... and now previously immature can be spent: spend_new_id = self.nodes[0].sendrawtransaction(immature_tx["hex"]) assert_equal(self.nodes[0].getrawmempool(), [spend_new_id]) if __name__ == "__main__": MempoolSpendCoinbaseTest().main() diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py index b1c6d0cce..b7dcbe5f5 100644 --- a/test/functional/p2p_blocksonly.py +++ b/test/functional/p2p_blocksonly.py @@ -1,152 +1,149 @@ # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test p2p blocksonly mode & block-relay-only connections.""" import time from test_framework.messages import MSG_TX, CInv, msg_inv, msg_tx from test_framework.p2p import P2PInterface, P2PTxInvStore from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal from test_framework.wallet import MiniWallet class P2PBlocksOnly(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [["-blocksonly"]] def run_test(self): self.miniwallet = MiniWallet(self.nodes[0]) - # Add enough mature utxos to the wallet, so that all txs spend - # confirmed coins - self.miniwallet.rescan_utxos() self.blocksonly_mode_tests() self.blocks_relay_conn_tests() def blocksonly_mode_tests(self): self.log.info("Tests with node running in -blocksonly mode") assert_equal(self.nodes[0].getnetworkinfo()["localrelay"], False) self.nodes[0].add_p2p_connection(P2PInterface()) tx, txid, tx_hex = self.check_p2p_tx_violation() self.log.info("Check that tx invs also violate the protocol") self.nodes[0].add_p2p_connection(P2PInterface()) with self.nodes[0].assert_debug_log( [ "transaction" " (0000000000000000000000000000000000000000000000000000000000001234)" " inv sent in violation of protocol, disconnecting peer" ] ): self.nodes[0].p2ps[0].send_message(msg_inv([CInv(t=MSG_TX, h=0x1234)])) self.nodes[0].p2ps[0].wait_for_disconnect() del self.nodes[0].p2ps[0] self.log.info( "Check that txs from rpc are not rejected and relayed to other peers" ) tx_relay_peer = self.nodes[0].add_p2p_connection(P2PInterface()) assert_equal(self.nodes[0].getpeerinfo()[0]["relaytxes"], True) assert_equal(self.nodes[0].testmempoolaccept([tx_hex])[0]["allowed"], True) with self.nodes[0].assert_debug_log([f"received getdata for: tx {txid} peer"]): self.nodes[0].sendrawtransaction(tx_hex) tx_relay_peer.wait_for_tx(txid) assert_equal(self.nodes[0].getmempoolinfo()["size"], 1) self.log.info("Restarting node 0 with relay permission and blocksonly") self.restart_node( 0, [ "-persistmempool=0", "-whitelist=relay@127.0.0.1", "-blocksonly", ], ) assert_equal(self.nodes[0].getrawmempool(), []) first_peer = self.nodes[0].add_p2p_connection(P2PInterface()) second_peer = self.nodes[0].add_p2p_connection(P2PInterface()) peer_1_info = self.nodes[0].getpeerinfo()[0] assert_equal(peer_1_info["permissions"], ["relay"]) peer_2_info = self.nodes[0].getpeerinfo()[1] assert_equal(peer_2_info["permissions"], ["relay"]) assert_equal(self.nodes[0].testmempoolaccept([tx_hex])[0]["allowed"], True) self.log.info( "Check that the tx from first_peer with relay-permission is " "relayed to others (ie.second_peer)" ) with self.nodes[0].assert_debug_log(["received getdata"]): # Note that normally, first_peer would never send us transactions # since we're a blocksonly node. By activating blocksonly, we # explicitly tell our peers that they should not send us # transactions, and Bitcoin ABC respects that choice and will not # send transactions. # But if, for some reason, first_peer decides to relay transactions # to us anyway, we should relay them to second_peer since we gave # relay permission to first_peer. # See https://github.com/bitcoin/bitcoin/issues/19943 for details. first_peer.send_message(msg_tx(tx)) self.log.info( "Check that the peer with relay-permission is still connected" " after sending the transaction" ) assert_equal(first_peer.is_connected, True) second_peer.wait_for_tx(txid) assert_equal(self.nodes[0].getmempoolinfo()["size"], 1) self.log.info("Relay-permission peer's transaction is accepted and relayed") self.nodes[0].disconnect_p2ps() self.generate(self.nodes[0], 1) def blocks_relay_conn_tests(self): self.log.info( "Tests with node in normal mode with block-relay-only connections" ) # disables blocks only mode self.restart_node(0, ["-noblocksonly"]) assert_equal(self.nodes[0].getnetworkinfo()["localrelay"], True) # Ensure we disconnect if a block-relay-only connection sends us a # transaction self.nodes[0].add_outbound_p2p_connection( P2PInterface(), p2p_idx=0, connection_type="block-relay-only" ) assert_equal(self.nodes[0].getpeerinfo()[0]["relaytxes"], False) _, txid, tx_hex = self.check_p2p_tx_violation() self.log.info("Check that txs from RPC are not sent to blockrelay connection") conn = self.nodes[0].add_outbound_p2p_connection( P2PTxInvStore(), p2p_idx=1, connection_type="block-relay-only" ) self.nodes[0].sendrawtransaction(tx_hex) # Bump time forward to ensure m_next_inv_send_time timer pops self.nodes[0].setmocktime(int(time.time()) + 60) conn.sync_with_ping() assert int(txid, 16) not in conn.get_invs() def check_p2p_tx_violation(self): self.log.info("Check that txs from P2P are rejected and result in disconnect") spendtx = self.miniwallet.create_self_transfer() with self.nodes[0].assert_debug_log( ["transaction sent in violation of protocol peer=0"] ): self.nodes[0].p2ps[0].send_message(msg_tx(spendtx["tx"])) self.nodes[0].p2ps[0].wait_for_disconnect() assert_equal(self.nodes[0].getmempoolinfo()["size"], 0) # Remove the disconnected peer del self.nodes[0].p2ps[0] return spendtx["tx"], spendtx["txid"], spendtx["hex"] if __name__ == "__main__": P2PBlocksOnly().main() diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py index 13e3c6a41..836ece802 100644 --- a/test/functional/p2p_filter.py +++ b/test/functional/p2p_filter.py @@ -1,315 +1,314 @@ # Copyright (c) 2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Test BIP 37 """ from test_framework.messages import ( COIN, MAX_BLOOM_FILTER_SIZE, MAX_BLOOM_HASH_FUNCS, MSG_BLOCK, MSG_FILTERED_BLOCK, CInv, msg_filteradd, msg_filterclear, msg_filterload, msg_getdata, msg_mempool, msg_version, ) from test_framework.p2p import ( P2P_SERVICES, P2P_SUBVERSION, P2P_VERSION, P2PInterface, p2p_lock, ) from test_framework.script import MAX_SCRIPT_ELEMENT_SIZE from test_framework.test_framework import BitcoinTestFramework from test_framework.wallet import MiniWallet, getnewdestination class P2PBloomFilter(P2PInterface): # This is a P2SH watch-only wallet watch_script_pubkey = bytes.fromhex( "a914ffffffffffffffffffffffffffffffffffffffff87" ) # The initial filter (n=10, fp=0.000001) with just the above scriptPubKey # added watch_filter_init = msg_filterload( data=( b"@\x00\x08\x00\x80\x00\x00 \x00\xc0\x00 \x04\x00\x08$\x00\x04\x80\x00\x00" b" \x00\x00\x00\x00\x80\x00\x00@\x00\x02@ \x00" ), nHashFuncs=19, nTweak=0, nFlags=1, ) def __init__(self): super().__init__() self._tx_received = False self._merkleblock_received = False def on_inv(self, message): want = msg_getdata() for i in message.inv: # inv messages can only contain TX or BLOCK, so translate BLOCK to # FILTERED_BLOCK if i.type == MSG_BLOCK: want.inv.append(CInv(MSG_FILTERED_BLOCK, i.hash)) else: want.inv.append(i) if len(want.inv): self.send_message(want) def on_merkleblock(self, message): self._merkleblock_received = True def on_tx(self, message): self._tx_received = True @property def tx_received(self): with p2p_lock: return self._tx_received @tx_received.setter def tx_received(self, value): with p2p_lock: self._tx_received = value @property def merkleblock_received(self): with p2p_lock: return self._merkleblock_received @merkleblock_received.setter def merkleblock_received(self, value): with p2p_lock: self._merkleblock_received = value class FilterTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [ [ "-peerbloomfilters", "-whitelist=noban@127.0.0.1", # immediate tx relay ] ] def generatetoscriptpubkey(self, scriptpubkey): """Helper to generate a single block to the given scriptPubKey.""" return self.generatetodescriptor( self.nodes[0], 1, f"raw({scriptpubkey.hex()})" )[0] def test_size_limits(self, filter_peer): self.log.info("Check that too large filter is rejected") with self.nodes[0].assert_debug_log(["Misbehaving"]): filter_peer.send_and_ping( msg_filterload(data=b"\xbb" * (MAX_BLOOM_FILTER_SIZE + 1)) ) self.log.info("Check that max size filter is accepted") with self.nodes[0].assert_debug_log([""], unexpected_msgs=["Misbehaving"]): filter_peer.send_and_ping( msg_filterload(data=b"\xbb" * (MAX_BLOOM_FILTER_SIZE)) ) filter_peer.send_and_ping(msg_filterclear()) self.log.info("Check that filter with too many hash functions is rejected") with self.nodes[0].assert_debug_log(["Misbehaving"]): filter_peer.send_and_ping( msg_filterload(data=b"\xaa", nHashFuncs=MAX_BLOOM_HASH_FUNCS + 1) ) self.log.info("Check that filter with max hash functions is accepted") with self.nodes[0].assert_debug_log([""], unexpected_msgs=["Misbehaving"]): filter_peer.send_and_ping( msg_filterload(data=b"\xaa", nHashFuncs=MAX_BLOOM_HASH_FUNCS) ) # Don't send filterclear until next two filteradd checks are done self.log.info( "Check that max size data element to add to the filter is accepted" ) with self.nodes[0].assert_debug_log([""], unexpected_msgs=["Misbehaving"]): filter_peer.send_and_ping( msg_filteradd(data=b"\xcc" * (MAX_SCRIPT_ELEMENT_SIZE)) ) self.log.info( "Check that too large data element to add to the filter is rejected" ) with self.nodes[0].assert_debug_log(["Misbehaving"]): filter_peer.send_and_ping( msg_filteradd(data=b"\xcc" * (MAX_SCRIPT_ELEMENT_SIZE + 1)) ) filter_peer.send_and_ping(msg_filterclear()) def test_msg_mempool(self): self.log.info( "Check that a node with bloom filters enabled services p2p mempool messages" ) filter_peer = P2PBloomFilter() self.log.debug("Create a tx relevant to the peer before connecting") txid, _ = self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=9 * COIN, ) self.log.debug( "Send a mempool msg after connecting and check that the tx is received" ) self.nodes[0].add_p2p_connection(filter_peer) filter_peer.send_and_ping(filter_peer.watch_filter_init) filter_peer.send_message(msg_mempool()) filter_peer.wait_for_tx(txid) def test_frelay_false(self, filter_peer): self.log.info( "Check that a node with fRelay set to false does not receive invs until the" " filter is set" ) filter_peer.tx_received = False self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=9 * COIN, ) # Sync to make sure the reason filter_peer doesn't receive the tx is # not p2p delays filter_peer.sync_with_ping() assert not filter_peer.tx_received # Clear the mempool so that this transaction does not impact subsequent # tests self.generate(self.nodes[0], 1) def test_filter(self, filter_peer): # Set the bloomfilter using filterload filter_peer.send_and_ping(filter_peer.watch_filter_init) # If fRelay is not already True, sending filterload sets it to True assert self.nodes[0].getpeerinfo()[0]["relaytxes"] self.log.info( "Check that we receive merkleblock and tx if the filter matches a tx in a" " block" ) block_hash = self.generatetoscriptpubkey(filter_peer.watch_script_pubkey) txid = self.nodes[0].getblock(block_hash)["tx"][0] filter_peer.wait_for_merkleblock(block_hash) filter_peer.wait_for_tx(txid) self.log.info( "Check that we only receive a merkleblock if the filter does not match a tx" " in a block" ) filter_peer.tx_received = False block_hash = self.generatetoscriptpubkey(getnewdestination()[1]) filter_peer.wait_for_merkleblock(block_hash) assert not filter_peer.tx_received self.log.info( "Check that we not receive a tx if the filter does not match a mempool tx" ) filter_peer.merkleblock_received = False filter_peer.tx_received = False self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN, ) filter_peer.sync_with_ping() assert not filter_peer.merkleblock_received assert not filter_peer.tx_received self.log.info("Check that we receive a tx if the filter matches a mempool tx") filter_peer.merkleblock_received = False txid, _ = self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=9 * COIN, ) filter_peer.wait_for_tx(txid) assert not filter_peer.merkleblock_received self.log.info("Check that after deleting filter all txs get relayed again") filter_peer.send_and_ping(msg_filterclear()) for _ in range(5): txid, _ = self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN, ) filter_peer.wait_for_tx(txid) self.log.info( "Check that request for filtered blocks is ignored if no filter is set" ) filter_peer.merkleblock_received = False filter_peer.tx_received = False with self.nodes[0].assert_debug_log(expected_msgs=["received getdata"]): block_hash = self.generatetoscriptpubkey(getnewdestination()[1]) filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))]) filter_peer.sync_with_ping() assert not filter_peer.merkleblock_received assert not filter_peer.tx_received self.log.info( 'Check that sending "filteradd" if no filter is set is treated as ' "misbehavior" ) with self.nodes[0].assert_debug_log(["Misbehaving"]): filter_peer.send_and_ping(msg_filteradd(data=b"letsmisbehave")) self.log.info( "Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed" ) filter_peer.send_and_ping(msg_filterload(data=b"", nHashFuncs=1)) filter_peer.send_and_ping(msg_filteradd(data=b"letstrytocrashthisnode")) self.nodes[0].disconnect_p2ps() def run_test(self): self.wallet = MiniWallet(self.nodes[0]) - self.wallet.rescan_utxos() filter_peer = self.nodes[0].add_p2p_connection(P2PBloomFilter()) self.log.info("Test filter size limits") self.test_size_limits(filter_peer) self.log.info("Test BIP 37 for a node with fRelay = True (default)") self.test_filter(filter_peer) self.nodes[0].disconnect_p2ps() self.log.info("Test BIP 37 for a node with fRelay = False") # Add peer but do not send version yet filter_peer_without_nrelay = self.nodes[0].add_p2p_connection( P2PBloomFilter(), send_version=False, wait_for_verack=False ) # Send version with relay=False version_without_fRelay = msg_version() version_without_fRelay.nVersion = P2P_VERSION version_without_fRelay.strSubVer = P2P_SUBVERSION version_without_fRelay.nServices = P2P_SERVICES version_without_fRelay.relay = 0 filter_peer_without_nrelay.send_message(version_without_fRelay) filter_peer_without_nrelay.wait_for_verack() assert not self.nodes[0].getpeerinfo()[0]["relaytxes"] self.test_frelay_false(filter_peer_without_nrelay) self.test_filter(filter_peer_without_nrelay) self.test_msg_mempool() if __name__ == "__main__": FilterTest().main() diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py index a8c75d3fd..1f296d33f 100644 --- a/test/functional/rpc_blockchain.py +++ b/test/functional/rpc_blockchain.py @@ -1,559 +1,558 @@ # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test RPCs related to blockchainstate. Test the following RPCs: - getblockchaininfo - getchaintxstats - gettxoutsetinfo - getblockheader - getdifficulty - getnetworkhashps - waitforblockheight - getblock - getblockhash - getbestblockhash - verifychain Tests correspond to code in rpc/blockchain.cpp. """ import http.client import os import subprocess from decimal import Decimal from test_framework.address import ADDRESS_ECREG_P2SH_OP_TRUE from test_framework.blocktools import ( MAX_FUTURE_BLOCK_TIME, TIME_GENESIS_BLOCK, create_block, create_coinbase, ) from test_framework.messages import CBlockHeader, FromHex, msg_block from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_greater_than_or_equal, assert_is_hash_string, assert_is_hex_string, assert_raises, assert_raises_rpc_error, get_datadir_path, ) from test_framework.wallet import MiniWallet # blocks mined HEIGHT = 200 # ten-minute steps TIME_RANGE_STEP = 600 TIME_RANGE_MTP = TIME_GENESIS_BLOCK + (HEIGHT - 6) * TIME_RANGE_STEP TIME_RANGE_TIP = TIME_GENESIS_BLOCK + (HEIGHT - 1) * TIME_RANGE_STEP TIME_RANGE_END = TIME_GENESIS_BLOCK + HEIGHT * TIME_RANGE_STEP class BlockchainTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.supports_cli = False def run_test(self): self.mine_chain() self._test_max_future_block_time() # Set extra args with pruning after rescan is complete self.restart_node(0, extra_args=["-stopatheight=207", "-prune=1"]) self._test_getblockchaininfo() self._test_getchaintxstats() self._test_gettxoutsetinfo() self._test_getblockheader() self._test_getdifficulty() self._test_getnetworkhashps() self._test_stopatheight() self._test_waitforblockheight() if self.is_wallet_compiled(): self._test_getblock() self._test_getblock_txfee() assert self.nodes[0].verifychain(4, 0) def mine_chain(self): self.log.info( f"Generate {HEIGHT} blocks after the genesis block in ten-minute steps" ) for t in range(TIME_GENESIS_BLOCK, TIME_RANGE_END, TIME_RANGE_STEP): self.nodes[0].setmocktime(t) self.generatetoaddress(self.nodes[0], 1, ADDRESS_ECREG_P2SH_OP_TRUE) assert_equal(self.nodes[0].getblockchaininfo()["blocks"], HEIGHT) def _test_max_future_block_time(self): self.stop_node(0) self.log.info( "A block tip of more than MAX_FUTURE_BLOCK_TIME in the future raises an" " error" ) self.nodes[0].assert_start_raises_init_error( extra_args=[f"-mocktime={TIME_RANGE_TIP - MAX_FUTURE_BLOCK_TIME - 1}"], expected_msg=( ": The block database contains a block which appears to be from the" " future. This may be due to your computer's date and time being set" " incorrectly. Only rebuild the block database if you are sure that" f" your computer's date and time are correct.{os.linesep}Please restart" " with -reindex or -reindex-chainstate to recover." ), ) self.log.info("A block tip of MAX_FUTURE_BLOCK_TIME in the future is fine") self.start_node( 0, extra_args=[f"-mocktime={TIME_RANGE_TIP - MAX_FUTURE_BLOCK_TIME}"] ) def _test_getblockchaininfo(self): self.log.info("Test getblockchaininfo") keys = [ "bestblockhash", "blocks", "chain", "chainwork", "difficulty", "headers", "initialblockdownload", "mediantime", "pruned", "size_on_disk", "time", "verificationprogress", "warnings", ] res = self.nodes[0].getblockchaininfo() assert_equal(res["time"], TIME_RANGE_END - TIME_RANGE_STEP) assert_equal(res["mediantime"], TIME_RANGE_MTP) # result should have these additional pruning keys if manual pruning is # enabled assert_equal( sorted(res.keys()), sorted(["pruneheight", "automatic_pruning"] + keys) ) # size_on_disk should be > 0 assert_greater_than(res["size_on_disk"], 0) # pruneheight should be greater or equal to 0 assert_greater_than_or_equal(res["pruneheight"], 0) # check other pruning fields given that prune=1 assert res["pruned"] assert not res["automatic_pruning"] self.restart_node(0, ["-stopatheight=207"]) res = self.nodes[0].getblockchaininfo() # should have exact keys assert_equal(sorted(res.keys()), keys) self.restart_node(0, ["-stopatheight=207", "-prune=550"]) res = self.nodes[0].getblockchaininfo() # result should have these additional pruning keys if prune=550 assert_equal( sorted(res.keys()), sorted(["pruneheight", "automatic_pruning", "prune_target_size"] + keys), ) # check related fields assert res["pruned"] assert_equal(res["pruneheight"], 0) assert res["automatic_pruning"] assert_equal(res["prune_target_size"], 576716800) assert_greater_than(res["size_on_disk"], 0) def _test_getchaintxstats(self): self.log.info("Test getchaintxstats") # Test `getchaintxstats` invalid extra parameters assert_raises_rpc_error( -1, "getchaintxstats", self.nodes[0].getchaintxstats, 0, "", 0 ) # Test `getchaintxstats` invalid `nblocks` assert_raises_rpc_error( -1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, "", ) assert_raises_rpc_error( -8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1, ) assert_raises_rpc_error( -8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount(), ) # Test `getchaintxstats` invalid `blockhash` assert_raises_rpc_error( -1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0, ) assert_raises_rpc_error( -8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash="0", ) assert_raises_rpc_error( -8, "blockhash must be hexadecimal string (not" " 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash=( "ZZZ0000000000000000000000000000000000000000000000000000000000000" ), ) assert_raises_rpc_error( -5, "Block not found", self.nodes[0].getchaintxstats, blockhash=( "0000000000000000000000000000000000000000000000000000000000000000" ), ) blockhash = self.nodes[0].getblockhash(HEIGHT) self.nodes[0].invalidateblock(blockhash) assert_raises_rpc_error( -8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash, ) self.nodes[0].reconsiderblock(blockhash) chaintxstats = self.nodes[0].getchaintxstats(nblocks=1) # 200 txs plus genesis tx assert_equal(chaintxstats["txcount"], HEIGHT + 1) # tx rate should be 1 per 10 minutes, or 1/600 # we have to round because of binary math assert_equal(round(chaintxstats["txrate"] * TIME_RANGE_STEP, 10), Decimal(1)) b1_hash = self.nodes[0].getblockhash(1) b1 = self.nodes[0].getblock(b1_hash) b200_hash = self.nodes[0].getblockhash(HEIGHT) b200 = self.nodes[0].getblock(b200_hash) time_diff = b200["mediantime"] - b1["mediantime"] chaintxstats = self.nodes[0].getchaintxstats() assert_equal(chaintxstats["time"], b200["time"]) assert_equal(chaintxstats["txcount"], HEIGHT + 1) assert_equal(chaintxstats["window_final_block_hash"], b200_hash) assert_equal(chaintxstats["window_final_block_height"], HEIGHT) assert_equal(chaintxstats["window_block_count"], HEIGHT - 1) assert_equal(chaintxstats["window_tx_count"], HEIGHT - 1) assert_equal(chaintxstats["window_interval"], time_diff) assert_equal(round(chaintxstats["txrate"] * time_diff, 10), Decimal(HEIGHT - 1)) chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash) assert_equal(chaintxstats["time"], b1["time"]) assert_equal(chaintxstats["txcount"], 2) assert_equal(chaintxstats["window_final_block_hash"], b1_hash) assert_equal(chaintxstats["window_final_block_height"], 1) assert_equal(chaintxstats["window_block_count"], 0) assert "window_tx_count" not in chaintxstats assert "window_interval" not in chaintxstats assert "txrate" not in chaintxstats def _test_gettxoutsetinfo(self): node = self.nodes[0] res = node.gettxoutsetinfo() assert_equal(res["total_amount"], Decimal("8725000000.00")) assert_equal(res["transactions"], HEIGHT) assert_equal(res["height"], HEIGHT) assert_equal(res["txouts"], HEIGHT) assert_equal(res["bogosize"], 14600), assert_equal(res["bestblock"], node.getblockhash(HEIGHT)) size = res["disk_size"] assert size > 6400 assert size < 64000 assert_equal(len(res["bestblock"]), 64) assert_equal(len(res["hash_serialized"]), 64) self.log.info( "Test gettxoutsetinfo works for blockchain with just the genesis block" ) b1hash = node.getblockhash(1) node.invalidateblock(b1hash) res2 = node.gettxoutsetinfo() assert_equal(res2["transactions"], 0) assert_equal(res2["total_amount"], Decimal("0")) assert_equal(res2["height"], 0) assert_equal(res2["txouts"], 0) assert_equal(res2["bogosize"], 0), assert_equal(res2["bestblock"], node.getblockhash(0)) assert_equal(len(res2["hash_serialized"]), 64) self.log.info( "Test gettxoutsetinfo returns the same result after invalidate/reconsider" " block" ) node.reconsiderblock(b1hash) res3 = node.gettxoutsetinfo() # The field 'disk_size' is non-deterministic and can thus not be # compared between res and res3. Everything else should be the same. del res["disk_size"], res3["disk_size"] assert_equal(res, res3) self.log.info("Test gettxoutsetinfo hash_type option") # Adding hash_type 'hash_serialized', which is the default, should not # change the result. res4 = node.gettxoutsetinfo(hash_type="hash_serialized") del res4["disk_size"] assert_equal(res, res4) # hash_type none should not return a UTXO set hash. res5 = node.gettxoutsetinfo(hash_type="none") assert "hash_serialized" not in res5 # hash_type muhash should return a different UTXO set hash. res6 = node.gettxoutsetinfo(hash_type="muhash") assert "muhash" in res6 assert res["hash_serialized"] != res6["muhash"] # muhash should not be returned unless requested. for r in [res, res2, res3, res4, res5]: assert "muhash" not in r # Unknown hash_type raises an error assert_raises_rpc_error( -8, "foohash is not a valid hash_type", node.gettxoutsetinfo, "foohash" ) def _test_getblockheader(self): self.log.info("Test getblockheader") node = self.nodes[0] assert_raises_rpc_error( -8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense", ) assert_raises_rpc_error( -8, "hash must be hexadecimal string (not" " 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844", ) assert_raises_rpc_error( -5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844", ) besthash = node.getbestblockhash() secondbesthash = node.getblockhash(HEIGHT - 1) header = node.getblockheader(blockhash=besthash) assert_equal(header["hash"], besthash) assert_equal(header["height"], HEIGHT) assert_equal(header["confirmations"], 1) assert_equal(header["previousblockhash"], secondbesthash) assert_is_hex_string(header["chainwork"]) assert_equal(header["nTx"], 1) assert_is_hash_string(header["hash"]) assert_is_hash_string(header["previousblockhash"]) assert_is_hash_string(header["merkleroot"]) assert_is_hash_string(header["bits"], length=None) assert isinstance(header["time"], int) assert_equal(header["mediantime"], TIME_RANGE_MTP) assert isinstance(header["nonce"], int) assert isinstance(header["version"], int) assert isinstance(int(header["versionHex"], 16), int) assert isinstance(header["difficulty"], Decimal) # Test with verbose=False, which should return the header as hex. header_hex = node.getblockheader(blockhash=besthash, verbose=False) assert_is_hex_string(header_hex) header = FromHex(CBlockHeader(), header_hex) header.calc_sha256() assert_equal(header.hash, besthash) assert "previousblockhash" not in node.getblockheader(node.getblockhash(0)) assert "nextblockhash" not in node.getblockheader(node.getbestblockhash()) def _test_getdifficulty(self): self.log.info("Test getdifficulty") difficulty = self.nodes[0].getdifficulty() # 1 hash in 2 should be valid, so difficulty should be 1/2**31 # binary => decimal => binary math is why we do this check assert abs(difficulty * 2**31 - 1) < 0.0001 def _test_getnetworkhashps(self): self.log.info("Test getnetworkhashps") hashes_per_second = self.nodes[0].getnetworkhashps() # This should be 2 hashes every 10 minutes or 1/300 assert abs(hashes_per_second * 300 - 1) < 0.0001 def _test_stopatheight(self): self.log.info("Test stopping at height") assert_equal(self.nodes[0].getblockcount(), HEIGHT) self.generatetoaddress(self.nodes[0], 6, ADDRESS_ECREG_P2SH_OP_TRUE) assert_equal(self.nodes[0].getblockcount(), HEIGHT + 6) self.log.debug("Node should not stop at this height") assert_raises( subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3) ) try: self.generatetoaddress( self.nodes[0], 1, ADDRESS_ECREG_P2SH_OP_TRUE, sync_fun=self.no_op ) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug("Node should stop at this height...") self.nodes[0].wait_until_stopped() self.start_node(0) assert_equal(self.nodes[0].getblockcount(), HEIGHT + 7) def _test_waitforblockheight(self): self.log.info("Test waitforblockheight") node = self.nodes[0] peer = node.add_p2p_connection(P2PInterface()) current_height = node.getblock(node.getbestblockhash())["height"] # Create a fork somewhere below our current height, invalidate the tip # of that fork, and then ensure that waitforblockheight still # works as expected. # # (Previously this was broken based on setting # `rpc/blockchain.cpp:latestblock` incorrectly.) # # choose something vaguely near our tip fork_height = current_height - 100 fork_hash = node.getblockhash(fork_height) fork_block = node.getblock(fork_hash) def solve_and_send_block(prevhash, height, time): b = create_block(prevhash, create_coinbase(height), time) b.solve() peer.send_and_ping(msg_block(b)) return b b1 = solve_and_send_block( int(fork_hash, 16), fork_height + 1, fork_block["time"] + 1 ) b2 = solve_and_send_block(b1.sha256, fork_height + 1, b1.nTime + 1) node.invalidateblock(b2.hash) def assert_waitforheight(height, timeout=2): assert_equal( node.waitforblockheight(height=height, timeout=timeout)["height"], current_height, ) assert_waitforheight(0) assert_waitforheight(current_height - 1) assert_waitforheight(current_height) assert_waitforheight(current_height + 1) def _test_getblock(self): # Checks for getblock verbose outputs node = self.nodes[0] blockcount = node.getblockcount() blockhash = node.getblockhash(blockcount - 1) nextblockhash = node.getblockhash(blockcount) blockinfo = node.getblock(blockhash, 2) blockheaderinfo = node.getblockheader(blockhash, True) assert_equal(blockinfo["hash"], blockhash) assert_equal(blockinfo["confirmations"], 2) assert_equal(blockinfo["height"], blockheaderinfo["height"]) assert_equal(blockinfo["versionHex"], blockheaderinfo["versionHex"]) assert_equal(blockinfo["version"], blockheaderinfo["version"]) assert_equal(blockinfo["size"], 181) assert_equal(blockinfo["merkleroot"], blockheaderinfo["merkleroot"]) # Verify transaction data by check the hex values for tx in blockinfo["tx"]: rawtransaction = node.getrawtransaction( txid=tx["txid"], verbose=True, blockhash=blockhash ) assert_equal(tx["hex"], rawtransaction["hex"]) assert_equal(blockinfo["time"], blockheaderinfo["time"]) assert_equal(blockinfo["mediantime"], blockheaderinfo["mediantime"]) assert_equal(blockinfo["nonce"], blockheaderinfo["nonce"]) assert_equal(blockinfo["bits"], blockheaderinfo["bits"]) assert_equal(blockinfo["difficulty"], blockheaderinfo["difficulty"]) assert_equal(blockinfo["chainwork"], blockheaderinfo["chainwork"]) assert_equal( blockinfo["previousblockhash"], blockheaderinfo["previousblockhash"] ) assert_equal(blockinfo["nextblockhash"], nextblockhash) assert_equal(blockinfo["nextblockhash"], blockheaderinfo["nextblockhash"]) assert "previousblockhash" not in node.getblock(node.getblockhash(0)) assert "nextblockhash" not in node.getblock(node.getbestblockhash()) def _test_getblock_txfee(self): node = self.nodes[0] miniwallet = MiniWallet(node) - miniwallet.rescan_utxos() fee_per_byte = Decimal("0.1") fee_per_kb = 1000 * fee_per_byte txid = miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)[ "txid" ] blockhash = self.generate(node, 1)[0] self.log.info("Test getblock with verbosity 1 only includes the txid") block = node.getblock(blockhash, 1) assert_equal(block["tx"][1], txid) self.log.info("Test getblock with verbosity 2 includes expected fee") block = node.getblock(blockhash, 2) tx = block["tx"][1] assert_equal(tx["fee"], tx["size"] * fee_per_byte) self.log.info( "Test getblock with verbosity 2 still works with pruned Undo data" ) datadir = get_datadir_path(self.options.tmpdir, 0) def move_block_file(old, new): old_path = os.path.join(datadir, self.chain, "blocks", old) new_path = os.path.join(datadir, self.chain, "blocks", new) os.rename(old_path, new_path) # Move instead of deleting so we can restore chain state afterwards move_block_file("rev00000.dat", "rev_wrong") block = node.getblock(blockhash, 2) assert "fee" not in block["tx"][1] # Restore chain state move_block_file("rev_wrong", "rev00000.dat") if __name__ == "__main__": BlockchainTest().main() diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py index a53d1df6a..79f78633f 100644 --- a/test/functional/rpc_scantxoutset.py +++ b/test/functional/rpc_scantxoutset.py @@ -1,465 +1,464 @@ # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the scantxoutset rpc call.""" from decimal import Decimal from test_framework.messages import XEC from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error from test_framework.wallet import MiniWallet, address_to_scriptpubkey, getnewdestination def descriptors(out): return sorted(u["desc"] for u in out["unspents"]) class ScantxoutsetTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def sendtodestination(self, destination, amount): # interpret strings as addresses, assume scriptPubKey otherwise if isinstance(destination, str): destination = address_to_scriptpubkey(destination) self.wallet.send_to( from_node=self.nodes[0], scriptPubKey=destination, amount=int(XEC * amount) ) def run_test(self): self.wallet = MiniWallet(self.nodes[0]) - self.wallet.rescan_utxos() self.log.info("Create UTXOs...") pubk, spk, addr = getnewdestination() self.sendtodestination(spk, 2000) # send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK # (m/0'/0'/0') self.sendtodestination("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 8000) # (m/0'/0'/1') self.sendtodestination("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 16000) # (m/0'/0'/1500') self.sendtodestination("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 32000) # (m/0'/0'/0) self.sendtodestination("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 64000) # (m/0'/0'/1) self.sendtodestination("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 128000) # (m/0'/0'/1500) self.sendtodestination("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 256000) # (m/1/1/0') self.sendtodestination("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 512000) # (m/1/1/1') self.sendtodestination("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1024000) # (m/1/1/1500') self.sendtodestination("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2048000) # (m/1/1/0) self.sendtodestination("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4096000) # (m/1/1/1) self.sendtodestination("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8192000) # (m/1/1/1500) self.sendtodestination("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16384000) self.generate(self.nodes[0], 1) scan = self.nodes[0].scantxoutset("start", []) info = self.nodes[0].gettxoutsetinfo() assert_equal(scan["success"], True) assert_equal(scan["height"], info["height"]) assert_equal(scan["txouts"], info["txouts"]) assert_equal(scan["bestblock"], info["bestblock"]) self.log.info("Test if we have found the non HD unspent outputs.") assert_equal( self.nodes[0].scantxoutset("start", [f"pkh({pubk.hex()})"])["total_amount"], Decimal("2000"), ) assert_equal( self.nodes[0].scantxoutset("start", [f"combo({pubk.hex()})"])[ "total_amount" ], Decimal("2000"), ) assert_equal( self.nodes[0].scantxoutset("start", [f"addr({addr})"])["total_amount"], Decimal("2000"), ) assert_equal( self.nodes[0].scantxoutset("start", [f"addr({addr})"])["total_amount"], Decimal("2000"), ) self.log.info("Test range validation.") assert_raises_rpc_error( -8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": -1}], ) assert_raises_rpc_error( -8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [-1, 10]}], ) assert_raises_rpc_error( -8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}], ) assert_raises_rpc_error( -8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [2, 1]}], ) assert_raises_rpc_error( -8, "Range is too large", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [0, 1000001]}], ) self.log.info("Test extended key derivation.") # Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset. # Note that all amounts in the UTXO set are powers of 2 multiplied by # 0.001 BTC, so each amounts uniquely identifies a subset. assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)" ], )["total_amount"], Decimal("8000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)" ], )["total_amount"], Decimal("16000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')" ], )["total_amount"], Decimal("32000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)" ], )["total_amount"], Decimal("64000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)" ], )["total_amount"], Decimal("128000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)" ], )["total_amount"], Decimal("256000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499, } ], )["total_amount"], Decimal("24000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500, } ], )["total_amount"], Decimal("56000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499, } ], )["total_amount"], Decimal("192000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500, } ], )["total_amount"], Decimal("448000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')" ], )["total_amount"], Decimal("0512000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')" ], )["total_amount"], Decimal("1024000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)" ], )["total_amount"], Decimal("2048000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)" ], )["total_amount"], Decimal("4096000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)" ], )["total_amount"], Decimal("8192000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)" ], )["total_amount"], Decimal("16384000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)" ], )["total_amount"], Decimal("4096000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)" ], )["total_amount"], Decimal("8192000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)" ], )["total_amount"], Decimal("16384000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499, } ], )["total_amount"], Decimal("1536000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500, } ], )["total_amount"], Decimal("3584000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499, } ], )["total_amount"], Decimal("12288000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500, } ], )["total_amount"], Decimal("28672000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499, } ], )["total_amount"], Decimal("12288000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500, } ], )["total_amount"], Decimal("28672000"), ) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": [1500, 1500], } ], )["total_amount"], Decimal("16384000"), ) # Test the reported descriptors for a few matches assert_equal( descriptors( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499, } ], ) ), [ "pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x", "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed", ], ) assert_equal( descriptors( self.nodes[0].scantxoutset( "start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)" ], ) ), [ "pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8" ], ) assert_equal( descriptors( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500, } ], ) ), [ "pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8", "pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g", "pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa", ], ) # Check that status and abort don't need second arg assert_equal(self.nodes[0].scantxoutset("status"), None) assert_equal(self.nodes[0].scantxoutset("abort"), False) # Check that second arg is needed for start assert_raises_rpc_error( -1, "scanobjects argument is required for the start action", self.nodes[0].scantxoutset, "start", ) if __name__ == "__main__": ScantxoutsetTest().main() diff --git a/test/functional/setup_scripts/chronik-client_blocktxs_and_tx_and_rawtx.py b/test/functional/setup_scripts/chronik-client_blocktxs_and_tx_and_rawtx.py index ae827880b..fd6aeb532 100644 --- a/test/functional/setup_scripts/chronik-client_blocktxs_and_tx_and_rawtx.py +++ b/test/functional/setup_scripts/chronik-client_blocktxs_and_tx_and_rawtx.py @@ -1,58 +1,57 @@ # Copyright (c) 2024 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Setup script to exercise the chronik-client js library blockTxs(), tx(), and rawTx() functions """ import pathmagic # noqa from ipc import send_ipc_message from setup_framework import SetupFramework from test_framework.util import assert_equal from test_framework.wallet import MiniWallet class ChronikClient_Block_Setup(SetupFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [["-chronik"]] self.ipc_timeout = 10 def skip_test_if_missing_module(self): self.skip_if_no_chronik() def run_test(self): # Init node = self.nodes[0] wallet = MiniWallet(node) - wallet.rescan_utxos() yield True self.log.info("Step 1: Initialized regtest chain") assert_equal(node.getblockcount(), 200) yield True self.log.info("Step 2: Broadcast ten txs") txs_and_rawtxs = {} for x in range(10): # Make the fee rate vary to have txs with varying amounts tx = wallet.send_self_transfer(from_node=node, fee_rate=(x + 1) * 1000) txs_and_rawtxs[tx["txid"]] = tx["hex"] send_ipc_message({"txs_and_rawtxs": txs_and_rawtxs}) assert_equal(node.getblockcount(), 200) yield True self.log.info("Step 3: Mine a block with these txs") self.generate(node, 1) assert_equal(node.getblockcount(), 201) yield True self.log.info("Step 4: Park the last block") node.parkblock(node.getbestblockhash()) assert_equal(node.getblockcount(), 200) yield True if __name__ == "__main__": ChronikClient_Block_Setup().main() diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py index 55f51911b..d80e9f87e 100644 --- a/test/functional/test_framework/wallet.py +++ b/test/functional/test_framework/wallet.py @@ -1,321 +1,328 @@ # Copyright (c) 2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """A limited-functionality wallet, which may replace a real wallet in tests""" from copy import deepcopy from decimal import Decimal from typing import Any, Optional from test_framework.address import ( ADDRESS_ECREG_P2SH_OP_TRUE, SCRIPTSIG_OP_TRUE, base58_to_byte, key_to_p2pkh, ) from test_framework.key import ECKey from test_framework.messages import ( XEC, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ) from test_framework.script import ( OP_CHECKSIG, OP_DUP, OP_EQUAL, OP_EQUALVERIFY, OP_HASH160, CScript, hash160, ) from test_framework.txtools import pad_tx from test_framework.util import ( assert_equal, assert_greater_than_or_equal, satoshi_round, ) DEFAULT_FEE = Decimal("100.00") class MiniWallet: def __init__(self, test_node): self._test_node = test_node self._utxos = [] self._address = ADDRESS_ECREG_P2SH_OP_TRUE self._scriptPubKey = bytes.fromhex( self._test_node.validateaddress(self._address)["scriptPubKey"] ) + # When the pre-mined test framework chain is used, it contains coinbase + # outputs to the MiniWallet's default address in blocks 76-100 + # (see method BitcoinTestFramework._initialize_chain()) + # The MiniWallet needs to rescan_utxos() in order to account + # for those mature UTXOs, so that all txs spend confirmed coins + self.rescan_utxos() + def _create_utxo(self, *, txid, vout, value, height): return {"txid": txid, "vout": vout, "value": value, "height": height} def rescan_utxos(self): """Drop all utxos and rescan the utxo set""" self._utxos = [] res = self._test_node.scantxoutset( action="start", scanobjects=[f"raw({self._scriptPubKey.hex()})"] ) assert_equal(True, res["success"]) for utxo in res["unspents"]: self._utxos.append( self._create_utxo( txid=utxo["txid"], vout=utxo["vout"], value=utxo["amount"], height=utxo["height"], ) ) def scan_tx(self, tx): """Scan the tx and adjust the internal list of owned utxos""" for spent in tx["vin"]: # Mark spent. This may happen when the caller has ownership of a # utxo that remained in this wallet. For example, by passing # mark_as_spent=False to get_utxo or by using an utxo returned by a # create_self_transfer* call. try: self.get_utxo(txid=spent["txid"], vout=spent["vout"]) except StopIteration: pass for out in tx["vout"]: if out["scriptPubKey"]["hex"] == self._scriptPubKey.hex(): self._utxos.append( self._create_utxo( txid=tx["txid"], vout=out["n"], value=out["value"], height=0 ) ) def generate(self, num_blocks, **kwargs): """Generate blocks with coinbase outputs to the internal address, and call rescan_utxos""" blocks = self._test_node.generatetodescriptor( num_blocks, f"raw({self._scriptPubKey.hex()})", **kwargs ) # Calling rescan_utxos here makes sure that after a generate the utxo # set is in a clean state. For example, the wallet will update # - if the caller consumed utxos, but never used them # - if the caller sent a transaction that is not mined # - after block re-orgs # - the utxo height for mined mempool txs # - However, the wallet will not consider remaining mempool txs self.rescan_utxos() return blocks def get_scriptPubKey(self): return self._scriptPubKey def get_utxo(self, *, txid: str = "", vout: Optional[int] = None): """ Returns a utxo and marks it as spent (pops it from the internal list) Args: txid: get the first utxo we find from a specific transaction """ # Put the largest utxo last self._utxos = sorted(self._utxos, key=lambda k: (k["value"], -k["height"])) if txid: utxo_filter: Any = filter(lambda utxo: txid == utxo["txid"], self._utxos) else: # By default the largest utxo utxo_filter = reversed(self._utxos) if vout is not None: utxo_filter = filter(lambda utxo: vout == utxo["vout"], utxo_filter) index = self._utxos.index(next(utxo_filter)) return self._utxos.pop(index) def send_self_transfer(self, *, from_node, **kwargs): """Create and send a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" tx = self.create_self_transfer(**kwargs) self.sendrawtransaction(from_node=from_node, tx_hex=tx["hex"]) return tx def send_to(self, *, from_node, scriptPubKey, amount, fee=1000): """ Create and send a tx with an output to a given scriptPubKey/amount, plus a change output to our internal address. To keep things simple, a fixed fee given in Satoshi is used. Note that this method fails if there is no single internal utxo available that can cover the cost for the amount and the fixed fee (the utxo with the largest value is taken). Returns a tuple (txid, n) referring to the created external utxo outpoint. """ tx = self.create_self_transfer(fee_rate=0)["tx"] assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee) # change output -> MiniWallet tx.vout[0].nValue -= amount + fee # arbitrary output -> to be returned tx.vout.append(CTxOut(amount, scriptPubKey)) txid = self.sendrawtransaction(from_node=from_node, tx_hex=tx.serialize().hex()) return txid, len(tx.vout) - 1 def create_self_transfer( self, *, fee_rate=Decimal("3000.00"), utxo_to_spend=None, locktime=0 ): """Create and return a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed.""" utxo_to_spend = utxo_to_spend or self.get_utxo() # The size will be enforced by pad_tx() size = 100 send_value = satoshi_round( utxo_to_spend["value"] - fee_rate * (Decimal(size) / 1000) ) assert send_value > 0 tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxo_to_spend["txid"], 16), utxo_to_spend["vout"])) ] tx.vout = [CTxOut(int(send_value * XEC), self._scriptPubKey)] tx.nLockTime = locktime tx.vin[0].scriptSig = SCRIPTSIG_OP_TRUE pad_tx(tx, size) tx_hex = tx.serialize().hex() assert_equal(len(tx.serialize()), size) new_utxo = self._create_utxo( txid=tx.rehash(), vout=0, value=send_value, height=0 ) return {"txid": new_utxo["txid"], "hex": tx_hex, "tx": tx, "new_utxo": new_utxo} def sendrawtransaction(self, *, from_node, tx_hex): txid = from_node.sendrawtransaction(tx_hex) self.scan_tx(from_node.decoderawtransaction(tx_hex)) return txid def getnewdestination(): """Generate a random destination and return the corresponding public key, scriptPubKey and address. Can be used when a random destination is needed, but no compiled wallet is available (e.g. as replacement to the getnewaddress/getaddressinfo RPCs).""" key = ECKey() key.generate() pubkey = key.get_pubkey().get_bytes() scriptpubkey = CScript( [OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG] ) return pubkey, scriptpubkey, key_to_p2pkh(pubkey) def address_to_scriptpubkey(address): """Converts a given address to the corresponding output script (scriptPubKey).""" payload, version = base58_to_byte(address) if version == 111: # testnet pubkey hash return CScript([OP_DUP, OP_HASH160, payload, OP_EQUALVERIFY, OP_CHECKSIG]) elif version == 196: # testnet script hash return CScript([OP_HASH160, payload, OP_EQUAL]) # TODO: also support other address formats else: assert False def make_chain( node, address, privkeys, parent_txid, parent_value, n=0, parent_locking_script=None, fee=DEFAULT_FEE, ): """Build a transaction that spends parent_txid.vout[n] and produces one output with amount = parent_value with a fee deducted. Return tuple (CTransaction object, raw hex, nValue, scriptPubKey of the output created). """ inputs = [{"txid": parent_txid, "vout": n}] my_value = parent_value - fee outputs = {address: my_value} rawtx = node.createrawtransaction(inputs, outputs) prevtxs = ( [ { "txid": parent_txid, "vout": n, "scriptPubKey": parent_locking_script, "amount": parent_value, } ] if parent_locking_script else None ) signedtx = node.signrawtransactionwithkey( hexstring=rawtx, privkeys=privkeys, prevtxs=prevtxs ) assert signedtx["complete"] tx = FromHex(CTransaction(), signedtx["hex"]) return (tx, signedtx["hex"], my_value, tx.vout[0].scriptPubKey.hex()) def create_child_with_parents( node, address, privkeys, parents_tx, values, locking_scripts, fee=DEFAULT_FEE ): """Creates a transaction that spends the first output of each parent in parents_tx.""" num_parents = len(parents_tx) total_value = sum(values) inputs = [{"txid": tx.get_id(), "vout": 0} for tx in parents_tx] outputs = {address: total_value - fee} rawtx_child = node.createrawtransaction(inputs, outputs) prevtxs = [] for i in range(num_parents): prevtxs.append( { "txid": parents_tx[i].get_id(), "vout": 0, "scriptPubKey": locking_scripts[i], "amount": values[i], } ) signedtx_child = node.signrawtransactionwithkey( hexstring=rawtx_child, privkeys=privkeys, prevtxs=prevtxs ) assert signedtx_child["complete"] return signedtx_child["hex"] def create_raw_chain(node, first_coin, address, privkeys, chain_length=50): """Helper function: create a "chain" of chain_length transactions. The nth transaction in the chain is a child of the n-1th transaction and parent of the n+1th transaction. """ parent_locking_script = None txid = first_coin["txid"] chain_hex = [] chain_txns = [] value = first_coin["amount"] for _ in range(chain_length): (tx, txhex, value, parent_locking_script) = make_chain( node, address, privkeys, txid, value, 0, parent_locking_script ) txid = tx.get_id() chain_hex.append(txhex) chain_txns.append(tx) return (chain_hex, chain_txns) def bulk_transaction( tx: CTransaction, node, target_size: int, privkeys=None, prevtxs=None ) -> CTransaction: """Return a padded and signed transaction. The original transaction is left unaltered. If privkeys is not specified, it is assumed that the transaction has an anyone-can-spend output as unique output. """ tx_heavy = deepcopy(tx) pad_tx(tx_heavy, target_size) assert_greater_than_or_equal(tx_heavy.billable_size(), target_size) if privkeys is not None: signed_tx = node.signrawtransactionwithkey(ToHex(tx_heavy), privkeys, prevtxs) return FromHex(CTransaction(), signed_tx["hex"]) # OP_TRUE tx_heavy.vin[0].scriptSig = SCRIPTSIG_OP_TRUE return tx_heavy