diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py index 4795ee877..140e24ef6 100755 --- a/test/functional/interface_bitcoin_cli.py +++ b/test/functional/interface_bitcoin_cli.py @@ -1,333 +1,334 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoin-cli""" from decimal import Decimal + from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_process_error, assert_raises_rpc_error, get_auth_cookie, ) # The block reward of coinbaseoutput.nValue (50) BTC/block matures after # COINBASE_MATURITY (100) blocks. Therefore, after mining 101 blocks we expect # node 0 to have a balance of (BLOCKS - COINBASE_MATURITY) * 50 BTC/block. BLOCKS = 101 BALANCE = (BLOCKS - 100) * 50000000 JSON_PARSING_ERROR = 'error: Error parsing JSON: foo' BLOCKS_VALUE_OF_ZERO = 'error: the first argument (number of blocks to generate, default: 1) must be an integer value greater than zero' TOO_MANY_ARGS = 'error: too many arguments (maximum 2 for nblocks and maxtries)' WALLET_NOT_LOADED = 'Requested wallet does not exist or is not loaded' WALLET_NOT_SPECIFIED = 'Wallet file not specified' class TestBitcoinCli(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def skip_test_if_missing_module(self): self.skip_if_no_cli() def run_test(self): """Main test logic""" self.nodes[0].generate(BLOCKS) self.log.info( "Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`") cli_response = self.nodes[0].cli.getblockchaininfo() rpc_response = self.nodes[0].getblockchaininfo() assert_equal(cli_response, rpc_response) user, password = get_auth_cookie(self.nodes[0].datadir, self.chain) self.log.info("Test -stdinrpcpass option") assert_equal(BLOCKS, self.nodes[0].cli( '-rpcuser={}'.format(user), '-stdinrpcpass', input=password).getblockcount()) assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli( '-rpcuser={}'.format(user), '-stdinrpcpass', input="foo").echo) self.log.info("Test -stdin and -stdinrpcpass") assert_equal(["foo", "bar"], self.nodes[0].cli('-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input=password + "\nfoo\nbar").echo()) assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli( '-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input="foo").echo) self.log.info("Test connecting to a non-existing server") assert_raises_process_error( 1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo) self.log.info("Test connecting with non-existing RPC cookie file") assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli( '-rpccookiefile=does-not-exist', '-rpcpassword=').echo) self.log.info("Test -getinfo with arguments fails") assert_raises_process_error( 1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help) self.log.info( "Test -getinfo returns expected network and blockchain info") if self.is_wallet_compiled(): self.nodes[0].encryptwallet(password) cli_get_info = self.nodes[0].cli().send_cli('-getinfo') network_info = self.nodes[0].getnetworkinfo() blockchain_info = self.nodes[0].getblockchaininfo() assert_equal(cli_get_info['version'], network_info['version']) assert_equal(cli_get_info['blocks'], blockchain_info['blocks']) assert_equal(cli_get_info['headers'], blockchain_info['headers']) assert_equal(cli_get_info['timeoffset'], network_info['timeoffset']) assert_equal( cli_get_info['connections'], { 'in': network_info['connections_in'], 'out': network_info['connections_out'], 'total': network_info['connections'] } ) assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy']) assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty']) assert_equal(cli_get_info['chain'], blockchain_info['chain']) if self.is_wallet_compiled(): self.log.info( "Test -getinfo and bitcoin-cli getwalletinfo return expected wallet info") assert_equal(cli_get_info['balance'], BALANCE) assert 'balances' not in cli_get_info.keys() wallet_info = self.nodes[0].getwalletinfo() assert_equal( cli_get_info['keypoolsize'], wallet_info['keypoolsize']) assert_equal( cli_get_info['unlocked_until'], wallet_info['unlocked_until']) assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee']) assert_equal(cli_get_info['relayfee'], network_info['relayfee']) assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info) # Setup to test -getinfo, -generate, and -rpcwallet= with multiple # wallets. wallets = ['', 'Encrypted', 'secret'] amounts = [ BALANCE + Decimal('9999995.50'), Decimal(9000000), Decimal(31000000)] self.nodes[0].createwallet(wallet_name=wallets[1]) self.nodes[0].createwallet(wallet_name=wallets[2]) w1 = self.nodes[0].get_wallet_rpc(wallets[0]) w2 = self.nodes[0].get_wallet_rpc(wallets[1]) w3 = self.nodes[0].get_wallet_rpc(wallets[2]) rpcwallet2 = '-rpcwallet={}'.format(wallets[1]) rpcwallet3 = '-rpcwallet={}'.format(wallets[2]) w1.walletpassphrase(password, self.rpc_timeout) w2.encryptwallet(password) w1.sendtoaddress(w2.getnewaddress(), amounts[1]) w1.sendtoaddress(w3.getnewaddress(), amounts[2]) # Mine a block to confirm; adds a block reward (50 BTC) to the # default wallet. self.nodes[0].generate(1) self.log.info( "Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance") for i in range(len(wallets)): cli_get_info = self.nodes[0].cli( '-getinfo', '-rpcwallet={}'.format(wallets[i])).send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[i]) self.log.info( "Test -getinfo with multiple wallets and " "-rpcwallet=non-existing-wallet returns no balances") cli_get_info_keys = self.nodes[0].cli( '-getinfo', '-rpcwallet=does-not-exist').send_cli().keys() assert 'balance' not in cli_get_info_keys assert 'balances' not in cli_get_info_keys self.log.info( "Test -getinfo with multiple wallets returns all loaded " "wallet names and balances") assert_equal(set(self.nodes[0].listwallets()), set(wallets)) cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balance' not in cli_get_info.keys() assert_equal(cli_get_info['balances'], {k: v for k, v in zip(wallets, amounts)}) # Unload the default wallet and re-verify. self.nodes[0].unloadwallet(wallets[0]) assert wallets[0] not in self.nodes[0].listwallets() cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balance' not in cli_get_info.keys() assert_equal(cli_get_info['balances'], {k: v for k, v in zip(wallets[1:], amounts[1:])}) self.log.info( "Test -getinfo after unloading all wallets except a " "non-default one returns its balance") self.nodes[0].unloadwallet(wallets[2]) assert_equal(self.nodes[0].listwallets(), [wallets[1]]) cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[1]) self.log.info( "Test -getinfo with -rpcwallet=remaining-non-default-wallet" " returns only its balance") cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet2).send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[1]) self.log.info( "Test -getinfo with -rpcwallet=unloaded wallet returns" " no balances") cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli() assert 'balance' not in cli_get_info_keys assert 'balances' not in cli_get_info_keys # Test bitcoin-cli -generate. n1 = 3 n2 = 4 w2.walletpassphrase(password, self.rpc_timeout) blocks = self.nodes[0].getblockcount() self.log.info('Test -generate with no args') generate = self.nodes[0].cli('-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 1) self.log.info('Test -generate with bad args') assert_raises_process_error( 1, JSON_PARSING_ERROR, self.nodes[0].cli( '-generate', 'foo').echo) assert_raises_process_error( 1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli( '-generate', 0).echo) assert_raises_process_error( 1, TOO_MANY_ARGS, self.nodes[0].cli( '-generate', 1, 2, 3).echo) self.log.info('Test -generate with nblocks') generate = self.nodes[0].cli('-generate', n1).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n1) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1) self.log.info('Test -generate with nblocks and maxtries') generate = self.nodes[0].cli('-generate', n2, 1000000).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n2) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1 + n2) self.log.info('Test -generate -rpcwallet in single-wallet mode') generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 2 + n1 + n2) self.log.info( 'Test -generate -rpcwallet=unloaded wallet raises RPC error') assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate').echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 'foo').echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 0).echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 1, 2, 3).echo) # Test bitcoin-cli -generate with -rpcwallet in multiwallet mode. self.nodes[0].loadwallet(wallets[2]) n3 = 4 n4 = 10 blocks = self.nodes[0].getblockcount() self.log.info('Test -generate -rpcwallet with no args') generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 1) self.log.info('Test -generate -rpcwallet with bad args') assert_raises_process_error( 1, JSON_PARSING_ERROR, self.nodes[0].cli( rpcwallet2, '-generate', 'foo').echo) assert_raises_process_error( 1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli( rpcwallet2, '-generate', 0).echo) assert_raises_process_error( 1, TOO_MANY_ARGS, self.nodes[0].cli( rpcwallet2, '-generate', 1, 2, 3).echo) self.log.info('Test -generate -rpcwallet with nblocks') generate = self.nodes[0].cli( rpcwallet2, '-generate', n3).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n3) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3) self.log.info( 'Test -generate -rpcwallet with nblocks and maxtries') generate = self.nodes[0].cli( rpcwallet2, '-generate', n4, 1000000).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n4) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3 + n4) self.log.info( 'Test -generate without -rpcwallet in multiwallet mode raises RPC error') assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate').echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 'foo').echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 0).echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo) else: self.log.info( "*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped") # maintain block parity with the wallet_compiled conditional branch self.nodes[0].generate(25) self.log.info("Test -version with node stopped") self.stop_node(0) cli_response = self.nodes[0].cli().send_cli('-version') assert "{} RPC client version".format( self.config['environment']['PACKAGE_NAME']) in cli_response self.log.info( "Test -rpcwait option successfully waits for RPC connection") # Start node without RPC connection. self.nodes[0].start() # ensure cookie file is available to avoid race condition self.nodes[0].wait_for_cookie_credentials() blocks = self.nodes[0].cli('-rpcwait').send_cli('getblockcount') self.nodes[0].wait_for_rpc_connection() assert_equal(blocks, BLOCKS + 25) if __name__ == '__main__': TestBitcoinCli().main() diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index 81796cd0d..989ea869a 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -1,407 +1,406 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the REST API.""" import binascii +import http.client +import json +import urllib.parse from decimal import Decimal from enum import Enum -import http.client from io import BytesIO -import json from struct import pack, unpack -import urllib.parse +from test_framework.messages import BLOCK_HEADER_SIZE from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_greater_than_or_equal, hex_str_to_bytes, ) -from test_framework.messages import BLOCK_HEADER_SIZE - class ReqType(Enum): JSON = 1 BIN = 2 HEX = 3 class RetType(Enum): OBJ = 1 BYTES = 2 JSON = 3 def filter_output_indices_by_value(vouts, value): for vout in vouts: if vout['value'] == value: yield vout['n'] class RESTTest (BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [["-rest"], []] self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def test_rest_request(self, uri, http_method='GET', req_type=ReqType.JSON, body='', status=200, ret_type=RetType.JSON): rest_uri = '/rest' + uri if req_type == ReqType.JSON: rest_uri += '.json' elif req_type == ReqType.BIN: rest_uri += '.bin' elif req_type == ReqType.HEX: rest_uri += '.hex' conn = http.client.HTTPConnection(self.url.hostname, self.url.port) self.log.debug('{} {} {}'.format(http_method, rest_uri, body)) if http_method == 'GET': conn.request('GET', rest_uri) elif http_method == 'POST': conn.request('POST', rest_uri, body) resp = conn.getresponse() assert_equal(resp.status, status) if ret_type == RetType.OBJ: return resp elif ret_type == RetType.BYTES: return resp.read() elif ret_type == RetType.JSON: return json.loads(resp.read().decode('utf-8'), parse_float=Decimal) def run_test(self): self.url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mine blocks and send Bitcoin Cash to node 1") # Random address so node1's balance doesn't increase not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" self.nodes[0].generate(1) self.sync_all() self.nodes[1].generatetoaddress(100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50000000) txid = self.nodes[0].sendtoaddress( self.nodes[1].getnewaddress(), 100000) self.sync_all() self.log.info("Test the /tx URI") json_obj = self.test_rest_request("/tx/{}".format(txid)) assert_equal(json_obj['txid'], txid) # Check hex format response hex_response = self.test_rest_request( "/tx/{}".format(txid), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than_or_equal(int(hex_response.getheader('content-length')), json_obj['size'] * 2) # Get the vin to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # Get n of 0.1 outpoint n, = filter_output_indices_by_value( json_obj['vout'], Decimal('100000')) spending = (txid, n) self.log.info("Query an unspent TXO using the /getutxos URI") self.nodes[1].generatetoaddress(1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("100000")) # Check chainTip response json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], Decimal('100000')) self.log.info("Query a spent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is no utxo in the response because this outpoint has # been spent assert_equal(len(json_obj['utxos']), 0) # Check bitmap assert_equal(json_obj['bitmap'], "0") self.log.info("Query two TXOs using the /getutxos URI") json_obj = self.test_rest_request( "/getutxos/{}-{}/{}-{}".format(*(spending + spent))) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") self.log.info( "Query the TXOs using the /getutxos URI with a binary response") bin_request = b'\x01\x02' for txid, n in [spending, spent]: bin_request += hex_str_to_bytes(txid) bin_request += pack("i", n) bin_response = self.test_rest_request( "/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) output = BytesIO(bin_response) chain_height, = unpack("1650 bytes) tx.vin[0].scriptSig = CScript([b'a' * 1648]) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}], rawtxs=[tx.serialize().hex()], ) tx = FromHex(CTransaction(), raw_tx_reference) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript( [OP_HASH160, hash160(b'burn'), OP_EQUAL])) # Use enough outputs to make the tx too large for our policy num_scripts = 100000 // len(output_p2sh_burn.serialize()) tx.vout = [output_p2sh_burn] * num_scripts self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0] = output_p2sh_burn # Make output smaller, such that it is dust for our policy tx.vout[0].nValue -= 1 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 2 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}], rawtxs=[ToHex(tx)], ) self.log.info('A timelocked transaction') tx = FromHex(CTransaction(), raw_tx_reference) # Should be non-max, so locktime is not ignored tx.vin[0].nSequence -= 1 tx.nLockTime = node.getblockcount() + 1 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-nonfinal'}], rawtxs=[ToHex(tx)], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx = FromHex(CTransaction(), raw_tx_reference) # We could include it in the second block mined from now, but not the # very next one tx.vin[0].nSequence = 2 # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) if __name__ == '__main__': MempoolAcceptanceTest().main() diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index f7d953758..72cab1c5c 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -1,203 +1,203 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool persistence. By default, bitcoind will dump mempool on shutdown and then reload it on startup. This can be overridden with the -persistmempool=0 command line option. Test is as follows: - start node0, node1 and node2. node1 has -persistmempool=0 - create 5 transactions on node2 to its own address. Note that these are not sent to node0 or node1 addresses because we don't want them to be saved in the wallet. - check that node0 and node1 have 5 transactions in their mempools - shutdown all nodes. - startup node0. Verify that it still has 5 transactions in its mempool. Shutdown node0. This tests that by default the mempool is persistent. - startup node1. Verify that its mempool is empty. Shutdown node1. This tests that with -persistmempool=0, the mempool is not dumped to disk when the node is shut down. - Restart node0 with -persistmempool=0. Verify that its mempool is empty. Shutdown node0. This tests that with -persistmempool=0, the mempool is not loaded from disk on start up. - Restart node0 with -persistmempool. Verify that it has 5 transactions in its mempool. This tests that -persistmempool=0 does not overwrite a previously valid mempool stored on disk. - Remove node0 mempool.dat and verify savemempool RPC recreates it and verify that node1 can load it and has 5 transactions in its mempool. - Verify that savemempool throws when the RPC is called if node1 can't write to disk. """ -from decimal import Decimal import os import time +from decimal import Decimal from test_framework.p2p import P2PTxInvStore from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than_or_equal, assert_raises_rpc_error, connect_nodes, disconnect_nodes, ) class MempoolPersistTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 3 self.extra_args = [[], ["-persistmempool=0"], []] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.log.debug("Send 5 transactions from node2 (to its own address)") tx_creation_time_lower = int(time.time()) for _ in range(5): last_txid = self.nodes[2].sendtoaddress( self.nodes[2].getnewaddress(), Decimal("10")) node2_balance = self.nodes[2].getbalance() self.sync_all() tx_creation_time_higher = int(time.time()) self.log.debug( "Verify that node0 and node1 have 5 transactions in their mempools") assert_equal(len(self.nodes[0].getrawmempool()), 5) assert_equal(len(self.nodes[1].getrawmempool()), 5) self.log.debug("Prioritize a transaction on node0") fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'], fees['modified']) self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000) fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'] + Decimal('10.0'), fees['modified']) tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)[ 'time'] assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower) assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time) # disconnect nodes & make a txn that remains in the unbroadcast set. disconnect_nodes(self.nodes[0], self.nodes[1]) assert(len(self.nodes[0].getpeerinfo()) == 0) assert(len(self.nodes[0].p2ps) == 0) self.nodes[0].sendtoaddress( self.nodes[2].getnewaddress(), Decimal("12000000")) connect_nodes(self.nodes[0], self.nodes[2]) self.log.debug("Stop-start the nodes. Verify that node0 has the " "transactions in its mempool and node1 does not. " "Verify that node2 calculates its balance correctly " "after loading wallet transactions.") self.stop_nodes() # Give this one a head-start, so we can be "extra-sure" that it didn't # load anything later # Also don't store the mempool, to keep the datadir clean self.start_node(1, extra_args=["-persistmempool=0"]) self.start_node(0) self.start_node(2) # start_node is blocking on the mempool being loaded assert self.nodes[0].getmempoolinfo()["loaded"] assert self.nodes[2].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 6) assert_equal(len(self.nodes[2].getrawmempool()), 5) # The others have loaded their mempool. If node_1 loaded anything, we'd # probably notice by now: assert_equal(len(self.nodes[1].getrawmempool()), 0) self.log.debug('Verify prioritization is loaded correctly') fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'] + Decimal('10.00'), fees['modified']) self.log.debug('Verify time is loaded correctly') assert_equal( tx_creation_time, self.nodes[0].getmempoolentry( txid=last_txid)['time']) # Verify accounting of mempool transactions after restart is correct # Flush mempool to wallet self.nodes[2].syncwithvalidationinterfacequeue() assert_equal(node2_balance, self.nodes[2].getbalance()) # start node0 with wallet disabled so wallet transactions don't get # resubmitted self.log.debug( "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.") self.stop_nodes() self.start_node(0, extra_args=["-persistmempool=0", "-disablewallet"]) assert self.nodes[0].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 0) self.log.debug( "Stop-start node0. Verify that it has the transactions in its mempool.") self.stop_nodes() self.start_node(0) assert self.nodes[0].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 6) mempooldat0 = os.path.join( self.nodes[0].datadir, self.chain, 'mempool.dat') mempooldat1 = os.path.join( self.nodes[1].datadir, self.chain, 'mempool.dat') self.log.debug( "Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it") os.remove(mempooldat0) self.nodes[0].savemempool() assert os.path.isfile(mempooldat0) self.log.debug( "Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions") os.rename(mempooldat0, mempooldat1) self.stop_nodes() self.start_node(1, extra_args=[]) assert self.nodes[1].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[1].getrawmempool()), 6) self.log.debug( "Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails") # to test the exception we are creating a tmp folder called mempool.dat.new # which is an implementation detail that could change and break this # test mempooldotnew1 = mempooldat1 + '.new' os.mkdir(mempooldotnew1) assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool) os.rmdir(mempooldotnew1) self.test_persist_unbroadcast() def test_persist_unbroadcast(self): node0 = self.nodes[0] self.start_node(0) # clear out mempool node0.generate(1) # ensure node0 doesn't have any connections # make a transaction that will remain in the unbroadcast set assert(len(node0.getpeerinfo()) == 0) assert(len(node0.p2ps) == 0) node0.sendtoaddress(self.nodes[1].getnewaddress(), Decimal("12")) # shutdown, then startup with wallet disabled self.stop_nodes() self.start_node(0, extra_args=["-disablewallet"]) # check that txn gets broadcast due to unbroadcast logic conn = node0.add_p2p_connection(P2PTxInvStore()) # 15 min + 1 for buffer node0.mockscheduler(16 * 60) self.wait_until(lambda: len(conn.get_invs()) == 1) if __name__ == '__main__': MempoolPersistTest().main() diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index edcf4e838..0060f6d35 100755 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -1,126 +1,126 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool re-org scenarios. Test re-org scenarios with a mempool that contains transactions that spend (directly or indirectly) coinbase transactions. """ -from test_framework.test_framework import BitcoinTestFramework from test_framework.blocktools import create_raw_transaction +from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class MempoolCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [ # immediate tx relay ['-whitelist=noban@127.0.0.1', ], [] ] def skip_test_if_missing_module(self): self.skip_if_no_wallet() alert_filename = None # Set by setup_network def run_test(self): # Start with a 200 block chain assert_equal(self.nodes[0].getblockcount(), 200) # Mine four blocks. After this, nodes[0] blocks # 101, 102, and 103 are spend-able. new_blocks = self.nodes[1].generate(4) self.sync_all() node0_address = self.nodes[0].getnewaddress() node1_address = self.nodes[1].getnewaddress() # Three scenarios for re-orging coinbase spends in the memory pool: # 1. Direct coinbase spend : spend_101 # 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1 # 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1 # Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase), # and make sure the mempool code behaves correctly. b = [self.nodes[0].getblockhash(n) for n in range(101, 105)] coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b] spend_101_raw = create_raw_transaction( self.nodes[0], coinbase_txids[1], node1_address, amount=49990000) spend_102_raw = create_raw_transaction( self.nodes[0], coinbase_txids[2], node0_address, amount=49990000) spend_103_raw = create_raw_transaction( self.nodes[0], coinbase_txids[3], node0_address, amount=49990000) # Create a transaction which is time-locked to two blocks in the future timelock_tx = self.nodes[0].createrawtransaction( inputs=[{ "txid": coinbase_txids[0], "vout": 0 }], outputs={node0_address: 49990000}, locktime=self.nodes[0].getblockcount() + 2, ) timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)[ "hex"] # This will raise an exception because the timelock transaction is too # immature to spend assert_raises_rpc_error(-26, "bad-txns-nonfinal", self.nodes[0].sendrawtransaction, timelock_tx) # Broadcast and mine spend_102 and 103: spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw) spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw) self.nodes[0].generate(1) # Time-locked transaction is still too immature to spend assert_raises_rpc_error(-26, 'bad-txns-nonfinal', self.nodes[0].sendrawtransaction, timelock_tx) # Create 102_1 and 103_1: spend_102_1_raw = create_raw_transaction( self.nodes[0], spend_102_id, node1_address, amount=49980000) spend_103_1_raw = create_raw_transaction( self.nodes[0], spend_103_id, node1_address, amount=49980000) # Broadcast and mine 103_1: spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw) last_block = self.nodes[0].generate(1) # Sync blocks, so that peer 1 gets the block before timelock_tx # Otherwise, peer 1 would put the timelock_tx in recentRejects self.sync_all() # Time-locked transaction can now be spent timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx) # ... now put spend_101 and spend_102_1 in memory pools: spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw) spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw) self.sync_all() assert_equal(set(self.nodes[0].getrawmempool()), { spend_101_id, spend_102_1_id, timelock_tx_id}) for node in self.nodes: node.invalidateblock(last_block[0]) # Time-locked transaction is now too immature and has been removed from the mempool # spend_103_1 has been re-orged out of the chain and is back in the # mempool assert_equal(set(self.nodes[0].getrawmempool()), { spend_101_id, spend_102_1_id, spend_103_1_id}) # Use invalidateblock to re-org back and make all those coinbase spends # immature/invalid: for node in self.nodes: node.invalidateblock(new_blocks[0]) self.sync_all() # mempool should be empty. assert_equal(set(self.nodes[0].getrawmempool()), set()) if __name__ == '__main__': MempoolCoinbaseTest().main() diff --git a/test/functional/mempool_resurrect.py b/test/functional/mempool_resurrect.py index ac7625be1..1c91e3a44 100755 --- a/test/functional/mempool_resurrect.py +++ b/test/functional/mempool_resurrect.py @@ -1,75 +1,75 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test resurrection of mined transactions when the blockchain is re-organized.""" -from test_framework.test_framework import BitcoinTestFramework from test_framework.blocktools import create_raw_transaction +from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class MempoolCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): node0_address = self.nodes[0].getnewaddress() # Spend block 1/2/3's coinbase transactions # Mine a block. # Create three more transactions, spending the spends # Mine another block. # ... make sure all the transactions are confirmed # Invalidate both blocks # ... make sure all the transactions are put back in the mempool # Mine a new block # ... make sure all the transactions are confirmed again. b = [self.nodes[0].getblockhash(n) for n in range(1, 4)] coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b] spends1_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49990000) for txid in coinbase_txids] spends1_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw] blocks = [] blocks.extend(self.nodes[0].generate(1)) spends2_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49980000) for txid in spends1_id] spends2_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw] blocks.extend(self.nodes[0].generate(1)) # mempool should be empty, all txns confirmed assert_equal(set(self.nodes[0].getrawmempool()), set()) for txid in spends1_id + spends2_id: tx = self.nodes[0].gettransaction(txid) assert tx["confirmations"] > 0 # Use invalidateblock to re-org back for node in self.nodes: node.invalidateblock(blocks[0]) # All txns should be back in mempool with 0 confirmations assert_equal( set(self.nodes[0].getrawmempool()), set(spends1_id + spends2_id)) for txid in spends1_id + spends2_id: tx = self.nodes[0].gettransaction(txid) assert tx["confirmations"] == 0 # Generate another block, they should all get mined self.nodes[0].generate(1) # mempool should be empty, all txns confirmed assert_equal(set(self.nodes[0].getrawmempool()), set()) for txid in spends1_id + spends2_id: tx = self.nodes[0].gettransaction(txid) assert tx["confirmations"] > 0 if __name__ == '__main__': MempoolCoinbaseTest().main() diff --git a/test/functional/mempool_spend_coinbase.py b/test/functional/mempool_spend_coinbase.py index fa6a1dae2..6d0bb0711 100755 --- a/test/functional/mempool_spend_coinbase.py +++ b/test/functional/mempool_spend_coinbase.py @@ -1,59 +1,59 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test spending coinbase transactions. The coinbase transaction in block N can appear in block N+100... so is valid in the mempool when the best block height is N+99. This test makes sure coinbase spends that will be mature in the next block are accepted into the memory pool, but less mature coinbase spends are NOT. """ -from test_framework.test_framework import BitcoinTestFramework from test_framework.blocktools import create_raw_transaction +from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class MempoolSpendCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): chain_height = self.nodes[0].getblockcount() assert_equal(chain_height, 200) node0_address = self.nodes[0].getnewaddress() # Coinbase at height chain_height-100+1 ok in mempool, should # get mined. Coinbase at height chain_height-100+2 is # is too immature to spend. b = [self.nodes[0].getblockhash(n) for n in range(101, 103)] coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b] spends_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49990000) for txid in coinbase_txids] spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0]) # coinbase at height 102 should be too immature to spend assert_raises_rpc_error(-26, "bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1]) # mempool should have just spend_101: assert_equal(self.nodes[0].getrawmempool(), [spend_101_id]) # mine a block, spend_101 should get confirmed self.nodes[0].generate(1) assert_equal(set(self.nodes[0].getrawmempool()), set()) # ... and now height 102 can be spent: spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1]) assert_equal(self.nodes[0].getrawmempool(), [spend_102_id]) if __name__ == '__main__': MempoolSpendCoinbaseTest().main() diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py index 711bf803f..4cbe7d125 100755 --- a/test/functional/mempool_unbroadcast.py +++ b/test/functional/mempool_unbroadcast.py @@ -1,124 +1,120 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test that the mempool ensures transaction delivery by periodically sending to peers until a GETDATA is received.""" import time from test_framework.blocktools import create_confirmed_utxos from test_framework.p2p import P2PTxInvStore from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import ( - assert_equal, - connect_nodes, - disconnect_nodes, -) +from test_framework.util import assert_equal, connect_nodes, disconnect_nodes # 15 minutes in seconds MAX_INITIAL_BROADCAST_DELAY = 15 * 60 class MempoolUnbroadcastTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.test_broadcast() self.test_txn_removal() def test_broadcast(self): self.log.info( "Test that mempool reattempts delivery of locally submitted transaction") node = self.nodes[0] min_relay_fee = node.getnetworkinfo()["relayfee"] create_confirmed_utxos(node, 10) disconnect_nodes(node, self.nodes[1]) self.log.info("Generate transactions that only node 0 knows about") # generate a wallet txn addr = node.getnewaddress() wallet_tx_hsh = node.sendtoaddress(addr, 100) utxos = node.listunspent() # generate a txn using sendrawtransaction us0 = utxos.pop() inputs = [{"txid": us0["txid"], "vout": us0["vout"]}] outputs = {addr: 100} tx = node.createrawtransaction(inputs, outputs) node.settxfee(min_relay_fee) txF = node.fundrawtransaction(tx) txFS = node.signrawtransactionwithwallet(txF["hex"]) rpc_tx_hsh = node.sendrawtransaction(txFS["hex"]) # check transactions are in unbroadcast using rpc mempoolinfo = self.nodes[0].getmempoolinfo() assert_equal(mempoolinfo['unbroadcastcount'], 2) mempool = self.nodes[0].getrawmempool(True) for tx in mempool: assert_equal(mempool[tx]['unbroadcast'], True) # check that second node doesn't have these two txns mempool = self.nodes[1].getrawmempool() assert rpc_tx_hsh not in mempool assert wallet_tx_hsh not in mempool # ensure that unbroadcast txs are persisted to mempool.dat self.restart_node(0) self.log.info("Reconnect nodes & check if they are sent to node 1") connect_nodes(node, self.nodes[1]) # fast forward into the future & ensure that the second node has the # txns node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY) self.sync_mempools(timeout=30) mempool = self.nodes[1].getrawmempool() assert rpc_tx_hsh in mempool assert wallet_tx_hsh in mempool # check that transactions are no longer in first node's unbroadcast set mempool = self.nodes[0].getrawmempool(True) for tx in mempool: assert_equal(mempool[tx]['unbroadcast'], False) self.log.info( "Add another connection & ensure transactions aren't broadcast again") conn = node.add_p2p_connection(P2PTxInvStore()) node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY) # allow sufficient time for possibility of broadcast time.sleep(2) assert_equal(len(conn.get_invs()), 0) disconnect_nodes(node, self.nodes[1]) node.disconnect_p2ps() def test_txn_removal(self): self.log.info( "Test that transactions removed from mempool are removed from unbroadcast set") node = self.nodes[0] # since the node doesn't have any connections, it will not receive # any GETDATAs & thus the transaction will remain in the unbroadcast # set. addr = node.getnewaddress() txhsh = node.sendtoaddress(addr, 100) # check transaction was removed from unbroadcast set due to presence in # a block removal_reason = "Removed {} from set of unbroadcast txns before " \ "confirmation that txn was sent out".format(txhsh) with node.assert_debug_log([removal_reason]): node.generate(1) if __name__ == "__main__": MempoolUnbroadcastTest().main() diff --git a/test/functional/mempool_updatefromblock.py b/test/functional/mempool_updatefromblock.py index fee326a45..a625bf893 100755 --- a/test/functional/mempool_updatefromblock.py +++ b/test/functional/mempool_updatefromblock.py @@ -1,154 +1,154 @@ #!/usr/bin/env python3 # Copyright (c) 2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool descendants/ancestors information update. Test mempool update of transaction descendants/ancestors information (count, size) when transactions have been re-added from a disconnected block to the mempool. """ import time - from decimal import Decimal + from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class MempoolUpdateFromBlockTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [ ['-limitdescendantsize=5000', '-limitancestorsize=5000']] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def transaction_graph_test(self, size, n_tx_to_mine=None, start_input_txid='', end_address='', fee=Decimal(1000)): """Create an acyclic tournament (a type of directed graph) of transactions and use it for testing. Keyword arguments: size -- the order N of the tournament which is equal to the number of the created transactions n_tx_to_mine -- the number of transaction that should be mined into a block If all of the N created transactions tx[0]..tx[N-1] reside in the mempool, the following holds: the tx[K] transaction: - has N-K descendants (including this one), and - has K+1 ancestors (including this one) More details: https://en.wikipedia.org/wiki/Tournament_(graph_theory) """ if not start_input_txid: start_input_txid = self.nodes[0].getblock( self.nodes[0].getblockhash(1))['tx'][0] if not end_address: end_address = self.nodes[0].getnewaddress() first_block_hash = '' tx_id = [] tx_size = [] self.log.info('Creating {} transactions...'.format(size)) for i in range(0, size): self.log.debug('Preparing transaction #{}...'.format(i)) # Prepare inputs. if i == 0: inputs = [{'txid': start_input_txid, 'vout': 0}] inputs_value = self.nodes[0].gettxout( start_input_txid, 0)['value'] else: inputs = [] inputs_value = 0 for j, tx in enumerate(tx_id[0:i]): # Transaction tx[K] is a child of each of previous # transactions tx[0]..tx[K-1] at their output K-1. vout = i - j - 1 inputs.append({'txid': tx_id[j], 'vout': vout}) inputs_value += self.nodes[0].gettxout(tx, vout)['value'] self.log.debug('inputs={}'.format(inputs)) self.log.debug('inputs_value={}'.format(inputs_value)) # Prepare outputs. tx_count = i + 1 if tx_count < size: # Transaction tx[K] is an ancestor of each of subsequent # transactions tx[K+1]..tx[N-1]. n_outputs = size - tx_count output_value = ( (inputs_value - fee) / Decimal(n_outputs)).quantize( Decimal('0.01')) outputs = {} for _ in range(n_outputs): outputs[self.nodes[0].getnewaddress()] = output_value else: output_value = ( inputs_value - fee).quantize( Decimal('0.01')) outputs = {end_address: output_value} self.log.debug('output_value={}'.format(output_value)) self.log.debug('outputs={}'.format(outputs)) # Create a new transaction. unsigned_raw_tx = self.nodes[0].createrawtransaction( inputs, outputs) signed_raw_tx = self.nodes[0].signrawtransactionwithwallet( unsigned_raw_tx) tx_id.append( self.nodes[0].sendrawtransaction( signed_raw_tx['hex'])) tx_size.append(self.nodes[0].getmempoolentry(tx_id[-1])['size']) if tx_count in n_tx_to_mine: # The created transactions are mined into blocks by batches. self.log.info('The batch of {} transactions has been accepted' ' into the mempool.'.format(len(self.nodes[0].getrawmempool()))) block_hash = self.nodes[0].generate(1)[0] if not first_block_hash: first_block_hash = block_hash assert_equal(len(self.nodes[0].getrawmempool()), 0) self.log.info( 'All of the transactions from the current batch have been' ' mined into a block.') elif tx_count == size: # At the end all of the mined blocks are invalidated, and all of the created # transactions should be re-added from disconnected blocks to # the mempool. self.log.info('The last batch of {} transactions has been' ' accepted into the mempool.'.format(len(self.nodes[0].getrawmempool()))) start = time.time() self.nodes[0].invalidateblock(first_block_hash) end = time.time() assert_equal(len(self.nodes[0].getrawmempool()), size) self.log.info( 'All of the recently mined transactions have been re-added' ' into the mempool in {} seconds.'.format(end - start)) self.log.info( 'Checking descendants/ancestors properties of all of the' ' in-mempool transactions...') for k, tx in enumerate(tx_id): self.log.debug('Check transaction #{}.'.format(k)) entry = self.nodes[0].getmempoolentry(tx) assert_equal(entry['descendantcount'], size - k) assert_equal(entry['descendantsize'], sum(tx_size[k:size])) assert_equal(entry['ancestorcount'], k + 1) assert_equal(entry['ancestorsize'], sum(tx_size[0:(k + 1)])) def run_test(self): # Use batch size limited by DEFAULT_ANCESTOR_LIMIT = 50 to not fire # "too many unconfirmed parents" error. self.transaction_graph_test(size=200, n_tx_to_mine=[50, 100, 150]) if __name__ == '__main__': MempoolUpdateFromBlockTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index 6b4501119..b4336229d 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -1,283 +1,274 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mining RPCs - getmininginfo - getblocktemplate proposal mode - submitblock""" import copy from decimal import Decimal -from test_framework.blocktools import ( - create_coinbase, - TIME_GENESIS_BLOCK, -) -from test_framework.messages import ( - CBlock, - CBlockHeader, - BLOCK_HEADER_SIZE, -) -from test_framework.p2p import ( - P2PDataStore, -) +from test_framework.blocktools import TIME_GENESIS_BLOCK, create_coinbase +from test_framework.messages import BLOCK_HEADER_SIZE, CBlock, CBlockHeader +from test_framework.p2p import P2PDataStore from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, ) def assert_template(node, block, expect, rehash=True): if rehash: block.hashMerkleRoot = block.calc_merkle_root() rsp = node.getblocktemplate( template_request={ 'data': block.serialize().hex(), 'mode': 'proposal'}) assert_equal(rsp, expect) class MiningTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.supports_cli = False def mine_chain(self): self.log.info('Create some old blocks') node = self.nodes[0] address = node.get_deterministic_priv_key().address for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600): node.setmocktime(t) node.generatetoaddress(1, address) mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['currentblocktx'], 0) assert_equal(mining_info['currentblocksize'], 1000) self.restart_node(0) connect_nodes(self.nodes[0], self.nodes[1]) def run_test(self): self.mine_chain() node = self.nodes[0] def assert_submitblock(block, result_str_1, result_str_2=None): block.solve() result_str_2 = result_str_2 or 'duplicate-invalid' assert_equal(result_str_1, node.submitblock( hexdata=block.serialize().hex())) assert_equal(result_str_2, node.submitblock( hexdata=block.serialize().hex())) self.log.info('getmininginfo') mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['chain'], self.chain) assert 'currentblocktx' not in mining_info assert 'currentblocksize' not in mining_info assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) assert_equal(mining_info['pooledtx'], 0) # Mine a block to leave initial block download node.generatetoaddress(1, node.get_deterministic_priv_key().address) tmpl = node.getblocktemplate() self.log.info("getblocktemplate: Test capability advertised") assert 'proposal' in tmpl['capabilities'] next_height = int(tmpl["height"]) coinbase_tx = create_coinbase(height=next_height) # sequence numbers must not be max for nLockTime to have effect coinbase_tx.vin[0].nSequence = 2 ** 32 - 2 coinbase_tx.rehash() block = CBlock() block.nVersion = tmpl["version"] block.hashPrevBlock = int(tmpl["previousblockhash"], 16) block.nTime = tmpl["curtime"] block.nBits = int(tmpl["bits"], 16) block.nNonce = 0 block.vtx = [coinbase_tx] self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) self.log.info("submitblock: Test block decode failure") assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, block.serialize()[:-15].hex()) self.log.info( "getblocktemplate: Test bad input hash for coinbase transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].vin[0].prevout.hash += 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-cb-missing') self.log.info("submitblock: Test invalid coinbase transaction") assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex()) self.log.info("getblocktemplate: Test truncated final transaction") assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': block.serialize()[:-1].hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test duplicate transaction") bad_block = copy.deepcopy(block) bad_block.vtx.append(bad_block.vtx[0]) assert_template(node, bad_block, 'bad-txns-duplicate') assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate') self.log.info("getblocktemplate: Test invalid transaction") bad_block = copy.deepcopy(block) bad_tx = copy.deepcopy(bad_block.vtx[0]) bad_tx.vin[0].prevout.hash = 255 bad_tx.rehash() bad_block.vtx.append(bad_tx) assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent') self.log.info("getblocktemplate: Test nonfinal transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].nLockTime = 2 ** 32 - 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-txns-nonfinal') assert_submitblock(bad_block, 'bad-txns-nonfinal') self.log.info("getblocktemplate: Test bad tx count") # The tx count is immediately after the block header bad_block_sn = bytearray(block.serialize()) assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1) bad_block_sn[BLOCK_HEADER_SIZE] += 1 assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': bad_block_sn.hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test bad bits") bad_block = copy.deepcopy(block) bad_block.nBits = 469762303 # impossible in the real world assert_template(node, bad_block, 'bad-diffbits') self.log.info("getblocktemplate: Test bad merkle root") bad_block = copy.deepcopy(block) bad_block.hashMerkleRoot += 1 assert_template(node, bad_block, 'bad-txnmrklroot', False) assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot') self.log.info("getblocktemplate: Test bad timestamps") bad_block = copy.deepcopy(block) bad_block.nTime = 2 ** 31 - 1 assert_template(node, bad_block, 'time-too-new') assert_submitblock(bad_block, 'time-too-new', 'time-too-new') bad_block.nTime = 0 assert_template(node, bad_block, 'time-too-old') assert_submitblock(bad_block, 'time-too-old', 'time-too-old') self.log.info("getblocktemplate: Test not best block") bad_block = copy.deepcopy(block) bad_block.hashPrevBlock = 123 assert_template(node, bad_block, 'inconclusive-not-best-prevblk') assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found') self.log.info('submitheader tests') assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE)) assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE - 2))) assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata=super(CBlock, bad_block).serialize().hex())) block.nTime += 1 block.solve() def chain_tip(b_hash, *, status='headers-only', branchlen=1): return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status} assert chain_tip(block.hash) not in node.getchaintips() node.submitheader(hexdata=block.serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() # Noop node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() bad_block_root = copy.deepcopy(block) bad_block_root.hashMerkleRoot += 2 bad_block_root.solve() assert chain_tip(bad_block_root.hash) not in node.getchaintips() node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() # Should still reject invalid blocks, even if we have the header: assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert chain_tip(bad_block_root.hash) in node.getchaintips() # We know the header for this invalid block, so should just return # early without error: node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() bad_block_lock = copy.deepcopy(block) bad_block_lock.vtx[0].nLockTime = 2**32 - 1 bad_block_lock.vtx[0].rehash() bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root() bad_block_lock.solve() assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'bad-txns-nonfinal') assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'duplicate-invalid') # Build a "good" block on top of the submitted bad block bad_block2 = copy.deepcopy(block) bad_block2.hashPrevBlock = bad_block_lock.sha256 bad_block2.solve() assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) # Should reject invalid header right away bad_block_time = copy.deepcopy(block) bad_block_time.nTime = 1 bad_block_time.solve() assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) # Should ask for the block from a p2p node, if they announce the header # as well: node.add_p2p_connection(P2PDataStore()) # Drop the first getheaders node.p2p.wait_for_getheaders(timeout=5) node.p2p.send_blocks_and_test(blocks=[block], node=node) # Must be active now: assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips() # Building a few blocks should give the same results node.generatetoaddress(10, node.get_deterministic_priv_key().address) assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) # valid assert_equal(node.submitblock( hexdata=block.serialize().hex()), 'duplicate') # Sanity check that maxtries supports large integers node.generatetoaddress( 1, node.get_deterministic_priv_key().address, pow( 2, 32)) if __name__ == '__main__': MiningTest().main() diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py index 4a9fda3f1..004a9b575 100755 --- a/test/functional/mining_getblocktemplate_longpoll.py +++ b/test/functional/mining_getblocktemplate_longpoll.py @@ -1,92 +1,92 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test longpolling with getblocktemplate.""" -from decimal import Decimal import threading +from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import get_rpc_proxy, random_transaction class LongpollThread(threading.Thread): def __init__(self, node): threading.Thread.__init__(self) # query current longpollid templat = node.getblocktemplate() self.longpollid = templat['longpollid'] # create a new connection to the node, we can't use the same # connection from two threads self.node = get_rpc_proxy( node.url, 1, timeout=600, coveragedir=node.coverage_dir) def run(self): self.node.getblocktemplate({'longpollid': self.longpollid}) class GetBlockTemplateLPTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.log.info( "Warning: this test will take about 70 seconds in the best case. Be patient.") self.nodes[0].generate(10) templat = self.nodes[0].getblocktemplate() longpollid = templat['longpollid'] # longpollid should not change between successive invocations if # nothing else happens templat2 = self.nodes[0].getblocktemplate() assert templat2['longpollid'] == longpollid # Test 1: test that the longpolling wait if we do nothing thr = LongpollThread(self.nodes[0]) thr.start() # check that thread still lives # wait 5 seconds or until thread exits thr.join(5) assert thr.is_alive() # Test 2: test that longpoll will terminate if another node generates a block # generate a block on another node self.nodes[1].generate(1) # check that thread will exit now that new transaction entered mempool # wait 5 seconds or until thread exits thr.join(5) assert not thr.is_alive() # Test 3: test that longpoll will terminate if we generate a block # ourselves thr = LongpollThread(self.nodes[0]) thr.start() # generate a block on another node self.nodes[0].generate(1) # wait 5 seconds or until thread exits thr.join(5) assert not thr.is_alive() # Test 4: test that introducing a new transaction into the mempool will # terminate the longpoll thr = LongpollThread(self.nodes[0]) thr.start() # generate a random transaction and submit it min_relay_fee = self.nodes[0].getnetworkinfo()["relayfee"] # min_relay_fee is fee per 1000 bytes, which should be more than # enough. (txid, txhex, fee) = random_transaction(self.nodes, Decimal("1100000"), min_relay_fee, Decimal("1000"), 20) # after one minute, every 10 seconds the mempool is probed, so in 80 # seconds it should have returned thr.join(60 + 20) assert not thr.is_alive() if __name__ == '__main__': GetBlockTemplateLPTest().main() diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index fe590d3f9..21ab7bb9b 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -1,222 +1,223 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the prioritisetransaction mining RPC.""" import time from test_framework.blocktools import ( create_confirmed_utxos, send_big_transactions, ) + # FIXME: review how this test needs to be adapted w.r.t _LEGACY_MAX_BLOCK_SIZE from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE from test_framework.messages import COIN from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class PrioritiseTransactionTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 # TODO: remove -txindex. Currently required for getrawtransaction call # (called by calculate_fee_from_txid) self.extra_args = [[ "-printpriority=1", "-acceptnonstdtxn=1", "-txindex" ]] * self.num_nodes self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): # Test `prioritisetransaction` required parameters assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction) assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '') assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0) # Test `prioritisetransaction` invalid extra parameters assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0, 0, 0) # Test `prioritisetransaction` invalid `txid` assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].prioritisetransaction, txid='foo', fee_delta=0) assert_raises_rpc_error( -8, "txid must be hexadecimal string (not 'Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000')", self.nodes[0].prioritisetransaction, txid='Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000', fee_delta=0) # Test `prioritisetransaction` invalid `dummy` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-1, "JSON value is not a number as expected", self.nodes[0].prioritisetransaction, txid, 'foo', 0) assert_raises_rpc_error( -8, "Priority is no longer supported, dummy argument to prioritisetransaction must be 0.", self.nodes[0].prioritisetransaction, txid, 1, 0) # Test `prioritisetransaction` invalid `fee_delta` assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].prioritisetransaction, txid=txid, fee_delta='foo') self.relayfee = self.nodes[0].getnetworkinfo()['relayfee'] utxo_count = 90 utxos = create_confirmed_utxos(self.nodes[0], utxo_count) txids = [] # Create 3 batches of transactions at 3 different fee rate levels range_size = utxo_count // 3 for i in range(3): txids.append([]) start_range = i * range_size end_range = start_range + range_size txids[i] = send_big_transactions(self.nodes[0], utxos[start_range:end_range], end_range - start_range, 10 * (i + 1)) # Make sure that the size of each group of transactions exceeds # LEGACY_MAX_BLOCK_SIZE -- otherwise the test needs to be revised to create # more transactions. mempool = self.nodes[0].getrawmempool(True) sizes = [0, 0, 0] for i in range(3): for j in txids[i]: assert j in mempool sizes[i] += mempool[j]['size'] # Fail => raise utxo_count assert sizes[i] > LEGACY_MAX_BLOCK_SIZE # add a fee delta to something in the cheapest bucket and make sure it gets mined # also check that a different entry in the cheapest bucket is NOT mined self.nodes[0].prioritisetransaction( txid=txids[0][0], fee_delta=100 * self.nodes[0].calculate_fee_from_txid(txids[0][0])) self.nodes[0].generate(1) mempool = self.nodes[0].getrawmempool() self.log.info("Assert that prioritised transaction was mined") assert txids[0][0] not in mempool assert txids[0][1] in mempool confirmed_transactions = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['tx'] # Pull the highest fee-rate transaction from a block high_fee_tx = confirmed_transactions[1] # Something high-fee should have been mined! assert high_fee_tx is not None # Add a prioritisation before a tx is in the mempool (de-prioritising a # high-fee transaction so that it's now low fee). # # NOTE WELL: gettransaction returns the fee as a negative number and # as fractional coins. However, the prioritisetransaction expects a # number of satoshi to add or subtract from the actual fee. # Thus the conversation here is simply int(tx_fee*COIN) to remove all fees, and then # we add the minimum fee back. tx_fee = self.nodes[0].gettransaction(high_fee_tx)['fee'] self.nodes[0].prioritisetransaction( txid=high_fee_tx, fee_delta=int(tx_fee * COIN) + self.nodes[0].calculate_fee_from_txid(high_fee_tx)) # Add everything back to mempool self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Check to make sure our high fee rate tx is back in the mempool mempool = self.nodes[0].getrawmempool() assert high_fee_tx in mempool # Now verify the modified-high feerate transaction isn't mined before # the other high fee transactions. Keep mining until our mempool has # decreased by all the high fee size that we calculated above. while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]): self.nodes[0].generate(1) # High fee transaction should not have been mined, but other high fee rate # transactions should have been. mempool = self.nodes[0].getrawmempool() self.log.info( "Assert that de-prioritised transaction is still in mempool") assert high_fee_tx in mempool for x in txids[2]: if (x != high_fee_tx): assert x not in mempool # Create a free transaction. Should be rejected. utxo_list = self.nodes[0].listunspent() assert len(utxo_list) > 0 utxo = utxo_list[0] inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[0].getnewaddress()] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"] tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"] # This will raise an exception due to min relay fee not being met assert_raises_rpc_error(-26, "min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex) assert tx_id not in self.nodes[0].getrawmempool() # This is a less than 1000-byte transaction, so just set the fee # to be the minimum for a 1000-byte transaction and check that it is # accepted. self.nodes[0].prioritisetransaction( txid=tx_id, fee_delta=int(self.relayfee * COIN)) self.log.info( "Assert that prioritised free transaction is accepted to mempool") assert_equal(self.nodes[0].sendrawtransaction(tx_hex), tx_id) assert tx_id in self.nodes[0].getrawmempool() # Test that calling prioritisetransaction is sufficient to trigger # getblocktemplate to (eventually) return a new block. mock_time = int(time.time()) self.nodes[0].setmocktime(mock_time) template = self.nodes[0].getblocktemplate() self.nodes[0].prioritisetransaction( txid=tx_id, fee_delta=-int(self.relayfee * COIN)) self.nodes[0].setmocktime(mock_time + 10) new_template = self.nodes[0].getblocktemplate() assert template != new_template if __name__ == '__main__': PrioritiseTransactionTest().main()