diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py index 18f7b263e..b044ed40f 100755 --- a/test/functional/feature_abortnode.py +++ b/test/functional/feature_abortnode.py @@ -1,51 +1,51 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoind aborts if can't disconnect a block. - Start a single node and generate 3 blocks. - Delete the undo data. - Mine a fork that requires disconnecting the tip. - Verify that bitcoind AbortNode's. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import wait_until, get_datadir_path, connect_nodes import os class AbortNodeTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [["-noparkdeepreorg"], []] def setup_network(self): self.setup_nodes() # We'll connect the nodes later def run_test(self): self.nodes[0].generate(3) datadir = get_datadir_path(self.options.tmpdir, 0) # Deleting the undo file will result in reorg failure - os.unlink(os.path.join(datadir, 'regtest', 'blocks', 'rev00000.dat')) + os.unlink(os.path.join(datadir, self.chain, 'blocks', 'rev00000.dat')) # Connecting to a node with a more work chain will trigger a reorg # attempt. self.nodes[1].generate(3) with self.nodes[0].assert_debug_log(["Failed to disconnect block"]): connect_nodes(self.nodes[0], self.nodes[1]) self.nodes[1].generate(1) # Check that node0 aborted self.log.info("Waiting for crash") wait_until(lambda: self.nodes[0].is_node_stopped(), timeout=60) self.log.info("Node crashed - now verifying restart fails") self.nodes[0].assert_start_raises_init_error() if __name__ == '__main__': AbortNodeTest().main() diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py index a7e8351d3..a42a7220e 100755 --- a/test/functional/feature_config_args.py +++ b/test/functional/feature_config_args.py @@ -1,191 +1,191 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test various command line arguments and configuration file parameters.""" import os from test_framework.test_framework import BitcoinTestFramework class ConfArgsTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.supports_cli = False def test_config_file_parser(self): # Assume node is stopped inc_conf_file_path = os.path.join( self.nodes[0].datadir, 'include.conf') with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf: conf.write('includeconf={}\n'.format(inc_conf_file_path)) self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli', extra_args=['-dash_cli=1'], ) with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('dash_conf=1\n') with self.nodes[0].assert_debug_log(expected_msgs=['Ignoring unknown configuration value dash_conf']): self.start_node(0) self.stop_node(0) with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('-dash=1\n') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -') if self.is_wallet_compiled(): with open(inc_conf_file_path, 'w', encoding='utf8') as conf: conf.write("wallet=foo\n") self.nodes[0].assert_start_raises_init_error( - expected_msg='Error: Config setting for -wallet only applied on regtest network when in [regtest] section.') + expected_msg='Error: Config setting for -wallet only applied on {} network when in [{}] section.'.format(self.chain, self.chain)) with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('regtest=0\n') # mainnet conf.write('acceptnonstdtxn=1\n') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: acceptnonstdtxn is not currently supported for main chain') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('nono\n') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('server=1\nrpcuser=someuser\nrpcpassword=some#pass') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('server=1\nrpcuser=someuser\nmain.rpcpassword=some#pass') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write( 'server=1\nrpcuser=someuser\n[main]\nrpcpassword=some#pass') self.nodes[0].assert_start_raises_init_error( expected_msg='Error: Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided') inc_conf_file2_path = os.path.join( self.nodes[0].datadir, 'include2.conf') with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf: conf.write('includeconf={}\n'.format(inc_conf_file2_path)) with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('testnot.datadir=1\n') with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf: conf.write('[testnet]\n') self.restart_node(0) self.nodes[0].stop_node( expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + inc_conf_file2_path + ':1 Section [testnet] is not recognized.') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('') # clear with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf: conf.write('') # clear def test_log_buffer(self): with self.nodes[0].assert_debug_log(expected_msgs=['Warning: parsed potentially confusing double-negative -connect=0\n']): self.start_node(0, extra_args=['-noconnect=0']) self.stop_node(0) def test_args_log(self): self.log.info('Test config args logging') with self.nodes[0].assert_debug_log( expected_msgs=[ 'Command-line arg: addnode="some.node"', 'Command-line arg: rpcauth=****', 'Command-line arg: rpcbind=****', 'Command-line arg: rpcpassword=****', 'Command-line arg: rpcuser=****', 'Command-line arg: torpassword=****', - 'Config file arg: regtest="1"', - 'Config file arg: [regtest] server="1"', + 'Config file arg: {}="1"'.format(self.chain), + 'Config file arg: [{}] server="1"'.format(self.chain), ], unexpected_msgs=[ 'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0', '127.1.1.1', 'secret-rpcuser', 'secret-torpassword', ]): self.start_node(0, extra_args=[ '-addnode=some.node', '-rpcauth=alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0', '-rpcbind=127.1.1.1', '-rpcpassword=', '-rpcuser=secret-rpcuser', '-torpassword=secret-torpassword', ]) self.stop_node(0) def run_test(self): self.stop_node(0) self.test_log_buffer() self.test_args_log() self.test_config_file_parser() # Remove the -datadir argument so it doesn't override the config file self.nodes[0].remove_default_args(["-datadir"]) default_data_dir = self.nodes[0].datadir new_data_dir = os.path.join(default_data_dir, 'newdatadir') new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2') # Check that using -datadir argument on non-existent directory fails self.nodes[0].datadir = new_data_dir self.nodes[0].assert_start_raises_init_error( ['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.') # Check that using non-existent datadir in conf file fails conf_file = os.path.join(default_data_dir, "bitcoin.conf") - # datadir needs to be set before [regtest] section + # datadir needs to be set before [chain] section conf_file_contents = open(conf_file, encoding='utf8').read() with open(conf_file, 'w', encoding='utf8') as f: f.write("datadir=" + new_data_dir + "\n") f.write(conf_file_contents) self.nodes[0].assert_start_raises_init_error( ['-conf=' + conf_file], 'Error: Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.') # Create the directory and ensure the config file now works os.mkdir(new_data_dir) self.start_node(0, ['-conf=' + conf_file, '-wallet=w1']) self.stop_node(0) - assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'blocks')) + assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks')) if self.is_wallet_compiled(): assert os.path.exists(os.path.join( - new_data_dir, 'regtest', 'wallets', 'w1')) + new_data_dir, self.chain, 'wallets', 'w1')) # Ensure command line argument overrides datadir in conf os.mkdir(new_data_dir_2) self.nodes[0].datadir = new_data_dir_2 self.start_node(0, ['-datadir=' + new_data_dir_2, '-conf=' + conf_file, '-wallet=w2']) assert os.path.exists( os.path.join( new_data_dir_2, - 'regtest', + self.chain, 'blocks')) if self.is_wallet_compiled(): assert os.path.exists( os.path.join( new_data_dir_2, - 'regtest', + self.chain, 'wallets', 'w2')) if __name__ == '__main__': ConfArgsTest().main() diff --git a/test/functional/feature_filelock.py b/test/functional/feature_filelock.py index 3a93cb5a6..8b8f0b244 100755 --- a/test/functional/feature_filelock.py +++ b/test/functional/feature_filelock.py @@ -1,47 +1,47 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Check that it's not possible to start a second bitcoind instance using the same datadir or wallet.""" import os from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch class FilelockTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 def setup_network(self): self.add_nodes(self.num_nodes, extra_args=None) self.nodes[0].start([]) self.nodes[0].wait_for_rpc_connection() def run_test(self): - datadir = os.path.join(self.nodes[0].datadir, 'regtest') + datadir = os.path.join(self.nodes[0].datadir, self.chain) self.log.info("Using datadir {}".format(datadir)) self.log.info( "Check that we can't start a second bitcoind instance using the same datadir") expected_msg = "Error: Cannot obtain a lock on data directory {0}. {1} is probably already running.".format( datadir, self.config['environment']['PACKAGE_NAME']) self.nodes[1].assert_start_raises_init_error(extra_args=[ '-datadir={}'.format(self.nodes[0].datadir), '-noserver'], expected_msg=expected_msg) if self.is_wallet_compiled(): wallet_dir = os.path.join(datadir, 'wallets') self.log.info( "Check that we can't start a second bitcoind instance using the same wallet") expected_msg = "Error: Error initializing wallet database environment" self.nodes[1].assert_start_raises_init_error( extra_args=[ '-walletdir={}'.format(wallet_dir), '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX) if __name__ == '__main__': FilelockTest().main() diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index 9bb658002..162f9cb03 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -1,87 +1,87 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test loadblock option Test the option to start a node with the option loadblock which loads a serialized blockchain from a file (usually called bootstrap.dat). To generate that file this test uses the helper scripts available in contrib/linearize. """ import os import subprocess import sys import tempfile import urllib from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class LoadblockTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.supports_cli = False def run_test(self): self.nodes[1].setnetworkactive(state=False) self.nodes[0].generate(100) # Parsing the url of our node to get settings for config file data_dir = self.nodes[0].datadir node_url = urllib.parse.urlparse(self.nodes[0].url) cfg_file = os.path.join(data_dir, "linearize.cfg") bootstrap_file = os.path.join(self.options.tmpdir, "bootstrap.dat") genesis_block = self.nodes[0].getblockhash(0) - blocks_dir = os.path.join(data_dir, "regtest", "blocks") + blocks_dir = os.path.join(data_dir, self.chain, "blocks") hash_list = tempfile.NamedTemporaryFile(dir=data_dir, mode='w', delete=False, encoding="utf-8") self.log.info("Create linearization config file") with open(cfg_file, "a", encoding="utf-8") as cfg: cfg.write("datadir={}\n".format(data_dir)) cfg.write("rpcuser={}\n".format(node_url.username)) cfg.write("rpcpassword={}\n".format(node_url.password)) cfg.write("port={}\n".format(node_url.port)) cfg.write("host={}\n".format(node_url.hostname)) cfg.write("output_file={}\n".format(bootstrap_file)) cfg.write("max_height=100\n") cfg.write("netmagic=fabfb5da\n") cfg.write("input={}\n".format(blocks_dir)) cfg.write("genesis={}\n".format(genesis_block)) cfg.write("hashlist={}\n".format(hash_list.name)) base_dir = self.config["environment"]["SRCDIR"] linearize_dir = os.path.join(base_dir, "contrib", "linearize") self.log.info("Run linearization of block hashes") linearize_hashes_file = os.path.join( linearize_dir, "linearize-hashes.py") subprocess.run([sys.executable, linearize_hashes_file, cfg_file], stdout=hash_list, check=True) self.log.info("Run linearization of block data") linearize_data_file = os.path.join(linearize_dir, "linearize-data.py") subprocess.run([sys.executable, linearize_data_file, cfg_file], check=True) self.log.info("Restart second, unsynced node with bootstrap file") self.stop_node(1) self.start_node(1, ["-loadblock=" + bootstrap_file]) # start_node is blocking on all block files being imported assert_equal(self.nodes[1].getblockcount(), 100) assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100) assert_equal( self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash()) if __name__ == '__main__': LoadblockTest().main() diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py index 45e657f49..674035df6 100755 --- a/test/functional/feature_logging.py +++ b/test/functional/feature_logging.py @@ -1,77 +1,77 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test debug logging.""" import os from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch class LoggingTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def relative_log_path(self, name): - return os.path.join(self.nodes[0].datadir, "regtest", name) + return os.path.join(self.nodes[0].datadir, self.chain, name) def run_test(self): # test default log file name default_log_path = self.relative_log_path("debug.log") assert os.path.isfile(default_log_path) # test alternative log file name in datadir self.restart_node(0, ["-debuglogfile=foo.log"]) assert os.path.isfile(self.relative_log_path("foo.log")) # test alternative log file name outside datadir tempname = os.path.join(self.options.tmpdir, "foo.log") self.restart_node(0, ["-debuglogfile={}".format(tempname)]) assert os.path.isfile(tempname) # check that invalid log (relative) will cause error invdir = self.relative_log_path("foo") invalidname = os.path.join("foo", "foo.log") self.stop_node(0) exp_stderr = r"Error: Could not open debug log file \S+$" self.nodes[0].assert_start_raises_init_error( ["-debuglogfile={}".format(invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX) assert not os.path.isfile(os.path.join(invdir, "foo.log")) # check that invalid log (relative) works after path exists self.stop_node(0) os.mkdir(invdir) self.start_node(0, ["-debuglogfile={}".format(invalidname)]) assert os.path.isfile(os.path.join(invdir, "foo.log")) # check that invalid log (absolute) will cause error self.stop_node(0) invdir = os.path.join(self.options.tmpdir, "foo") invalidname = os.path.join(invdir, "foo.log") self.nodes[0].assert_start_raises_init_error( ["-debuglogfile={}".format(invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX) assert not os.path.isfile(os.path.join(invdir, "foo.log")) # check that invalid log (absolute) works after path exists self.stop_node(0) os.mkdir(invdir) self.start_node(0, ["-debuglogfile={}".format(invalidname)]) assert os.path.isfile(os.path.join(invdir, "foo.log")) # check that -nodebuglogfile disables logging self.stop_node(0) os.unlink(default_log_path) assert not os.path.isfile(default_log_path) self.start_node(0, ["-nodebuglogfile"]) assert not os.path.isfile(default_log_path) # just sanity check no crash here self.stop_node(0) self.start_node(0, ["-debuglogfile={}".format(os.devnull)]) if __name__ == '__main__': LoggingTest().main() diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index 8b5aace34..c42154343 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -1,524 +1,524 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the pruning code. WARNING: This test uses 4GB of disk space. This test takes 30 mins or more (up to 2 hours) """ import os from test_framework.blocktools import create_coinbase from test_framework.messages import CBlock, ToHex from test_framework.script import CScript, OP_RETURN, OP_NOP from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_raises_rpc_error, connect_nodes, disconnect_nodes, wait_until, ) # Rescans start at the earliest block up to 2 hours before a key timestamp, so # the manual prune RPC avoids pruning blocks in the same window to be # compatible with pruning based on key creation time. TIMESTAMP_WINDOW = 2 * 60 * 60 def mine_large_blocks(node, n): # Make a large scriptPubKey for the coinbase transaction. This is OP_RETURN # followed by 950k of OP_NOP. This would be non-standard in a non-coinbase # transaction but is consensus valid. # Set the nTime if this is the first time this function has been called. # A static variable ensures that time is monotonicly increasing and is therefore # different for each block created => blockhash is unique. if "nTimes" not in mine_large_blocks.__dict__: mine_large_blocks.nTime = 0 # Get the block parameters for the first block big_script = CScript([OP_RETURN] + [OP_NOP] * 950000) best_block = node.getblock(node.getbestblockhash()) height = int(best_block["height"]) + 1 mine_large_blocks.nTime = max( mine_large_blocks.nTime, int(best_block["time"])) + 1 previousblockhash = int(best_block["hash"], 16) for _ in range(n): # Build the coinbase transaction (with large scriptPubKey) coinbase_tx = create_coinbase(height) coinbase_tx.vin[0].nSequence = 2 ** 32 - 1 coinbase_tx.vout[0].scriptPubKey = big_script coinbase_tx.rehash() # Build the block block = CBlock() block.nVersion = best_block["version"] block.hashPrevBlock = previousblockhash block.nTime = mine_large_blocks.nTime block.nBits = int('207fffff', 16) block.nNonce = 0 block.vtx = [coinbase_tx] block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Submit to the node node.submitblock(ToHex(block)) previousblockhash = block.sha256 height += 1 mine_large_blocks.nTime += 1 def calc_usage(blockdir): return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.) class PruneTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 6 self.supports_cli = False # Create nodes 0 and 1 to mine. # Create node 2 to test pruning. self.full_node_default_args = ["-maxreceivebuffer=20000", "-blockmaxsize=999000", "-checkblocks=5", "-noparkdeepreorg", "-maxreorgdepth=-1"] # Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later) # Create nodes 5 to test wallet in prune mode, but do not connect self.extra_args = [self.full_node_default_args, self.full_node_default_args, ["-maxreceivebuffer=20000", "-prune=550", "-noparkdeepreorg", "-maxreorgdepth=-1"], ["-maxreceivebuffer=20000", "-blockmaxsize=999000", "-noparkdeepreorg", "-maxreorgdepth=-1"], ["-maxreceivebuffer=20000", "-blockmaxsize=999000", "-noparkdeepreorg", "-maxreorgdepth=-1"], ["-prune=550"]] self.rpc_timeout = 120 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): self.setup_nodes() self.prunedir = os.path.join( - self.nodes[2].datadir, 'regtest', 'blocks', '') + self.nodes[2].datadir, self.chain, 'blocks', '') connect_nodes(self.nodes[0], self.nodes[1]) connect_nodes(self.nodes[1], self.nodes[2]) connect_nodes(self.nodes[0], self.nodes[2]) connect_nodes(self.nodes[0], self.nodes[3]) connect_nodes(self.nodes[0], self.nodes[4]) self.sync_blocks(self.nodes[0:5]) def setup_nodes(self): self.add_nodes(self.num_nodes, self.extra_args) self.start_nodes() for n in self.nodes: n.importprivkey( privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=False) def create_big_chain(self): # Start by creating some coinbases we can spend later self.nodes[1].generate(200) self.sync_blocks(self.nodes[0:2]) self.nodes[0].generate(150) # Then mine enough full blocks to create more than 550MiB of data mine_large_blocks(self.nodes[0], 645) self.sync_blocks(self.nodes[0:5]) def test_height_min(self): assert os.path.isfile(os.path.join( self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early" self.log.info("Success") self.log.info("Though we're already using more than 550MiB, current usage: {}".format( calc_usage(self.prunedir))) self.log.info( "Mining 25 more blocks should cause the first block file to be pruned") # Pruning doesn't run until we're allocating another chunk, 20 full # blocks past the height cutoff will ensure this mine_large_blocks(self.nodes[0], 25) # Wait for blk00000.dat to be pruned wait_until(lambda: not os.path.isfile( os.path.join(self.prunedir, "blk00000.dat")), timeout=30) self.log.info("Success") usage = calc_usage(self.prunedir) self.log.info("Usage should be below target: {}".format(usage)) assert_greater_than(550, usage) def create_chain_with_staleblocks(self): # Create stale blocks in manageable sized chunks self.log.info( "Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds") for j in range(12): # Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain # Node 2 stays connected, so it hears about the stale blocks and # then reorg's when node0 reconnects disconnect_nodes(self.nodes[0], self.nodes[1]) disconnect_nodes(self.nodes[0], self.nodes[2]) # Mine 24 blocks in node 1 mine_large_blocks(self.nodes[1], 24) # Reorg back with 25 block chain from node 0 mine_large_blocks(self.nodes[0], 25) # Create connections in the order so both nodes can see the reorg # at the same time connect_nodes(self.nodes[0], self.nodes[1]) connect_nodes(self.nodes[0], self.nodes[2]) self.sync_blocks(self.nodes[0:3]) self.log.info("Usage can be over target because of high stale rate: {}".format( calc_usage(self.prunedir))) def reorg_test(self): # Node 1 will mine a 300 block chain starting 287 blocks back from Node # 0 and Node 2's tip. This will cause Node 2 to do a reorg requiring # 288 blocks of undo data to the reorg_test chain. height = self.nodes[1].getblockcount() self.log.info("Current block height: {}".format(height)) self.forkheight = height - 287 self.forkhash = self.nodes[1].getblockhash(self.forkheight) self.log.info("Invalidating block {} at height {}".format( self.forkhash, self.forkheight)) self.nodes[1].invalidateblock(self.forkhash) # We've now switched to our previously mined-24 block fork on node 1, but that's not what we want. # So invalidate that fork as well, until we're on the same chain as # node 0/2 (but at an ancestor 288 blocks ago) mainchainhash = self.nodes[0].getblockhash(self.forkheight - 1) curhash = self.nodes[1].getblockhash(self.forkheight - 1) while curhash != mainchainhash: self.nodes[1].invalidateblock(curhash) curhash = self.nodes[1].getblockhash(self.forkheight - 1) assert self.nodes[1].getblockcount() == self.forkheight - 1 self.log.info("New best height: {}".format( self.nodes[1].getblockcount())) # Disconnect node1 and generate the new chain disconnect_nodes(self.nodes[0], self.nodes[1]) disconnect_nodes(self.nodes[1], self.nodes[2]) self.log.info("Generating new longer chain of 300 more blocks") self.nodes[1].generate(300) self.log.info("Reconnect nodes") connect_nodes(self.nodes[0], self.nodes[1]) connect_nodes(self.nodes[1], self.nodes[2]) self.sync_blocks(self.nodes[0:3], timeout=120) self.log.info("Verify height on node 2: {}".format( self.nodes[2].getblockcount())) self.log.info("Usage possibly still high because of stale blocks in block files: {}".format( calc_usage(self.prunedir))) self.log.info( "Mine 220 more large blocks so we have requisite history") mine_large_blocks(self.nodes[0], 220) self.sync_blocks(self.nodes[0:3], timeout=120) usage = calc_usage(self.prunedir) self.log.info("Usage should be below target: {}".format(usage)) assert_greater_than(550, usage) def reorg_back(self): # Verify that a block on the old main chain fork has been pruned away assert_raises_rpc_error( -1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash) with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']): self.nodes[2].verifychain(checklevel=4, nblocks=0) self.log.info( "Will need to redownload block {}".format(self.forkheight)) # Verify that we have enough history to reorg back to the fork point. # Although this is more than 288 blocks, because this chain was written # more recently and only its other 299 small and 220 large blocks are in # the block files after it, it is expected to still be retained. self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight)) first_reorg_height = self.nodes[2].getblockcount() curchainhash = self.nodes[2].getblockhash(self.mainchainheight) self.nodes[2].invalidateblock(curchainhash) goalbestheight = self.mainchainheight goalbesthash = self.mainchainhash2 # As of 0.10 the current block download logic is not able to reorg to # the original chain created in create_chain_with_stale_blocks because # it doesn't know of any peer that's on that chain from which to # redownload its missing blocks. Invalidate the reorg_test chain in # node 0 as well, it can successfully switch to the original chain # because it has all the block data. However it must mine enough blocks # to have a more work chain than the reorg_test chain in order to # trigger node 2's block download logic. At this point node 2 is within # 288 blocks of the fork point so it will preserve its ability to # reorg. if self.nodes[2].getblockcount() < self.mainchainheight: blocks_to_mine = first_reorg_height + 1 - self.mainchainheight self.log.info( "Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: {}".format( blocks_to_mine)) self.nodes[0].invalidateblock(curchainhash) assert_equal(self.nodes[0].getblockcount(), self.mainchainheight) assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2) goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1] goalbestheight = first_reorg_height + 1 self.log.info( "Verify node 2 reorged back to the main chain, some blocks of which it had to redownload") # Wait for Node 2 to reorg to proper height wait_until(lambda: self.nodes[2].getblockcount( ) >= goalbestheight, timeout=900) assert_equal(self.nodes[2].getbestblockhash(), goalbesthash) # Verify we can now have the data for a block previously pruned assert_equal(self.nodes[2].getblock( self.forkhash)["height"], self.forkheight) def manual_test(self, node_number, use_timestamp): # at this point, node has 995 blocks and has not yet run in prune mode self.start_node(node_number) node = self.nodes[node_number] assert_equal(node.getblockcount(), 995) assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500) # now re-start in manual pruning mode self.stop_node(node_number) self.start_node(node_number, extra_args=["-prune=1"]) node = self.nodes[node_number] assert_equal(node.getblockcount(), 995) def height(index): if use_timestamp: return node.getblockheader(node.getblockhash(index))[ "time"] + TIMESTAMP_WINDOW else: return index def prune(index): ret = node.pruneblockchain(height=height(index)) assert_equal(ret, node.getblockchaininfo()['pruneheight']) def has_block(index): return os.path.isfile(os.path.join( - self.nodes[node_number].datadir, "regtest", "blocks", "blk{:05}.dat".format(index))) + self.nodes[node_number].datadir, self.chain, "blocks", "blk{:05}.dat".format(index))) # should not prune because chain tip of node 3 (995) < PruneAfterHeight # (1000) assert_raises_rpc_error( -1, "Blockchain is too short for pruning", node.pruneblockchain, height(500)) # Save block transaction count before pruning, assert value block1_details = node.getblock(node.getblockhash(1)) assert_equal(block1_details["nTx"], len(block1_details["tx"])) # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) node.generate(6) assert_equal(node.getblockchaininfo()["blocks"], 1001) # Pruned block should still know the number of transactions assert_equal(node.getblockheader(node.getblockhash(1)) ["nTx"], block1_details["nTx"]) # negative heights should raise an exception assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10) # height=100 too low to prune first block file so this is a no-op prune(100) assert has_block( 0), "blk00000.dat is missing when should still be there" # Does nothing node.pruneblockchain(height(0)) assert has_block( 0), "blk00000.dat is missing when should still be there" # height=500 should prune first file prune(500) assert not has_block( 0), "blk00000.dat is still there, should be pruned by now" assert has_block( 1), "blk00001.dat is missing when should still be there" # height=650 should prune second file prune(650) assert not has_block( 1), "blk00001.dat is still there, should be pruned by now" # height=1000 should not prune anything more, because tip-288 is in # blk00002.dat. prune(1000) assert has_block( 2), "blk00002.dat is still there, should be pruned by now" # advance the tip so blk00002.dat and blk00003.dat can be pruned (the # last 288 blocks should now be in blk00004.dat) node.generate(288) prune(1000) assert not has_block( 2), "blk00002.dat is still there, should be pruned by now" assert not has_block( 3), "blk00003.dat is still there, should be pruned by now" # stop node, start back up with auto-prune at 550 MiB, make sure still # runs self.stop_node(node_number) self.start_node(node_number, extra_args=["-prune=550"]) self.log.info("Success") def wallet_test(self): # check that the pruning node's wallet is still in good shape self.log.info("Stop and start pruning node to trigger wallet rescan") self.stop_node(2) self.start_node( 2, extra_args=["-prune=550", "-noparkdeepreorg", "-maxreorgdepth=-1"]) self.log.info("Success") # check that wallet loads successfully when restarting a pruned node after IBD. # this was reported to fail in #7494. self.log.info("Syncing node 5 to test wallet") connect_nodes(self.nodes[0], self.nodes[5]) nds = [self.nodes[0], self.nodes[5]] self.sync_blocks(nds, wait=5, timeout=300) # Stop and start to trigger rescan self.stop_node(5) self.start_node( 5, extra_args=["-prune=550", "-noparkdeepreorg", "-maxreorgdepth=-1"]) self.log.info("Success") def run_test(self): self.log.info("Warning! This test requires 4GB of disk space") self.log.info("Mining a big blockchain of 995 blocks") self.create_big_chain() # Chain diagram key: # * blocks on main chain # +,&,$,@ blocks on other forks # X invalidated block # N1 Node 1 # # Start by mining a simple chain that all nodes have # N0=N1=N2 **...*(995) # stop manual-pruning node with 995 blocks self.stop_node(3) self.stop_node(4) self.log.info( "Check that we haven't started pruning yet because we're below PruneAfterHeight") self.test_height_min() # Extend this chain past the PruneAfterHeight # N0=N1=N2 **...*(1020) self.log.info( "Check that we'll exceed disk space target if we have a very high stale block rate") self.create_chain_with_staleblocks() # Disconnect N0 # And mine a 24 block chain on N1 and a separate 25 block chain on N0 # N1=N2 **...*+...+(1044) # N0 **...**...**(1045) # # reconnect nodes causing reorg on N1 and N2 # N1=N2 **...*(1020) *...**(1045) # \ # +...+(1044) # # repeat this process until you have 12 stale forks hanging off the # main chain on N1 and N2 # N0 *************************...***************************(1320) # # N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320) # \ \ \ # +...+(1044) &.. $...$(1319) # Save some current chain state for later use self.mainchainheight = self.nodes[2].getblockcount() # 1320 self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight) self.log.info("Check that we can survive a 288 block reorg still") self.reorg_test() # (1033, ) # Now create a 288 block reorg by mining a longer chain on N1 # First disconnect N1 # Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain # N1 **...*(1020) **...**(1032)X.. # \ # ++...+(1031)X.. # # Now mine 300 more blocks on N1 # N1 **...*(1020) **...**(1032) @@...@(1332) # \ \ # \ X... # \ \ # ++...+(1031)X.. .. # # Reconnect nodes and mine 220 more blocks on N1 # N1 **...*(1020) **...**(1032) @@...@@@(1552) # \ \ # \ X... # \ \ # ++...+(1031)X.. .. # # N2 **...*(1020) **...**(1032) @@...@@@(1552) # \ \ # \ *...**(1320) # \ \ # ++...++(1044) .. # # N0 ********************(1032) @@...@@@(1552) # \ # *...**(1320) self.log.info( "Test that we can rerequest a block we previously pruned if needed for a reorg") self.reorg_back() # Verify that N2 still has block 1033 on current chain (@), but not on main chain (*) # Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to # original main chain (*), but will require redownload of some blocks # In order to have a peer we think we can download from, must also perform this invalidation # on N0 and mine a new longest chain to trigger. # Final result: # N0 ********************(1032) **...****(1553) # \ # X@...@@@(1552) # # N2 **...*(1020) **...**(1032) **...****(1553) # \ \ # \ X@...@@@(1552) # \ # +.. # # N1 doesn't change because 1033 on main chain (*) is invalid self.log.info("Test manual pruning with block indices") self.manual_test(3, use_timestamp=False) self.log.info("Test manual pruning with timestamps") self.manual_test(4, use_timestamp=True) self.log.info("Test wallet re-scan") self.wallet_test() self.log.info("Done") if __name__ == '__main__': PruneTest().main() diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py index 03f31dc23..fb21d300e 100755 --- a/test/functional/interface_rpc.py +++ b/test/functional/interface_rpc.py @@ -1,85 +1,85 @@ #!/usr/bin/env python3 # Copyright (c) 2018-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Tests some generic aspects of the RPC interface.""" import os from test_framework.authproxy import JSONRPCException from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_greater_than_or_equal def expect_http_status(expected_http_status, expected_rpc_code, fcn, *args): try: fcn(*args) raise AssertionError( "Expected RPC error {}, got none".format(expected_rpc_code)) except JSONRPCException as exc: assert_equal(exc.error["code"], expected_rpc_code) assert_equal(exc.http_status, expected_http_status) class RPCInterfaceTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.supports_cli = False def test_getrpcinfo(self): self.log.info("Testing getrpcinfo...") info = self.nodes[0].getrpcinfo() assert_equal(len(info['active_commands']), 1) command = info['active_commands'][0] assert_equal(command['method'], 'getrpcinfo') assert_greater_than_or_equal(command['duration'], 0) assert_equal( info['logpath'], os.path.join( self.nodes[0].datadir, - 'regtest', + self.chain, 'debug.log')) def test_batch_request(self): self.log.info("Testing basic JSON-RPC batch request...") results = self.nodes[0].batch([ # A basic request that will work fine. {"method": "getblockcount", "id": 1}, # Request that will fail. The whole batch request should still # work fine. {"method": "invalidmethod", "id": 2}, # Another call that should succeed. {"method": "getbestblockhash", "id": 3}, ]) result_by_id = {} for res in results: result_by_id[res["id"]] = res assert_equal(result_by_id[1]['error'], None) assert_equal(result_by_id[1]['result'], 0) assert_equal(result_by_id[2]['error']['code'], -32601) assert_equal(result_by_id[2]['result'], None) assert_equal(result_by_id[3]['error'], None) assert result_by_id[3]['result'] is not None def test_http_status_codes(self): self.log.info("Testing HTTP status codes for JSON-RPC requests...") expect_http_status(404, -32601, self.nodes[0].invalidmethod) expect_http_status(500, -8, self.nodes[0].getblockhash, 42) def run_test(self): self.test_getrpcinfo() self.test_batch_request() self.test_http_status_codes() if __name__ == '__main__': RPCInterfaceTest().main() diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index 512f52d8f..337599149 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -1,165 +1,165 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool persistence. By default, bitcoind will dump mempool on shutdown and then reload it on startup. This can be overridden with the -persistmempool=0 command line option. Test is as follows: - start node0, node1 and node2. node1 has -persistmempool=0 - create 5 transactions on node2 to its own address. Note that these are not sent to node0 or node1 addresses because we don't want them to be saved in the wallet. - check that node0 and node1 have 5 transactions in their mempools - shutdown all nodes. - startup node0. Verify that it still has 5 transactions in its mempool. Shutdown node0. This tests that by default the mempool is persistent. - startup node1. Verify that its mempool is empty. Shutdown node1. This tests that with -persistmempool=0, the mempool is not dumped to disk when the node is shut down. - Restart node0 with -persistmempool=0. Verify that its mempool is empty. Shutdown node0. This tests that with -persistmempool=0, the mempool is not loaded from disk on start up. - Restart node0 with -persistmempool. Verify that it has 5 transactions in its mempool. This tests that -persistmempool=0 does not overwrite a previously valid mempool stored on disk. - Remove node0 mempool.dat and verify savemempool RPC recreates it and verify that node1 can load it and has 5 transactions in its mempool. - Verify that savemempool throws when the RPC is called if node1 can't write to disk. """ from decimal import Decimal import os import time from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than_or_equal, assert_raises_rpc_error, ) class MempoolPersistTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 3 self.extra_args = [[], ["-persistmempool=0"], []] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.log.debug("Send 5 transactions from node2 (to its own address)") tx_creation_time_lower = int(time.time()) for i in range(5): last_txid = self.nodes[2].sendtoaddress( self.nodes[2].getnewaddress(), Decimal("10")) node2_balance = self.nodes[2].getbalance() self.sync_all() tx_creation_time_higher = int(time.time()) self.log.debug( "Verify that node0 and node1 have 5 transactions in their mempools") assert_equal(len(self.nodes[0].getrawmempool()), 5) assert_equal(len(self.nodes[1].getrawmempool()), 5) self.log.debug("Prioritize a transaction on node0") fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'], fees['modified']) self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000) fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified']) tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)[ 'time'] assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower) assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time) self.log.debug("Stop-start the nodes. Verify that node0 has the " "transactions in its mempool and node1 does not. " "Verify that node2 calculates its balance correctly " "after loading wallet transactions.") self.stop_nodes() # Give this one a head-start, so we can be "extra-sure" that it didn't # load anything later # Also don't store the mempool, to keep the datadir clean self.start_node(1, extra_args=["-persistmempool=0"]) self.start_node(0) self.start_node(2) # start_node is blocking on the mempool being loaded assert self.nodes[0].getmempoolinfo()["loaded"] assert self.nodes[2].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 5) assert_equal(len(self.nodes[2].getrawmempool()), 5) # The others have loaded their mempool. If node_1 loaded anything, we'd # probably notice by now: assert_equal(len(self.nodes[1].getrawmempool()), 0) self.log.debug('Verify prioritization is loaded correctly') fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees'] assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified']) self.log.debug('Verify time is loaded correctly') assert_equal( tx_creation_time, self.nodes[0].getmempoolentry( txid=last_txid)['time']) # Verify accounting of mempool transactions after restart is correct # Flush mempool to wallet self.nodes[2].syncwithvalidationinterfacequeue() assert_equal(node2_balance, self.nodes[2].getbalance()) self.log.debug( "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.") self.stop_nodes() self.start_node(0, extra_args=["-persistmempool=0"]) assert self.nodes[0].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 0) self.log.debug( "Stop-start node0. Verify that it has the transactions in its mempool.") self.stop_nodes() self.start_node(0) assert self.nodes[0].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 5) mempooldat0 = os.path.join( - self.nodes[0].datadir, 'regtest', 'mempool.dat') + self.nodes[0].datadir, self.chain, 'mempool.dat') mempooldat1 = os.path.join( - self.nodes[1].datadir, 'regtest', 'mempool.dat') + self.nodes[1].datadir, self.chain, 'mempool.dat') self.log.debug( "Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it") os.remove(mempooldat0) self.nodes[0].savemempool() assert os.path.isfile(mempooldat0) self.log.debug( "Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions") os.rename(mempooldat0, mempooldat1) self.stop_nodes() self.start_node(1, extra_args=[]) assert self.nodes[1].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[1].getrawmempool()), 5) self.log.debug( "Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails") # to test the exception we are creating a tmp folder called mempool.dat.new # which is an implementation detail that could change and break this # test mempooldotnew1 = mempooldat1 + '.new' os.mkdir(mempooldotnew1) assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool) os.rmdir(mempooldotnew1) if __name__ == '__main__': MempoolPersistTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index c88d0691e..c0b9038e8 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -1,300 +1,300 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mining RPCs - getmininginfo - getblocktemplate proposal mode - submitblock""" import copy from decimal import Decimal from test_framework.blocktools import ( create_coinbase, TIME_GENESIS_BLOCK, ) from test_framework.messages import ( CBlock, CBlockHeader, BLOCK_HEADER_SIZE, ) from test_framework.mininode import ( P2PDataStore, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, ) from test_framework.script import CScriptNum def assert_template(node, block, expect, rehash=True): if rehash: block.hashMerkleRoot = block.calc_merkle_root() rsp = node.getblocktemplate( template_request={ 'data': block.serialize().hex(), 'mode': 'proposal'}) assert_equal(rsp, expect) class MiningTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.supports_cli = False def mine_chain(self): self.log.info('Create some old blocks') node = self.nodes[0] address = node.get_deterministic_priv_key().address for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600): node.setmocktime(t) node.generatetoaddress(1, address) mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['currentblocktx'], 0) assert_equal(mining_info['currentblocksize'], 1000) self.restart_node(0) connect_nodes(self.nodes[0], self.nodes[1]) def run_test(self): self.mine_chain() node = self.nodes[0] def assert_submitblock(block, result_str_1, result_str_2=None): block.solve() result_str_2 = result_str_2 or 'duplicate-invalid' assert_equal(result_str_1, node.submitblock( hexdata=block.serialize().hex())) assert_equal(result_str_2, node.submitblock( hexdata=block.serialize().hex())) self.log.info('getmininginfo') mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) - assert_equal(mining_info['chain'], 'regtest') + assert_equal(mining_info['chain'], self.chain) assert 'currentblocktx' not in mining_info assert 'currentblocksize' not in mining_info assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) assert_equal(mining_info['pooledtx'], 0) # Mine a block to leave initial block download node.generatetoaddress(1, node.get_deterministic_priv_key().address) tmpl = node.getblocktemplate() self.log.info("getblocktemplate: Test capability advertised") assert 'proposal' in tmpl['capabilities'] next_height = int(tmpl["height"]) coinbase_tx = create_coinbase(height=next_height) # sequence numbers must not be max for nLockTime to have effect coinbase_tx.vin[0].nSequence = 2 ** 32 - 2 coinbase_tx.rehash() # round-trip the encoded bip34 block height commitment assert_equal( CScriptNum.decode( coinbase_tx.vin[0].scriptSig), next_height) # round-trip negative and multi-byte CScriptNums to catch python # regression assert_equal( CScriptNum.decode( CScriptNum.encode( CScriptNum(1500))), 1500) assert_equal(CScriptNum.decode( CScriptNum.encode(CScriptNum(-1500))), -1500) assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1))), -1) block = CBlock() block.nVersion = tmpl["version"] block.hashPrevBlock = int(tmpl["previousblockhash"], 16) block.nTime = tmpl["curtime"] block.nBits = int(tmpl["bits"], 16) block.nNonce = 0 block.vtx = [coinbase_tx] self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) self.log.info("submitblock: Test block decode failure") assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, block.serialize()[:-15].hex()) self.log.info( "getblocktemplate: Test bad input hash for coinbase transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].vin[0].prevout.hash += 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-cb-missing') self.log.info("submitblock: Test invalid coinbase transaction") assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex()) self.log.info("getblocktemplate: Test truncated final transaction") assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': block.serialize()[:-1].hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test duplicate transaction") bad_block = copy.deepcopy(block) bad_block.vtx.append(bad_block.vtx[0]) assert_template(node, bad_block, 'bad-txns-duplicate') assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate') self.log.info("getblocktemplate: Test invalid transaction") bad_block = copy.deepcopy(block) bad_tx = copy.deepcopy(bad_block.vtx[0]) bad_tx.vin[0].prevout.hash = 255 bad_tx.rehash() bad_block.vtx.append(bad_tx) assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent') self.log.info("getblocktemplate: Test nonfinal transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].nLockTime = 2 ** 32 - 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-txns-nonfinal') assert_submitblock(bad_block, 'bad-txns-nonfinal') self.log.info("getblocktemplate: Test bad tx count") # The tx count is immediately after the block header bad_block_sn = bytearray(block.serialize()) assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1) bad_block_sn[BLOCK_HEADER_SIZE] += 1 assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': bad_block_sn.hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test bad bits") bad_block = copy.deepcopy(block) bad_block.nBits = 469762303 # impossible in the real world assert_template(node, bad_block, 'bad-diffbits') self.log.info("getblocktemplate: Test bad merkle root") bad_block = copy.deepcopy(block) bad_block.hashMerkleRoot += 1 assert_template(node, bad_block, 'bad-txnmrklroot', False) assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot') self.log.info("getblocktemplate: Test bad timestamps") bad_block = copy.deepcopy(block) bad_block.nTime = 2 ** 31 - 1 assert_template(node, bad_block, 'time-too-new') assert_submitblock(bad_block, 'time-too-new', 'time-too-new') bad_block.nTime = 0 assert_template(node, bad_block, 'time-too-old') assert_submitblock(bad_block, 'time-too-old', 'time-too-old') self.log.info("getblocktemplate: Test not best block") bad_block = copy.deepcopy(block) bad_block.hashPrevBlock = 123 assert_template(node, bad_block, 'inconclusive-not-best-prevblk') assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found') self.log.info('submitheader tests') assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE)) assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE - 2))) assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata=super(CBlock, bad_block).serialize().hex())) block.nTime += 1 block.solve() def chain_tip(b_hash, *, status='headers-only', branchlen=1): return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status} assert chain_tip(block.hash) not in node.getchaintips() node.submitheader(hexdata=block.serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() # Noop node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() bad_block_root = copy.deepcopy(block) bad_block_root.hashMerkleRoot += 2 bad_block_root.solve() assert chain_tip(bad_block_root.hash) not in node.getchaintips() node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() # Should still reject invalid blocks, even if we have the header: assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert chain_tip(bad_block_root.hash) in node.getchaintips() # We know the header for this invalid block, so should just return # early without error: node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() bad_block_lock = copy.deepcopy(block) bad_block_lock.vtx[0].nLockTime = 2**32 - 1 bad_block_lock.vtx[0].rehash() bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root() bad_block_lock.solve() assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'bad-txns-nonfinal') assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'duplicate-invalid') # Build a "good" block on top of the submitted bad block bad_block2 = copy.deepcopy(block) bad_block2.hashPrevBlock = bad_block_lock.sha256 bad_block2.solve() assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) # Should reject invalid header right away bad_block_time = copy.deepcopy(block) bad_block_time.nTime = 1 bad_block_time.solve() assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) # Should ask for the block from a p2p node, if they announce the header # as well: node.add_p2p_connection(P2PDataStore()) # Drop the first getheaders node.p2p.wait_for_getheaders(timeout=5) node.p2p.send_blocks_and_test(blocks=[block], node=node) # Must be active now: assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips() # Building a few blocks should give the same results node.generatetoaddress(10, node.get_deterministic_priv_key().address) assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) # valid assert_equal(node.submitblock( hexdata=block.serialize().hex()), 'duplicate') # Sanity check that maxtries supports large integers node.generatetoaddress( 1, node.get_deterministic_priv_key().address, pow( 2, 32)) if __name__ == '__main__': MiningTest().main() diff --git a/test/functional/rpc_dumptxoutset.py b/test/functional/rpc_dumptxoutset.py index 39ef975ca..b7068bbf4 100755 --- a/test/functional/rpc_dumptxoutset.py +++ b/test/functional/rpc_dumptxoutset.py @@ -1,53 +1,53 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the generation of UTXO snapshots using `dumptxoutset`. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error import hashlib from pathlib import Path class DumptxoutsetTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): """Test a trivial usage of the dumptxoutset RPC command.""" node = self.nodes[0] mocktime = node.getblockheader(node.getblockhash(0))['time'] + 1 node.setmocktime(mocktime) node.generate(100) FILENAME = 'txoutset.dat' out = node.dumptxoutset(FILENAME) - expected_path = Path(node.datadir) / 'regtest' / FILENAME + expected_path = Path(node.datadir) / self.chain / FILENAME assert expected_path.is_file() assert_equal(out['coins_written'], 100) assert_equal(out['base_height'], 100) assert_equal(out['path'], str(expected_path)) # Blockhash should be deterministic based on mocked time. assert_equal( out['base_hash'], '65d0aec2439aae14373c153f596fb90a87b643d9bff3e65f250aa8f055e6816b') with open(str(expected_path), 'rb') as f: digest = hashlib.sha256(f.read()).hexdigest() # UTXO snapshot hash should be deterministic based on mocked time. assert_equal( digest, '05957e146e38153d84e9294999cc24f0dcdb9902c4834b32c79ae8e8985babea') # Specifying a path to an existing file will fail. assert_raises_rpc_error( -8, '{} already exists'.format(FILENAME), node.dumptxoutset, FILENAME) if __name__ == '__main__': DumptxoutsetTest().main() diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py index 73b9fe7d7..a0b14fe17 100755 --- a/test/functional/rpc_scantxoutset.py +++ b/test/functional/rpc_scantxoutset.py @@ -1,231 +1,231 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the scantxoutset rpc call.""" import os import shutil from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error def descriptors(out): return sorted(u['desc'] for u in out['unspents']) class ScantxoutsetTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(110) addr = self.nodes[0].getnewaddress("") pubkey = self.nodes[0].getaddressinfo(addr)['pubkey'] self.nodes[0].sendtoaddress(addr, 0.002) # send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK # (m/0'/0'/0') self.nodes[0].sendtoaddress( "mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/1') self.nodes[0].sendtoaddress( "mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1500') self.nodes[0].sendtoaddress( "n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/0) self.nodes[0].sendtoaddress( "mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/1) self.nodes[0].sendtoaddress( "mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1500) self.nodes[0].sendtoaddress( "mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/1/1/0') self.nodes[0].sendtoaddress( "mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/1') self.nodes[0].sendtoaddress( "mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1500') self.nodes[0].sendtoaddress( "mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/0) self.nodes[0].sendtoaddress( "mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/1) self.nodes[0].sendtoaddress( "mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1500) self.nodes[0].sendtoaddress( "mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) self.nodes[0].generate(1) self.log.info("Stop node, remove wallet, mine again some blocks...") self.stop_node(0) shutil.rmtree(os.path.join( - self.nodes[0].datadir, "regtest", 'wallets')) + self.nodes[0].datadir, self.chain, 'wallets')) self.start_node(0) self.nodes[0].generate(110) scan = self.nodes[0].scantxoutset("start", []) info = self.nodes[0].gettxoutsetinfo() assert_equal(scan['success'], True) assert_equal(scan['height'], info['height']) assert_equal(scan['txouts'], info['txouts']) assert_equal(scan['bestblock'], info['bestblock']) self.restart_node(0, ['-nowallet']) self.log.info("Test if we have found the non HD unspent outputs.") assert_equal(self.nodes[0].scantxoutset( "start", ["pkh(" + pubkey + ")"])['total_amount'], Decimal("0.002")) assert_equal(self.nodes[0].scantxoutset( "start", ["combo(" + pubkey + ")"])['total_amount'], Decimal("0.002")) assert_equal(self.nodes[0].scantxoutset( "start", ["addr(" + addr + ")"])['total_amount'], Decimal("0.002")) assert_equal(self.nodes[0].scantxoutset( "start", ["addr(" + addr + ")"])['total_amount'], Decimal("0.002")) self.log.info("Test range validation.") assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": -1}]) assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [-1, 10]}]) assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}]) assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [2, 1]}]) assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [0, 1000001]}]) self.log.info("Test extended key derivation.") # Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset. # Note that all amounts in the UTXO set are powers of 2 multiplied by # 0.001 BTC, so each amounts uniquely identifies a subset. assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("0.008")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("0.016")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("0.032")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("0.064")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("0.128")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("0.256")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("0.024")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("0.056")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("0.192")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("0.448")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("0.512")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], Decimal("1.024")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], Decimal("2.048")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], Decimal("4.096")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], Decimal("8.192")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], Decimal("16.384")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], Decimal("4.096")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], Decimal("8.192")) assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], Decimal("16.384")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], Decimal("1.536")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], Decimal("3.584")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288")) assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672")) assert_equal( self.nodes[0].scantxoutset( "start", [ { "desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": [ 1500, 1500]}])['total_amount'], Decimal("16.384")) # Test the reported descriptors for a few matches assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])), ["pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x", "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed"]) assert_equal( descriptors( self.nodes[0].scantxoutset( "start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])), ["pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"]) assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])), ['pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8', 'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g', 'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa']) # Check that status and abort don't need second arg assert_equal(self.nodes[0].scantxoutset("status"), None) assert_equal(self.nodes[0].scantxoutset("abort"), False) # Check that second arg is needed for start assert_raises_rpc_error(-1, "scanobjects argument is required for the start action", self.nodes[0].scantxoutset, "start") if __name__ == '__main__': ScantxoutsetTest().main() diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 6746efe94..8fe476ddb 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -1,686 +1,686 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Base class for RPC testing.""" import argparse import configparser from enum import Enum import logging import os import pdb import random import shutil import sys import tempfile import time from .authproxy import JSONRPCException from . import coverage from .test_node import TestNode from .mininode import NetworkThread from .util import ( assert_equal, check_json_precision, connect_nodes, disconnect_nodes, get_datadir_path, initialize_datadir, MAX_NODES, p2p_port, PortSeed, rpc_port, sync_blocks, sync_mempools, ) class TestStatus(Enum): PASSED = 1 FAILED = 2 SKIPPED = 3 TEST_EXIT_PASSED = 0 TEST_EXIT_FAILED = 1 TEST_EXIT_SKIPPED = 77 # Timestamp is Dec. 1st, 2019 at 00:00:00 TIMESTAMP_IN_THE_PAST = 1575158400 TMPDIR_PREFIX = "bitcoin_func_test_" class SkipTest(Exception): """This exception is raised to skip a test""" def __init__(self, message): self.message = message class BitcoinTestMetaClass(type): """Metaclass for BitcoinTestFramework. Ensures that any attempt to register a subclass of `BitcoinTestFramework` adheres to a standard whereby the subclass overrides `set_test_params` and `run_test` but DOES NOT override either `__init__` or `main`. If any of those standards are violated, a ``TypeError`` is raised.""" def __new__(cls, clsname, bases, dct): if not clsname == 'BitcoinTestFramework': if not ('run_test' in dct and 'set_test_params' in dct): raise TypeError("BitcoinTestFramework subclasses must override " "'run_test' and 'set_test_params'") if '__init__' in dct or 'main' in dct: raise TypeError("BitcoinTestFramework subclasses may not override " "'__init__' or 'main'") return super().__new__(cls, clsname, bases, dct) class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): """Base class for a bitcoin test script. Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods. Individual tests can also override the following methods to customize the test setup: - add_options() - setup_chain() - setup_network() - setup_nodes() The __init__() and main() methods should not be overridden. This class also contains various public and private helper methods.""" def __init__(self): """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method""" self.chain = 'regtest' self.setup_clean_chain = False self.nodes = [] self.network_thread = None # Wait for up to 60 seconds for the RPC server to respond self.rpc_timeout = 60 self.supports_cli = True self.bind_to_localhost_only = True # We run parse_args before set_test_params for tests who need to # know the parser options during setup. self.parse_args() self.set_test_params() def main(self): """Main function. This should not be overridden by the subclass test scripts.""" assert hasattr( self, "num_nodes"), "Test must set self.num_nodes in set_test_params()" try: self.setup() self.run_test() except JSONRPCException: self.log.exception("JSONRPC error") self.success = TestStatus.FAILED except SkipTest as e: self.log.warning("Test Skipped: {}".format(e.message)) self.success = TestStatus.SKIPPED except AssertionError: self.log.exception("Assertion failed") self.success = TestStatus.FAILED except KeyError: self.log.exception("Key error") self.success = TestStatus.FAILED except Exception: self.log.exception("Unexpected exception caught during testing") self.success = TestStatus.FAILED except KeyboardInterrupt: self.log.warning("Exiting after keyboard interrupt") self.success = TestStatus.FAILED finally: exit_code = self.shutdown() sys.exit(exit_code) def parse_args(self): parser = argparse.ArgumentParser(usage="%(prog)s [options]") parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true", help="Leave bitcoinds and test.* datadir on exit or error") parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true", help="Don't stop bitcoinds after the test execution") parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), help="Directory for caching pregenerated datadirs (default: %(default)s)") parser.add_argument("--tmpdir", dest="tmpdir", help="Root directory for datadirs") parser.add_argument("-l", "--loglevel", dest="loglevel", default="INFO", help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.") parser.add_argument("--tracerpc", dest="trace_rpc", default=False, action="store_true", help="Print out all RPC calls as they are made") parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int, help="The seed to use for assigning port numbers (default: current process id)") parser.add_argument("--coveragedir", dest="coveragedir", help="Write tested RPC commands into this directory") parser.add_argument("--configfile", dest="configfile", default=os.path.abspath(os.path.dirname(os.path.realpath( __file__)) + "/../../config.ini"), help="Location of the test framework config file (default: %(default)s)") parser.add_argument("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true", help="Attach a python debugger if test fails") parser.add_argument("--usecli", dest="usecli", default=False, action="store_true", help="use bitcoin-cli instead of RPC for all commands") parser.add_argument("--perf", dest="perf", default=False, action="store_true", help="profile running nodes with perf for the duration of the test") parser.add_argument("--valgrind", dest="valgrind", default=False, action="store_true", help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required") parser.add_argument("--randomseed", type=int, help="set a random seed for deterministically reproducing a previous test run") parser.add_argument("--with-axionactivation", dest="axionactivation", default=False, action="store_true", help="Activate axion update on timestamp {}".format(TIMESTAMP_IN_THE_PAST)) self.add_options(parser) self.options = parser.parse_args() def setup(self): """Call this method to start up the test framework object with options set.""" PortSeed.n = self.options.port_seed check_json_precision() self.options.cachedir = os.path.abspath(self.options.cachedir) config = configparser.ConfigParser() config.read_file(open(self.options.configfile, encoding='utf-8')) self.config = config self.options.bitcoind = os.getenv( "BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"]) self.options.bitcoincli = os.getenv( "BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"]) self.options.emulator = config["environment"]["EMULATOR"] or None os.environ['PATH'] = config['environment']['BUILDDIR'] + os.pathsep + \ config['environment']['BUILDDIR'] + os.path.sep + "qt" + os.pathsep + \ os.environ['PATH'] # Set up temp directory and start logging if self.options.tmpdir: self.options.tmpdir = os.path.abspath(self.options.tmpdir) os.makedirs(self.options.tmpdir, exist_ok=False) else: self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX) self._start_logging() # Seed the PRNG. Note that test runs are reproducible if and only if # a single thread accesses the PRNG. For more information, see # https://docs.python.org/3/library/random.html#notes-on-reproducibility. # The network thread shouldn't access random. If we need to change the # network thread to access randomness, it should instantiate its own # random.Random object. seed = self.options.randomseed if seed is None: seed = random.randrange(sys.maxsize) else: self.log.debug("User supplied random seed {}".format(seed)) random.seed(seed) self.log.debug("PRNG seed is: {}".format(seed)) self.log.debug('Setting up network thread') self.network_thread = NetworkThread() self.network_thread.start() if self.options.usecli: if not self.supports_cli: raise SkipTest( "--usecli specified but test does not support using CLI") self.skip_if_no_cli() self.skip_test_if_missing_module() self.setup_chain() self.setup_network() self.success = TestStatus.PASSED def shutdown(self): """Call this method to shut down the test framework object.""" if self.success == TestStatus.FAILED and self.options.pdbonfailure: print("Testcase failed. Attaching python debugger. Enter ? for help") pdb.set_trace() self.log.debug('Closing down network thread') self.network_thread.close() if not self.options.noshutdown: self.log.info("Stopping nodes") if self.nodes: self.stop_nodes() else: for node in self.nodes: node.cleanup_on_exit = False self.log.info( "Note: bitcoinds were not stopped and may still be running") should_clean_up = ( not self.options.nocleanup and not self.options.noshutdown and self.success != TestStatus.FAILED and not self.options.perf ) if should_clean_up: self.log.info("Cleaning up {} on exit".format(self.options.tmpdir)) cleanup_tree_on_exit = True elif self.options.perf: self.log.warning( "Not cleaning up dir {} due to perf data".format( self.options.tmpdir)) cleanup_tree_on_exit = False else: self.log.warning( "Not cleaning up dir {}".format(self.options.tmpdir)) cleanup_tree_on_exit = False if self.success == TestStatus.PASSED: self.log.info("Tests successful") exit_code = TEST_EXIT_PASSED elif self.success == TestStatus.SKIPPED: self.log.info("Test skipped") exit_code = TEST_EXIT_SKIPPED else: self.log.error( "Test failed. Test logging available at {}/test_framework.log".format(self.options.tmpdir)) self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath( os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir)) exit_code = TEST_EXIT_FAILED # Logging.shutdown will not remove stream- and filehandlers, so we must # do it explicitly. Handlers are removed so the next test run can apply # different log handler settings. # See: https://docs.python.org/3/library/logging.html#logging.shutdown for h in list(self.log.handlers): h.flush() h.close() self.log.removeHandler(h) rpc_logger = logging.getLogger("BitcoinRPC") for h in list(rpc_logger.handlers): h.flush() rpc_logger.removeHandler(h) if cleanup_tree_on_exit: shutil.rmtree(self.options.tmpdir) self.nodes.clear() return exit_code # Methods to override in subclass test scripts. def set_test_params(self): """Tests must this method to change default values for number of nodes, topology, etc""" raise NotImplementedError def add_options(self, parser): """Override this method to add command-line options to the test""" pass def skip_test_if_missing_module(self): """Override this method to skip a test if a module is not compiled""" pass def setup_chain(self): """Override this method to customize blockchain setup""" self.log.info("Initializing test directory " + self.options.tmpdir) if self.setup_clean_chain: self._initialize_chain_clean() else: self._initialize_chain() def setup_network(self): """Override this method to customize test network topology""" self.setup_nodes() # Connect the nodes as a "chain". This allows us # to split the network between nodes 1 and 2 to get # two halves that can work on competing chains. # # Topology looks like this: # node0 <-- node1 <-- node2 <-- node3 # # If all nodes are in IBD (clean chain from genesis), node0 is assumed to be the source of blocks (miner). To # ensure block propagation, all nodes will establish outgoing connections toward node0. # See fPreferredDownload in net_processing. # # If further outbound connections are needed, they can be added at the beginning of the test with e.g. # connect_nodes(self.nodes[1], 2) for i in range(self.num_nodes - 1): connect_nodes(self.nodes[i + 1], self.nodes[i]) self.sync_all() def setup_nodes(self): """Override this method to customize test node setup""" extra_args = None if hasattr(self, "extra_args"): extra_args = self.extra_args self.add_nodes(self.num_nodes, extra_args) self.start_nodes() self.import_deterministic_coinbase_privkeys() if not self.setup_clean_chain: for n in self.nodes: assert_equal(n.getblockchaininfo()["blocks"], 199) # To ensure that all nodes are out of IBD, the most recent block # must have a timestamp not too old (see IsInitialBlockDownload()). self.log.debug('Generate a block with current time') block_hash = self.nodes[0].generate(1)[0] block = self.nodes[0].getblock(blockhash=block_hash, verbosity=0) for n in self.nodes: n.submitblock(block) chain_info = n.getblockchaininfo() assert_equal(chain_info["blocks"], 200) assert_equal(chain_info["initialblockdownload"], False) def import_deterministic_coinbase_privkeys(self): for n in self.nodes: try: n.getwalletinfo() except JSONRPCException as e: assert str(e).startswith('Method not found') continue n.importprivkey( privkey=n.get_deterministic_priv_key().key, label='coinbase') def run_test(self): """Tests must override this method to define test logic""" raise NotImplementedError # Public helper methods. These can be accessed by the subclass test # scripts. def add_nodes(self, num_nodes, extra_args=None, *, host=None, binary=None): """Instantiate TestNode objects. Should only be called once after the nodes have been specified in set_test_params().""" if self.bind_to_localhost_only: extra_confs = [["bind=127.0.0.1"]] * num_nodes else: extra_confs = [[]] * num_nodes if extra_args is None: extra_args = [[]] * num_nodes if binary is None: binary = [self.options.bitcoind] * num_nodes assert_equal(len(extra_confs), num_nodes) assert_equal(len(extra_args), num_nodes) assert_equal(len(binary), num_nodes) for i in range(num_nodes): self.nodes.append(TestNode( i, get_datadir_path(self.options.tmpdir, i), chain=self.chain, host=host, rpc_port=rpc_port(i), p2p_port=p2p_port(i), timewait=self.rpc_timeout, bitcoind=binary[i], bitcoin_cli=self.options.bitcoincli, coverage_dir=self.options.coveragedir, cwd=self.options.tmpdir, extra_conf=extra_confs[i], extra_args=extra_args[i], use_cli=self.options.usecli, emulator=self.options.emulator, start_perf=self.options.perf, use_valgrind=self.options.valgrind, )) if self.options.axionactivation: self.nodes[i].extend_default_args( ["-axionactivationtime={}".format(TIMESTAMP_IN_THE_PAST)]) def start_node(self, i, *args, **kwargs): """Start a bitcoind""" node = self.nodes[i] node.start(*args, **kwargs) node.wait_for_rpc_connection() if self.options.coveragedir is not None: coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) def start_nodes(self, extra_args=None, *args, **kwargs): """Start multiple bitcoinds""" if extra_args is None: extra_args = [None] * self.num_nodes assert_equal(len(extra_args), self.num_nodes) try: for i, node in enumerate(self.nodes): node.start(extra_args[i], *args, **kwargs) for node in self.nodes: node.wait_for_rpc_connection() except BaseException: # If one node failed to start, stop the others self.stop_nodes() raise if self.options.coveragedir is not None: for node in self.nodes: coverage.write_all_rpc_commands( self.options.coveragedir, node.rpc) def stop_node(self, i, expected_stderr='', wait=0): """Stop a bitcoind test node""" self.nodes[i].stop_node(expected_stderr, wait=wait) self.nodes[i].wait_until_stopped() def stop_nodes(self, wait=0): """Stop multiple bitcoind test nodes""" for node in self.nodes: # Issue RPC to stop nodes node.stop_node(wait=wait) for node in self.nodes: # Wait for nodes to stop node.wait_until_stopped() def restart_node(self, i, extra_args=None): """Stop and start a test node""" self.stop_node(i) self.start_node(i, extra_args) def wait_for_node_exit(self, i, timeout): self.nodes[i].process.wait(timeout) def split_network(self): """ Split the network of four nodes into nodes 0/1 and 2/3. """ disconnect_nodes(self.nodes[1], self.nodes[2]) disconnect_nodes(self.nodes[2], self.nodes[1]) self.sync_all(self.nodes[:2]) self.sync_all(self.nodes[2:]) def join_network(self): """ Join the (previously split) network halves together. """ connect_nodes(self.nodes[1], self.nodes[2]) self.sync_all() def sync_blocks(self, nodes=None, **kwargs): sync_blocks(nodes or self.nodes, **kwargs) def sync_mempools(self, nodes=None, **kwargs): sync_mempools(nodes or self.nodes, **kwargs) def sync_all(self, nodes=None, **kwargs): self.sync_blocks(nodes, **kwargs) self.sync_mempools(nodes, **kwargs) # Private helper methods. These should not be accessed by the subclass # test scripts. def _start_logging(self): # Add logger and logging handlers self.log = logging.getLogger('TestFramework') self.log.setLevel(logging.DEBUG) # Create file handler to log all messages fh = logging.FileHandler( self.options.tmpdir + '/test_framework.log', encoding='utf-8') fh.setLevel(logging.DEBUG) # Create console handler to log messages to stderr. By default this # logs only error messages, but can be configured with --loglevel. ch = logging.StreamHandler(sys.stdout) # User can provide log level as a number or string (eg DEBUG). loglevel # was caught as a string, so try to convert it to an int ll = int(self.options.loglevel) if self.options.loglevel.isdigit( ) else self.options.loglevel.upper() ch.setLevel(ll) # Format logs the same as bitcoind's debug.log with microprecision (so # log files can be concatenated and sorted) formatter = logging.Formatter( fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S') formatter.converter = time.gmtime fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger self.log.addHandler(fh) self.log.addHandler(ch) if self.options.trace_rpc: rpc_logger = logging.getLogger("BitcoinRPC") rpc_logger.setLevel(logging.DEBUG) rpc_handler = logging.StreamHandler(sys.stdout) rpc_handler.setLevel(logging.DEBUG) rpc_logger.addHandler(rpc_handler) def _initialize_chain(self): """Initialize a pre-mined blockchain for use by the test. Create a cache of a 199-block-long chain Afterward, create num_nodes copies from the cache.""" # Use node 0 to create the cache for all other nodes CACHE_NODE_ID = 0 cache_node_dir = get_datadir_path(self.options.cachedir, CACHE_NODE_ID) assert self.num_nodes <= MAX_NODES if not os.path.isdir(cache_node_dir): self.log.debug( "Creating cache directory {}".format(cache_node_dir)) initialize_datadir( self.options.cachedir, CACHE_NODE_ID, self.chain) self.nodes.append( TestNode( CACHE_NODE_ID, cache_node_dir, chain=self.chain, extra_conf=["bind=127.0.0.1"], extra_args=['-disablewallet'], host=None, rpc_port=rpc_port(CACHE_NODE_ID), p2p_port=p2p_port(CACHE_NODE_ID), timewait=self.rpc_timeout, bitcoind=self.options.bitcoind, bitcoin_cli=self.options.bitcoincli, coverage_dir=None, cwd=self.options.tmpdir, emulator=self.options.emulator, )) if self.options.axionactivation: self.nodes[CACHE_NODE_ID].extend_default_args( ["-axionactivationtime={}".format(TIMESTAMP_IN_THE_PAST)]) self.start_node(CACHE_NODE_ID) cache_node = self.nodes[CACHE_NODE_ID] # Wait for RPC connections to be ready cache_node.wait_for_rpc_connection() # Set a time in the past, so that blocks don't end up in the future cache_node.setmocktime( cache_node.getblockheader( cache_node.getbestblockhash())['time']) # Create a 199-block-long chain; each of the 4 first nodes # gets 25 mature blocks and 25 immature. # The 4th node gets only 24 immature blocks so that the very last # block in the cache does not age too much (have an old tip age). # This is needed so that we are out of IBD when the test starts, # see the tip age check in IsInitialBlockDownload(). for i in range(8): cache_node.generatetoaddress( nblocks=25 if i != 7 else 24, address=TestNode.PRIV_KEYS[i % 4].address, ) assert_equal(cache_node.getblockchaininfo()["blocks"], 199) # Shut it down, and clean up cache directories: self.stop_nodes() self.nodes = [] def cache_path(*paths): - return os.path.join(cache_node_dir, "regtest", *paths) + return os.path.join(cache_node_dir, self.chain, *paths) # Remove empty wallets dir os.rmdir(cache_path('wallets')) for entry in os.listdir(cache_path()): # Only keep chainstate and blocks folder if entry not in ['chainstate', 'blocks']: os.remove(cache_path(entry)) for i in range(self.num_nodes): self.log.debug( "Copy cache directory {} to node {}".format( cache_node_dir, i)) to_dir = get_datadir_path(self.options.tmpdir, i) shutil.copytree(cache_node_dir, to_dir) # Overwrite port/rpcport in bitcoin.conf initialize_datadir(self.options.tmpdir, i, self.chain) def _initialize_chain_clean(self): """Initialize empty blockchain for use by the test. Create an empty blockchain and num_nodes wallets. Useful if a test case wants complete control over initialization.""" for i in range(self.num_nodes): initialize_datadir(self.options.tmpdir, i, self.chain) def skip_if_no_py3_zmq(self): """Attempt to import the zmq package and skip the test if the import fails.""" try: import zmq # noqa except ImportError: raise SkipTest("python3-zmq module not available.") def skip_if_no_bitcoind_zmq(self): """Skip the running test if bitcoind has not been compiled with zmq support.""" if not self.is_zmq_compiled(): raise SkipTest("bitcoind has not been built with zmq enabled.") def skip_if_no_wallet(self): """Skip the running test if wallet has not been compiled.""" if not self.is_wallet_compiled(): raise SkipTest("wallet has not been compiled.") def skip_if_no_wallet_tool(self): """Skip the running test if bitcoin-wallet has not been compiled.""" if not self.is_wallet_tool_compiled(): raise SkipTest("bitcoin-wallet has not been compiled") def skip_if_no_cli(self): """Skip the running test if bitcoin-cli has not been compiled.""" if not self.is_cli_compiled(): raise SkipTest("bitcoin-cli has not been compiled.") def is_cli_compiled(self): """Checks whether bitcoin-cli was compiled.""" return self.config["components"].getboolean("ENABLE_CLI") def is_wallet_compiled(self): """Checks whether the wallet module was compiled.""" return self.config["components"].getboolean("ENABLE_WALLET") def is_wallet_tool_compiled(self): """Checks whether bitcoin-wallet was compiled.""" return self.config["components"].getboolean("ENABLE_WALLET_TOOL") def is_zmq_compiled(self): """Checks whether the zmq module was compiled.""" return self.config["components"].getboolean("ENABLE_ZMQ") diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py index db6ba1c9e..06daf2d4f 100755 --- a/test/functional/tool_wallet.py +++ b/test/functional/tool_wallet.py @@ -1,257 +1,257 @@ #!/usr/bin/env python3 # Copyright (c) 2018-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoin-wallet.""" import hashlib import os import stat import subprocess import textwrap from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal BUFFER_SIZE = 16 * 1024 class ToolWalletTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.rpc_timeout = 120 def skip_test_if_missing_module(self): self.skip_if_no_wallet() self.skip_if_no_wallet_tool() def bitcoin_wallet_process(self, *args): binary = self.config["environment"]["BUILDDIR"] + \ '/src/bitcoin-wallet' + self.config["environment"]["EXEEXT"] args = ['-datadir={}'.format(self.nodes[0].datadir), - '-regtest'] + list(args) + '-chain={}'.format(self.chain)] + list(args) command_line = [binary] + args if self.config["environment"]["EMULATOR"]: command_line = [ self.config["environment"]["EMULATOR"]] + command_line return subprocess.Popen(command_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) def assert_raises_tool_error(self, error, *args): p = self.bitcoin_wallet_process(*args) stdout, stderr = p.communicate() assert_equal(p.poll(), 1) assert_equal(stdout, '') assert_equal(stderr.strip(), error) def assert_tool_output(self, output, *args): p = self.bitcoin_wallet_process(*args) stdout, stderr = p.communicate() assert_equal(stderr, '') assert_equal(stdout, output) assert_equal(p.poll(), 0) def wallet_shasum(self): h = hashlib.sha1() mv = memoryview(bytearray(BUFFER_SIZE)) with open(self.wallet_path, 'rb', buffering=0) as f: for n in iter(lambda: f.readinto(mv), 0): h.update(mv[:n]) return h.hexdigest() def wallet_timestamp(self): return os.path.getmtime(self.wallet_path) def wallet_permissions(self): return oct(os.lstat(self.wallet_path).st_mode)[-3:] def log_wallet_timestamp_comparison(self, old, new): result = 'unchanged' if new == old else 'increased!' self.log.debug('Wallet file timestamp {}'.format(result)) def test_invalid_tool_commands_and_args(self): self.log.info( 'Testing that various invalid commands raise with specific error messages') self.assert_raises_tool_error('Invalid command: foo', 'foo') # `bitcoin-wallet help` raises an error. Use `bitcoin-wallet -help`. self.assert_raises_tool_error('Invalid command: help', 'help') self.assert_raises_tool_error( 'Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create') self.assert_raises_tool_error( 'Error parsing command line arguments: Invalid parameter -foo', '-foo') self.assert_raises_tool_error( 'Error loading wallet.dat. Is wallet being used by another process?', '-wallet=wallet.dat', 'info', ) self.assert_raises_tool_error( 'Error: no wallet file at nonexistent.dat', '-wallet=nonexistent.dat', 'info') def test_tool_wallet_info(self): # Stop the node to close the wallet to call the info command. self.stop_node(0) self.log.info('Calling wallet tool info, testing output') # # TODO: Wallet tool info should work with wallet file permissions set to # read-only without raising: # "Error loading wallet.dat. Is wallet being used by another process?" # The following lines should be uncommented and the tests still succeed: # # self.log.debug('Setting wallet file permissions to 400 (read-only)') # os.chmod(self.wallet_path, stat.S_IRUSR) # assert self.wallet_permissions() in ['400', '666'] # Sanity check. 666 because Appveyor. # shasum_before = self.wallet_shasum() timestamp_before = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp before calling info: {}'.format(timestamp_before)) out = textwrap.dedent('''\ Wallet info =========== Encrypted: no HD (hd seed available): yes Keypool Size: 2 Transactions: 0 Address Book: 1 ''') self.assert_tool_output(out, '-wallet=wallet.dat', 'info') timestamp_after = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp after calling info: {}'.format(timestamp_after)) self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after) self.log.debug( 'Setting wallet file permissions back to 600 (read/write)') os.chmod(self.wallet_path, stat.S_IRUSR | stat.S_IWUSR) # Sanity check. 666 because Appveyor. assert self.wallet_permissions() in ['600', '666'] # # TODO: Wallet tool info should not write to the wallet file. # The following lines should be uncommented and the tests still succeed: # # assert_equal(timestamp_before, timestamp_after) # shasum_after = self.wallet_shasum() # assert_equal(shasum_before, shasum_after) # self.log.debug('Wallet file shasum unchanged\n') def test_tool_wallet_info_after_transaction(self): """ Mutate the wallet with a transaction to verify that the info command output changes accordingly. """ self.start_node(0) self.log.info('Generating transaction to mutate wallet') self.nodes[0].generate(1) self.stop_node(0) self.log.info( 'Calling wallet tool info after generating a transaction, testing output') shasum_before = self.wallet_shasum() timestamp_before = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp before calling info: {}'.format(timestamp_before)) out = textwrap.dedent('''\ Wallet info =========== Encrypted: no HD (hd seed available): yes Keypool Size: 2 Transactions: 1 Address Book: 1 ''') self.assert_tool_output(out, '-wallet=wallet.dat', 'info') shasum_after = self.wallet_shasum() timestamp_after = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp after calling info: {}'.format(timestamp_after)) self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after) # # TODO: Wallet tool info should not write to the wallet file. # This assertion should be uncommented and succeed: # assert_equal(timestamp_before, timestamp_after) assert_equal(shasum_before, shasum_after) self.log.debug('Wallet file shasum unchanged\n') def test_tool_wallet_create_on_existing_wallet(self): self.log.info( 'Calling wallet tool create on an existing wallet, testing output') shasum_before = self.wallet_shasum() timestamp_before = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp before calling create: {}'.format(timestamp_before)) out = textwrap.dedent('''\ Topping up keypool... Wallet info =========== Encrypted: no HD (hd seed available): yes Keypool Size: 2000 Transactions: 0 Address Book: 0 ''') self.assert_tool_output(out, '-wallet=foo', 'create') shasum_after = self.wallet_shasum() timestamp_after = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp after calling create: {}'.format(timestamp_after)) self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after) assert_equal(timestamp_before, timestamp_after) assert_equal(shasum_before, shasum_after) self.log.debug('Wallet file shasum unchanged\n') def test_getwalletinfo_on_different_wallet(self): self.log.info('Starting node with arg -wallet=foo') self.start_node(0, ['-wallet=foo']) self.log.info( 'Calling getwalletinfo on a different wallet ("foo"), testing output') shasum_before = self.wallet_shasum() timestamp_before = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp before calling getwalletinfo: {}'.format(timestamp_before)) out = self.nodes[0].getwalletinfo() self.stop_node(0) shasum_after = self.wallet_shasum() timestamp_after = self.wallet_timestamp() self.log.debug( 'Wallet file timestamp after calling getwalletinfo: {}'.format(timestamp_after)) assert_equal(0, out['txcount']) assert_equal(1000, out['keypoolsize']) assert_equal(1000, out['keypoolsize_hd_internal']) assert_equal(True, 'hdseedid' in out) self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after) assert_equal(timestamp_before, timestamp_after) assert_equal(shasum_after, shasum_before) self.log.debug('Wallet file shasum unchanged\n') def test_salvage(self): # TODO: Check salvage actually salvages and doesn't break things. # https://github.com/bitcoin/bitcoin/issues/7463 self.log.info('Check salvage') self.start_node(0, ['-wallet=salvage']) self.stop_node(0) self.assert_tool_output('', '-wallet=salvage', 'salvage') def run_test(self): self.wallet_path = os.path.join( - self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat') + self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat') self.test_invalid_tool_commands_and_args() # Warning: The following tests are order-dependent. self.test_tool_wallet_info() self.test_tool_wallet_info_after_transaction() self.test_tool_wallet_create_on_existing_wallet() self.test_getwalletinfo_on_different_wallet() self.test_salvage() if __name__ == '__main__': ToolWalletTest().main() diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index fec54b6b2..26a755d77 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -1,241 +1,249 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the wallet backup features. Test case is: 4 nodes. 1 2 and 3 send transactions between each other, fourth node is a miner. 1 2 3 each mine a block to start, then Miner creates 100 blocks so 1 2 3 each have 50 mature coins to spend. Then 5 iterations of 1/2/3 sending coins amongst themselves to get transactions in the wallets, and the miner mining one block. Wallets are backed up using dumpwallet/backupwallet. Then 5 more iterations of transactions and mining a block. Miner then generates 101 more blocks, so any transaction fees paid mature. Sanity check: Sum(1,2,3,4 balances) == 114*50 1/2/3 are shutdown, and their wallets erased. Then restore using wallet.dat backup. And confirm 1/2/3/4 balances are same as before. Shutdown again, restore using importwallet, and confirm again balances are correct. """ from decimal import Decimal import os from random import randint import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, ) class WalletBackupTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.setup_clean_chain = True # nodes 1, 2,3 are spenders, let's give them a keypool=100 # whitelist all peers to speed up tx relay / mempool sync self.extra_args = [ ["-keypool=100", "-whitelist=127.0.0.1"], ["-keypool=100", "-whitelist=127.0.0.1"], ["-keypool=100", "-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1"] ] self.rpc_timeout = 120 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): self.setup_nodes() connect_nodes(self.nodes[0], self.nodes[3]) connect_nodes(self.nodes[1], self.nodes[3]) connect_nodes(self.nodes[2], self.nodes[3]) connect_nodes(self.nodes[2], self.nodes[0]) self.sync_all() def one_send(self, from_node, to_address): if (randint(1, 2) == 1): amount = Decimal(randint(1, 10)) / Decimal(10) self.nodes[from_node].sendtoaddress(to_address, amount) def do_one_round(self): a0 = self.nodes[0].getnewaddress() a1 = self.nodes[1].getnewaddress() a2 = self.nodes[2].getnewaddress() self.one_send(0, a1) self.one_send(0, a2) self.one_send(1, a0) self.one_send(1, a2) self.one_send(2, a0) self.one_send(2, a1) # Have the miner (node3) mine a block. # Must sync mempools before mining. self.sync_mempools() self.nodes[3].generate(1) self.sync_blocks() # As above, this mirrors the original bash test. def start_three(self): self.start_node(0) self.start_node(1) self.start_node(2) connect_nodes(self.nodes[0], self.nodes[3]) connect_nodes(self.nodes[1], self.nodes[3]) connect_nodes(self.nodes[2], self.nodes[3]) connect_nodes(self.nodes[2], self.nodes[0]) def stop_three(self): self.stop_node(0) self.stop_node(1) self.stop_node(2) def erase_three(self): os.remove(os.path.join( - self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat')) os.remove(os.path.join( - self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[1].datadir, self.chain, 'wallets', 'wallet.dat')) os.remove(os.path.join( - self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[2].datadir, self.chain, 'wallets', 'wallet.dat')) def run_test(self): self.log.info("Generating initial blockchain") self.nodes[0].generate(1) self.sync_blocks() self.nodes[1].generate(1) self.sync_blocks() self.nodes[2].generate(1) self.sync_blocks() self.nodes[3].generate(100) self.sync_blocks() assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 50) assert_equal(self.nodes[2].getbalance(), 50) assert_equal(self.nodes[3].getbalance(), 0) self.log.info("Creating transactions") # Five rounds of sending each other transactions. for i in range(5): self.do_one_round() self.log.info("Backing up") self.nodes[0].backupwallet(os.path.join( self.nodes[0].datadir, 'wallet.bak')) self.nodes[0].dumpwallet(os.path.join( self.nodes[0].datadir, 'wallet.dump')) self.nodes[1].backupwallet(os.path.join( self.nodes[1].datadir, 'wallet.bak')) self.nodes[1].dumpwallet(os.path.join( self.nodes[1].datadir, 'wallet.dump')) self.nodes[2].backupwallet(os.path.join( self.nodes[2].datadir, 'wallet.bak')) self.nodes[2].dumpwallet(os.path.join( self.nodes[2].datadir, 'wallet.dump')) self.log.info("More transactions") for i in range(5): self.do_one_round() # Generate 101 more blocks, so any fees paid mature self.nodes[3].generate(101) self.sync_all() balance0 = self.nodes[0].getbalance() balance1 = self.nodes[1].getbalance() balance2 = self.nodes[2].getbalance() balance3 = self.nodes[3].getbalance() total = balance0 + balance1 + balance2 + balance3 # At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.) # 114 are mature, so the sum of all wallets should be 114 * 50 = 5700. assert_equal(total, 5700) ## # Test restoring spender wallets from backups ## self.log.info("Restoring using wallet.dat") self.stop_three() self.erase_three() # Start node2 with no chain - shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks')) + shutil.rmtree( + os.path.join( + self.nodes[2].datadir, + self.chain, + 'blocks')) shutil.rmtree(os.path.join( - self.nodes[2].datadir, 'regtest', 'chainstate')) + self.nodes[2].datadir, self.chain, 'chainstate')) # Restore wallets from backup shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join( - self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat')) shutil.copyfile(os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join( - self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[1].datadir, self.chain, 'wallets', 'wallet.dat')) shutil.copyfile(os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join( - self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat')) + self.nodes[2].datadir, self.chain, 'wallets', 'wallet.dat')) self.log.info("Re-starting nodes") self.start_three() self.sync_blocks() assert_equal(self.nodes[0].getbalance(), balance0) assert_equal(self.nodes[1].getbalance(), balance1) assert_equal(self.nodes[2].getbalance(), balance2) self.log.info("Restoring using dumped wallet") self.stop_three() self.erase_three() # start node2 with no chain - shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks')) + shutil.rmtree( + os.path.join( + self.nodes[2].datadir, + self.chain, + 'blocks')) shutil.rmtree(os.path.join( - self.nodes[2].datadir, 'regtest', 'chainstate')) + self.nodes[2].datadir, self.chain, 'chainstate')) self.start_three() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) self.nodes[0].importwallet(os.path.join( self.nodes[0].datadir, 'wallet.dump')) self.nodes[1].importwallet(os.path.join( self.nodes[1].datadir, 'wallet.dump')) self.nodes[2].importwallet(os.path.join( self.nodes[2].datadir, 'wallet.dump')) self.sync_blocks() assert_equal(self.nodes[0].getbalance(), balance0) assert_equal(self.nodes[1].getbalance(), balance1) assert_equal(self.nodes[2].getbalance(), balance2) # Backup to source wallet file must fail sourcePaths = [ os.path.join(self.nodes[0].datadir, - 'regtest', 'wallets', 'wallet.dat'), - os.path.join(self.nodes[0].datadir, 'regtest', + self.chain, 'wallets', 'wallet.dat'), + os.path.join(self.nodes[0].datadir, self.chain, '.', 'wallets', 'wallet.dat'), - os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', ''), - os.path.join(self.nodes[0].datadir, 'regtest', 'wallets')] + os.path.join(self.nodes[0].datadir, self.chain, 'wallets', ''), + os.path.join(self.nodes[0].datadir, self.chain, 'wallets')] for sourcePath in sourcePaths: assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath) if __name__ == '__main__': WalletBackupTest().main() diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py index a1fb6752b..8fb486892 100755 --- a/test/functional/wallet_hd.py +++ b/test/functional/wallet_hd.py @@ -1,189 +1,197 @@ #!/usr/bin/env python3 # Copyright (c) 2016-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test Hierarchical Deterministic wallet function.""" import os import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, connect_nodes, assert_raises_rpc_error ) class WalletHDTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [[], ['-keypool=0']] self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): # Make sure we use hd, keep masterkeyid masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert_equal(len(masterkeyid), 40) # create an internal key change_addr = self.nodes[1].getrawchangeaddress() change_addrV = self.nodes[1].getaddressinfo(change_addr) # first internal child key assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") # Import a non-HD private key in the HD wallet non_hd_add = self.nodes[0].getnewaddress() self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add)) # This should be enough to keep the master key and the non-HD key self.nodes[1].backupwallet( os.path.join(self.nodes[1].datadir, "hd.bak")) # self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump")) # Derive some HD addresses and remember the last # Also send funds to each add self.nodes[0].generate(101) hd_add = None NUM_HD_ADDS = 10 for i in range(NUM_HD_ADDS): hd_add = self.nodes[1].getnewaddress() hd_info = self.nodes[1].getaddressinfo(hd_add) assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'") assert_equal(hd_info["hdseedid"], masterkeyid) self.nodes[0].sendtoaddress(hd_add, 1) self.nodes[0].generate(1) self.nodes[0].sendtoaddress(non_hd_add, 1) self.nodes[0].generate(1) # create an internal key (again) change_addr = self.nodes[1].getrawchangeaddress() change_addrV = self.nodes[1].getaddressinfo(change_addr) # second internal child key assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") self.sync_all() assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) self.log.info("Restore backup ...") self.stop_node(1) # we need to delete the complete regtest directory # otherwise node1 would auto-recover all funds in flag the keypool keys # as used - shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks")) + shutil.rmtree( + os.path.join( + self.nodes[1].datadir, + self.chain, + "blocks")) shutil.rmtree(os.path.join( - self.nodes[1].datadir, "regtest", "chainstate")) + self.nodes[1].datadir, self.chain, "chainstate")) shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join( - self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")) + self.nodes[1].datadir, self.chain, "wallets", "wallet.dat")) self.start_node(1) # Assert that derivation is deterministic hd_add_2 = None for i in range(NUM_HD_ADDS): hd_add_2 = self.nodes[1].getnewaddress() hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2) assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'") assert_equal(hd_info_2["hdseedid"], masterkeyid) assert_equal(hd_add, hd_add_2) connect_nodes(self.nodes[0], self.nodes[1]) self.sync_all() # Needs rescan self.stop_node(1) self.start_node(1, extra_args=self.extra_args[1] + ['-rescan']) assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) # Try a RPC based rescan self.stop_node(1) - shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks")) + shutil.rmtree( + os.path.join( + self.nodes[1].datadir, + self.chain, + "blocks")) shutil.rmtree(os.path.join( - self.nodes[1].datadir, "regtest", "chainstate")) + self.nodes[1].datadir, self.chain, "chainstate")) shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join( - self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")) + self.nodes[1].datadir, self.chain, "wallets", "wallet.dat")) self.start_node(1, extra_args=self.extra_args[1]) connect_nodes(self.nodes[0], self.nodes[1]) self.sync_all() # Wallet automatically scans blocks older than key on startup assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) out = self.nodes[1].rescanblockchain(0, 1) assert_equal(out['start_height'], 0) assert_equal(out['stop_height'], 1) out = self.nodes[1].rescanblockchain(2, 4) assert_equal(out['start_height'], 2) assert_equal(out['stop_height'], 4) out = self.nodes[1].rescanblockchain(3) assert_equal(out['start_height'], 3) assert_equal(out['stop_height'], self.nodes[1].getblockcount()) out = self.nodes[1].rescanblockchain() assert_equal(out['start_height'], 0) assert_equal(out['stop_height'], self.nodes[1].getblockcount()) assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) # send a tx and make sure its using the internal chain for the # changeoutput txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) outs = self.nodes[1].decoderawtransaction( self.nodes[1].gettransaction(txid)['hex'])['vout'] keypath = "" for out in outs: if out['value'] != 1: keypath = self.nodes[1].getaddressinfo( out['scriptPubKey']['addresses'][0])['hdkeypath'] assert_equal(keypath[0:7], "m/0'/1'") # Generate a new HD seed on node 1 and make sure it is set orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] self.nodes[1].sethdseed() new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert orig_masterkeyid != new_masterkeyid addr = self.nodes[1].getnewaddress() # Make sure the new address is the first from the keypool assert_equal(self.nodes[1].getaddressinfo( addr)['hdkeypath'], 'm/0\'/0\'/0\'') self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key # Set a new HD seed on node 1 without flushing the keypool new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress()) orig_masterkeyid = new_masterkeyid self.nodes[1].sethdseed(False, new_seed) new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] assert orig_masterkeyid != new_masterkeyid addr = self.nodes[1].getnewaddress() assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo( addr)['hdseedid']) # Make sure the new address continues previous keypool assert_equal(self.nodes[1].getaddressinfo( addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Check that the next address is from the new seed self.nodes[1].keypoolrefill(1) next_addr = self.nodes[1].getnewaddress() assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo( next_addr)['hdseedid']) # Make sure the new address is not from previous keypool assert_equal(self.nodes[1].getaddressinfo( next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') assert next_addr != addr # Sethdseed parameter validity assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0) assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif") assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool") assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True) assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed) assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress())) if __name__ == '__main__': WalletHDTest().main() diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py index 43aeef8c9..f1945ba27 100755 --- a/test/functional/wallet_keypool_topup.py +++ b/test/functional/wallet_keypool_topup.py @@ -1,74 +1,74 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test HD Wallet keypool restore function. Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks. - Start node1, shutdown and backup wallet. - Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110. - Stop node1, clear the datadir, move wallet file back into the datadir and restart node1. - connect node1 to node0. Verify that they sync and node1 receives its funds.""" import os import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, connect_nodes, ) class KeypoolRestoreTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [[], ['-keypool=100']] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): wallet_path = os.path.join( - self.nodes[1].datadir, "regtest", "wallets", "wallet.dat") + self.nodes[1].datadir, self.chain, "wallets", "wallet.dat") wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak") self.nodes[0].generate(101) self.log.info("Make backup of wallet") self.stop_node(1) shutil.copyfile(wallet_path, wallet_backup_path) self.start_node(1, self.extra_args[1]) connect_nodes(self.nodes[0], self.nodes[1]) self.log.info("Generate keys for wallet") for _ in range(90): addr_oldpool = self.nodes[1].getnewaddress() for _ in range(20): addr_extpool = self.nodes[1].getnewaddress() self.log.info("Send funds to wallet") self.nodes[0].sendtoaddress(addr_oldpool, 10) self.nodes[0].generate(1) self.nodes[0].sendtoaddress(addr_extpool, 5) self.nodes[0].generate(1) self.sync_blocks() self.log.info("Restart node with wallet backup") self.stop_node(1) shutil.copyfile(wallet_backup_path, wallet_path) self.start_node(1, self.extra_args[1]) connect_nodes(self.nodes[0], self.nodes[1]) self.sync_all() self.log.info("Verify keypool is restored and balance is correct") assert_equal(self.nodes[1].getbalance(), 15) assert_equal(self.nodes[1].listtransactions() [0]['category'], "receive") # Check that we have marked all keys up to the used keypool key as used assert_equal(self.nodes[1].getaddressinfo( self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'") if __name__ == '__main__': KeypoolRestoreTest().main() diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index 0b8b8e49b..dfb8df145 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -1,402 +1,402 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test multiwallet. Verify that a bitcoind node can load multiple wallet files """ import os import shutil import time from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch from test_framework.util import ( assert_equal, assert_raises_rpc_error, ) FEATURE_LATEST = 200300 class MultiWalletTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 def skip_test_if_missing_module(self): self.skip_if_no_wallet() def add_options(self, parser): parser.add_argument( '--data_wallets_dir', default=os.path.join( os.path.dirname( os.path.realpath(__file__)), 'data/wallets/'), help='Test data with wallet directories (default: %(default)s)', ) def run_test(self): node = self.nodes[0] - def data_dir(*p): return os.path.join(node.datadir, 'regtest', *p) + def data_dir(*p): return os.path.join(node.datadir, self.chain, *p) def wallet_dir(*p): return data_dir('wallets', *p) def wallet(name): return node.get_wallet_rpc(name) def wallet_file(name): if os.path.isdir(wallet_dir(name)): return wallet_dir(name, "wallet.dat") return wallet_dir(name) assert_equal(self.nodes[0].listwalletdir(), {'wallets': [{'name': ''}]}) # check wallet.dat is created self.stop_nodes() assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True) # create symlink to verify wallet directory path can be referenced # through symlink if os.name != 'nt': os.mkdir(wallet_dir('w7')) os.symlink('w7', wallet_dir('w7_symlink')) # rename wallet.dat to make sure plain wallet file paths (as opposed to # directory paths) can be loaded os.rename(wallet_dir("wallet.dat"), wallet_dir("w8")) # create another dummy wallet for use in testing backups later self.start_node(0, []) self.stop_nodes() empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat') os.rename(wallet_dir("wallet.dat"), empty_wallet) # restart node with a mix of wallet names: # w1, w2, w3 - to verify new wallets created when non-existing paths specified # w - to verify wallet name matching works when one wallet path is prefix of another # sub/w5 - to verify relative wallet path is created correctly # extern/w6 - to verify absolute wallet path is created correctly # w7_symlink - to verify symlinked wallet path is initialized correctly # w8 - to verify existing wallet file is loaded correctly # '' - to verify default wallet file is created correctly wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', ''] if os.name == 'nt': wallet_names.remove('w7_symlink') extra_args = ['-wallet={}'.format(n) for n in wallet_names] self.start_node(0, extra_args) assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [ '', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8']) assert_equal(set(node.listwallets()), set(wallet_names)) # check that all requested wallets were created self.stop_node(0) for wallet_name in wallet_names: assert_equal(os.path.isfile(wallet_file(wallet_name)), True) # should not initialize if wallet path can't be created exp_stderr = "boost::filesystem::create_directory:" self.nodes[0].assert_start_raises_init_error( ['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) self.nodes[0].assert_start_raises_init_error( ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist') self.nodes[0].assert_start_raises_init_error( ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir()) self.nodes[0].assert_start_raises_init_error( ['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir()) # should not initialize if there are duplicate wallets self.nodes[0].assert_start_raises_init_error( ['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.') # should not initialize if one wallet is a copy of another shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy')) exp_stderr = r"BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)" self.nodes[0].assert_start_raises_init_error( ['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) # should not initialize if wallet file is a symlink if os.name != 'nt': os.symlink('w8', wallet_dir('w8_symlink')) self.nodes[0].assert_start_raises_init_error( ['-wallet=w8_symlink'], r'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX) # should not initialize if the specified walletdir does not exist self.nodes[0].assert_start_raises_init_error( ['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist') # should not initialize if the specified walletdir is not a directory not_a_dir = wallet_dir('notadir') open(not_a_dir, 'a', encoding="utf8").close() self.nodes[0].assert_start_raises_init_error( ['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory') self.log.info("Do not allow -zapwallettxes with multiwallet") self.nodes[0].assert_start_raises_init_error( [ '-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( [ '-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") self.nodes[0].assert_start_raises_init_error( [ '-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") # if wallets/ doesn't exist, datadir should be the default wallet dir wallet_dir2 = data_dir('walletdir') os.rename(wallet_dir(), wallet_dir2) self.start_node(0, ['-wallet=w4', '-wallet=w5']) assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") node.generatetoaddress(nblocks=1, address=w5.getnewaddress()) # now if wallets/ exists again, but the rootdir is specified as the # walletdir, w4 and w5 should still be loaded os.rename(wallet_dir2, wallet_dir()) self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()]) assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") w5_info = w5.getwalletinfo() assert_equal(w5_info['immature_balance'], 50) competing_wallet_dir = os.path.join( self.options.tmpdir, 'competing_walletdir') os.mkdir(competing_wallet_dir) self.restart_node(0, ['-walletdir=' + competing_wallet_dir]) exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\"!" self.nodes[1].assert_start_raises_init_error( ['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) self.restart_node(0, extra_args) assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [ '', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy']) wallets = [wallet(w) for w in wallet_names] wallet_bad = wallet("bad") # check wallet names and balances node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress()) for wallet_name, wallet in zip(wallet_names, wallets): info = wallet.getwalletinfo() assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0) assert_equal(info['walletname'], wallet_name) # accessing invalid wallet fails assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo) # accessing wallet RPC without using wallet endpoint fails assert_raises_rpc_error(-19, "Wallet file not specified (must request wallet RPC through /wallet/ uri-path).", node.getwalletinfo) w1, w2, w3, w4, *_ = wallets node.generatetoaddress(nblocks=101, address=w1.getnewaddress()) assert_equal(w1.getbalance(), 100) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) assert_equal(w4.getbalance(), 0) w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.sendtoaddress(w4.getnewaddress(), 3) node.generatetoaddress(nblocks=1, address=w1.getnewaddress()) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2) assert_equal(w4.getbalance(), 3) batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()]) - assert_equal(batch[0]["result"]["chain"], "regtest") + assert_equal(batch[0]["result"]["chain"], self.chain) assert_equal(batch[1]["result"]["walletname"], "w1") self.log.info('Check for per-wallet settxfee call') assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], 0) w2.settxfee(4.0) assert_equal(w1.getwalletinfo()['paytxfee'], 0) assert_equal(w2.getwalletinfo()['paytxfee'], 4.0) self.log.info("Test dynamic wallet loading") self.restart_node(0, ['-nowallet']) assert_equal(node.listwallets(), []) assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo) self.log.info("Load first wallet") loadwallet_name = node.loadwallet(wallet_names[0]) assert_equal(loadwallet_name['name'], wallet_names[0]) assert_equal(node.listwallets(), wallet_names[0:1]) node.getwalletinfo() w1 = node.get_wallet_rpc(wallet_names[0]) w1.getwalletinfo() self.log.info("Load second wallet") loadwallet_name = node.loadwallet(wallet_names[1]) assert_equal(loadwallet_name['name'], wallet_names[1]) assert_equal(node.listwallets(), wallet_names[0:2]) assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w2 = node.get_wallet_rpc(wallet_names[1]) w2.getwalletinfo() self.log.info("Load remaining wallets") for wallet_name in wallet_names[2:]: loadwallet_name = self.nodes[0].loadwallet(wallet_name) assert_equal(loadwallet_name['name'], wallet_name) assert_equal(set(self.nodes[0].listwallets()), set(wallet_names)) # Fail to load if wallet doesn't exist assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets') # Fail to load duplicate wallets assert_raises_rpc_error( -4, 'Wallet file verification failed. Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0]) # Fail to load duplicate wallets by different ways (directory and # filepath) assert_raises_rpc_error( -4, "Wallet file verification failed. Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat') # Fail to load if one wallet is a copy of another assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') # Fail to load if one wallet is a copy of another. # Test this twice to make sure that we don't re-introduce # https://github.com/bitcoin/bitcoin/issues/14304 assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') # Fail to load if wallet file is a symlink if os.name != 'nt': assert_raises_rpc_error( -4, "Wallet file verification failed. Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink') # Fail to load if a directory is specified that doesn't contain a # wallet os.mkdir(wallet_dir('empty_wallet_dir')) assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir') self.log.info("Test dynamic wallet creation.") # Fail to create a wallet if it already exists. assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2') # Successfully create a wallet with a new name loadwallet_name = self.nodes[0].createwallet('w9') assert_equal(loadwallet_name['name'], 'w9') w9 = node.get_wallet_rpc('w9') assert_equal(w9.getwalletinfo()['walletname'], 'w9') assert 'w9' in self.nodes[0].listwallets() # Successfully create a wallet using a full path new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir') new_wallet_name = os.path.join(new_wallet_dir, 'w10') loadwallet_name = self.nodes[0].createwallet(new_wallet_name) assert_equal(loadwallet_name['name'], new_wallet_name) w10 = node.get_wallet_rpc(new_wallet_name) assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name) assert new_wallet_name in self.nodes[0].listwallets() self.log.info("Test dynamic wallet unloading") # Test `unloadwallet` errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet) assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy") assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet) assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"), # Successfully unload the specified wallet name self.nodes[0].unloadwallet("w1") assert 'w1' not in self.nodes[0].listwallets() # Successfully unload the wallet referenced by the request endpoint # Also ensure unload works during walletpassphrase timeout w2.encryptwallet('test') w2.walletpassphrase('test', 1) w2.unloadwallet() time.sleep(1.1) assert 'w2' not in self.nodes[0].listwallets() # Successfully unload all wallets for wallet_name in self.nodes[0].listwallets(): self.nodes[0].unloadwallet(wallet_name) assert_equal(self.nodes[0].listwallets(), []) assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo) # Successfully load a previously unloaded wallet self.nodes[0].loadwallet('w1') assert_equal(self.nodes[0].listwallets(), ['w1']) assert_equal(w1.getwalletinfo()['walletname'], 'w1') assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [ '', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9']) # Test backing up and restoring wallets self.log.info("Test wallet backup") self.restart_node(0, ['-nowallet']) for wallet_name in wallet_names: self.nodes[0].loadwallet(wallet_name) for wallet_name in wallet_names: rpc = self.nodes[0].get_wallet_rpc(wallet_name) addr = rpc.getnewaddress() backup = os.path.join(self.options.tmpdir, 'backup.dat') rpc.backupwallet(backup) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(empty_wallet, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], False) self.nodes[0].unloadwallet(wallet_name) shutil.copyfile(backup, wallet_file(wallet_name)) self.nodes[0].loadwallet(wallet_name) assert_equal(rpc.getaddressinfo(addr)['ismine'], True) # Test .walletlock file is closed self.start_node(1) wallet = os.path.join(self.options.tmpdir, 'my_wallet') self.nodes[0].createwallet(wallet) assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet) self.nodes[0].unloadwallet(wallet) self.nodes[1].loadwallet(wallet) if __name__ == '__main__': MultiWalletTest().main() diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index 04925ae80..0bde1171b 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -1,128 +1,128 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test tx status in case of reorgs while wallet being shutdown. Wallet txn status rely on block connection/disconnection for its accuracy. In case of reorgs happening while wallet being shutdown block updates are not going to be received. At wallet loading, we check against chain if confirmed txn are still in chain and change their status if block in which they have been included has been disconnected. """ from decimal import Decimal import os import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, connect_nodes, disconnect_nodes, ) class ReorgsRestoreTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 3 self.extra_args = [["-noparkdeepreorg"]] * self.num_nodes def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): # Send a tx from which to conflict outputs later txid_conflict_from = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), Decimal("10")) self.nodes[0].generate(1) self.sync_blocks() # Disconnect node1 from others to reorg its chain later disconnect_nodes(self.nodes[0], self.nodes[1]) disconnect_nodes(self.nodes[1], self.nodes[2]) connect_nodes(self.nodes[0], self.nodes[2]) # Send a tx to be unconfirmed later txid = self.nodes[0].sendtoaddress( self.nodes[0].getnewaddress(), Decimal("10")) tx = self.nodes[0].gettransaction(txid) self.nodes[0].generate(4) tx_before_reorg = self.nodes[0].gettransaction(txid) assert_equal(tx_before_reorg["confirmations"], 4) # Disconnect node0 from node2 to broadcast a conflict on their # respective chains disconnect_nodes(self.nodes[0], self.nodes[2]) nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction( txid_conflict_from)["details"] if tx_out["amount"] == Decimal("10")) inputs = [] inputs.append({"txid": txid_conflict_from, "vout": nA}) outputs_1 = {} outputs_2 = {} # Create a conflicted tx broadcast on node0 chain and conflicting tx # broadcast on node1 chain. Both spend from txid_conflict_from outputs_1[self.nodes[0].getnewaddress()] = Decimal("9.99998") outputs_2[self.nodes[0].getnewaddress()] = Decimal("9.99998") conflicted = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs_1)) conflicting = self.nodes[0].signrawtransactionwithwallet( self.nodes[0].createrawtransaction(inputs, outputs_2)) conflicted_txid = self.nodes[0].sendrawtransaction(conflicted["hex"]) self.nodes[0].generate(1) conflicting_txid = self.nodes[2].sendrawtransaction(conflicting["hex"]) self.nodes[2].generate(9) # Reconnect node0 and node2 and check that conflicted_txid is # effectively conflicted connect_nodes(self.nodes[0], self.nodes[2]) self.sync_blocks([self.nodes[0], self.nodes[2]]) conflicted = self.nodes[0].gettransaction(conflicted_txid) conflicting = self.nodes[0].gettransaction(conflicting_txid) assert_equal(conflicted["confirmations"], -9) assert_equal(conflicted["walletconflicts"][0], conflicting["txid"]) # Node0 wallet is shutdown self.stop_node(0) self.start_node(0) # The block chain re-orgs and the tx is included in a different block self.nodes[1].generate(9) self.nodes[1].sendrawtransaction(tx["hex"]) self.nodes[1].generate(1) self.nodes[1].sendrawtransaction(conflicted["hex"]) self.nodes[1].generate(1) # Node0 wallet file is loaded on longest sync'ed node1 self.stop_node(1) self.nodes[0].backupwallet( os.path.join( self.nodes[0].datadir, 'wallet.bak')) shutil.copyfile( os.path.join( self.nodes[0].datadir, 'wallet.bak'), os.path.join( self.nodes[1].datadir, - 'regtest', + self.chain, 'wallet.dat')) self.start_node(1) tx_after_reorg = self.nodes[1].gettransaction(txid) # Check that normal confirmed tx is confirmed again but with different # blockhash assert_equal(tx_after_reorg["confirmations"], 2) assert(tx_before_reorg["blockhash"] != tx_after_reorg["blockhash"]) conflicted_after_reorg = self.nodes[1].gettransaction(conflicted_txid) # Check that conflicted tx is confirmed again with blockhash different # than previously conflicting tx assert_equal(conflicted_after_reorg["confirmations"], 1) assert(conflicting["blockhash"] != conflicted_after_reorg["blockhash"]) if __name__ == '__main__': ReorgsRestoreTest().main()