diff --git a/test/functional/abc_mining_basic.py b/test/functional/abc_mining_basic.py
index d605aeeb2..dc0a5bd55 100755
--- a/test/functional/abc_mining_basic.py
+++ b/test/functional/abc_mining_basic.py
@@ -1,157 +1,153 @@
 #!/usr/bin/env python3
 # Copyright (c) 2020 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Tests for Bitcoin ABC mining RPCs
 """
 
 from decimal import Decimal
 
 from test_framework.cdefs import (
     BLOCK_MAXBYTES_MAXSIGCHECKS_RATIO,
     DEFAULT_MAX_BLOCK_SIZE,
 )
 from test_framework.messages import XEC
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_greater_than_or_equal,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_greater_than_or_equal
 
 AXION_ACTIVATION_TIME = 2000000600
 MINER_FUND_ADDR = 'ecregtest:pqnqv9lt7e5vjyp0w88zf2af0l92l8rxdgz0wv9ltl'
 MINER_FUND_LEGACY_ADDR = '2MviGxxFciGeWTgkUgYgjqehWt18c4ZsShd'
 
 
 class AbcMiningRPCTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 3
         self.extra_args = [[], [
             '-enableminerfund',
             '-axionactivationtime={}'.format(AXION_ACTIVATION_TIME),
         ], [
             '-enableminerfund',
             '-usecashaddr=0',
             '-axionactivationtime={}'.format(AXION_ACTIVATION_TIME),
         ]]
 
     def setup_network(self):
         self.setup_nodes()
 
         # Connect node0 to all other nodes so getblocktemplate will return results
         # (getblocktemplate has a sanity check that ensures it's connected to a network)
         # Since the other nodes are mining blocks "in the future" compared to node0,
         # node0 will not broadcast blocks between the other nodes.
         for n in range(1, len(self.nodes)):
-            connect_nodes(self.nodes[0], self.nodes[n])
+            self.connect_nodes(0, n)
 
     def run_for_node(self, node, expectedMinerFundAddress):
         address = node.get_deterministic_priv_key().address
 
         # Assert the results of getblocktemplate have expected values. Keys not
         # in 'expected' are not checked.
         def assert_getblocktemplate(expected):
             # Always test these values in addition to those passed in
             expected = {**expected, **{
                 'sigoplimit': DEFAULT_MAX_BLOCK_SIZE // BLOCK_MAXBYTES_MAXSIGCHECKS_RATIO,
             }}
 
             blockTemplate = node.getblocktemplate()
             for key, value in expected.items():
                 assert_equal(blockTemplate[key], value)
 
         # Move block time to just before axion activation
         node.setmocktime(AXION_ACTIVATION_TIME)
         node.generatetoaddress(5, address)
 
         # Before axion activation, the miner fund list is empty
         assert_getblocktemplate({
             'coinbasetxn': {
                 'minerfund': {
                     'addresses': [],
                     'minimumvalue': 0,
                 },
             },
         })
 
         # Move MTP forward to axion activation
         node.generatetoaddress(1, address)
         assert_equal(
             node.getblockchaininfo()['mediantime'],
             AXION_ACTIVATION_TIME)
 
         def get_best_coinbase():
             return node.getblock(node.getbestblockhash(), 2)['tx'][0]
 
         coinbase = get_best_coinbase()
         assert_equal(len(coinbase['vout']), 1)
         block_reward = coinbase['vout'][0]['value']
 
         # We don't need to test all fields in getblocktemplate since many of
         # them are covered in mining_basic.py
         assert_equal(node.getmempoolinfo()['size'], 0)
         assert_getblocktemplate({
             'coinbasetxn': {
                 # We expect to start seeing the miner fund addresses since the
                 # next block will start enforcing them.
                 'minerfund': {
                     'addresses': [expectedMinerFundAddress],
                     'minimumvalue': block_reward * 8 // 100 * XEC,
                 },
             },
             # Although the coinbase value need not necessarily be the same as
             # the last block due to halvings and fees, we know this to be true
             # since we are not crossing a halving boundary and there are no
             # transactions in the mempool.
             'coinbasevalue': block_reward * XEC,
             'mintime': AXION_ACTIVATION_TIME + 1,
         })
 
         # First block with the new rules
         node.generatetoaddress(1, address)
 
         # We expect the coinbase to have multiple outputs now
         coinbase = get_best_coinbase()
         assert_greater_than_or_equal(len(coinbase['vout']), 2)
         total = Decimal()
         for o in coinbase['vout']:
             total += o['value']
 
         assert_equal(total, block_reward)
         assert_getblocktemplate({
             'coinbasetxn': {
                 'minerfund': {
                     'addresses': [expectedMinerFundAddress],
                     'minimumvalue': block_reward * 8 // 100 * XEC,
                 },
             },
             # Again, we assume the coinbase value is the same as prior blocks.
             'coinbasevalue': block_reward * XEC,
             'mintime': AXION_ACTIVATION_TIME + 1,
         })
 
         # Move MTP forward
         node.setmocktime(AXION_ACTIVATION_TIME + 1)
         node.generatetoaddress(6, address)
         assert_getblocktemplate({
             'coinbasetxn': {
                 'minerfund': {
                     'addresses': [expectedMinerFundAddress],
                     'minimumvalue': block_reward * 8 // 100 * XEC,
                 },
             },
             'coinbasevalue': block_reward * XEC,
             'mintime': AXION_ACTIVATION_TIME + 2,
         })
 
     def run_test(self):
         # node0 is for connectivity only and is not mined on (see
         # setup_network)
         self.run_for_node(self.nodes[1], MINER_FUND_ADDR)
         self.run_for_node(self.nodes[2], MINER_FUND_LEGACY_ADDR)
 
 
 if __name__ == '__main__':
     AbcMiningRPCTest().main()
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 0031ab505..e8425c228 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -1,224 +1,224 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """An example functional test
 
 The module-level docstring should include a high-level description of
 what the test is doing. It's the first thing people see when they open
 the file and should give the reader information about *what* the test
 is testing and *how* it's being tested
 """
 # Imports should be in PEP8 ordering (std library first, then third party
 # libraries then local imports).
 from collections import defaultdict
 
 # Avoid wildcard * imports if possible
 from test_framework.blocktools import create_block, create_coinbase
 from test_framework.messages import MSG_BLOCK, CInv, msg_block, msg_getdata
 from test_framework.p2p import P2PInterface, p2p_lock
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.util import assert_equal
 
 # P2PInterface is a class containing callbacks to be executed when a P2P
 # message is received from the node-under-test. Subclass P2PInterface and
 # override the on_*() methods if you need custom behaviour.
 
 
 class BaseNode(P2PInterface):
     def __init__(self):
         """Initialize the P2PInterface
 
         Used to initialize custom properties for the Node that aren't
         included by default in the base class. Be aware that the P2PInterface
         base class already stores a counter for each P2P message type and the
         last received message of each type, which should be sufficient for the
         needs of most tests.
 
         Call super().__init__() first for standard initialization and then
         initialize custom properties."""
         super().__init__()
         # Stores a dictionary of all blocks received
         self.block_receive_map = defaultdict(int)
 
     def on_block(self, message):
         """Override the standard on_block callback
 
         Store the hash of a received block in the dictionary."""
         message.block.calc_sha256()
         self.block_receive_map[message.block.sha256] += 1
 
     def on_inv(self, message):
         """Override the standard on_inv callback"""
         pass
 
 
 def custom_function():
     """Do some custom behaviour
 
     If this function is more generally useful for other tests, consider
     moving it to a module in test_framework."""
     # self.log.info("running custom_function")  # Oops! Can't run self.log
     # outside the BitcoinTestFramework
     pass
 
 
 class ExampleTest(BitcoinTestFramework):
     # Each functional test is a subclass of the BitcoinTestFramework class.
 
     # Override the set_test_params(), skip_test_if_missing_module(), add_options(), setup_chain(), setup_network()
     # and setup_nodes() methods to customize the test setup as required.
 
     def set_test_params(self):
         """Override test parameters for your individual test.
 
         This method must be overridden and num_nodes must be exlicitly set."""
         self.setup_clean_chain = True
         self.num_nodes = 3
         # Use self.extra_args to change command-line arguments for the nodes
         self.extra_args = [[], ["-logips"], []]
 
         # self.log.info("I've finished set_test_params")  # Oops! Can't run
         # self.log before run_test()
 
     # Use skip_test_if_missing_module() to skip the test if your test requires certain modules to be present.
     # This test uses generate which requires wallet to be compiled
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     # Use add_options() to add specific command-line options for your test.
     # In practice this is not used very much, since the tests are mostly written
     # to be run in automated environments without command-line options.
     # def add_options()
     #     pass
 
     # Use setup_chain() to customize the node data directories. In practice
     # this is not used very much since the default behaviour is almost always
     # fine
     # def setup_chain():
     #     pass
 
     def setup_network(self):
         """Setup the test network topology
 
         Often you won't need to override this, since the standard network topology
         (linear: node0 <-> node1 <-> node2 <-> ...) is fine for most tests.
 
         If you do override this method, remember to start the nodes, assign
         them to self.nodes, connect them and then sync."""
 
         self.setup_nodes()
 
         # In this test, we're not connecting node2 to node0 or node1. Calls to
         # sync_all() should not include node2, since we're not expecting it to
         # sync.
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_all(self.nodes[0:2])
 
     # Use setup_nodes() to customize the node start behaviour (for example if
     # you don't want to start all nodes at the start of the test).
     # def setup_nodes():
     #     pass
 
     def custom_method(self):
         """Do some custom behaviour for this test
 
         Define it in a method here because you're going to use it repeatedly.
         If you think it's useful in general, consider moving it to the base
         BitcoinTestFramework class so other tests can use it."""
 
         self.log.info("Running custom_method")
 
     def run_test(self):
         """Main test logic"""
 
         # Create P2P connections will wait for a verack to make sure the
         # connection is fully up
         peer_messaging = self.nodes[0].add_p2p_connection(BaseNode())
 
         # Generating a block on one of the nodes will get us out of IBD
         blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
         self.sync_all(self.nodes[0:2])
 
         # Notice above how we called an RPC by calling a method with the same
         # name on the node object. Notice also how we used a keyword argument
         # to specify a named RPC argument. Neither of those are defined on the
         # node object. Instead there's some __getattr__() magic going on under
         # the covers to dispatch unrecognised attribute calls to the RPC
         # interface.
 
         # Logs are nice. Do plenty of them. They can be used in place of comments for
         # breaking the test into sub-sections.
         self.log.info("Starting test!")
 
         self.log.info("Calling a custom function")
         custom_function()
 
         self.log.info("Calling a custom method")
         self.custom_method()
 
         self.log.info("Create some blocks")
         self.tip = int(self.nodes[0].getbestblockhash(), 16)
         self.block_time = self.nodes[0].getblock(
             self.nodes[0].getbestblockhash())['time'] + 1
 
         height = self.nodes[0].getblockcount()
 
         for _ in range(10):
             # Use the blocktools functionality to manually build a block.
             # Calling the generate() rpc is easier, but this allows us to exactly
             # control the blocks and transactions.
             block = create_block(
                 self.tip, create_coinbase(
                     height + 1), self.block_time)
             block.solve()
             block_message = msg_block(block)
             # Send message is used to send a P2P message to the node over our
             # P2PInterface
             peer_messaging.send_message(block_message)
             self.tip = block.sha256
             blocks.append(self.tip)
             self.block_time += 1
             height += 1
 
         self.log.info(
             "Wait for node1 to reach current tip (height 11) using RPC")
         self.nodes[1].waitforblockheight(11)
 
         self.log.info("Connect node2 and node1")
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
 
         self.log.info("Wait for node2 to receive all the blocks from node1")
         self.sync_all()
 
         self.log.info("Add P2P connection to node2")
         self.nodes[0].disconnect_p2ps()
 
         peer_receiving = self.nodes[2].add_p2p_connection(BaseNode())
 
         self.log.info("Test that node2 propagates all the blocks to us")
 
         getdata_request = msg_getdata()
         for block in blocks:
             getdata_request.inv.append(CInv(MSG_BLOCK, block))
         peer_receiving.send_message(getdata_request)
 
         # wait_until() will loop until a predicate condition is met. Use it to test properties of the
         # P2PInterface objects.
         peer_receiving.wait_until(
             lambda: sorted(blocks) == sorted(list(
                 peer_receiving.block_receive_map.keys())),
             timeout=5)
 
         self.log.info("Check that each block was received only once")
         # The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
         # messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
         # and synchronization issues. Note p2p.wait_until() acquires this
         # global lock internally when testing the predicate.
         with p2p_lock:
             for block in peer_receiving.block_receive_map.values():
                 assert_equal(block, 1)
 
 
 if __name__ == '__main__':
     ExampleTest().main()
diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py
index ae920c2b1..feb653110 100755
--- a/test/functional/feature_abortnode.py
+++ b/test/functional/feature_abortnode.py
@@ -1,52 +1,52 @@
 #!/usr/bin/env python3
 # Copyright (c) 2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test bitcoind aborts if can't disconnect a block.
 
 - Start a single node and generate 3 blocks.
 - Delete the undo data.
 - Mine a fork that requires disconnecting the tip.
 - Verify that bitcoind AbortNode's.
 """
 
 import os
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import connect_nodes, get_datadir_path
+from test_framework.util import get_datadir_path
 
 
 class AbortNodeTest(BitcoinTestFramework):
 
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [["-noparkdeepreorg"], []]
 
     def setup_network(self):
         self.setup_nodes()
         # We'll connect the nodes later
 
     def run_test(self):
         self.nodes[0].generate(3)
         datadir = get_datadir_path(self.options.tmpdir, 0)
 
         # Deleting the undo file will result in reorg failure
         os.unlink(os.path.join(datadir, self.chain, 'blocks', 'rev00000.dat'))
 
         # Connecting to a node with a more work chain will trigger a reorg
         # attempt.
         self.nodes[1].generate(3)
         with self.nodes[0].assert_debug_log(["Failed to disconnect block"]):
-            connect_nodes(self.nodes[0], self.nodes[1])
+            self.connect_nodes(0, 1)
             self.nodes[1].generate(1)
 
             # Check that node0 aborted
             self.log.info("Waiting for crash")
             self.nodes[0].wait_until_stopped(timeout=200)
         self.log.info("Node crashed - now verifying restart fails")
         self.nodes[0].assert_start_raises_init_error()
 
 
 if __name__ == '__main__':
     AbortNodeTest().main()
diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py
index 52da535ef..9e7881d78 100755
--- a/test/functional/feature_bip68_sequence.py
+++ b/test/functional/feature_bip68_sequence.py
@@ -1,506 +1,504 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test BIP68 implementation."""
 
 import time
 
 from test_framework.blocktools import create_block, create_coinbase
 from test_framework.messages import (
     XEC,
     COutPoint,
     CTransaction,
     CTxIn,
     CTxOut,
     FromHex,
     ToHex,
 )
 from test_framework.script import CScript
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.txtools import pad_tx
 from test_framework.util import (
     assert_equal,
     assert_greater_than,
     assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
     satoshi_round,
 )
 
 SEQUENCE_LOCKTIME_DISABLE_FLAG = (1 << 31)
 # this means use time (0 means height)
 SEQUENCE_LOCKTIME_TYPE_FLAG = (1 << 22)
 # this is a bit-shift
 SEQUENCE_LOCKTIME_GRANULARITY = 9
 SEQUENCE_LOCKTIME_MASK = 0x0000ffff
 
 # RPC error for non-BIP68 final transactions
 NOT_FINAL_ERROR = "non-BIP68-final"
 
 
 class BIP68Test(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.extra_args = [
             [
                 "-noparkdeepreorg",
                 "-maxreorgdepth=-1",
                 "-acceptnonstdtxn=1",
                 # bump because mocktime might cause a disconnect otherwise
                 "-peertimeout=9999",
             ],
             [
                 "-acceptnonstdtxn=0",
                 "-maxreorgdepth=-1"
             ]
         ]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
 
         # Generate some coins
         self.nodes[0].generate(110)
 
         self.log.info("Running test disable flag")
         self.test_disable_flag()
 
         self.log.info("Running test sequence-lock-confirmed-inputs")
         self.test_sequence_lock_confirmed_inputs()
 
         self.log.info("Running test sequence-lock-unconfirmed-inputs")
         self.test_sequence_lock_unconfirmed_inputs()
 
         self.log.info(
             "Running test BIP68 not consensus before versionbits activation")
         self.test_bip68_not_consensus()
 
         self.log.info("Activating BIP68 (and 112/113)")
         self.activateCSV()
 
         print("Verifying nVersion=2 transactions are standard.")
         print("Note that with current versions of bitcoin software, nVersion=2 transactions are always standard (independent of BIP68 activation status).")
         self.test_version2_relay()
 
         self.log.info("Passed")
 
     # Test that BIP68 is not in effect if tx version is 1, or if
     # the first sequence bit is set.
     def test_disable_flag(self):
         # Create some unconfirmed inputs
         new_addr = self.nodes[0].getnewaddress()
         # send 2,000,000 XEC
         self.nodes[0].sendtoaddress(new_addr, 2000000)
 
         utxos = self.nodes[0].listunspent(0, 0)
         assert len(utxos) > 0
 
         utxo = utxos[0]
 
         tx1 = CTransaction()
         value = int(satoshi_round(utxo["amount"] - self.relayfee) * XEC)
 
         # Check that the disable flag disables relative locktime.
         # If sequence locks were used, this would require 1 block for the
         # input to mature.
         sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
         tx1.vin = [
             CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
         tx1.vout = [CTxOut(value, CScript([b'a']))]
         pad_tx(tx1)
 
         tx1_signed = self.nodes[0].signrawtransactionwithwallet(ToHex(tx1))[
             "hex"]
         tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
         tx1_id = int(tx1_id, 16)
 
         # This transaction will enable sequence-locks, so this transaction should
         # fail
         tx2 = CTransaction()
         tx2.nVersion = 2
         sequence_value = sequence_value & 0x7fffffff
         tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
         tx2.vout = [CTxOut(int(value - self.relayfee * XEC), CScript([b'a']))]
         pad_tx(tx2)
         tx2.rehash()
 
         assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                 self.nodes[0].sendrawtransaction, ToHex(tx2))
 
         # Setting the version back down to 1 should disable the sequence lock,
         # so this should be accepted.
         tx2.nVersion = 1
 
         self.nodes[0].sendrawtransaction(ToHex(tx2))
 
     # Calculate the median time past of a prior block ("confirmations" before
     # the current tip).
     def get_median_time_past(self, confirmations):
         block_hash = self.nodes[0].getblockhash(
             self.nodes[0].getblockcount() - confirmations)
         return self.nodes[0].getblockheader(block_hash)["mediantime"]
 
     # Test that sequence locks are respected for transactions spending
     # confirmed inputs.
     def test_sequence_lock_confirmed_inputs(self):
         # Create lots of confirmed utxos, and use them to generate lots of random
         # transactions.
         max_outputs = 50
         addresses = []
         while len(addresses) < max_outputs:
             addresses.append(self.nodes[0].getnewaddress())
         while len(self.nodes[0].listunspent()) < 200:
             import random
             random.shuffle(addresses)
             num_outputs = random.randint(1, max_outputs)
             outputs = {}
             for i in range(num_outputs):
                 outputs[addresses[i]] = random.randint(1, 20) * 10000
             self.nodes[0].sendmany("", outputs)
             self.nodes[0].generate(1)
 
         utxos = self.nodes[0].listunspent()
 
         # Try creating a lot of random transactions.
         # Each time, choose a random number of inputs, and randomly set
         # some of those inputs to be sequence locked (and randomly choose
         # between height/time locking). Small random chance of making the locks
         # all pass.
         for _ in range(400):
             # Randomly choose up to 10 inputs
             num_inputs = random.randint(1, 10)
             random.shuffle(utxos)
 
             # Track whether any sequence locks used should fail
             should_pass = True
 
             # Track whether this transaction was built with sequence locks
             using_sequence_locks = False
 
             tx = CTransaction()
             tx.nVersion = 2
             value = 0
             for j in range(num_inputs):
                 # this disables sequence locks
                 sequence_value = 0xfffffffe
 
                 # 50% chance we enable sequence locks
                 if random.randint(0, 1):
                     using_sequence_locks = True
 
                     # 10% of the time, make the input sequence value pass
                     input_will_pass = (random.randint(1, 10) == 1)
                     sequence_value = utxos[j]["confirmations"]
                     if not input_will_pass:
                         sequence_value += 1
                         should_pass = False
 
                     # Figure out what the median-time-past was for the confirmed input
                     # Note that if an input has N confirmations, we're going back N blocks
                     # from the tip so that we're looking up MTP of the block
                     # PRIOR to the one the input appears in, as per the BIP68
                     # spec.
                     orig_time = self.get_median_time_past(
                         utxos[j]["confirmations"])
                     # MTP of the tip
                     cur_time = self.get_median_time_past(0)
 
                     # can only timelock this input if it's not too old --
                     # otherwise use height
                     can_time_lock = True
                     if ((cur_time - orig_time)
                             >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
                         can_time_lock = False
 
                     # if time-lockable, then 50% chance we make this a time
                     # lock
                     if random.randint(0, 1) and can_time_lock:
                         # Find first time-lock value that fails, or latest one
                         # that succeeds
                         time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
                         if input_will_pass and time_delta > cur_time - orig_time:
                             sequence_value = (
                                 (cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
                         elif (not input_will_pass and time_delta <= cur_time - orig_time):
                             sequence_value = (
                                 (cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) + 1
                         sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
                 tx.vin.append(
                     CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
                 value += utxos[j]["amount"] * XEC
             # Overestimate the size of the tx - signatures should be less than
             # 120 bytes, and leave 50 for the output
             tx_size = len(ToHex(tx)) // 2 + 120 * num_inputs + 50
             tx.vout.append(
                 CTxOut(int(value - self.relayfee * tx_size * XEC / 1000), CScript([b'a'])))
             rawtx = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))[
                 "hex"]
 
             if (using_sequence_locks and not should_pass):
                 # This transaction should be rejected
                 assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                         self.nodes[0].sendrawtransaction, rawtx)
             else:
                 # This raw transaction should be accepted
                 self.nodes[0].sendrawtransaction(rawtx)
                 utxos = self.nodes[0].listunspent()
 
     # Test that sequence locks on unconfirmed inputs must have nSequence
     # height or time of 0 to be accepted.
     # Then test that BIP68-invalid transactions are removed from the mempool
     # after a reorg.
     def test_sequence_lock_unconfirmed_inputs(self):
         # Store height so we can easily reset the chain at the end of the test
         cur_height = self.nodes[0].getblockcount()
 
         # Create a mempool tx.
         txid = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), 2000000)
         tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
         tx1.rehash()
 
         # As the fees are calculated prior to the transaction being signed,
         # there is some uncertainty that calculate fee provides the correct
         # minimal fee. Since regtest coins are free, let's go ahead and
         # increase the fee by an order of magnitude to ensure this test
         # passes.
         fee_multiplier = 10
 
         # Anyone-can-spend mempool tx.
         # Sequence lock of 0 should pass.
         tx2 = CTransaction()
         tx2.nVersion = 2
         tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
         tx2.vout = [
             CTxOut(int(0), CScript([b'a']))]
         tx2.vout[0].nValue = tx1.vout[0].nValue - \
             fee_multiplier * self.nodes[0].calculate_fee(tx2)
         tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
         tx2 = FromHex(tx2, tx2_raw)
         tx2.rehash()
         self.nodes[0].sendrawtransaction(tx2_raw)
 
         # Create a spend of the 0th output of orig_tx with a sequence lock
         # of 1, and test what happens when submitting.
         # orig_tx.vout[0] must be an anyone-can-spend output
         def test_nonzero_locks(orig_tx, node, use_height_lock):
             sequence_value = 1
             if not use_height_lock:
                 sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
 
             tx = CTransaction()
             tx.nVersion = 2
             tx.vin = [
                 CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
             tx.vout = [
                 CTxOut(int(orig_tx.vout[0].nValue - fee_multiplier * node.calculate_fee(tx)), CScript([b'a']))]
             pad_tx(tx)
             tx.rehash()
 
             if (orig_tx.hash in node.getrawmempool()):
                 # sendrawtransaction should fail if the tx is in the mempool
                 assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                         node.sendrawtransaction, ToHex(tx))
             else:
                 # sendrawtransaction should succeed if the tx is not in the
                 # mempool
                 node.sendrawtransaction(ToHex(tx))
 
             return tx
 
         test_nonzero_locks(
             tx2, self.nodes[0], use_height_lock=True)
         test_nonzero_locks(
             tx2, self.nodes[0], use_height_lock=False)
 
         # Now mine some blocks, but make sure tx2 doesn't get mined.
         # Use prioritisetransaction to lower the effective feerate to 0
         self.nodes[0].prioritisetransaction(
             txid=tx2.hash, fee_delta=-fee_multiplier * self.nodes[0].calculate_fee(tx2))
         cur_time = int(time.time())
         for _ in range(10):
             self.nodes[0].setmocktime(cur_time + 600)
             self.nodes[0].generate(1)
             cur_time += 600
 
         assert tx2.hash in self.nodes[0].getrawmempool()
 
         test_nonzero_locks(
             tx2, self.nodes[0], use_height_lock=True)
         test_nonzero_locks(
             tx2, self.nodes[0], use_height_lock=False)
 
         # Mine tx2, and then try again
         self.nodes[0].prioritisetransaction(
             txid=tx2.hash, fee_delta=fee_multiplier * self.nodes[0].calculate_fee(tx2))
 
         # Advance the time on the node so that we can test timelocks
         self.nodes[0].setmocktime(cur_time + 600)
         self.nodes[0].generate(1)
         assert tx2.hash not in self.nodes[0].getrawmempool()
 
         # Now that tx2 is not in the mempool, a sequence locked spend should
         # succeed
         tx3 = test_nonzero_locks(
             tx2, self.nodes[0], use_height_lock=False)
         assert tx3.hash in self.nodes[0].getrawmempool()
 
         self.nodes[0].generate(1)
         assert tx3.hash not in self.nodes[0].getrawmempool()
 
         # One more test, this time using height locks
         tx4 = test_nonzero_locks(
             tx3, self.nodes[0], use_height_lock=True)
         assert tx4.hash in self.nodes[0].getrawmempool()
 
         # Now try combining confirmed and unconfirmed inputs
         tx5 = test_nonzero_locks(
             tx4, self.nodes[0], use_height_lock=True)
         assert tx5.hash not in self.nodes[0].getrawmempool()
 
         utxos = self.nodes[0].listunspent()
         tx5.vin.append(
             CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
         tx5.vout[0].nValue += int(utxos[0]["amount"] * XEC)
         raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]
 
         assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                 self.nodes[0].sendrawtransaction, raw_tx5)
 
         # Test mempool-BIP68 consistency after reorg
         #
         # State of the transactions in the last blocks:
         # ... -> [ tx2 ] ->  [ tx3 ]
         #         tip-1        tip
         # And currently tx4 is in the mempool.
         #
         # If we invalidate the tip, tx3 should get added to the mempool, causing
         # tx4 to be removed (fails sequence-lock).
         self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
         assert tx4.hash not in self.nodes[0].getrawmempool()
         assert tx3.hash in self.nodes[0].getrawmempool()
 
         # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
         # diagram above).
         # This would cause tx2 to be added back to the mempool, which in turn causes
         # tx3 to be removed.
         tip = int(self.nodes[0].getblockhash(
             self.nodes[0].getblockcount() - 1), 16)
         height = self.nodes[0].getblockcount()
         for i in range(2):
             block = create_block(tip, create_coinbase(height), cur_time)
             block.nVersion = 3
             block.rehash()
             block.solve()
             tip = block.sha256
             height += 1
             assert_equal(
                 None if i == 1 else 'inconclusive',
                 self.nodes[0].submitblock(
                     ToHex(block)))
             cur_time += 1
 
         mempool = self.nodes[0].getrawmempool()
         assert tx3.hash not in mempool
         assert tx2.hash in mempool
 
         # Reset the chain and get rid of the mocktimed-blocks
         self.nodes[0].setmocktime(0)
         self.nodes[0].invalidateblock(
             self.nodes[0].getblockhash(cur_height + 1))
         self.nodes[0].generate(10)
 
     def get_csv_status(self):
         height = self.nodes[0].getblockchaininfo()['blocks']
         return height >= 576
 
     # Make sure that BIP68 isn't being used to validate blocks, prior to
     # versionbits activation.  If more blocks are mined prior to this test
     # being run, then it's possible the test has activated the soft fork, and
     # this test should be moved to run earlier, or deleted.
     def test_bip68_not_consensus(self):
         assert_equal(self.get_csv_status(), False)
         txid = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), 2000000)
 
         tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
         tx1.rehash()
 
         # Make an anyone-can-spend transaction
         tx2 = CTransaction()
         tx2.nVersion = 1
         tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
         tx2.vout = [
             CTxOut(int(tx1.vout[0].nValue - self.relayfee * XEC), CScript([b'a']))]
 
         # sign tx2
         tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
         tx2 = FromHex(tx2, tx2_raw)
         pad_tx(tx2)
         tx2.rehash()
 
         self.nodes[0].sendrawtransaction(ToHex(tx2))
 
         # Now make an invalid spend of tx2 according to BIP68
         # 100 block relative locktime
         sequence_value = 100
 
         tx3 = CTransaction()
         tx3.nVersion = 2
         tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
         tx3.vout = [
             CTxOut(int(tx2.vout[0].nValue - self.relayfee * XEC), CScript([b'a']))]
         pad_tx(tx3)
         tx3.rehash()
 
         assert_raises_rpc_error(-26, NOT_FINAL_ERROR,
                                 self.nodes[0].sendrawtransaction, ToHex(tx3))
 
         # make a block that violates bip68; ensure that the tip updates
         tip = int(self.nodes[0].getbestblockhash(), 16)
         block = create_block(
             tip, create_coinbase(self.nodes[0].getblockcount() + 1))
         block.nVersion = 3
         block.vtx.extend(
             sorted([tx1, tx2, tx3], key=lambda tx: tx.get_id()))
         block.hashMerkleRoot = block.calc_merkle_root()
         block.rehash()
         block.solve()
 
         assert_equal(None, self.nodes[0].submitblock(ToHex(block)))
         assert_equal(self.nodes[0].getbestblockhash(), block.hash)
 
     def activateCSV(self):
         # activation should happen at block height 576
         csv_activation_height = 576
         height = self.nodes[0].getblockcount()
         assert_greater_than(csv_activation_height - height, 1)
         self.nodes[0].generate(csv_activation_height - height - 1)
         assert_equal(self.get_csv_status(), False)
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
         self.nodes[0].generate(1)
         assert_equal(self.get_csv_status(), True)
         # We have a block that has CSV activated, but we want to be at
         # the activation point, so we invalidate the tip.
         self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks()
 
     # Use self.nodes[1] to test that version 2 transactions are standard.
     def test_version2_relay(self):
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1000000.0}
         rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
         rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
         tx = FromHex(CTransaction(), rawtxfund)
         tx.nVersion = 2
         tx_signed = self.nodes[1].signrawtransactionwithwallet(ToHex(tx))[
             "hex"]
         self.nodes[1].sendrawtransaction(tx_signed)
 
 
 if __name__ == '__main__':
     BIP68Test().main()
diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py
index c3b465132..14cbf264c 100755
--- a/test/functional/feature_minchainwork.py
+++ b/test/functional/feature_minchainwork.py
@@ -1,101 +1,101 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test logic for setting nMinimumChainWork on command line.
 
 Nodes don't consider themselves out of "initial block download" until
 their active chain has more work than nMinimumChainWork.
 
 Nodes don't download blocks from a peer unless the peer's best known block
 has more work than nMinimumChainWork.
 
 While in initial block download, nodes won't relay blocks to their peers, so
 test that this parameter functions as intended by verifying that block relay
 only succeeds past a given node once its nMinimumChainWork has been exceeded.
 """
 
 import time
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.util import assert_equal
 
 # 2 hashes required per regtest block (with no difficulty adjustment)
 REGTEST_WORK_PER_BLOCK = 2
 
 
 class MinimumChainWorkTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 3
         self.extra_args = [[], ["-minimumchainwork=0x65"],
                            ["-minimumchainwork=0x65"]]
         self.node_min_work = [0, 101, 101]
 
     def setup_network(self):
         # This test relies on the chain setup being:
         # node0 <- node1 <- node2
         # Before leaving IBD, nodes prefer to download blocks from outbound
         # peers, so ensure that we're mining on an outbound peer and testing
         # block relay to inbound peers.
         self.setup_nodes()
         for i in range(self.num_nodes - 1):
-            connect_nodes(self.nodes[i + 1], self.nodes[i])
+            self.connect_nodes(i + 1, i)
 
     def run_test(self):
         # Start building a chain on node0.  node2 shouldn't be able to sync until node1's
         # minchainwork is exceeded
         starting_chain_work = REGTEST_WORK_PER_BLOCK  # Genesis block's work
         self.log.info(
             "Testing relay across node {} (minChainWork = {})".format(
                 1, self.node_min_work[1]))
 
         starting_blockcount = self.nodes[2].getblockcount()
 
         num_blocks_to_generate = int(
             (self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
         self.log.info("Generating {} blocks on node0".format(
                       num_blocks_to_generate))
         hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
                                                  self.nodes[0].get_deterministic_priv_key().address)
 
         self.log.info("Node0 current chain work: {}".format(
                       self.nodes[0].getblockheader(hashes[-1])['chainwork']))
 
         # Sleep a few seconds and verify that node2 didn't get any new blocks
         # or headers.  We sleep, rather than sync_blocks(node0, node1) because
         # it's reasonable either way for node1 to get the blocks, or not get
         # them (since they're below node1's minchainwork).
         time.sleep(3)
 
         self.log.info("Verifying node 2 has no more blocks than before")
         self.log.info("Blockcounts: {}".format(
                       [n.getblockcount() for n in self.nodes]))
         # Node2 shouldn't have any new headers yet, because node1 should not
         # have relayed anything.
         assert_equal(len(self.nodes[2].getchaintips()), 1)
         assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
 
         assert self.nodes[1].getbestblockhash(
         ) != self.nodes[0].getbestblockhash()
         assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
 
         self.log.info("Generating one more block")
         self.nodes[0].generatetoaddress(
             1, self.nodes[0].get_deterministic_priv_key().address)
 
         self.log.info("Verifying nodes are all synced")
 
         # Because nodes in regtest are all manual connections (eg using
         # addnode), node1 should not have disconnected node0. If not for that,
         # we'd expect node1 to have disconnected node0 for serving an
         # insufficient work chain, in which case we'd need to reconnect them to
         # continue the test.
 
         self.sync_all()
         self.log.info("Blockcounts: {}".format(
                       [n.getblockcount() for n in self.nodes]))
 
 
 if __name__ == '__main__':
     MinimumChainWorkTest().main()
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index 8b27f1f34..fb6e0627f 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -1,205 +1,200 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Copyright (c) 2018 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the -alertnotify, -blocknotify and -walletnotify options."""
 import os
 
 from test_framework.address import ADDRESS_ECREG_UNSPENDABLE, keyhash_to_p2pkh
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    connect_nodes,
-    disconnect_nodes,
-    hex_str_to_bytes,
-)
+from test_framework.util import assert_equal, hex_str_to_bytes
 
 FORK_WARNING_MESSAGE = "Warning: Large-work fork detected, forking after block {}"
 
 # Linux allow all characters other than \x00
 # Windows disallow control characters (0-31) and /\?%:|"<>
 FILE_CHAR_START = 32 if os.name == 'nt' else 1
 FILE_CHAR_END = 128
 FILE_CHAR_BLACKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
 
 
 def notify_outputname(walletname, txid):
     return txid if os.name == 'nt' else '{}_{}'.format(walletname, txid)
 
 
 class NotificationsTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.setup_clean_chain = True
 
     def setup_network(self):
         self.wallet = ''.join(
             chr(i) for i in range(
                 FILE_CHAR_START,
                 FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLACKLIST)
         self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
         self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
         self.walletnotify_dir = os.path.join(
             self.options.tmpdir, "walletnotify")
         os.mkdir(self.alertnotify_dir)
         os.mkdir(self.blocknotify_dir)
         os.mkdir(self.walletnotify_dir)
 
         # -alertnotify and -blocknotify on node0, walletnotify on node1
         self.extra_args = [["-alertnotify=echo > {}".format(
             os.path.join(self.alertnotify_dir, '%s')),
             "-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
             ["-blockversion=211",
              "-rescan",
              "-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s')))]]
         self.wallet_names = [self.default_wallet_name, self.wallet]
         super().setup_network()
 
     def run_test(self):
         self.log.info("test -blocknotify")
         block_count = 10
         blocks = self.nodes[1].generatetoaddress(
             block_count,
             self.nodes[1].getnewaddress() if self.is_wallet_compiled()
             else ADDRESS_ECREG_UNSPENDABLE
         )
 
         # wait at most 10 seconds for expected number of files before reading
         # the content
         self.wait_until(
             lambda: len(os.listdir(self.blocknotify_dir)) == block_count,
             timeout=10)
 
         # directory content should equal the generated blocks hashes
         assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir)))
 
         if self.is_wallet_compiled():
             self.log.info("test -walletnotify")
             # wait at most 10 seconds for expected number of files before
             # reading the content
             self.wait_until(
                 lambda: len(os.listdir(self.walletnotify_dir)) == block_count,
                 timeout=10)
 
             # directory content should equal the generated transaction hashes
             txids_rpc = list(map(lambda t: notify_outputname(
                 self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
             assert_equal(
                 sorted(txids_rpc), sorted(
                     os.listdir(
                         self.walletnotify_dir)))
             self.stop_node(1)
             for tx_file in os.listdir(self.walletnotify_dir):
                 os.remove(os.path.join(self.walletnotify_dir, tx_file))
 
             self.log.info("test -walletnotify after rescan")
             # restart node to rescan to force wallet notifications
             self.start_node(1)
-            connect_nodes(self.nodes[0], self.nodes[1])
+            self.connect_nodes(0, 1)
 
             self.wait_until(
                 lambda: len(os.listdir(self.walletnotify_dir)) == block_count,
                 timeout=10)
 
             # directory content should equal the generated transaction hashes
             txids_rpc = list(map(lambda t: notify_outputname(
                 self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
             assert_equal(
                 sorted(txids_rpc), sorted(
                     os.listdir(
                         self.walletnotify_dir)))
             for tx_file in os.listdir(self.walletnotify_dir):
                 os.remove(os.path.join(self.walletnotify_dir, tx_file))
 
             # Conflicting transactions tests. Give node 0 same wallet seed as
             # node 1, generate spends from node 0, and check notifications
             # triggered by node 1
             self.log.info("test -walletnotify with conflicting transactions")
             self.nodes[0].sethdseed(
                 seed=self.nodes[1].dumpprivkey(
                     keyhash_to_p2pkh(
                         hex_str_to_bytes(
                             self.nodes[1].getwalletinfo()['hdseedid'])[::-1])))
             self.nodes[0].rescanblockchain()
             self.nodes[0].generatetoaddress(100, ADDRESS_ECREG_UNSPENDABLE)
 
             # Generate transaction on node 0, sync mempools, and check for
             # notification on node 1.
             tx1 = self.nodes[0].sendtoaddress(
                 address=ADDRESS_ECREG_UNSPENDABLE, amount=100)
             assert_equal(tx1 in self.nodes[0].getrawmempool(), True)
             self.sync_mempools()
             self.expect_wallet_notify([tx1])
 
             # Add tx1 transaction to new block, checking for a notification
             # and the correct number of confirmations.
             self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
             self.sync_blocks()
             self.expect_wallet_notify([tx1])
             assert_equal(self.nodes[1].gettransaction(tx1)["confirmations"], 1)
 
             # Generate conflicting transactions with the nodes disconnected.
             # Sending almost the entire available balance on each node, but
             # with a slightly different amount, ensures that there will be
             # a conflict.
             balance = self.nodes[0].getbalance()
-            disconnect_nodes(self.nodes[0], self.nodes[1])
+            self.disconnect_nodes(0, 1)
             tx2_node0 = self.nodes[0].sendtoaddress(
                 address=ADDRESS_ECREG_UNSPENDABLE, amount=balance - 20)
             tx2_node1 = self.nodes[1].sendtoaddress(
                 address=ADDRESS_ECREG_UNSPENDABLE, amount=balance - 21)
             assert tx2_node0 != tx2_node1
             self.expect_wallet_notify([tx2_node1])
             # So far tx2_node1 has no conflicting tx
             assert not self.nodes[1].gettransaction(
                 tx2_node1)['walletconflicts']
 
             # Mine a block on node0, reconnect the nodes, check that tx2_node1
             # has a conflicting tx after syncing with node0.
             self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
-            connect_nodes(self.nodes[0], self.nodes[1])
+            self.connect_nodes(0, 1)
             self.sync_blocks()
             assert tx2_node0 in self.nodes[1].gettransaction(tx2_node1)[
                 'walletconflicts']
 
             # node1's wallet will notify of the new confirmed transaction tx2_0
             # and about the conflicted transaction tx2_1.
             self.expect_wallet_notify([tx2_node0, tx2_node1])
 
         # Create an invalid chain and ensure the node warns.
         self.log.info("test -alertnotify for forked chain")
         fork_block = self.nodes[0].getbestblockhash()
         self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
         invalid_block = self.nodes[0].getbestblockhash()
         self.nodes[0].generatetoaddress(7, ADDRESS_ECREG_UNSPENDABLE)
 
         # Invalidate a large branch, which should trigger an alert.
         self.nodes[0].invalidateblock(invalid_block)
 
         # Give bitcoind 10 seconds to write the alert notification
         self.wait_until(lambda: len(os.listdir(self.alertnotify_dir)),
                         timeout=10)
 
         # The notification command is unable to properly handle the spaces on
         # windows. Skip the content check in this case.
         if os.name != 'nt':
             assert FORK_WARNING_MESSAGE.format(
                 fork_block) in os.listdir(self.alertnotify_dir)
 
         for notify_file in os.listdir(self.alertnotify_dir):
             os.remove(os.path.join(self.alertnotify_dir, notify_file))
 
     def expect_wallet_notify(self, tx_ids):
         self.wait_until(
             lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids),
             timeout=10)
         assert_equal(
             sorted(notify_outputname(self.wallet, tx_id) for tx_id in tx_ids),
             sorted(os.listdir(self.walletnotify_dir)))
         for tx_file in os.listdir(self.walletnotify_dir):
             os.remove(os.path.join(self.walletnotify_dir, tx_file))
 
 
 if __name__ == '__main__':
     NotificationsTest().main()
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 6bda06e4c..9acf9b152 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -1,516 +1,514 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the pruning code.
 
 WARNING:
 This test uses 4GB of disk space.
 This test takes 30 mins or more (up to 2 hours)
 """
 
 import os
 
 from test_framework.blocktools import create_coinbase
 from test_framework.messages import CBlock, ToHex
 from test_framework.script import OP_NOP, OP_RETURN, CScript
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_greater_than,
     assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
 )
 
 # Rescans start at the earliest block up to 2 hours before a key timestamp, so
 # the manual prune RPC avoids pruning blocks in the same window to be
 # compatible with pruning based on key creation time.
 TIMESTAMP_WINDOW = 2 * 60 * 60
 
 
 def mine_large_blocks(node, n):
     # Make a large scriptPubKey for the coinbase transaction. This is OP_RETURN
     # followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
     # transaction but is consensus valid.
 
     # Set the nTime if this is the first time this function has been called.
     # A static variable ensures that time is monotonicly increasing and is therefore
     # different for each block created => blockhash is unique.
     if "nTimes" not in mine_large_blocks.__dict__:
         mine_large_blocks.nTime = 0
 
     # Get the block parameters for the first block
     big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
     best_block = node.getblock(node.getbestblockhash())
     height = int(best_block["height"]) + 1
     mine_large_blocks.nTime = max(
         mine_large_blocks.nTime, int(best_block["time"])) + 1
     previousblockhash = int(best_block["hash"], 16)
 
     for _ in range(n):
         # Build the coinbase transaction (with large scriptPubKey)
         coinbase_tx = create_coinbase(height)
         coinbase_tx.vin[0].nSequence = 2 ** 32 - 1
         coinbase_tx.vout[0].scriptPubKey = big_script
         coinbase_tx.rehash()
 
         # Build the block
         block = CBlock()
         block.nVersion = best_block["version"]
         block.hashPrevBlock = previousblockhash
         block.nTime = mine_large_blocks.nTime
         block.nBits = int('207fffff', 16)
         block.nNonce = 0
         block.vtx = [coinbase_tx]
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
 
         # Submit to the node
         node.submitblock(ToHex(block))
 
         previousblockhash = block.sha256
         height += 1
         mine_large_blocks.nTime += 1
 
 
 def calc_usage(blockdir):
     return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir)
                if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
 
 
 class PruneTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 6
         self.supports_cli = False
 
         # Create nodes 0 and 1 to mine.
         # Create node 2 to test pruning.
         self.full_node_default_args = ["-maxreceivebuffer=20000", "-blockmaxsize=999000",
                                        "-checkblocks=5", "-noparkdeepreorg", "-maxreorgdepth=-1"]
         # Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
         # Create nodes 5 to test wallet in prune mode, but do not connect
         self.extra_args = [self.full_node_default_args,
                            self.full_node_default_args,
                            ["-maxreceivebuffer=20000", "-prune=550",
                                "-noparkdeepreorg", "-maxreorgdepth=-1"],
                            ["-maxreceivebuffer=20000", "-blockmaxsize=999000",
                                "-noparkdeepreorg", "-maxreorgdepth=-1"],
                            ["-maxreceivebuffer=20000", "-blockmaxsize=999000",
                                "-noparkdeepreorg", "-maxreorgdepth=-1"],
                            ["-prune=550"]]
         self.rpc_timeout = 120
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.setup_nodes()
 
         self.prunedir = os.path.join(
             self.nodes[2].datadir, self.chain, 'blocks', '')
 
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[3])
-        connect_nodes(self.nodes[0], self.nodes[4])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
+        self.connect_nodes(0, 3)
+        self.connect_nodes(0, 4)
         self.sync_blocks(self.nodes[0:5])
 
     def setup_nodes(self):
         self.add_nodes(self.num_nodes, self.extra_args)
         self.start_nodes()
         self.import_deterministic_coinbase_privkeys()
 
     def create_big_chain(self):
         # Start by creating some coinbases we can spend later
         self.nodes[1].generate(200)
         self.sync_blocks(self.nodes[0:2])
         self.nodes[0].generate(150)
 
         # Then mine enough full blocks to create more than 550MiB of data
         mine_large_blocks(self.nodes[0], 645)
 
         self.sync_blocks(self.nodes[0:5])
 
     def test_height_min(self):
         assert os.path.isfile(os.path.join(
             self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
         self.log.info("Success")
         self.log.info("Though we're already using more than 550MiB, current usage: {}".format(
                       calc_usage(self.prunedir)))
         self.log.info(
             "Mining 25 more blocks should cause the first block file to be pruned")
         # Pruning doesn't run until we're allocating another chunk, 20 full
         # blocks past the height cutoff will ensure this
         mine_large_blocks(self.nodes[0], 25)
 
         # Wait for blk00000.dat to be pruned
         self.wait_until(
             lambda: not os.path.isfile(
                 os.path.join(self.prunedir, "blk00000.dat")),
             timeout=30)
 
         self.log.info("Success")
         usage = calc_usage(self.prunedir)
         self.log.info("Usage should be below target: {}".format(usage))
         assert_greater_than(550, usage)
 
     def create_chain_with_staleblocks(self):
         # Create stale blocks in manageable sized chunks
         self.log.info(
             "Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
 
         for _ in range(12):
             # Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
             # Node 2 stays connected, so it hears about the stale blocks and
             # then reorg's when node0 reconnects
-            disconnect_nodes(self.nodes[0], self.nodes[1])
-            disconnect_nodes(self.nodes[0], self.nodes[2])
+            self.disconnect_nodes(0, 1)
+            self.disconnect_nodes(0, 2)
             # Mine 24 blocks in node 1
             mine_large_blocks(self.nodes[1], 24)
 
             # Reorg back with 25 block chain from node 0
             mine_large_blocks(self.nodes[0], 25)
 
             # Create connections in the order so both nodes can see the reorg
             # at the same time
-            connect_nodes(self.nodes[0], self.nodes[1])
-            connect_nodes(self.nodes[0], self.nodes[2])
+            self.connect_nodes(0, 1)
+            self.connect_nodes(0, 2)
             self.sync_blocks(self.nodes[0:3])
 
         self.log.info("Usage can be over target because of high stale rate: {}".format(
                       calc_usage(self.prunedir)))
 
     def reorg_test(self):
         # Node 1 will mine a 300 block chain starting 287 blocks back from Node
         # 0 and Node 2's tip. This will cause Node 2 to do a reorg requiring
         # 288 blocks of undo data to the reorg_test chain.
 
         height = self.nodes[1].getblockcount()
         self.log.info("Current block height: {}".format(height))
 
         self.forkheight = height - 287
         self.forkhash = self.nodes[1].getblockhash(self.forkheight)
         self.log.info("Invalidating block {} at height {}".format(
             self.forkhash, self.forkheight))
         self.nodes[1].invalidateblock(self.forkhash)
 
         # We've now switched to our previously mined-24 block fork on node 1, but that's not what we want.
         # So invalidate that fork as well, until we're on the same chain as
         # node 0/2 (but at an ancestor 288 blocks ago)
         mainchainhash = self.nodes[0].getblockhash(self.forkheight - 1)
         curhash = self.nodes[1].getblockhash(self.forkheight - 1)
         while curhash != mainchainhash:
             self.nodes[1].invalidateblock(curhash)
             curhash = self.nodes[1].getblockhash(self.forkheight - 1)
 
         assert self.nodes[1].getblockcount() == self.forkheight - 1
         self.log.info("New best height: {}".format(
                       self.nodes[1].getblockcount()))
 
         # Disconnect node1 and generate the new chain
-        disconnect_nodes(self.nodes[0], self.nodes[1])
-        disconnect_nodes(self.nodes[1], self.nodes[2])
+        self.disconnect_nodes(0, 1)
+        self.disconnect_nodes(1, 2)
 
         self.log.info("Generating new longer chain of 300 more blocks")
         self.nodes[1].generate(300)
 
         self.log.info("Reconnect nodes")
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
         self.sync_blocks(self.nodes[0:3], timeout=120)
 
         self.log.info("Verify height on node 2: {}".format(
                       self.nodes[2].getblockcount()))
         self.log.info("Usage possibly still high because of stale blocks in block files: {}".format(
                       calc_usage(self.prunedir)))
 
         self.log.info(
             "Mine 220 more large blocks so we have requisite history")
 
         mine_large_blocks(self.nodes[0], 220)
         self.sync_blocks(self.nodes[0:3], timeout=120)
 
         usage = calc_usage(self.prunedir)
         self.log.info("Usage should be below target: {}".format(usage))
         assert_greater_than(550, usage)
 
     def reorg_back(self):
         # Verify that a block on the old main chain fork has been pruned away
         assert_raises_rpc_error(
             -1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
         with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
             self.nodes[2].verifychain(checklevel=4, nblocks=0)
         self.log.info(
             "Will need to redownload block {}".format(self.forkheight))
 
         # Verify that we have enough history to reorg back to the fork point.
         # Although this is more than 288 blocks, because this chain was written
         # more recently and only its other 299 small and 220 large blocks are in
         # the block files after it, it is expected to still be retained.
         self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
 
         first_reorg_height = self.nodes[2].getblockcount()
         curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
         self.nodes[2].invalidateblock(curchainhash)
         goalbestheight = self.mainchainheight
         goalbesthash = self.mainchainhash2
 
         # As of 0.10 the current block download logic is not able to reorg to
         # the original chain created in create_chain_with_stale_blocks because
         # it doesn't know of any peer that's on that chain from which to
         # redownload its missing blocks. Invalidate the reorg_test chain in
         # node 0 as well, it can successfully switch to the original chain
         # because it has all the block data. However it must mine enough blocks
         # to have a more work chain than the reorg_test chain in order to
         # trigger node 2's block download logic. At this point node 2 is within
         # 288 blocks of the fork point so it will preserve its ability to
         # reorg.
         if self.nodes[2].getblockcount() < self.mainchainheight:
             blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
             self.log.info(
                 "Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: {}".format(
                     blocks_to_mine))
             self.nodes[0].invalidateblock(curchainhash)
             assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
             assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
             goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1]
             goalbestheight = first_reorg_height + 1
 
         self.log.info(
             "Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
         # Wait for Node 2 to reorg to proper height
         self.wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight,
                         timeout=900)
         assert_equal(self.nodes[2].getbestblockhash(), goalbesthash)
         # Verify we can now have the data for a block previously pruned
         assert_equal(self.nodes[2].getblock(
             self.forkhash)["height"], self.forkheight)
 
     def manual_test(self, node_number, use_timestamp):
         # at this point, node has 995 blocks and has not yet run in prune mode
         self.start_node(node_number)
         node = self.nodes[node_number]
         assert_equal(node.getblockcount(), 995)
         assert_raises_rpc_error(-1, "not in prune mode",
                                 node.pruneblockchain, 500)
 
         # now re-start in manual pruning mode
         self.restart_node(node_number, extra_args=["-prune=1"])
         node = self.nodes[node_number]
         assert_equal(node.getblockcount(), 995)
 
         def height(index):
             if use_timestamp:
                 return node.getblockheader(node.getblockhash(index))[
                     "time"] + TIMESTAMP_WINDOW
             else:
                 return index
 
         def prune(index):
             ret = node.pruneblockchain(height=height(index))
             assert_equal(ret, node.getblockchaininfo()['pruneheight'])
 
         def has_block(index):
             return os.path.isfile(os.path.join(
                 self.nodes[node_number].datadir, self.chain, "blocks", "blk{:05}.dat".format(index)))
 
         # should not prune because chain tip of node 3 (995) < PruneAfterHeight
         # (1000)
         assert_raises_rpc_error(
             -1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
 
         # Save block transaction count before pruning, assert value
         block1_details = node.getblock(node.getblockhash(1))
         assert_equal(block1_details["nTx"], len(block1_details["tx"]))
 
         # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
         node.generate(6)
         assert_equal(node.getblockchaininfo()["blocks"], 1001)
 
         # Pruned block should still know the number of transactions
         assert_equal(node.getblockheader(node.getblockhash(1))
                      ["nTx"], block1_details["nTx"])
 
         # negative heights should raise an exception
         assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10)
 
         # height=100 too low to prune first block file so this is a no-op
         prune(100)
         assert has_block(
             0), "blk00000.dat is missing when should still be there"
 
         # Does nothing
         node.pruneblockchain(height(0))
         assert has_block(
             0), "blk00000.dat is missing when should still be there"
 
         # height=500 should prune first file
         prune(500)
         assert not has_block(
             0), "blk00000.dat is still there, should be pruned by now"
         assert has_block(
             1), "blk00001.dat is missing when should still be there"
 
         # height=650 should prune second file
         prune(650)
         assert not has_block(
             1), "blk00001.dat is still there, should be pruned by now"
 
         # height=1000 should not prune anything more, because tip-288 is in
         # blk00002.dat.
         prune(1000)
         assert has_block(
             2), "blk00002.dat is still there, should be pruned by now"
 
         # advance the tip so blk00002.dat and blk00003.dat can be pruned (the
         # last 288 blocks should now be in blk00004.dat)
         node.generate(288)
         prune(1000)
         assert not has_block(
             2), "blk00002.dat is still there, should be pruned by now"
         assert not has_block(
             3), "blk00003.dat is still there, should be pruned by now"
 
         # stop node, start back up with auto-prune at 550 MiB, make sure still
         # runs
         self.restart_node(node_number, extra_args=["-prune=550"])
 
         self.log.info("Success")
 
     def wallet_test(self):
         # check that the pruning node's wallet is still in good shape
         self.log.info("Stop and start pruning node to trigger wallet rescan")
         self.restart_node(
             2, extra_args=["-prune=550", "-noparkdeepreorg", "-maxreorgdepth=-1"])
         self.log.info("Success")
 
         # check that wallet loads successfully when restarting a pruned node after IBD.
         # this was reported to fail in #7494.
         self.log.info("Syncing node 5 to test wallet")
-        connect_nodes(self.nodes[0], self.nodes[5])
+        self.connect_nodes(0, 5)
         nds = [self.nodes[0], self.nodes[5]]
         self.sync_blocks(nds, wait=5, timeout=300)
         self.restart_node(
             5, extra_args=["-prune=550", "-noparkdeepreorg", "-maxreorgdepth=-1"])
         self.log.info("Success")
 
     def run_test(self):
         self.log.info("Warning! This test requires 4GB of disk space")
 
         self.log.info("Mining a big blockchain of 995 blocks")
         self.create_big_chain()
         # Chain diagram key:
         # *   blocks on main chain
         # +,&,$,@ blocks on other forks
         # X   invalidated block
         # N1  Node 1
         #
         # Start by mining a simple chain that all nodes have
         # N0=N1=N2 **...*(995)
 
         # stop manual-pruning node with 995 blocks
         self.stop_node(3)
         self.stop_node(4)
 
         self.log.info(
             "Check that we haven't started pruning yet because we're below PruneAfterHeight")
         self.test_height_min()
         # Extend this chain past the PruneAfterHeight
         # N0=N1=N2 **...*(1020)
 
         self.log.info(
             "Check that we'll exceed disk space target if we have a very high stale block rate")
         self.create_chain_with_staleblocks()
         # Disconnect N0
         # And mine a 24 block chain on N1 and a separate 25 block chain on N0
         # N1=N2 **...*+...+(1044)
         # N0    **...**...**(1045)
         #
         # reconnect nodes causing reorg on N1 and N2
         # N1=N2 **...*(1020) *...**(1045)
         #                   \
         #                    +...+(1044)
         #
         # repeat this process until you have 12 stale forks hanging off the
         # main chain on N1 and N2
         # N0    *************************...***************************(1320)
         #
         # N1=N2 **...*(1020) *...**(1045) *..         ..**(1295) *...**(1320)
         #                   \            \                      \
         #                    +...+(1044)  &..                    $...$(1319)
 
         # Save some current chain state for later use
         self.mainchainheight = self.nodes[2].getblockcount()  # 1320
         self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
 
         self.log.info("Check that we can survive a 288 block reorg still")
         self.reorg_test()  # (1033, )
         # Now create a 288 block reorg by mining a longer chain on N1
         # First disconnect N1
         # Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
         # N1   **...*(1020) **...**(1032)X..
         #                  \
         #                   ++...+(1031)X..
         #
         # Now mine 300 more blocks on N1
         # N1    **...*(1020) **...**(1032) @@...@(1332)
         #                 \               \
         #                  \               X...
         #                   \                 \
         #                    ++...+(1031)X..   ..
         #
         # Reconnect nodes and mine 220 more blocks on N1
         # N1    **...*(1020) **...**(1032) @@...@@@(1552)
         #                 \               \
         #                  \               X...
         #                   \                 \
         #                    ++...+(1031)X..   ..
         #
         # N2    **...*(1020) **...**(1032) @@...@@@(1552)
         #                 \               \
         #                  \               *...**(1320)
         #                   \                 \
         #                    ++...++(1044)     ..
         #
         # N0    ********************(1032) @@...@@@(1552)
         #                                 \
         #                                  *...**(1320)
 
         self.log.info(
             "Test that we can rerequest a block we previously pruned if needed for a reorg")
         self.reorg_back()
         # Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
         # Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
         # original main chain (*), but will require redownload of some blocks
         # In order to have a peer we think we can download from, must also perform this invalidation
         # on N0 and mine a new longest chain to trigger.
         # Final result:
         # N0    ********************(1032) **...****(1553)
         #                                 \
         #                                  X@...@@@(1552)
         #
         # N2    **...*(1020) **...**(1032) **...****(1553)
         #                 \               \
         #                  \               X@...@@@(1552)
         #                   \
         #                    +..
         #
         # N1 doesn't change because 1033 on main chain (*) is invalid
 
         self.log.info("Test manual pruning with block indices")
         self.manual_test(3, use_timestamp=False)
 
         self.log.info("Test manual pruning with timestamps")
         self.manual_test(4, use_timestamp=True)
 
         self.log.info("Test wallet re-scan")
         self.wallet_test()
 
         self.log.info("Done")
 
 
 if __name__ == '__main__':
     PruneTest().main()
diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py
index add6fddfe..88ca72001 100755
--- a/test/functional/interface_zmq.py
+++ b/test/functional/interface_zmq.py
@@ -1,653 +1,648 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the ZMQ notification interface."""
 import struct
 from io import BytesIO
 from time import sleep
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
 )
 from test_framework.blocktools import (
     create_block,
     create_coinbase,
     make_conform_to_ctor,
 )
 from test_framework.messages import CTransaction, FromHex, hash256
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 # Test may be skipped and not have zmq installed
 try:
     import zmq
 except ImportError:
     pass
 
 
 def hash256_reversed(byte_str):
     return hash256(byte_str)[::-1]
 
 
 class ZMQSubscriber:
     def __init__(self, socket, topic):
         self.sequence = 0
         self.socket = socket
         self.topic = topic
 
         self.socket.setsockopt(zmq.SUBSCRIBE, self.topic)
 
     def receive(self):
         topic, body, seq = self.socket.recv_multipart()
         # Topic should match the subscriber topic.
         assert_equal(topic, self.topic)
         # Sequence should be incremental.
         assert_equal(struct.unpack('<I', seq)[-1], self.sequence)
         self.sequence += 1
         return body
 
     def receive_sequence(self):
         topic, body, seq = self.socket.recv_multipart()
         # Topic should match the subscriber topic.
         assert_equal(topic, self.topic)
         # Sequence should be incremental.
         assert_equal(struct.unpack('<I', seq)[-1], self.sequence)
         self.sequence += 1
         hash = body[:32].hex()
         label = chr(body[32])
         mempool_sequence = None if len(
             body) != 32 + 1 + 8 else struct.unpack("<Q", body[32 + 1:])[0]
         if mempool_sequence is not None:
             assert label == "A" or label == "R"
         else:
             assert label == "D" or label == "C"
         return (hash, label, mempool_sequence)
 
 
 class ZMQTest (BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.extra_args = [["-whitelist=noban@127.0.0.1"]] * self.num_nodes
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_py3_zmq()
         self.skip_if_no_bitcoind_zmq()
 
     def run_test(self):
         self.ctx = zmq.Context()
         try:
             self.test_basic()
             self.test_sequence()
             self.test_mempool_sync()
             self.test_reorg()
             self.test_multiple_interfaces()
         finally:
             # Destroy the ZMQ context.
             self.log.debug("Destroying ZMQ context")
             self.ctx.destroy(linger=None)
 
     def test_basic(self):
 
         # Invalid zmq arguments don't take down the node, see #17185.
         self.restart_node(0, ["-zmqpubrawtx=foo", "-zmqpubhashtx=bar"])
 
         address = 'tcp://127.0.0.1:28332'
         sockets = []
         subs = []
         services = [b"hashblock", b"hashtx", b"rawblock", b"rawtx"]
         for service in services:
             sockets.append(self.ctx.socket(zmq.SUB))
             sockets[-1].set(zmq.RCVTIMEO, 60000)
             subs.append(ZMQSubscriber(sockets[-1], service))
 
         # Subscribe to all available topics.
         hashblock = subs[0]
         hashtx = subs[1]
         rawblock = subs[2]
         rawtx = subs[3]
 
         self.restart_node(
             0,
             self.extra_args[0] + [
                 f"-zmqpub{sub.topic.decode()}={address}" for sub in [
                     hashblock, hashtx, rawblock, rawtx]]
         )
 
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         for socket in sockets:
             socket.connect(address)
         # Relax so that the subscriber is ready before publishing zmq messages
         sleep(0.2)
 
         num_blocks = 5
         self.log.info(
             "Generate {0} blocks (and {0} coinbase txes)".format(num_blocks))
         genhashes = self.nodes[0].generatetoaddress(
             num_blocks, ADDRESS_ECREG_UNSPENDABLE)
 
         self.sync_all()
 
         for x in range(num_blocks):
             # Should receive the coinbase txid.
             txid = hashtx.receive()
 
             # Should receive the coinbase raw transaction.
             hex = rawtx.receive()
             tx = CTransaction()
             tx.deserialize(BytesIO(hex))
             tx.calc_sha256()
             assert_equal(tx.hash, txid.hex())
 
             # Should receive the generated raw block.
             block = rawblock.receive()
             assert_equal(genhashes[x], hash256_reversed(block[:80]).hex())
 
             # Should receive the generated block hash.
             hash = hashblock.receive().hex()
             assert_equal(genhashes[x], hash)
             # The block should only have the coinbase txid.
             assert_equal([txid.hex()], self.nodes[1].getblock(hash)["tx"])
 
         if self.is_wallet_compiled():
             self.log.info("Wait for tx from second node")
             payment_txid = self.nodes[1].sendtoaddress(
                 self.nodes[0].getnewaddress(), 1000000)
             self.sync_all()
 
             # Should receive the broadcasted txid.
             txid = hashtx.receive()
             assert_equal(payment_txid, txid.hex())
 
             # Should receive the broadcasted raw transaction.
             hex = rawtx.receive()
             assert_equal(payment_txid, hash256_reversed(hex).hex())
 
             # Mining the block with this tx should result in second notification
             # after coinbase tx notification
             self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
             hashtx.receive()
             txid = hashtx.receive()
             assert_equal(payment_txid, txid.hex())
 
         self.log.info("Test the getzmqnotifications RPC")
         assert_equal(self.nodes[0].getzmqnotifications(), [
             {"type": "pubhashblock", "address": address, "hwm": 1000},
             {"type": "pubhashtx", "address": address, "hwm": 1000},
             {"type": "pubrawblock", "address": address, "hwm": 1000},
             {"type": "pubrawtx", "address": address, "hwm": 1000},
         ])
 
         assert_equal(self.nodes[1].getzmqnotifications(), [])
 
     def test_reorg(self):
         if not self.is_wallet_compiled():
             self.log.info("Skipping reorg test because wallet is disabled")
             return
 
         address = 'tcp://127.0.0.1:28333'
 
         services = [b"hashblock", b"hashtx"]
         sockets = []
         subs = []
         for service in services:
             sockets.append(self.ctx.socket(zmq.SUB))
             # 2 second timeout to check end of notifications
             sockets[-1].set(zmq.RCVTIMEO, 2000)
             subs.append(ZMQSubscriber(sockets[-1], service))
 
         # Subscribe to all available topics.
         hashblock = subs[0]
         hashtx = subs[1]
 
         # Should only notify the tip if a reorg occurs
         self.restart_node(
             0, self.extra_args[0] + [f'-zmqpub{sub.topic.decode()}={address}'
                                      for sub in [hashblock, hashtx]])
         for socket in sockets:
             socket.connect(address)
         # Relax so that the subscriber is ready before publishing zmq messages
         sleep(0.2)
 
         # Generate 1 block in nodes[0] with 1 mempool tx and receive all
         # notifications
         payment_txid = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), 1000000)
         disconnect_block = self.nodes[0].generatetoaddress(
             1, ADDRESS_ECREG_UNSPENDABLE)[0]
         disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0]
         assert_equal(
             self.nodes[0].getbestblockhash(),
             hashblock.receive().hex())
         assert_equal(hashtx.receive().hex(), payment_txid)
         assert_equal(hashtx.receive().hex(), disconnect_cb)
 
         # Generate 2 blocks in nodes[1] to a different address to ensure split
         connect_blocks = self.nodes[1].generatetoaddress(
             2, ADDRESS_ECREG_P2SH_OP_TRUE)
 
         # nodes[0] will reorg chain after connecting back nodes[1]
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         # tx in mempool valid but not advertised
         self.sync_blocks()
 
         # Should receive nodes[1] tip
         assert_equal(
             self.nodes[1].getbestblockhash(),
             hashblock.receive().hex())
 
         # During reorg:
         # Get old payment transaction notification from disconnect and
         # disconnected cb
         assert_equal(hashtx.receive().hex(), payment_txid)
         assert_equal(hashtx.receive().hex(), disconnect_cb)
         # And the payment transaction again due to mempool entry
         assert_equal(hashtx.receive().hex(), payment_txid)
         assert_equal(hashtx.receive().hex(), payment_txid)
         # And the new connected coinbases
         for i in [0, 1]:
             assert_equal(
                 hashtx.receive().hex(),
                 self.nodes[1].getblock(
                     connect_blocks[i])["tx"][0])
 
         # If we do a simple invalidate we announce the disconnected coinbase
         self.nodes[0].invalidateblock(connect_blocks[1])
         assert_equal(
             hashtx.receive().hex(),
             self.nodes[1].getblock(
                 connect_blocks[1])["tx"][0])
         # And the current tip
         assert_equal(
             hashtx.receive().hex(),
             self.nodes[1].getblock(
                 connect_blocks[0])["tx"][0])
 
     def create_conflicting_tx(self):
         """Create a transaction that is initially added to node0's mempool
         and is then rejected by a transaction created and included into a
         block by node1."""
         utxo = self.nodes[1].listunspent()[0]
 
         def send_conflicting_transaction(send_node):
             """Send a transaction using an identical utxo as input and
             a different address as output each time the function is
             called. Return the TxId."""
             address = self.nodes[1].getnewaddress()
             change_address = self.nodes[1].getrawchangeaddress()
             tx = self.nodes[1].signrawtransactionwithwallet(
                 self.nodes[1].createrawtransaction(
                     inputs=[{"txid": utxo["txid"], "vout": utxo["vout"]}],
                     outputs=[{address: 5_000_000},
                              {change_address: utxo["amount"] - 5_001_000}]
                 )
             )
             return send_node.sendrawtransaction(tx["hex"])
 
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
         txid_to_be_replaced = send_conflicting_transaction(self.nodes[0])
         replacement_txid = send_conflicting_transaction(self.nodes[1])
         block_hash = self.nodes[1].generatetoaddress(
             1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_all()
 
         return block_hash, txid_to_be_replaced, replacement_txid
 
     def test_sequence(self):
         """
         Sequence zmq notifications give every blockhash and txhash in order
         of processing, regardless of IBD, re-orgs, etc.
         Format of messages:
         <32-byte hash>C :                 Blockhash connected
         <32-byte hash>D :                 Blockhash disconnected
         <32-byte hash>R<8-byte LE uint> : Transactionhash removed from mempool
                                           for non-block inclusion reason
         <32-byte hash>A<8-byte LE uint> : Transactionhash added mempool
         """
         self.log.info("Testing 'sequence' publisher")
         address = 'tcp://127.0.0.1:28333'
         socket = self.ctx.socket(zmq.SUB)
         socket.set(zmq.RCVTIMEO, 60000)
         seq = ZMQSubscriber(socket, b'sequence')
 
         self.restart_node(
             0, self.extra_args[0] + [f'-zmqpub{seq.topic.decode()}={address}'])
         socket.connect(address)
         # Relax so that the subscriber is ready before publishing zmq messages
         sleep(0.2)
 
         # Mempool sequence number starts at 1
         seq_num = 1
 
         # Generate 1 block in nodes[0] and receive all notifications
         dc_block = self.nodes[0].generatetoaddress(
             1, ADDRESS_ECREG_UNSPENDABLE)[0]
 
         # Note: We are not notified of any block transactions, coinbase or
         # mined
         assert_equal((self.nodes[0].getbestblockhash(), "C", None),
                      seq.receive_sequence())
 
         # Generate 2 blocks in nodes[1] to a different address to ensure
         # a chain split
         self.nodes[1].generatetoaddress(2, ADDRESS_ECREG_P2SH_OP_TRUE)
 
         # nodes[0] will reorg chain after connecting back nodes[1]
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
         # Then we receive all block (dis)connect notifications for the
         # 2 block reorg
         assert_equal((dc_block, "D", None), seq.receive_sequence())
         block_count = self.nodes[1].getblockcount()
         assert_equal((self.nodes[1].getblockhash(block_count - 1), "C", None),
                      seq.receive_sequence())
         assert_equal((self.nodes[1].getblockhash(block_count), "C", None),
                      seq.receive_sequence())
 
         # Rest of test requires wallet functionality
         if self.is_wallet_compiled():
             (block_hash, txid_to_be_replaced, replacement_txid
              ) = self.create_conflicting_tx()
             self.log.info(
                 "Testing sequence notifications with mempool sequence values")
             # Should receive the initially broadcasted txid.
             assert_equal((txid_to_be_replaced, "A", seq_num),
                          seq.receive_sequence())
             seq_num += 1
 
             self.log.info("Testing a tx removal notification")
             # Next we receive a notification for the transaction removal
             assert_equal((txid_to_be_replaced, "R", seq_num),
                          seq.receive_sequence())
             seq_num += 1
             # Then we see the block notification
             assert_equal((block_hash, "C", None), seq.receive_sequence())
             # There is no sequence notification for the transaction that was
             # never in node0's mempool, but it can be found in the block.
             assert replacement_txid in self.nodes[0].getblock(block_hash)["tx"]
 
             self.log.info("Wait for tx from second node")
             payment_txid = self.nodes[1].sendtoaddress(
                 address=self.nodes[0].getnewaddress(), amount=5_000_000)
             self.sync_all()
             assert_equal((payment_txid, "A", seq_num), seq.receive_sequence())
             seq_num += 1
 
             # Doesn't get published when mined, make a block and tx to "flush"
             # the possibility though the mempool sequence number does go up by
             # the number of transactions removed from the mempool by the block
             # mining it.
             mempool_size = len(self.nodes[0].getrawmempool())
             c_block = self.nodes[0].generatetoaddress(
                 1, ADDRESS_ECREG_UNSPENDABLE)[0]
             self.sync_all()
             # Make sure the number of mined transactions matches the number of
             # txs out of mempool
             mempool_size_delta = mempool_size - \
                 len(self.nodes[0].getrawmempool())
             assert_equal(len(self.nodes[0].getblock(c_block)["tx"]) - 1,
                          mempool_size_delta)
             seq_num += mempool_size_delta
             payment_txid_2 = self.nodes[1].sendtoaddress(
                 self.nodes[0].getnewaddress(), 1_000_000)
             self.sync_all()
             assert_equal((c_block, "C", None), seq.receive_sequence())
             assert_equal((payment_txid_2, "A", seq_num),
                          seq.receive_sequence())
             seq_num += 1
 
             # Spot check getrawmempool results that they only show up when
             # asked for
             assert isinstance(self.nodes[0].getrawmempool(), list)
             assert isinstance(
                 self.nodes[0].getrawmempool(mempool_sequence=False),
                 list)
             assert "mempool_sequence" not in self.nodes[0].getrawmempool(
                 verbose=True)
             assert_raises_rpc_error(
                 -8, "Verbose results cannot contain mempool sequence values.",
                 self.nodes[0].getrawmempool, True, True)
             assert_equal(self.nodes[0].getrawmempool(
                 mempool_sequence=True)["mempool_sequence"],
                 seq_num)
 
             self.log.info("Testing reorg notifications")
             # Manually invalidate the last block to test mempool re-entry
             # N.B. This part could be made more lenient in exact ordering
             # since it greatly depends on inner-workings of blocks/mempool
             # during "deep" re-orgs. Probably should "re-construct"
             # blockchain/mempool state from notifications instead.
             block_count = self.nodes[0].getblockcount()
             best_hash = self.nodes[0].getbestblockhash()
             self.nodes[0].invalidateblock(best_hash)
             # Bit of room to make sure transaction things happened
             sleep(2)
 
             # Make sure getrawmempool mempool_sequence results aren't "queued"
             # but immediately reflective of the time they were gathered.
             assert self.nodes[0].getrawmempool(
                 mempool_sequence=True)["mempool_sequence"] > seq_num
 
             assert_equal((best_hash, "D", None), seq.receive_sequence())
             assert_equal((payment_txid, "A", seq_num), seq.receive_sequence())
             seq_num += 1
 
             # Other things may happen but aren't wallet-deterministic so we
             # don't test for them currently
             self.nodes[0].reconsiderblock(best_hash)
             self.nodes[1].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
             self.sync_all()
 
             self.log.info("Evict mempool transaction by block conflict")
             orig_txid = self.nodes[0].sendtoaddress(
                 address=self.nodes[0].getnewaddress(), amount=1_000_000)
 
             # More to be simply mined
             more_tx = []
             for _ in range(5):
                 more_tx.append(self.nodes[0].sendtoaddress(
                     self.nodes[0].getnewaddress(), 100_000))
 
             raw_tx = self.nodes[0].getrawtransaction(orig_txid)
             block = create_block(
                 int(self.nodes[0].getbestblockhash(), 16),
                 create_coinbase(self.nodes[0].getblockcount() + 1))
             tx = FromHex(CTransaction(), raw_tx)
             block.vtx.append(tx)
             for txid in more_tx:
                 tx = FromHex(CTransaction(),
                              self.nodes[0].getrawtransaction(txid))
                 block.vtx.append(tx)
             make_conform_to_ctor(block)
             block.hashMerkleRoot = block.calc_merkle_root()
             block.solve()
             assert_equal(self.nodes[0].submitblock(block.serialize().hex()),
                          None)
             tip = self.nodes[0].getbestblockhash()
             assert_equal(int(tip, 16), block.sha256)
             orig_txid_2 = self.nodes[0].sendtoaddress(
                 address=self.nodes[0].getnewaddress(), amount=1_000_000)
 
             # Flush old notifications until evicted tx original entry
             (hash_str, label, mempool_seq) = seq.receive_sequence()
             while hash_str != orig_txid:
                 (hash_str, label, mempool_seq) = seq.receive_sequence()
             mempool_seq += 1
 
             # Added original tx
             assert_equal(label, "A")
             # More transactions to be simply mined
             for i in range(len(more_tx)):
                 assert_equal((more_tx[i], "A", mempool_seq),
                              seq.receive_sequence())
                 mempool_seq += 1
 
             # Removed RBF tests
 
             mempool_seq += 1
             assert_equal((tip, "C", None), seq.receive_sequence())
             mempool_seq += len(more_tx)
             # Last tx
             assert_equal((orig_txid_2, "A", mempool_seq),
                          seq.receive_sequence())
             mempool_seq += 1
             self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
             # want to make sure we didn't break "consensus" for other tests
             self.sync_all()
 
     def test_mempool_sync(self):
         """
         Use sequence notification plus getrawmempool sequence results to
         "sync mempool"
         """
         if not self.is_wallet_compiled():
             self.log.info("Skipping mempool sync test")
             return
 
         self.log.info("Testing 'mempool sync' usage of sequence notifier")
         address = 'tcp://127.0.0.1:28333'
         socket = self.ctx.socket(zmq.SUB)
         socket.set(zmq.RCVTIMEO, 60000)
         seq = ZMQSubscriber(socket, b'sequence')
 
         self.restart_node(
             0, self.extra_args[0] + [f'-zmqpub{seq.topic.decode()}={address}'])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         socket.connect(address)
         # Relax so that the subscriber is ready before publishing zmq messages
         sleep(0.2)
 
         # In-memory counter, should always start at 1
         next_mempool_seq = self.nodes[0].getrawmempool(
             mempool_sequence=True)["mempool_sequence"]
         assert_equal(next_mempool_seq, 1)
 
         # Some transactions have been happening but we aren't consuming
         # zmq notifications yet or we lost a ZMQ message somehow and want
         # to start over
         txids = []
         num_txs = 5
         for _ in range(num_txs):
             txids.append(self.nodes[1].sendtoaddress(
                 address=self.nodes[0].getnewaddress(), amount=1_000_000))
         self.sync_all()
 
         # 1) Consume backlog until we get a mempool sequence number
         (hash_str, label, zmq_mem_seq) = seq.receive_sequence()
         while zmq_mem_seq is None:
             (hash_str, label, zmq_mem_seq) = seq.receive_sequence()
 
         assert label == "A"
         assert hash_str is not None
 
         # 2) We need to "seed" our view of the mempool
         mempool_snapshot = self.nodes[0].getrawmempool(mempool_sequence=True)
         mempool_view = set(mempool_snapshot["txids"])
         get_raw_seq = mempool_snapshot["mempool_sequence"]
         assert_equal(get_raw_seq, 6)
         # Snapshot may be too old compared to zmq message we read off latest
         while zmq_mem_seq >= get_raw_seq:
             sleep(2)
             mempool_snapshot = self.nodes[0].getrawmempool(
                 mempool_sequence=True)
             mempool_view = set(mempool_snapshot["txids"])
             get_raw_seq = mempool_snapshot["mempool_sequence"]
 
         # Things continue to happen in the "interim" while waiting for
         # snapshot results
         for _ in range(num_txs):
             txids.append(self.nodes[0].sendtoaddress(
                 address=self.nodes[0].getnewaddress(), amount=1_000_000))
         self.sync_all()
         self.create_conflicting_tx()
         self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
         final_txid = self.nodes[0].sendtoaddress(
             address=self.nodes[0].getnewaddress(), amount=100_000)
 
         # 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot
         while True:
             if zmq_mem_seq == get_raw_seq - 1:
                 break
             (hash_str, label, mempool_sequence) = seq.receive_sequence()
             if mempool_sequence is not None:
                 zmq_mem_seq = mempool_sequence
                 if zmq_mem_seq > get_raw_seq:
                     raise Exception(
                         f"We somehow jumped mempool sequence numbers! "
                         f"zmq_mem_seq: {zmq_mem_seq} > "
                         f"get_raw_seq: {get_raw_seq}")
 
         # 4) Moving forward, we apply the delta to our local view
         #  remaining txs + conflict (A, R, C) + 1 block connect + 1 final tx
         expected_sequence = get_raw_seq
         for _ in range(num_txs + 3 + 1 + 1):
             (hash_str, label, mempool_sequence) = seq.receive_sequence()
             if label == "A":
                 assert hash_str not in mempool_view
                 mempool_view.add(hash_str)
                 expected_sequence = mempool_sequence + 1
             elif label == "R":
                 assert hash_str in mempool_view
                 mempool_view.remove(hash_str)
                 expected_sequence = mempool_sequence + 1
             elif label == "C":
                 # (Attempt to) remove all txids from known block connects
                 block_txids = self.nodes[0].getblock(hash_str)["tx"][1:]
                 for txid in block_txids:
                     if txid in mempool_view:
                         expected_sequence += 1
                         mempool_view.remove(txid)
             elif label == "D":
                 # Not useful for mempool tracking per se
                 continue
             else:
                 raise Exception("Unexpected ZMQ sequence label!")
 
         assert_equal(self.nodes[0].getrawmempool(), [final_txid])
         assert_equal(
             self.nodes[0].getrawmempool(
                 mempool_sequence=True)["mempool_sequence"],
             expected_sequence)
 
         # 5) If you miss a zmq/mempool sequence number, go back to step (2)
 
         self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
 
     def test_multiple_interfaces(self):
         # Set up two subscribers with different addresses
         subscribers = []
         for i in range(2):
             address = f"tcp://127.0.0.1:{28334 + i}"
             socket = self.ctx.socket(zmq.SUB)
             socket.set(zmq.RCVTIMEO, 60000)
             hashblock = ZMQSubscriber(socket, b"hashblock")
             socket.connect(address)
             subscribers.append({'address': address, 'hashblock': hashblock})
 
         self.restart_node(
             0,
             [f'-zmqpub{subscriber["hashblock"].topic.decode()}={subscriber["address"]}'
              for subscriber in subscribers])
 
         # Relax so that the subscriber is ready before publishing zmq messages
         sleep(0.2)
 
         # Generate 1 block in nodes[0] and receive all notifications
         self.nodes[0].generatetoaddress(1, ADDRESS_ECREG_UNSPENDABLE)
 
         # Should receive the same block hash on both subscribers
         assert_equal(self.nodes[0].getbestblockhash(),
                      subscribers[0]['hashblock'].receive().hex())
         assert_equal(self.nodes[0].getbestblockhash(),
                      subscribers[1]['hashblock'].receive().hex())
 
 
 if __name__ == '__main__':
     ZMQTest().main()
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index 72cab1c5c..5a8e82fa5 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -1,203 +1,201 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test mempool persistence.
 
 By default, bitcoind will dump mempool on shutdown and
 then reload it on startup. This can be overridden with
 the -persistmempool=0 command line option.
 
 Test is as follows:
 
   - start node0, node1 and node2. node1 has -persistmempool=0
   - create 5 transactions on node2 to its own address. Note that these
     are not sent to node0 or node1 addresses because we don't want
     them to be saved in the wallet.
   - check that node0 and node1 have 5 transactions in their mempools
   - shutdown all nodes.
   - startup node0. Verify that it still has 5 transactions
     in its mempool. Shutdown node0. This tests that by default the
     mempool is persistent.
   - startup node1. Verify that its mempool is empty. Shutdown node1.
     This tests that with -persistmempool=0, the mempool is not
     dumped to disk when the node is shut down.
   - Restart node0 with -persistmempool=0. Verify that its mempool is
     empty. Shutdown node0. This tests that with -persistmempool=0,
     the mempool is not loaded from disk on start up.
   - Restart node0 with -persistmempool. Verify that it has 5
     transactions in its mempool. This tests that -persistmempool=0
     does not overwrite a previously valid mempool stored on disk.
   - Remove node0 mempool.dat and verify savemempool RPC recreates it
     and verify that node1 can load it and has 5 transactions in its
     mempool.
   - Verify that savemempool throws when the RPC is called if
     node1 can't write to disk.
 
 """
 import os
 import time
 from decimal import Decimal
 
 from test_framework.p2p import P2PTxInvStore
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_greater_than_or_equal,
     assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
 )
 
 
 class MempoolPersistTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 3
         self.extra_args = [[], ["-persistmempool=0"], []]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         self.log.debug("Send 5 transactions from node2 (to its own address)")
         tx_creation_time_lower = int(time.time())
         for _ in range(5):
             last_txid = self.nodes[2].sendtoaddress(
                 self.nodes[2].getnewaddress(), Decimal("10"))
         node2_balance = self.nodes[2].getbalance()
         self.sync_all()
         tx_creation_time_higher = int(time.time())
 
         self.log.debug(
             "Verify that node0 and node1 have 5 transactions in their mempools")
         assert_equal(len(self.nodes[0].getrawmempool()), 5)
         assert_equal(len(self.nodes[1].getrawmempool()), 5)
 
         self.log.debug("Prioritize a transaction on node0")
         fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
         assert_equal(fees['base'], fees['modified'])
         self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
         fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
         assert_equal(fees['base'] + Decimal('10.0'), fees['modified'])
 
         tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)[
             'time']
         assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
         assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
 
         # disconnect nodes & make a txn that remains in the unbroadcast set.
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
         assert(len(self.nodes[0].getpeerinfo()) == 0)
         assert(len(self.nodes[0].p2ps) == 0)
         self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), Decimal("12000000"))
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 2)
 
         self.log.debug("Stop-start the nodes. Verify that node0 has the "
                        "transactions in its mempool and node1 does not. "
                        "Verify that node2 calculates its balance correctly "
                        "after loading wallet transactions.")
         self.stop_nodes()
         # Give this one a head-start, so we can be "extra-sure" that it didn't
         # load anything later
         # Also don't store the mempool, to keep the datadir clean
         self.start_node(1, extra_args=["-persistmempool=0"])
         self.start_node(0)
         self.start_node(2)
         # start_node is blocking on the mempool being loaded
         assert self.nodes[0].getmempoolinfo()["loaded"]
         assert self.nodes[2].getmempoolinfo()["loaded"]
         assert_equal(len(self.nodes[0].getrawmempool()), 6)
         assert_equal(len(self.nodes[2].getrawmempool()), 5)
         # The others have loaded their mempool. If node_1 loaded anything, we'd
         # probably notice by now:
         assert_equal(len(self.nodes[1].getrawmempool()), 0)
 
         self.log.debug('Verify prioritization is loaded correctly')
         fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
         assert_equal(fees['base'] + Decimal('10.00'), fees['modified'])
 
         self.log.debug('Verify time is loaded correctly')
         assert_equal(
             tx_creation_time,
             self.nodes[0].getmempoolentry(
                 txid=last_txid)['time'])
 
         # Verify accounting of mempool transactions after restart is correct
         # Flush mempool to wallet
         self.nodes[2].syncwithvalidationinterfacequeue()
         assert_equal(node2_balance, self.nodes[2].getbalance())
 
         # start node0 with wallet disabled so wallet transactions don't get
         # resubmitted
         self.log.debug(
             "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
         self.stop_nodes()
         self.start_node(0, extra_args=["-persistmempool=0", "-disablewallet"])
         assert self.nodes[0].getmempoolinfo()["loaded"]
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
 
         self.log.debug(
             "Stop-start node0. Verify that it has the transactions in its mempool.")
         self.stop_nodes()
         self.start_node(0)
         assert self.nodes[0].getmempoolinfo()["loaded"]
         assert_equal(len(self.nodes[0].getrawmempool()), 6)
 
         mempooldat0 = os.path.join(
             self.nodes[0].datadir, self.chain, 'mempool.dat')
         mempooldat1 = os.path.join(
             self.nodes[1].datadir, self.chain, 'mempool.dat')
         self.log.debug(
             "Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
         os.remove(mempooldat0)
         self.nodes[0].savemempool()
         assert os.path.isfile(mempooldat0)
 
         self.log.debug(
             "Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions")
         os.rename(mempooldat0, mempooldat1)
         self.stop_nodes()
         self.start_node(1, extra_args=[])
         assert self.nodes[1].getmempoolinfo()["loaded"]
         assert_equal(len(self.nodes[1].getrawmempool()), 6)
 
         self.log.debug(
             "Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
         # to test the exception we are creating a tmp folder called mempool.dat.new
         # which is an implementation detail that could change and break this
         # test
         mempooldotnew1 = mempooldat1 + '.new'
         os.mkdir(mempooldotnew1)
         assert_raises_rpc_error(-1, "Unable to dump mempool to disk",
                                 self.nodes[1].savemempool)
         os.rmdir(mempooldotnew1)
 
         self.test_persist_unbroadcast()
 
     def test_persist_unbroadcast(self):
         node0 = self.nodes[0]
         self.start_node(0)
 
         # clear out mempool
         node0.generate(1)
 
         # ensure node0 doesn't have any connections
         # make a transaction that will remain in the unbroadcast set
         assert(len(node0.getpeerinfo()) == 0)
         assert(len(node0.p2ps) == 0)
         node0.sendtoaddress(self.nodes[1].getnewaddress(), Decimal("12"))
 
         # shutdown, then startup with wallet disabled
         self.stop_nodes()
         self.start_node(0, extra_args=["-disablewallet"])
 
         # check that txn gets broadcast due to unbroadcast logic
         conn = node0.add_p2p_connection(P2PTxInvStore())
         # 15 min + 1 for buffer
         node0.mockscheduler(16 * 60)
         self.wait_until(lambda: len(conn.get_invs()) == 1)
 
 
 if __name__ == '__main__':
     MempoolPersistTest().main()
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index 34484253d..0aa5cf78b 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -1,274 +1,270 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test mining RPCs
 
 - getmininginfo
 - getblocktemplate proposal mode
 - submitblock"""
 
 import copy
 from decimal import Decimal
 
 from test_framework.blocktools import TIME_GENESIS_BLOCK, create_coinbase
 from test_framework.messages import BLOCK_HEADER_SIZE, CBlock, CBlockHeader
 from test_framework.p2p import P2PDataStore
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 def assert_template(node, block, expect, rehash=True):
     if rehash:
         block.hashMerkleRoot = block.calc_merkle_root()
     rsp = node.getblocktemplate(
         template_request={
             'data': block.serialize().hex(),
             'mode': 'proposal'})
     assert_equal(rsp, expect)
 
 
 class MiningTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.setup_clean_chain = True
         self.supports_cli = False
 
     def mine_chain(self):
         self.log.info('Create some old blocks')
         node = self.nodes[0]
         address = node.get_deterministic_priv_key().address
         for t in range(TIME_GENESIS_BLOCK,
                        TIME_GENESIS_BLOCK + 200 * 600, 600):
             node.setmocktime(t)
             node.generatetoaddress(1, address)
         mining_info = node.getmininginfo()
         assert_equal(mining_info['blocks'], 200)
         assert_equal(mining_info['currentblocktx'], 0)
         assert_equal(mining_info['currentblocksize'], 1000)
         self.restart_node(0)
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
     def run_test(self):
         self.mine_chain()
         node = self.nodes[0]
 
         def assert_submitblock(block, result_str_1, result_str_2=None):
             block.solve()
             result_str_2 = result_str_2 or 'duplicate-invalid'
             assert_equal(result_str_1, node.submitblock(
                 hexdata=block.serialize().hex()))
             assert_equal(result_str_2, node.submitblock(
                 hexdata=block.serialize().hex()))
 
         self.log.info('getmininginfo')
         mining_info = node.getmininginfo()
         assert_equal(mining_info['blocks'], 200)
         assert_equal(mining_info['chain'], self.chain)
         assert 'currentblocktx' not in mining_info
         assert 'currentblocksize' not in mining_info
         assert_equal(mining_info['difficulty'],
                      Decimal('4.656542373906925E-10'))
         assert_equal(mining_info['networkhashps'],
                      Decimal('0.003333333333333334'))
         assert_equal(mining_info['pooledtx'], 0)
 
         # Mine a block to leave initial block download
         node.generatetoaddress(1, node.get_deterministic_priv_key().address)
         tmpl = node.getblocktemplate()
         self.log.info("getblocktemplate: Test capability advertised")
         assert 'proposal' in tmpl['capabilities']
 
         next_height = int(tmpl["height"])
         coinbase_tx = create_coinbase(height=next_height)
         # sequence numbers must not be max for nLockTime to have effect
         coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
         coinbase_tx.rehash()
 
         block = CBlock()
         block.nVersion = tmpl["version"]
         block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
         block.nTime = tmpl["curtime"]
         block.nBits = int(tmpl["bits"], 16)
         block.nNonce = 0
         block.vtx = [coinbase_tx]
 
         self.log.info("getblocktemplate: Test valid block")
         assert_template(node, block, None)
 
         self.log.info("submitblock: Test block decode failure")
         assert_raises_rpc_error(-22, "Block decode failed",
                                 node.submitblock, block.serialize()[:-15].hex())
 
         self.log.info(
             "getblocktemplate: Test bad input hash for coinbase transaction")
         bad_block = copy.deepcopy(block)
         bad_block.vtx[0].vin[0].prevout.hash += 1
         bad_block.vtx[0].rehash()
         assert_template(node, bad_block, 'bad-cb-missing')
 
         self.log.info("submitblock: Test invalid coinbase transaction")
         assert_raises_rpc_error(-22, "Block does not start with a coinbase",
                                 node.submitblock, bad_block.serialize().hex())
 
         self.log.info("getblocktemplate: Test truncated final transaction")
         assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
                                 'data': block.serialize()[:-1].hex(), 'mode': 'proposal'})
 
         self.log.info("getblocktemplate: Test duplicate transaction")
         bad_block = copy.deepcopy(block)
         bad_block.vtx.append(bad_block.vtx[0])
         assert_template(node, bad_block, 'bad-txns-duplicate')
         assert_submitblock(bad_block, 'bad-txns-duplicate',
                            'bad-txns-duplicate')
 
         self.log.info("getblocktemplate: Test invalid transaction")
         bad_block = copy.deepcopy(block)
         bad_tx = copy.deepcopy(bad_block.vtx[0])
         bad_tx.vin[0].prevout.hash = 255
         bad_tx.rehash()
         bad_block.vtx.append(bad_tx)
         assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
         assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent')
 
         self.log.info("getblocktemplate: Test nonfinal transaction")
         bad_block = copy.deepcopy(block)
         bad_block.vtx[0].nLockTime = 2 ** 32 - 1
         bad_block.vtx[0].rehash()
         assert_template(node, bad_block, 'bad-txns-nonfinal')
         assert_submitblock(bad_block, 'bad-txns-nonfinal')
 
         self.log.info("getblocktemplate: Test bad tx count")
         # The tx count is immediately after the block header
         bad_block_sn = bytearray(block.serialize())
         assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1)
         bad_block_sn[BLOCK_HEADER_SIZE] += 1
         assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
                                 'data': bad_block_sn.hex(), 'mode': 'proposal'})
 
         self.log.info("getblocktemplate: Test bad bits")
         bad_block = copy.deepcopy(block)
         bad_block.nBits = 469762303  # impossible in the real world
         assert_template(node, bad_block, 'bad-diffbits')
 
         self.log.info("getblocktemplate: Test bad merkle root")
         bad_block = copy.deepcopy(block)
         bad_block.hashMerkleRoot += 1
         assert_template(node, bad_block, 'bad-txnmrklroot', False)
         assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot')
 
         self.log.info("getblocktemplate: Test bad timestamps")
         bad_block = copy.deepcopy(block)
         bad_block.nTime = 2 ** 31 - 1
         assert_template(node, bad_block, 'time-too-new')
         assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
         bad_block.nTime = 0
         assert_template(node, bad_block, 'time-too-old')
         assert_submitblock(bad_block, 'time-too-old', 'time-too-old')
 
         self.log.info("getblocktemplate: Test not best block")
         bad_block = copy.deepcopy(block)
         bad_block.hashPrevBlock = 123
         assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
         assert_submitblock(bad_block, 'prev-blk-not-found',
                            'prev-blk-not-found')
 
         self.log.info('submitheader tests')
         assert_raises_rpc_error(-22, 'Block header decode failed',
                                 lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE))
         assert_raises_rpc_error(-22, 'Block header decode failed',
                                 lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE - 2)))
         assert_raises_rpc_error(-25, 'Must submit previous header',
                                 lambda: node.submitheader(hexdata=super(CBlock, bad_block).serialize().hex()))
 
         block.nTime += 1
         block.solve()
 
         def chain_tip(b_hash, *, status='headers-only', branchlen=1):
             return {'hash': b_hash, 'height': 202,
                     'branchlen': branchlen, 'status': status}
 
         assert chain_tip(block.hash) not in node.getchaintips()
         node.submitheader(hexdata=block.serialize().hex())
         assert chain_tip(block.hash) in node.getchaintips()
         # Noop
         node.submitheader(hexdata=CBlockHeader(block).serialize().hex())
         assert chain_tip(block.hash) in node.getchaintips()
 
         bad_block_root = copy.deepcopy(block)
         bad_block_root.hashMerkleRoot += 2
         bad_block_root.solve()
         assert chain_tip(bad_block_root.hash) not in node.getchaintips()
         node.submitheader(hexdata=CBlockHeader(
             bad_block_root).serialize().hex())
         assert chain_tip(bad_block_root.hash) in node.getchaintips()
         # Should still reject invalid blocks, even if we have the header:
         assert_equal(node.submitblock(
             hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot')
         assert_equal(node.submitblock(
             hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot')
         assert chain_tip(bad_block_root.hash) in node.getchaintips()
         # We know the header for this invalid block, so should just return
         # early without error:
         node.submitheader(hexdata=CBlockHeader(
             bad_block_root).serialize().hex())
         assert chain_tip(bad_block_root.hash) in node.getchaintips()
 
         bad_block_lock = copy.deepcopy(block)
         bad_block_lock.vtx[0].nLockTime = 2**32 - 1
         bad_block_lock.vtx[0].rehash()
         bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root()
         bad_block_lock.solve()
         assert_equal(node.submitblock(
             hexdata=bad_block_lock.serialize().hex()), 'bad-txns-nonfinal')
         assert_equal(node.submitblock(
             hexdata=bad_block_lock.serialize().hex()), 'duplicate-invalid')
         # Build a "good" block on top of the submitted bad block
         bad_block2 = copy.deepcopy(block)
         bad_block2.hashPrevBlock = bad_block_lock.sha256
         bad_block2.solve()
         assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(
             hexdata=CBlockHeader(bad_block2).serialize().hex()))
 
         # Should reject invalid header right away
         bad_block_time = copy.deepcopy(block)
         bad_block_time.nTime = 1
         bad_block_time.solve()
         assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(
             hexdata=CBlockHeader(bad_block_time).serialize().hex()))
 
         # Should ask for the block from a p2p node, if they announce the header
         # as well:
         peer = node.add_p2p_connection(P2PDataStore())
         # Drop the first getheaders
         peer.wait_for_getheaders(timeout=5)
         peer.send_blocks_and_test(blocks=[block], node=node)
         # Must be active now:
         assert chain_tip(block.hash, status='active',
                          branchlen=0) in node.getchaintips()
 
         # Building a few blocks should give the same results
         node.generatetoaddress(10, node.get_deterministic_priv_key().address)
         assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(
             hexdata=CBlockHeader(bad_block_time).serialize().hex()))
         assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(
             hexdata=CBlockHeader(bad_block2).serialize().hex()))
         node.submitheader(hexdata=CBlockHeader(block).serialize().hex())
         node.submitheader(hexdata=CBlockHeader(
             bad_block_root).serialize().hex())
         # valid
         assert_equal(node.submitblock(
             hexdata=block.serialize().hex()), 'duplicate')
 
         # Sanity check that maxtries supports large integers
         node.generatetoaddress(
             1, node.get_deterministic_priv_key().address, pow(
                 2, 32))
 
 
 if __name__ == '__main__':
     MiningTest().main()
diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py
index 27bca74a1..b238a4b6f 100755
--- a/test/functional/p2p_blockfilters.py
+++ b/test/functional/p2p_blockfilters.py
@@ -1,267 +1,267 @@
 #!/usr/bin/env python3
 # Copyright (c) 2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Tests NODE_COMPACT_FILTERS (BIP 157/158).
 
 Tests that a node configured with -blockfilterindex and -peerblockfilters signals
 NODE_COMPACT_FILTERS and can serve cfilters, cfheaders and cfcheckpts.
 """
 
 from test_framework.messages import (
     FILTER_TYPE_BASIC,
     NODE_COMPACT_FILTERS,
     hash256,
     msg_getcfcheckpt,
     msg_getcfheaders,
     msg_getcfilters,
     ser_uint256,
     uint256_from_str,
 )
 from test_framework.p2p import P2PInterface
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes, disconnect_nodes
+from test_framework.util import assert_equal
 
 
 class CFiltersClient(P2PInterface):
     def __init__(self):
         super().__init__()
         # Store the cfilters received.
         self.cfilters = []
 
     def pop_cfilters(self):
         cfilters = self.cfilters
         self.cfilters = []
         return cfilters
 
     def on_cfilter(self, message):
         """Store cfilters received in a list."""
         self.cfilters.append(message)
 
 
 class CompactFiltersTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.rpc_timeout = 480
         self.num_nodes = 2
         self.extra_args = [
             ["-blockfilterindex", "-peerblockfilters"],
             ["-blockfilterindex"],
         ]
 
     def run_test(self):
         # Node 0 supports COMPACT_FILTERS, node 1 does not.
         node0 = self.nodes[0].add_p2p_connection(CFiltersClient())
         node1 = self.nodes[1].add_p2p_connection(CFiltersClient())
 
         # Nodes 0 & 1 share the same first 999 blocks in the chain.
         self.nodes[0].generate(999)
         self.sync_blocks(timeout=600)
 
         # Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
 
         self.nodes[0].generate(1)
         self.wait_until(lambda: self.nodes[0].getblockcount() == 1000)
         stale_block_hash = self.nodes[0].getblockhash(1000)
 
         self.nodes[1].generate(1001)
         self.wait_until(lambda: self.nodes[1].getblockcount() == 2000)
 
         # Check that nodes have signalled NODE_COMPACT_FILTERS correctly.
         assert node0.nServices & NODE_COMPACT_FILTERS != 0
         assert node1.nServices & NODE_COMPACT_FILTERS == 0
 
         # Check that the localservices is as expected.
         assert int(
             self.nodes[0].getnetworkinfo()['localservices'],
             16) & NODE_COMPACT_FILTERS != 0
         assert int(
             self.nodes[1].getnetworkinfo()['localservices'],
             16) & NODE_COMPACT_FILTERS == 0
 
         self.log.info("get cfcheckpt on chain to be re-orged out.")
         request = msg_getcfcheckpt(
             filter_type=FILTER_TYPE_BASIC,
             stop_hash=int(stale_block_hash, 16)
         )
         node0.send_and_ping(message=request)
         response = node0.last_message['cfcheckpt']
         assert_equal(response.filter_type, request.filter_type)
         assert_equal(response.stop_hash, request.stop_hash)
         assert_equal(len(response.headers), 1)
 
         self.log.info("Reorg node 0 to a new chain.")
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks(timeout=600)
 
         main_block_hash = self.nodes[0].getblockhash(1000)
         assert main_block_hash != stale_block_hash, "node 0 chain did not reorganize"
 
         self.log.info("Check that peers can fetch cfcheckpt on active chain.")
         tip_hash = self.nodes[0].getbestblockhash()
         request = msg_getcfcheckpt(
             filter_type=FILTER_TYPE_BASIC,
             stop_hash=int(tip_hash, 16)
         )
         node0.send_and_ping(request)
         response = node0.last_message['cfcheckpt']
         assert_equal(response.filter_type, request.filter_type)
         assert_equal(response.stop_hash, request.stop_hash)
 
         main_cfcheckpt = self.nodes[0].getblockfilter(
             main_block_hash, 'basic')['header']
         tip_cfcheckpt = self.nodes[0].getblockfilter(tip_hash, 'basic')[
             'header']
         assert_equal(
             response.headers,
             [int(header, 16) for header in (main_cfcheckpt, tip_cfcheckpt)]
         )
 
         self.log.info("Check that peers can fetch cfcheckpt on stale chain.")
         request = msg_getcfcheckpt(
             filter_type=FILTER_TYPE_BASIC,
             stop_hash=int(stale_block_hash, 16)
         )
         node0.send_and_ping(request)
         response = node0.last_message['cfcheckpt']
 
         stale_cfcheckpt = self.nodes[0].getblockfilter(
             stale_block_hash, 'basic')['header']
         assert_equal(
             response.headers,
             [int(header, 16) for header in (stale_cfcheckpt,)]
         )
 
         self.log.info("Check that peers can fetch cfheaders on active chain.")
         request = msg_getcfheaders(
             filter_type=FILTER_TYPE_BASIC,
             start_height=1,
             stop_hash=int(main_block_hash, 16)
         )
         node0.send_and_ping(request)
         response = node0.last_message['cfheaders']
         main_cfhashes = response.hashes
         assert_equal(len(main_cfhashes), 1000)
         assert_equal(
             compute_last_header(response.prev_header, response.hashes),
             int(main_cfcheckpt, 16)
         )
 
         self.log.info("Check that peers can fetch cfheaders on stale chain.")
         request = msg_getcfheaders(
             filter_type=FILTER_TYPE_BASIC,
             start_height=1,
             stop_hash=int(stale_block_hash, 16)
         )
         node0.send_and_ping(request)
         response = node0.last_message['cfheaders']
         stale_cfhashes = response.hashes
         assert_equal(len(stale_cfhashes), 1000)
         assert_equal(
             compute_last_header(response.prev_header, response.hashes),
             int(stale_cfcheckpt, 16)
         )
 
         self.log.info("Check that peers can fetch cfilters.")
         stop_hash = self.nodes[0].getblockhash(10)
         request = msg_getcfilters(
             filter_type=FILTER_TYPE_BASIC,
             start_height=1,
             stop_hash=int(stop_hash, 16)
         )
         node0.send_message(request)
         node0.sync_with_ping()
         response = node0.pop_cfilters()
         assert_equal(len(response), 10)
 
         self.log.info("Check that cfilter responses are correct.")
         for cfilter, cfhash, height in zip(
                 response, main_cfhashes, range(1, 11)):
             block_hash = self.nodes[0].getblockhash(height)
             assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
             assert_equal(cfilter.block_hash, int(block_hash, 16))
             computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
             assert_equal(computed_cfhash, cfhash)
 
         self.log.info("Check that peers can fetch cfilters for stale blocks.")
         request = msg_getcfilters(
             filter_type=FILTER_TYPE_BASIC,
             start_height=1000,
             stop_hash=int(stale_block_hash, 16)
         )
         node0.send_message(request)
         node0.sync_with_ping()
         response = node0.pop_cfilters()
         assert_equal(len(response), 1)
 
         cfilter = response[0]
         assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
         assert_equal(cfilter.block_hash, int(stale_block_hash, 16))
         computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
         assert_equal(computed_cfhash, stale_cfhashes[999])
 
         self.log.info(
             "Requests to node 1 without NODE_COMPACT_FILTERS results in disconnection.")
         requests = [
             msg_getcfcheckpt(
                 filter_type=FILTER_TYPE_BASIC,
                 stop_hash=int(main_block_hash, 16)
             ),
             msg_getcfheaders(
                 filter_type=FILTER_TYPE_BASIC,
                 start_height=1000,
                 stop_hash=int(main_block_hash, 16)
             ),
             msg_getcfilters(
                 filter_type=FILTER_TYPE_BASIC,
                 start_height=1000,
                 stop_hash=int(main_block_hash, 16)
             ),
         ]
         for request in requests:
             node1 = self.nodes[1].add_p2p_connection(P2PInterface())
             node1.send_message(request)
             node1.wait_for_disconnect()
 
         self.log.info("Check that invalid requests result in disconnection.")
         requests = [
             # Requesting too many filters results in disconnection.
             msg_getcfilters(
                 filter_type=FILTER_TYPE_BASIC,
                 start_height=0,
                 stop_hash=int(main_block_hash, 16)
             ),
             # Requesting too many filter headers results in disconnection.
             msg_getcfheaders(
                 filter_type=FILTER_TYPE_BASIC,
                 start_height=0,
                 stop_hash=int(tip_hash, 16)
             ),
             # Requesting unknown filter type results in disconnection.
             msg_getcfcheckpt(
                 filter_type=255,
                 stop_hash=int(main_block_hash, 16)
             ),
             # Requesting unknown hash results in disconnection.
             msg_getcfcheckpt(
                 filter_type=FILTER_TYPE_BASIC,
                 stop_hash=123456789,
             ),
         ]
         for request in requests:
             node0 = self.nodes[0].add_p2p_connection(P2PInterface())
             node0.send_message(request)
             node0.wait_for_disconnect()
 
 
 def compute_last_header(prev_header, hashes):
     """Compute the last filter header from a starting header and a sequence of filter hashes."""
     header = ser_uint256(prev_header)
     for filter_hash in hashes:
         header = hash256(ser_uint256(filter_hash) + header)
     return uint256_from_str(header)
 
 
 if __name__ == '__main__':
     CompactFiltersTest().main()
diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py
index 721a2ce55..0580c3ac8 100755
--- a/test/functional/p2p_disconnect_ban.py
+++ b/test/functional/p2p_disconnect_ban.py
@@ -1,138 +1,134 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test node disconnect and ban behavior"""
 import time
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 class DisconnectBanTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.supports_cli = False
 
     def run_test(self):
         self.log.info("Connect nodes both way")
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[0])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 0)
 
         self.log.info("Test setban and listbanned RPCs")
 
         self.log.info("setban: successfully ban single IP address")
         # node1 should have 2 connections to node0 at this point
         assert_equal(len(self.nodes[1].getpeerinfo()), 2)
         self.nodes[1].setban(subnet="127.0.0.1", command="add")
         self.wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0,
                         timeout=10)
         # all nodes must be disconnected at this point
         assert_equal(len(self.nodes[1].getpeerinfo()), 0)
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("clearbanned: successfully clear ban list")
         self.nodes[1].clearbanned()
         assert_equal(len(self.nodes[1].listbanned()), 0)
         self.nodes[1].setban("127.0.0.0/24", "add")
 
         self.log.info("setban: fail to ban an already banned subnet")
         assert_equal(len(self.nodes[1].listbanned()), 1)
         assert_raises_rpc_error(
             -23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
 
         self.log.info("setban: fail to ban an invalid subnet")
         assert_raises_rpc_error(
             -30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
         # still only one banned ip because 127.0.0.1 is within the range of
         # 127.0.0.0/24
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("setban remove: fail to unban a non-banned subnet")
         assert_raises_rpc_error(
             -30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("setban remove: successfully unban subnet")
         self.nodes[1].setban("127.0.0.0/24", "remove")
         assert_equal(len(self.nodes[1].listbanned()), 0)
         self.nodes[1].clearbanned()
         assert_equal(len(self.nodes[1].listbanned()), 0)
 
         self.log.info("setban: test persistence across node restart")
         self.nodes[1].setban("127.0.0.0/32", "add")
         self.nodes[1].setban("127.0.0.0/24", "add")
         # Set the mocktime so we can control when bans expire
         old_time = int(time.time())
         self.nodes[1].setmocktime(old_time)
         # ban for 1 seconds
         self.nodes[1].setban("192.168.0.1", "add", 1)
         # ban for 1000 seconds
         self.nodes[1].setban(
             "2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000)
         listBeforeShutdown = self.nodes[1].listbanned()
         assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
         # Move time forward by 3 seconds so the third ban has expired
         self.nodes[1].setmocktime(old_time + 3)
         assert_equal(len(self.nodes[1].listbanned()), 3)
 
         self.restart_node(1)
 
         listAfterShutdown = self.nodes[1].listbanned()
         assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
         assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
         assert_equal("/19" in listAfterShutdown[2]['address'], True)
 
         # Clear ban lists
         self.nodes[1].clearbanned()
         self.log.info("Connect nodes both way")
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[0])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 0)
 
         self.log.info("Test disconnectnode RPCs")
 
         self.log.info(
             "disconnectnode: fail to disconnect when calling with address and nodeid")
         address1 = self.nodes[0].getpeerinfo()[0]['addr']
         node1 = self.nodes[0].getpeerinfo()[0]['addr']
         assert_raises_rpc_error(
             -32602, "Only one of address and nodeid should be provided.",
             self.nodes[0].disconnectnode, address=address1, nodeid=node1)
 
         self.log.info(
             "disconnectnode: fail to disconnect when calling with junk address")
         assert_raises_rpc_error(-29, "Node not found in connected nodes",
                                 self.nodes[0].disconnectnode, address="221B Baker Street")
 
         self.log.info(
             "disconnectnode: successfully disconnect node by address")
         address1 = self.nodes[0].getpeerinfo()[0]['addr']
         self.nodes[0].disconnectnode(address=address1)
         self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1,
                         timeout=10)
         assert not [node for node in self.nodes[0].getpeerinfo()
                     if node['addr'] == address1]
 
         self.log.info("disconnectnode: successfully reconnect node")
         # reconnect the node
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         assert_equal(len(self.nodes[0].getpeerinfo()), 2)
         assert [node for node in self.nodes[0]
                 .getpeerinfo() if node['addr'] == address1]
 
         self.log.info(
             "disconnectnode: successfully disconnect node by node id")
         id1 = self.nodes[0].getpeerinfo()[0]['id']
         self.nodes[0].disconnectnode(nodeid=id1)
         self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1,
                         timeout=10)
         assert not [node for node in self.nodes[0].getpeerinfo()
                     if node['id'] == id1]
 
 
 if __name__ == '__main__':
     DisconnectBanTest().main()
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index 0baafa994..fad4d1b86 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -1,135 +1,135 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Tests NODE_NETWORK_LIMITED.
 
 Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correctly
 and that it responds to getdata requests for blocks correctly:
     - send a block within 288 + 2 of the tip
     - disconnect peers who request blocks older than that."""
 from test_framework.messages import (
     MSG_BLOCK,
     NODE_BLOOM,
     NODE_NETWORK_LIMITED,
     CInv,
     msg_getdata,
     msg_verack,
 )
 from test_framework.p2p import P2PInterface
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes, disconnect_nodes
+from test_framework.util import assert_equal
 
 
 class P2PIgnoreInv(P2PInterface):
     firstAddrnServices = 0
 
     def on_inv(self, message):
         # The node will send us invs for other blocks. Ignore them.
         pass
 
     def on_addr(self, message):
         self.firstAddrnServices = message.addrs[0].nServices
 
     def wait_for_addr(self, timeout=5):
         def test_function(): return self.last_message.get("addr")
         self.wait_until(test_function, timeout=timeout)
 
     def send_getdata_for_block(self, blockhash):
         getdata_request = msg_getdata()
         getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
         self.send_message(getdata_request)
 
 
 class NodeNetworkLimitedTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 3
         self.extra_args = [['-prune=550', '-addrmantest'], [], []]
 
     def disconnect_all(self):
-        disconnect_nodes(self.nodes[0], self.nodes[1])
-        disconnect_nodes(self.nodes[0], self.nodes[2])
-        disconnect_nodes(self.nodes[1], self.nodes[2])
+        self.disconnect_nodes(0, 1)
+        self.disconnect_nodes(0, 2)
+        self.disconnect_nodes(1, 2)
 
     def setup_network(self):
         self.add_nodes(self.num_nodes, self.extra_args)
         self.start_nodes()
 
     def run_test(self):
         node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
 
         expected_services = NODE_BLOOM | NODE_NETWORK_LIMITED
 
         self.log.info("Check that node has signalled expected services.")
         assert_equal(node.nServices, expected_services)
 
         self.log.info("Check that the localservices is as expected.")
         assert_equal(int(self.nodes[0].getnetworkinfo()[
                      'localservices'], 16), expected_services)
 
         self.log.info(
             "Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         blocks = self.nodes[1].generatetoaddress(
             292, self.nodes[1].get_deterministic_priv_key().address)
         self.sync_blocks([self.nodes[0], self.nodes[1]])
 
         self.log.info("Make sure we can max retrieve block at tip-288.")
         # last block in valid range
         node.send_getdata_for_block(blocks[1])
         node.wait_for_block(int(blocks[1], 16), timeout=3)
 
         self.log.info(
             "Requesting block at height 2 (tip-289) must fail (ignored).")
         # first block outside of the 288+2 limit
         node.send_getdata_for_block(blocks[0])
         node.wait_for_disconnect(5)
 
         self.log.info("Check local address relay, do a fresh connection.")
         self.nodes[0].disconnect_p2ps()
         node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
         node1.send_message(msg_verack())
 
         node1.wait_for_addr()
         # must relay address with NODE_NETWORK_LIMITED
         assert_equal(node1.firstAddrnServices, expected_services)
 
         self.nodes[0].disconnect_p2ps()
 
         # connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
         # because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer,
         # sync must not be possible
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 2)
         try:
             self.sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
         except Exception:
             pass
         # node2 must remain at heigh 0
         assert_equal(self.nodes[2].getblockheader(
             self.nodes[2].getbestblockhash())['height'], 0)
 
         # now connect also to node 1 (non pruned)
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
 
         # sync must be possible
         self.sync_blocks()
 
         # disconnect all peers
         self.disconnect_all()
 
         # mine 10 blocks on node 0 (pruned node)
         self.nodes[0].generatetoaddress(
             10, self.nodes[0].get_deterministic_priv_key().address)
 
         # connect node1 (non pruned) with node0 (pruned) and check if the can
         # sync
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
         # sync must be possible, node 1 is no longer in IBD and should
         # therefore connect to node 0 (NODE_NETWORK_LIMITED)
         self.sync_blocks([self.nodes[0], self.nodes[1]])
 
 
 if __name__ == '__main__':
     NodeNetworkLimitedTest().main()
diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py
index b4705fa1f..563971248 100755
--- a/test/functional/p2p_permissions.py
+++ b/test/functional/p2p_permissions.py
@@ -1,210 +1,210 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2018 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test p2p permission message.
 
 Test that permissions are correctly calculated and applied
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.messages import CTransaction, FromHex
 from test_framework.p2p import P2PDataStore
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.test_node import ErrorMatch
 from test_framework.txtools import pad_tx
-from test_framework.util import assert_equal, connect_nodes, p2p_port
+from test_framework.util import assert_equal, p2p_port
 
 
 class P2PPermissionsTests(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.setup_clean_chain = True
 
     def run_test(self):
         self.check_tx_relay()
 
         self.checkpermission(
             # default permissions (no specific permissions)
             ["-whitelist=127.0.0.1"],
             # Make sure the default values in the command line documentation
             # match the ones here
             ["relay", "noban", "mempool", "download"],
             True)
 
         self.checkpermission(
             # check without deprecatedrpc=whitelisted
             ["-whitelist=127.0.0.1"],
             # Make sure the default values in the command line documentation
             # match the ones here
             ["relay", "noban", "mempool", "download"],
             None)
 
         self.checkpermission(
             # no permission (even with forcerelay)
             ["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"],
             [],
             False)
 
         self.checkpermission(
             # relay permission removed (no specific permissions)
             ["-whitelist=127.0.0.1", "-whitelistrelay=0"],
             ["noban", "mempool", "download"],
             True)
 
         self.checkpermission(
             # forcerelay and relay permission added
             # Legacy parameter interaction which set whitelistrelay to true
             # if whitelistforcerelay is true
             ["-whitelist=127.0.0.1", "-whitelistforcerelay"],
             ["forcerelay", "relay", "noban", "mempool", "download"],
             True)
 
         # Let's make sure permissions are merged correctly
         # For this, we need to use whitebind instead of bind
         # by modifying the configuration file.
         ip_port = "127.0.0.1:{}".format(p2p_port(1))
         self.replaceinconfig(
             1,
             "bind=127.0.0.1",
             "whitebind=bloomfilter,forcerelay@" +
             ip_port)
         self.checkpermission(
             ["-whitelist=noban@127.0.0.1"],
             # Check parameter interaction forcerelay should activate relay
             ["noban", "bloomfilter", "forcerelay", "relay", "download"],
             False)
         self.replaceinconfig(
             1,
             "whitebind=bloomfilter,forcerelay@" +
             ip_port,
             "bind=127.0.0.1")
 
         self.checkpermission(
             # legacy whitelistrelay should be ignored
             ["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"],
             ["noban", "mempool", "download"],
             False)
 
         self.checkpermission(
             # check without deprecatedrpc=whitelisted
             ["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"],
             ["noban", "mempool", "download"],
             None)
 
         self.checkpermission(
             # legacy whitelistforcerelay should be ignored
             ["-whitelist=noban,mempool@127.0.0.1", "-whitelistforcerelay"],
             ["noban", "mempool", "download"],
             False)
 
         self.checkpermission(
             # missing mempool permission to be considered legacy whitelisted
             ["-whitelist=noban@127.0.0.1"],
             ["noban", "download"],
             False)
 
         self.checkpermission(
             # all permission added
             ["-whitelist=all@127.0.0.1"],
             ["forcerelay", "noban", "mempool", "bloomfilter",
                 "relay", "download", "bypass_proof_request_limits", "addr"],
             False)
 
         self.checkpermission(
             # bypass_proof_request_limits permission
             ["-whitelist=bypass_proof_request_limits@127.0.0.1"],
             ["bypass_proof_request_limits"],
             False)
 
         self.stop_node(1)
         self.nodes[1].assert_start_raises_init_error(
             ["-whitelist=oopsie@127.0.0.1"],
             "Invalid P2P permission",
             match=ErrorMatch.PARTIAL_REGEX)
         self.nodes[1].assert_start_raises_init_error(
             ["-whitelist=noban@127.0.0.1:230"],
             "Invalid netmask specified in",
             match=ErrorMatch.PARTIAL_REGEX)
         self.nodes[1].assert_start_raises_init_error(
             ["-whitebind=noban@127.0.0.1/10"],
             "Cannot resolve -whitebind address",
             match=ErrorMatch.PARTIAL_REGEX)
 
     def check_tx_relay(self):
         block_op_true = self.nodes[0].getblock(
             self.nodes[0].generatetoaddress(100, ADDRESS_ECREG_P2SH_OP_TRUE)[0])
         self.sync_all()
 
         self.log.debug(
             "Create a connection from a forcerelay peer that rebroadcasts raw txs")
         # A python mininode is needed to send the raw transaction directly.
         # If a full node was used, it could only rebroadcast via the inv-getdata
         # mechanism. However, even for forcerelay connections, a full node would
         # currently not request a txid that is already in the mempool.
         self.restart_node(1, extra_args=["-whitelist=forcerelay@127.0.0.1"])
         p2p_rebroadcast_wallet = self.nodes[1].add_p2p_connection(
             P2PDataStore())
 
         self.log.debug("Send a tx from the wallet initially")
         tx = FromHex(CTransaction(),
                      self.nodes[0].createrawtransaction(
                          inputs=[{'txid': block_op_true['tx'][0], 'vout': 0}],
                          outputs=[{ADDRESS_ECREG_P2SH_OP_TRUE: 50}]))
         # push the one byte script to the stack
         tx.vin[0].scriptSig = SCRIPTSIG_OP_TRUE
         pad_tx(tx)
         txid = tx.rehash()
 
         self.log.debug("Wait until tx is in node[1]'s mempool")
         p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
 
         self.log.debug(
             "Check that node[1] will send the tx to node[0] even though it"
             " is already in the mempool")
-        connect_nodes(self.nodes[1], self.nodes[0])
+        self.connect_nodes(1, 0)
         with self.nodes[1].assert_debug_log(
                 ["Force relaying tx {} from peer=0".format(txid)]):
             p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
             self.wait_until(lambda: txid in self.nodes[0].getrawmempool())
 
         self.log.debug(
             "Check that node[1] will not send an invalid tx to node[0]")
         tx.vout[0].nValue += 1
         txid = tx.rehash()
         p2p_rebroadcast_wallet.send_txs_and_test(
             [tx],
             self.nodes[1],
             success=False,
             reject_reason='Not relaying non-mempool transaction '
                           '{} from forcerelay peer=0'.format(txid),
         )
 
     def checkpermission(self, args, expectedPermissions, whitelisted):
         if whitelisted is not None:
             args = [*args, '-deprecatedrpc=whitelisted']
         self.restart_node(1, args)
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         peerinfo = self.nodes[1].getpeerinfo()[0]
         if whitelisted is None:
             assert 'whitelisted' not in peerinfo
         else:
             assert_equal(peerinfo['whitelisted'], whitelisted)
         assert_equal(len(expectedPermissions), len(peerinfo['permissions']))
         for p in expectedPermissions:
             if p not in peerinfo['permissions']:
                 raise AssertionError(
                     "Expected permissions {!r} is not granted.".format(p))
 
     def replaceinconfig(self, nodeid, old, new):
         with open(self.nodes[nodeid].bitcoinconf, encoding="utf8") as f:
             newText = f.read().replace(old, new)
         with open(self.nodes[nodeid].bitcoinconf, 'w', encoding="utf8") as f:
             f.write(newText)
 
 
 if __name__ == '__main__':
     P2PPermissionsTests().main()
diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py
index 821901255..130e02b98 100755
--- a/test/functional/p2p_unrequested_blocks.py
+++ b/test/functional/p2p_unrequested_blocks.py
@@ -1,333 +1,329 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test processing of unrequested blocks.
 
 Setup: two nodes, node0 + node1, not connected to each other. Node1 will have
 nMinimumChainWork set to 0x10, so it won't process low-work unrequested blocks.
 
 We have one P2PInterface connection to node0 called test_node, and one to node1
 called min_work_node.
 
 The test:
 1. Generate one block on each node, to leave IBD.
 
 2. Mine a new block on each tip, and deliver to each node from node's peer.
    The tip should advance for node0, but node1 should skip processing due to
    nMinimumChainWork.
 
 Node1 is unused in tests 3-7:
 
 3. Mine a block that forks from the genesis block, and deliver to test_node.
    Node0 should not process this block (just accept the header), because it
    is unrequested and doesn't have more or equal work to the tip.
 
 4a,b. Send another two blocks that build on the forking block.
    Node0 should process the second block but be stuck on the shorter chain,
    because it's missing an intermediate block.
 
 4c.Send 288 more blocks on the longer chain (the number of blocks ahead
    we currently store).
    Node0 should process all but the last block (too far ahead in height).
 
 5. Send a duplicate of the block in #3 to Node0.
    Node0 should not process the block because it is unrequested, and stay on
    the shorter chain.
 
 6. Send Node0 an inv for the height 3 block produced in #4 above.
    Node0 should figure out that Node0 has the missing height 2 block and send a
    getdata.
 
 7. Send Node0 the missing block again.
    Node0 should process and the tip should advance.
 
 8. Create a fork which is invalid at a height longer than the current chain
    (ie to which the node will try to reorg) but which has headers built on top
    of the invalid block. Check that we get disconnected if we send more headers
    on the chain the node now knows to be invalid.
 
 9. Test Node1 is able to sync when connected to node0 (which should have sufficient
    work on its chain).
 """
 
 import time
 
 from test_framework.blocktools import (
     create_block,
     create_coinbase,
     create_tx_with_script,
 )
 from test_framework.messages import (
     MSG_BLOCK,
     CBlockHeader,
     CInv,
     msg_block,
     msg_headers,
     msg_inv,
 )
 from test_framework.p2p import P2PInterface, p2p_lock
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 class AcceptBlockTest(BitcoinTestFramework):
 
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [["-noparkdeepreorg"],
                            ["-minimumchainwork=0x10"]]
 
     def setup_network(self):
         self.setup_nodes()
 
     def run_test(self):
         test_node = self.nodes[0].add_p2p_connection(P2PInterface())
         min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())
 
         # 1. Have nodes mine a block (leave IBD)
         [n.generatetoaddress(1, n.get_deterministic_priv_key().address)
          for n in self.nodes]
         tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]
 
         # 2. Send one block that builds on each tip.
         # This should be accepted by node0
         blocks_h2 = []  # the height 2 blocks on each node's chain
         block_time = int(time.time()) + 1
         for i in range(2):
             blocks_h2.append(create_block(
                 tips[i], create_coinbase(2), block_time))
             blocks_h2[i].solve()
             block_time += 1
         test_node.send_and_ping(msg_block(blocks_h2[0]))
         min_work_node.send_and_ping(msg_block(blocks_h2[1]))
 
         assert_equal(self.nodes[0].getblockcount(), 2)
         assert_equal(self.nodes[1].getblockcount(), 1)
         self.log.info(
             "First height 2 block accepted by node0; correctly rejected by node1")
 
         # 3. Send another block that builds on genesis.
         block_h1f = create_block(
             int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
         block_time += 1
         block_h1f.solve()
         test_node.send_and_ping(msg_block(block_h1f))
 
         tip_entry_found = False
         for x in self.nodes[0].getchaintips():
             if x['hash'] == block_h1f.hash:
                 assert_equal(x['status'], "headers-only")
                 tip_entry_found = True
         assert tip_entry_found
         assert_raises_rpc_error(-1, "Block not found on disk",
                                 self.nodes[0].getblock, block_h1f.hash)
 
         # 4. Send another two block that build on the fork.
         block_h2f = create_block(
             block_h1f.sha256, create_coinbase(2), block_time)
         block_time += 1
         block_h2f.solve()
         test_node.send_and_ping(msg_block(block_h2f))
 
         # Since the earlier block was not processed by node, the new block
         # can't be fully validated.
         tip_entry_found = False
         for x in self.nodes[0].getchaintips():
             if x['hash'] == block_h2f.hash:
                 assert_equal(x['status'], "headers-only")
                 tip_entry_found = True
         assert tip_entry_found
 
         # But this block should be accepted by node since it has equal work.
         self.nodes[0].getblock(block_h2f.hash)
         self.log.info("Second height 2 block accepted, but not reorg'ed to")
 
         # 4b. Now send another block that builds on the forking chain.
         block_h3 = create_block(
             block_h2f.sha256, create_coinbase(3), block_h2f.nTime + 1)
         block_h3.solve()
         test_node.send_and_ping(msg_block(block_h3))
 
         # Since the earlier block was not processed by node, the new block
         # can't be fully validated.
         tip_entry_found = False
         for x in self.nodes[0].getchaintips():
             if x['hash'] == block_h3.hash:
                 assert_equal(x['status'], "headers-only")
                 tip_entry_found = True
         assert tip_entry_found
         self.nodes[0].getblock(block_h3.hash)
 
         # But this block should be accepted by node since it has more work.
         self.nodes[0].getblock(block_h3.hash)
         self.log.info("Unrequested more-work block accepted")
 
         # 4c. Now mine 288 more blocks and deliver; all should be processed but
         # the last (height-too-high) on node (as long as it is not missing any
         # headers)
         tip = block_h3
         all_blocks = []
         for i in range(288):
             next_block = create_block(
                 tip.sha256, create_coinbase(i + 4), tip.nTime + 1)
             next_block.solve()
             all_blocks.append(next_block)
             tip = next_block
 
         # Now send the block at height 5 and check that it wasn't accepted
         # (missing header)
         test_node.send_and_ping(msg_block(all_blocks[1]))
         assert_raises_rpc_error(-5, "Block not found",
                                 self.nodes[0].getblock, all_blocks[1].hash)
         assert_raises_rpc_error(-5, "Block not found",
                                 self.nodes[0].getblockheader, all_blocks[1].hash)
 
         # The block at height 5 should be accepted if we provide the missing
         # header, though
         headers_message = msg_headers()
         headers_message.headers.append(CBlockHeader(all_blocks[0]))
         test_node.send_message(headers_message)
         test_node.send_and_ping(msg_block(all_blocks[1]))
         self.nodes[0].getblock(all_blocks[1].hash)
 
         # Now send the blocks in all_blocks
         for i in range(288):
             test_node.send_message(msg_block(all_blocks[i]))
         test_node.sync_with_ping()
 
         # Blocks 1-287 should be accepted, block 288 should be ignored because
         # it's too far ahead
         for x in all_blocks[:-1]:
             self.nodes[0].getblock(x.hash)
         assert_raises_rpc_error(
             -1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
 
         # 5. Test handling of unrequested block on the node that didn't process
         # Should still not be processed (even though it has a child that has more
         # work).
 
         # The node should have requested the blocks at some point, so
         # disconnect/reconnect first
         self.nodes[0].disconnect_p2ps()
         self.nodes[1].disconnect_p2ps()
 
         test_node = self.nodes[0].add_p2p_connection(P2PInterface())
 
         test_node.send_and_ping(msg_block(block_h1f))
         assert_equal(self.nodes[0].getblockcount(), 2)
         self.log.info(
             "Unrequested block that would complete more-work chain was ignored")
 
         # 6. Try to get node to request the missing block.
         # Poke the node with an inv for block at height 3 and see if that
         # triggers a getdata on block 2 (it should if block 2 is missing).
         with p2p_lock:
             # Clear state so we can check the getdata request
             test_node.last_message.pop("getdata", None)
             test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))
 
         test_node.sync_with_ping()
         with p2p_lock:
             getdata = test_node.last_message["getdata"]
 
         # Check that the getdata includes the right block
         assert_equal(getdata.inv[0].hash, block_h1f.sha256)
         self.log.info("Inv at tip triggered getdata for unprocessed block")
 
         # 7. Send the missing block for the third time (now it is requested)
         test_node.send_and_ping(msg_block(block_h1f))
         assert_equal(self.nodes[0].getblockcount(), 290)
         self.nodes[0].getblock(all_blocks[286].hash)
         assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
         assert_raises_rpc_error(-1, "Block not found on disk",
                                 self.nodes[0].getblock, all_blocks[287].hash)
         self.log.info("Successfully reorged to longer chain")
 
         # 8. Create a chain which is invalid at a height longer than the
         # current chain, but which has more blocks on top of that
         block_289f = create_block(
             all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime + 1)
         block_289f.solve()
         block_290f = create_block(
             block_289f.sha256, create_coinbase(290), block_289f.nTime + 1)
         block_290f.solve()
         block_291 = create_block(
             block_290f.sha256, create_coinbase(291), block_290f.nTime + 1)
         # block_291 spends a coinbase below maturity!
         block_291.vtx.append(create_tx_with_script(
             block_290f.vtx[0], 0, script_sig=b"42", amount=1))
         block_291.hashMerkleRoot = block_291.calc_merkle_root()
         block_291.solve()
         block_292 = create_block(
             block_291.sha256, create_coinbase(292), block_291.nTime + 1)
         block_292.solve()
 
         # Now send all the headers on the chain and enough blocks to trigger
         # reorg
         headers_message = msg_headers()
         headers_message.headers.append(CBlockHeader(block_289f))
         headers_message.headers.append(CBlockHeader(block_290f))
         headers_message.headers.append(CBlockHeader(block_291))
         headers_message.headers.append(CBlockHeader(block_292))
         test_node.send_and_ping(headers_message)
 
         tip_entry_found = False
         for x in self.nodes[0].getchaintips():
             if x['hash'] == block_292.hash:
                 assert_equal(x['status'], "headers-only")
                 tip_entry_found = True
         assert tip_entry_found
         assert_raises_rpc_error(-1, "Block not found on disk",
                                 self.nodes[0].getblock, block_292.hash)
 
         test_node.send_message(msg_block(block_289f))
         test_node.send_and_ping(msg_block(block_290f))
 
         self.nodes[0].getblock(block_289f.hash)
         self.nodes[0].getblock(block_290f.hash)
 
         test_node.send_message(msg_block(block_291))
 
         # At this point we've sent an obviously-bogus block, wait for full processing
         # without assuming whether we will be disconnected or not
         try:
             # Only wait a short while so the test doesn't take forever if we do get
             # disconnected
             test_node.sync_with_ping(timeout=1)
         except AssertionError:
             test_node.wait_for_disconnect()
 
             self.nodes[0].disconnect_p2ps()
             test_node = self.nodes[0].add_p2p_connection(P2PInterface())
 
         # We should have failed reorg and switched back to 290 (but have block
         # 291)
         assert_equal(self.nodes[0].getblockcount(), 290)
         assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
         assert_equal(self.nodes[0].getblock(
             block_291.hash)["confirmations"], -1)
 
         # Now send a new header on the invalid chain, indicating we're forked
         # off, and expect to get disconnected
         block_293 = create_block(
             block_292.sha256, create_coinbase(293), block_292.nTime + 1)
         block_293.solve()
         headers_message = msg_headers()
         headers_message.headers.append(CBlockHeader(block_293))
         test_node.send_message(headers_message)
         test_node.wait_for_disconnect()
 
         # 9. Connect node1 to node0 and ensure it is able to sync
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks([self.nodes[0], self.nodes[1]])
         self.log.info("Successfully synced nodes 1 and 0")
 
 
 if __name__ == '__main__':
     AcceptBlockTest().main()
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 4658c6521..6c378d8e2 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -1,864 +1,863 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from decimal import Decimal
 
 from test_framework.messages import CTransaction, FromHex
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_fee_amount,
     assert_greater_than,
     assert_greater_than_or_equal,
     assert_raises_rpc_error,
-    connect_nodes,
     find_vout_for_address,
 )
 
 
 def get_unspent(listunspent, amount):
     for utx in listunspent:
         if utx['amount'] == amount:
             return utx
     raise AssertionError(
         'Could not find unspent with amount={}'.format(amount))
 
 
 class RawTransactionsTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.setup_clean_chain = True
         # This test isn't testing tx relay. Set whitelist on the peers for
         # instant tx relay.
         self.extra_args = [['-whitelist=noban@127.0.0.1']] * self.num_nodes
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.setup_nodes()
 
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[3])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
+        self.connect_nodes(0, 3)
 
     def run_test(self):
         self.log.info("Connect nodes, set fees, generate blocks, and sync")
         self.min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
         # This test is not meant to test fee estimation and we'd like
         # to be sure all txs are sent at a consistent desired feerate
         for node in self.nodes:
             node.settxfee(self.min_relay_tx_fee)
 
         # if the fee's positive delta is higher than this value tests will fail,
         # neg. delta always fail the tests.
         # The size of the signature of every input may be at most 2 bytes larger
         # than a minimum sized signature.
 
         #            = 2 bytes * minRelayTxFeePerByte
         self.fee_tolerance = 2 * self.min_relay_tx_fee / 1000
 
         self.nodes[2].generate(1)
         self.sync_all()
         self.nodes[0].generate(121)
         self.sync_all()
 
         self.test_change_position()
         self.test_simple()
         self.test_simple_two_coins()
         self.test_simple_two_outputs()
         self.test_change()
         self.test_no_change()
         self.test_invalid_option()
         self.test_invalid_change_address()
         self.test_valid_change_address()
         self.test_coin_selection()
         self.test_two_vin()
         self.test_two_vin_two_vout()
         self.test_invalid_input()
         self.test_fee_p2pkh()
         self.test_fee_p2pkh_multi_out()
         self.test_fee_p2sh()
         self.test_fee_4of5()
         self.test_spend_2of2()
         self.test_locked_wallet()
         self.test_many_inputs_fee()
         self.test_many_inputs_send()
         self.test_op_return()
         self.test_watchonly()
         self.test_all_watched_funds()
         self.test_option_feerate()
         self.test_address_reuse()
         self.test_option_subtract_fee_from_outputs()
         self.test_subtract_fee_with_presets()
 
     def test_change_position(self):
         """Ensure setting changePosition in fundraw with an exact match is
         handled properly."""
         self.log.info("Test fundrawtxn changePosition option")
         rawmatch = self.nodes[2].createrawtransaction(
             [], {self.nodes[2].getnewaddress(): 50000000})
         rawmatch = self.nodes[2].fundrawtransaction(
             rawmatch, {"changePosition": 1, "subtractFeeFromOutputs": [0]})
         assert_equal(rawmatch["changepos"], -1)
 
         watchonly_address = self.nodes[0].getnewaddress()
         watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)[
             "pubkey"]
         self.watchonly_amount = Decimal(200000000)
         self.nodes[3].importpubkey(watchonly_pubkey, "", True)
         self.watchonly_txid = self.nodes[0].sendtoaddress(
             watchonly_address, self.watchonly_amount)
 
         # Lock UTXO so nodes[0] doesn't accidentally spend it
         self.watchonly_vout = find_vout_for_address(
             self.nodes[0], self.watchonly_txid, watchonly_address)
         self.nodes[0].lockunspent(
             False, [{"txid": self.watchonly_txid, "vout": self.watchonly_vout}])
 
         self.nodes[0].sendtoaddress(
             self.nodes[3].getnewaddress(),
             self.watchonly_amount / 10)
 
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1500000)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1000000)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5000000)
 
         self.nodes[0].generate(1)
         self.sync_all()
 
     def test_simple(self):
         self.log.info("Test fundrawtxn")
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         # test that we have enough inputs
         assert len(dec_tx['vin']) > 0
 
     def test_simple_two_coins(self):
         self.log.info("Test fundrawtxn with 2 coins")
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 2200000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         # test if we have enough inputs
         assert len(dec_tx['vin']) > 0
         assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
 
     def test_simple_two_outputs(self):
         self.log.info("Test fundrawtxn with 2 outputs")
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 2600000, self.nodes[1].getnewaddress(): 2500000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         assert len(dec_tx['vin']) > 0
         assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
 
     def test_change(self):
         self.log.info("Test fundrawtxn with a vin > required amount")
         utx = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 1000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         fee = rawtxfund['fee']
         # Use the same fee for the next tx
         self.test_no_change_fee = fee
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         # compare vin total and totalout+fee
         assert_equal(fee + totalOut, utx['amount'])
 
     def test_no_change(self):
         self.log.info("Test fundrawtxn not having a change output")
         utx = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {
             self.nodes[0].getnewaddress(): Decimal(5000000) -
             self.test_no_change_fee -
             self.fee_tolerance}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         assert_equal(rawtxfund['changepos'], -1)
         # compare vin total and totalout+fee
         assert_equal(fee + totalOut, utx['amount'])
 
     def test_invalid_option(self):
         self.log.info("Test fundrawtxn with an invalid option")
         utx = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4000000)}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[
             2].fundrawtransaction, rawTx, {'foo': 'bar'})
         # reserveChangeKey was deprecated and is now removed
         assert_raises_rpc_error(-3,
                                 "Unexpected key reserveChangeKey",
                                 lambda: self.nodes[2].fundrawtransaction(hexstring=rawTx,
                                                                          options={'reserveChangeKey': True}))
 
     def test_invalid_change_address(self):
         self.log.info("Test fundrawtxn with an invalid change address")
         utx = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4000000)}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         assert_raises_rpc_error(
             -5, "Change address must be a valid bitcoin address",
             self.nodes[2].fundrawtransaction, rawTx, {'changeAddress': 'foobar'})
 
     def test_valid_change_address(self):
         self.log.info("Test fundrawtxn with a provided change address")
         utx = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4000000)}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         change = self.nodes[2].getnewaddress()
         assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[
             2].fundrawtransaction, rawTx, {'changeAddress': change, 'changePosition': 2})
         rawtxfund = self.nodes[2].fundrawtransaction(
             rawTx, {'changeAddress': change, 'changePosition': 0})
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         out = dec_tx['vout'][0]
         assert_equal(change, out['scriptPubKey']['addresses'][0])
 
     def test_coin_selection(self):
         self.log.info("Test fundrawtxn with a vin < required amount")
         utx = get_unspent(self.nodes[2].listunspent(), 1000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 1000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
 
         # 4-byte version + 1-byte vin count + 36-byte prevout then script_len
         rawTx = rawTx[:82] + "0100" + rawTx[84:]
 
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
         assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
 
         # Should fail without add_inputs:
         assert_raises_rpc_error(-4,
                                 "Insufficient funds",
                                 self.nodes[2].fundrawtransaction,
                                 rawTx,
                                 {"add_inputs": False})
         # add_inputs is enabled by default
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
 
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for i, out in enumerate(dec_tx['vout']):
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
             else:
                 assert_equal(i, rawtxfund['changepos'])
 
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
         assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
 
         assert_equal(matchingOuts, 1)
         assert_equal(len(dec_tx['vout']), 2)
 
     def test_two_vin(self):
         self.log.info("Test fundrawtxn with 2 vins")
         utx = get_unspent(self.nodes[2].listunspent(), 1000000)
         utx2 = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']},
                   {'txid': utx2['txid'], 'vout': utx2['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 6000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         # Should fail without add_inputs:
         assert_raises_rpc_error(-4,
                                 "Insufficient funds",
                                 self.nodes[2].fundrawtransaction,
                                 rawTx,
                                 {"add_inputs": False})
         rawtxfund = self.nodes[2].fundrawtransaction(
             rawTx, {"add_inputs": True})
 
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
 
         assert_equal(matchingOuts, 1)
         assert_equal(len(dec_tx['vout']), 2)
 
         matchingIns = 0
         for vinOut in dec_tx['vin']:
             for vinIn in inputs:
                 if vinIn['txid'] == vinOut['txid']:
                     matchingIns += 1
 
         # we now must see two vins identical to vins given as params
         assert_equal(matchingIns, 2)
 
     def test_two_vin_two_vout(self):
         self.log.info("Test fundrawtxn with 2 vins and 2 vouts")
         utx = get_unspent(self.nodes[2].listunspent(), 1000000)
         utx2 = get_unspent(self.nodes[2].listunspent(), 5000000)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']},
                   {'txid': utx2['txid'], 'vout': utx2['vout']}]
         outputs = {
             self.nodes[0].getnewaddress(): 6000000, self.nodes[0].getnewaddress(): 1000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         # Should fail without add_inputs:
         assert_raises_rpc_error(-4,
                                 "Insufficient funds",
                                 self.nodes[2].fundrawtransaction,
                                 rawTx,
                                 {"add_inputs": False})
         rawtxfund = self.nodes[2].fundrawtransaction(
             rawTx, {"add_inputs": True})
 
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
 
         assert_equal(matchingOuts, 2)
         assert_equal(len(dec_tx['vout']), 3)
 
     def test_invalid_input(self):
         self.log.info("Test fundrawtxn with an invalid vin")
         inputs = [
             {'txid': "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout': 0}]
         outputs = {self.nodes[0].getnewaddress(): 1000000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
 
         assert_raises_rpc_error(
             -4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawTx)
 
     def test_fee_p2pkh(self):
         """Compare fee of a standard pubkeyhash transaction."""
         self.log.info("Test fundrawtxn p2pkh fee")
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1100000}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # Create same transaction over sendtoaddress.
         txId = self.nodes[0].sendtoaddress(
             self.nodes[1].getnewaddress(), 1100000)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # Compare fee.
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert feeDelta >= 0 and feeDelta <= self.fee_tolerance
 
     def test_fee_p2pkh_multi_out(self):
         """Compare fee of a standard pubkeyhash transaction with multiple
         outputs."""
         self.log.info("Test fundrawtxn p2pkh fee with multiple outputs")
         inputs = []
         outputs = {
             self.nodes[1].getnewaddress(): 1100000,
             self.nodes[1].getnewaddress(): 1200000,
             self.nodes[1].getnewaddress(): 100000,
             self.nodes[1].getnewaddress(): 1300000,
             self.nodes[1].getnewaddress(): 200000,
             self.nodes[1].getnewaddress(): 300000,
         }
         rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawtx)
         # Create same transaction over sendtoaddress.
         txId = self.nodes[0].sendmany("", outputs)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # Compare fee.
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert feeDelta >= 0 and feeDelta <= self.fee_tolerance
 
     def test_fee_p2sh(self):
         """Compare fee of a 2-of-2 multisig p2sh transaction."""
         # Create 2-of-2 addr.
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[1].getnewaddress()
 
         addr1Obj = self.nodes[1].getaddressinfo(addr1)
         addr2Obj = self.nodes[1].getaddressinfo(addr2)
 
         mSigObj = self.nodes[1].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
 
         inputs = []
         outputs = {mSigObj: 1100000}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # Create same transaction over sendtoaddress.
         txId = self.nodes[0].sendtoaddress(mSigObj, 1100000)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # Compare fee.
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert feeDelta >= 0 and feeDelta <= self.fee_tolerance
 
     def test_fee_4of5(self):
         """Compare fee of a standard pubkeyhash transaction."""
         self.log.info("Test fundrawtxn fee with 4-of-5 addresses")
 
         # Create 4-of-5 addr.
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[1].getnewaddress()
         addr3 = self.nodes[1].getnewaddress()
         addr4 = self.nodes[1].getnewaddress()
         addr5 = self.nodes[1].getnewaddress()
 
         addr1Obj = self.nodes[1].getaddressinfo(addr1)
         addr2Obj = self.nodes[1].getaddressinfo(addr2)
         addr3Obj = self.nodes[1].getaddressinfo(addr3)
         addr4Obj = self.nodes[1].getaddressinfo(addr4)
         addr5Obj = self.nodes[1].getaddressinfo(addr5)
 
         mSigObj = self.nodes[1].addmultisigaddress(
             4,
             [
                 addr1Obj['pubkey'],
                 addr2Obj['pubkey'],
                 addr3Obj['pubkey'],
                 addr4Obj['pubkey'],
                 addr5Obj['pubkey'],
             ]
         )['address']
 
         inputs = []
         outputs = {mSigObj: 1100000}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # Create same transaction over sendtoaddress.
         txId = self.nodes[0].sendtoaddress(mSigObj, 1100000)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # Compare fee.
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert feeDelta >= 0 and feeDelta <= self.fee_tolerance
 
     def test_spend_2of2(self):
         """Spend a 2-of-2 multisig transaction over fundraw."""
         self.log.info("Test fundrawtxn spending 2-of-2 multisig")
 
         # Create 2-of-2 addr.
         addr1 = self.nodes[2].getnewaddress()
         addr2 = self.nodes[2].getnewaddress()
 
         addr1Obj = self.nodes[2].getaddressinfo(addr1)
         addr2Obj = self.nodes[2].getaddressinfo(addr2)
 
         mSigObj = self.nodes[2].addmultisigaddress(
             2,
             [
                 addr1Obj['pubkey'],
                 addr2Obj['pubkey'],
             ]
         )['address']
 
         # Send 1,200,000 XEC to msig addr.
         self.nodes[0].sendtoaddress(mSigObj, 1200000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         oldBalance = self.nodes[1].getbalance()
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1100000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[2].fundrawtransaction(rawTx)
 
         signedTx = self.nodes[2].signrawtransactionwithwallet(fundedTx['hex'])
         self.nodes[2].sendrawtransaction(signedTx['hex'])
         self.nodes[2].generate(1)
         self.sync_all()
 
         # Make sure funds are received at node1.
         assert_equal(
             oldBalance + Decimal('1100000.00'), self.nodes[1].getbalance())
 
     def test_locked_wallet(self):
         self.log.info("Test fundrawtxn with locked wallet")
 
         self.nodes[1].encryptwallet("test")
 
         # Drain the keypool.
         self.nodes[1].getnewaddress()
         self.nodes[1].getrawchangeaddress()
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1099997.00}
         rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
         # fund a transaction that does not require a new key for the change
         # output
         self.nodes[1].fundrawtransaction(rawtx)
 
         # fund a transaction that requires a new key for the change output
         # creating the key must be impossible because the wallet is locked
         outputs = {self.nodes[0].getnewaddress(): 1100000}
         rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
         assert_raises_rpc_error(
             -4,
             "Transaction needs a change address, but we can't generate it. Please call keypoolrefill first.",
             self.nodes[1].fundrawtransaction,
             rawtx)
 
         # Refill the keypool.
         self.nodes[1].walletpassphrase("test", 100)
         # need to refill the keypool to get an internal change address
         self.nodes[1].keypoolrefill(8)
         self.nodes[1].walletlock()
 
         assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[
             1].sendtoaddress, self.nodes[0].getnewaddress(), 1200000)
 
         oldBalance = self.nodes[0].getbalance()
 
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1100000}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
 
         # Now we need to unlock.
         self.nodes[1].walletpassphrase("test", 600)
         signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
         self.nodes[1].sendrawtransaction(signedTx['hex'])
         self.nodes[1].generate(1)
         self.sync_all()
 
         # Make sure funds are received at node1.
         assert_equal(
             oldBalance + Decimal('51100000.00'), self.nodes[0].getbalance())
 
     def test_many_inputs_fee(self):
         """Multiple (~19) inputs tx test | Compare fee."""
         self.log.info("Test fundrawtxn fee with many inputs")
 
         # Empty node1, send some small coins from node0 to node1.
         self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
         self.nodes[1].generate(1)
         self.sync_all()
 
         for _ in range(20):
             self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # Fund a tx with ~20 small inputs.
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 150000, self.nodes[0].getnewaddress(): 40000}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
 
         # Create same transaction over sendtoaddress.
         txId = self.nodes[1].sendmany("", outputs)
         signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
 
         # Compare fee.
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         # ~19 inputs
         assert feeDelta >= 0 and feeDelta <= self.fee_tolerance * 19
 
     def test_many_inputs_send(self):
         """Multiple (~19) inputs tx test | sign/send."""
         self.log.info("Test fundrawtxn sign+send with many inputs")
 
         # Again, empty node1, send some small coins from node0 to node1.
         self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
         self.nodes[1].generate(1)
         self.sync_all()
 
         for _ in range(20):
             self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # Fund a tx with ~20 small inputs.
         oldBalance = self.nodes[0].getbalance()
 
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 150000, self.nodes[0].getnewaddress(): 40000}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
         fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(
             fundedTx['hex'])
         self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
         self.nodes[1].generate(1)
         self.sync_all()
         assert_equal(oldBalance + Decimal('50190000.00'),
                      self.nodes[0].getbalance())  # 0.19+block reward
 
     def test_op_return(self):
         self.log.info("Test fundrawtxn with OP_RETURN and no vin")
 
         rawTx = "0100000000010000000000000000066a047465737400000000"
         dec_tx = self.nodes[2].decoderawtransaction(rawTx)
 
         assert_equal(len(dec_tx['vin']), 0)
         assert_equal(len(dec_tx['vout']), 1)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawTx)
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
 
         # at least one vin
         assert_greater_than(len(dec_tx['vin']), 0)
         # one change output added
         assert_equal(len(dec_tx['vout']), 2)
 
     def test_watchonly(self):
         self.log.info("Test fundrawtxn using only watchonly")
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): self.watchonly_amount / 2}
         rawTx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         result = self.nodes[3].fundrawtransaction(
             rawTx, {'includeWatching': True})
         res_dec = self.nodes[0].decoderawtransaction(result["hex"])
         assert_equal(len(res_dec["vin"]), 1)
         assert_equal(res_dec["vin"][0]["txid"], self.watchonly_txid)
 
         assert "fee" in result.keys()
         assert_greater_than(result["changepos"], -1)
 
     def test_all_watched_funds(self):
         self.log.info("Test fundrawtxn using entirety of watched funds")
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): self.watchonly_amount}
         rawTx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         # Backward compatibility test (2nd param is includeWatching).
         result = self.nodes[3].fundrawtransaction(rawTx, True)
         res_dec = self.nodes[0].decoderawtransaction(result["hex"])
         assert_equal(len(res_dec["vin"]), 2)
         assert res_dec["vin"][0]["txid"] == self.watchonly_txid or res_dec[
             "vin"][1]["txid"] == self.watchonly_txid
 
         assert_greater_than(result["fee"], 0)
         assert_greater_than(result["changepos"], -1)
         assert_equal(result["fee"] + res_dec["vout"][
                      result["changepos"]]["value"], self.watchonly_amount / 10)
 
         signedtx = self.nodes[3].signrawtransactionwithwallet(result["hex"])
         assert not signedtx["complete"]
         signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"])
         assert signedtx["complete"]
         self.nodes[0].sendrawtransaction(signedtx["hex"])
         self.nodes[0].generate(1)
         self.sync_all()
 
     def test_option_feerate(self):
         self.log.info("Test fundrawtxn feeRate option")
 
         # Make sure there is exactly one input so coin selection can't skew the
         # result.
         assert_equal(len(self.nodes[3].listunspent(1)), 1)
 
         inputs = []
         outputs = {self.nodes[3].getnewaddress(): 1000000}
         rawTx = self.nodes[3].createrawtransaction(inputs, outputs)
         # uses self.min_relay_tx_fee (set by settxfee)
         result = self.nodes[3].fundrawtransaction(rawTx)
         result2 = self.nodes[3].fundrawtransaction(
             rawTx, {"feeRate": 2 * self.min_relay_tx_fee})
         result_fee_rate = result['fee'] * 1000 / \
             FromHex(CTransaction(), result['hex']).billable_size()
         assert_fee_amount(
             result2['fee'], FromHex(CTransaction(), result2['hex']).billable_size(), 2 * result_fee_rate)
 
         result3 = self.nodes[3].fundrawtransaction(
             rawTx, {"feeRate": 10 * self.min_relay_tx_fee})
         assert_raises_rpc_error(-4,
                                 "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
                                 self.nodes[3].fundrawtransaction,
                                 rawTx,
                                 {"feeRate": 1000000})
         # allow this transaction to be underfunded by 10 bytes. This is due
         # to the first transaction possibly being overfunded by up to .9
         # satoshi due to  fee ceilings being used.
         assert_fee_amount(
             result3['fee'], FromHex(CTransaction(), result3['hex']).billable_size(), 10 * result_fee_rate, 10)
 
     def test_address_reuse(self):
         """Test no address reuse occurs."""
         self.log.info("Test fundrawtxn does not reuse addresses")
 
         rawTx = self.nodes[3].createrawtransaction(
             inputs=[], outputs={self.nodes[3].getnewaddress(): 1000000})
         result3 = self.nodes[3].fundrawtransaction(rawTx)
         res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
         changeaddress = ""
         for out in res_dec['vout']:
             if out['value'] > 1000000.0:
                 changeaddress += out['scriptPubKey']['addresses'][0]
         assert changeaddress != ""
         nextaddr = self.nodes[3].getnewaddress()
         # Now the change address key should be removed from the keypool.
         assert changeaddress != nextaddr
 
     def test_option_subtract_fee_from_outputs(self):
         self.log.info("Test fundrawtxn subtractFeeFromOutputs option")
 
         # Make sure there is exactly one input so coin selection can't skew the
         # result.
         assert_equal(len(self.nodes[3].listunspent(1)), 1)
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): 1000000}
         rawTx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         # uses self.min_relay_tx_fee (set by settxfee)
         result = [self.nodes[3].fundrawtransaction(rawTx),
                   # empty subtraction list
                   self.nodes[3].fundrawtransaction(
                       rawTx, {"subtractFeeFromOutputs": []}),
                   # uses self.min_relay_tx_fee (set by settxfee)
                   self.nodes[3].fundrawtransaction(
                       rawTx, {"subtractFeeFromOutputs": [0]}),
                   self.nodes[3].fundrawtransaction(
                       rawTx, {"feeRate": 2 * self.min_relay_tx_fee}),
                   self.nodes[3].fundrawtransaction(
                       rawTx, {"feeRate": 2 * self.min_relay_tx_fee, "subtractFeeFromOutputs": [0]}),
                   ]
 
         dec_tx = [self.nodes[3].decoderawtransaction(tx_['hex'])
                   for tx_ in result]
         output = [d['vout'][1 - r['changepos']]['value']
                   for d, r in zip(dec_tx, result)]
         change = [d['vout'][r['changepos']]['value']
                   for d, r in zip(dec_tx, result)]
 
         assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee'])
         assert_equal(result[3]['fee'], result[4]['fee'])
         assert_equal(change[0], change[1])
         assert_equal(output[0], output[1])
         assert_equal(output[0], output[2] + result[2]['fee'])
         assert_equal(change[0] + result[0]['fee'], change[2])
         assert_equal(output[3], output[4] + result[4]['fee'])
         assert_equal(change[3] + result[3]['fee'], change[4])
 
         inputs = []
         outputs = {
             self.nodes[2].getnewaddress(): value for value in (1000000.0, 1100000.0, 1200000.0, 1300000.0)}
         rawTx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         # Split the fee between outputs 0, 2, and 3, but not output 1
         result = [self.nodes[3].fundrawtransaction(rawTx),
                   self.nodes[3].fundrawtransaction(rawTx, {"subtractFeeFromOutputs": [0, 2, 3]})]
 
         dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']),
                   self.nodes[3].decoderawtransaction(result[1]['hex'])]
 
         # Nested list of non-change output amounts for each transaction.
         output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']]
                   for d, r in zip(dec_tx, result)]
 
         # List of differences in output amounts between normal and subtractFee
         # transactions.
         share = [o0 - o1 for o0, o1 in zip(output[0], output[1])]
 
         # Output 1 is the same in both transactions.
         assert_equal(share[1], 0)
 
         # The other 3 outputs are smaller as a result of
         # subtractFeeFromOutputs.
         assert_greater_than(share[0], 0)
         assert_greater_than(share[2], 0)
         assert_greater_than(share[3], 0)
 
         # Outputs 2 and 3 take the same share of the fee.
         assert_equal(share[2], share[3])
 
         # Output 0 takes at least as much share of the fee, and no more than 2
         # satoshis more, than outputs 2 and 3.
         assert_greater_than_or_equal(share[0], share[2])
         assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0])
 
         # The fee is the same in both transactions.
         assert_equal(result[0]['fee'], result[1]['fee'])
 
         # The total subtracted from the outputs is equal to the fee.
         assert_equal(share[0] + share[2] + share[3], result[0]['fee'])
 
     def test_subtract_fee_with_presets(self):
         self.log.info(
             "Test fundrawtxn subtract fee from outputs with preset inputs that are sufficient")
 
         addr = self.nodes[0].getnewaddress()
         txid = self.nodes[0].sendtoaddress(addr, 10000000)
         vout = find_vout_for_address(self.nodes[0], txid, addr)
 
         rawtx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], [
                                                    {self.nodes[0].getnewaddress(): 5000000}])
         fundedtx = self.nodes[0].fundrawtransaction(
             rawtx, {'subtractFeeFromOutputs': [0]})
         signedtx = self.nodes[0].signrawtransactionwithwallet(fundedtx['hex'])
         self.nodes[0].sendrawtransaction(signedtx['hex'])
 
 
 if __name__ == '__main__':
     RawTransactionsTest().main()
diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py
index 3a8edee97..6250ca0cf 100755
--- a/test/functional/rpc_getblockfilter.py
+++ b/test/functional/rpc_getblockfilter.py
@@ -1,78 +1,76 @@
 #!/usr/bin/env python3
 # Copyright (c) 2018 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the getblockfilter RPC."""
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_is_hex_string,
     assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
 )
 
 FILTER_TYPES = ["basic"]
 
 
 class GetBlockFilterTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [["-blockfilterindex",
                             "-noparkdeepreorg"], ["-noparkdeepreorg"]]
 
     def run_test(self):
         # Create two chains by disconnecting nodes 0 & 1, mining, then
         # reconnecting
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
 
         self.nodes[0].generate(3)
         self.nodes[1].generate(4)
 
         assert_equal(self.nodes[0].getblockcount(), 3)
         chain0_hashes = [self.nodes[0].getblockhash(
             block_height) for block_height in range(4)]
 
         # Reorg node 0 to a new chain
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks()
 
         assert_equal(self.nodes[0].getblockcount(), 4)
         chain1_hashes = [self.nodes[0].getblockhash(
             block_height) for block_height in range(4)]
 
         # Test getblockfilter returns a filter for all blocks and filter types
         # on active chain
         for block_hash in chain1_hashes:
             for filter_type in FILTER_TYPES:
                 result = self.nodes[0].getblockfilter(block_hash, filter_type)
                 assert_is_hex_string(result['filter'])
 
         # Test getblockfilter returns a filter for all blocks and filter types
         # on stale chain
         for block_hash in chain0_hashes:
             for filter_type in FILTER_TYPES:
                 result = self.nodes[0].getblockfilter(block_hash, filter_type)
                 assert_is_hex_string(result['filter'])
 
         # Test getblockfilter with unknown block
         bad_block_hash = "0123456789abcdef" * 4
         assert_raises_rpc_error(-5,
                                 "Block not found",
                                 self.nodes[0].getblockfilter,
                                 bad_block_hash,
                                 "basic")
 
         # Test getblockfilter with undefined filter type
         genesis_hash = self.nodes[0].getblockhash(0)
         assert_raises_rpc_error(-5,
                                 "Unknown filtertype",
                                 self.nodes[0].getblockfilter,
                                 genesis_hash,
                                 "unknown")
 
 
 if __name__ == '__main__':
     GetBlockFilterTest().main()
diff --git a/test/functional/rpc_getpeerinfo_deprecation.py b/test/functional/rpc_getpeerinfo_deprecation.py
index f8c05d43c..c27035150 100755
--- a/test/functional/rpc_getpeerinfo_deprecation.py
+++ b/test/functional/rpc_getpeerinfo_deprecation.py
@@ -1,43 +1,42 @@
 #!/usr/bin/env python3
 # Copyright (c) 2020 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test deprecation of getpeerinfo RPC fields."""
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import connect_nodes
 
 
 class GetpeerinfoDeprecationTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.extra_args = [[], ["-deprecatedrpc=banscore"]]
 
     def run_test(self):
         self.test_banscore_deprecation()
         self.test_addnode_deprecation()
 
     def test_banscore_deprecation(self):
         self.log.info(
             "Test getpeerinfo by default no longer returns a banscore field")
         assert "banscore" not in self.nodes[0].getpeerinfo()[0].keys()
 
         self.log.info(
             "Test getpeerinfo returns banscore with -deprecatedrpc=banscore")
         assert "banscore" in self.nodes[1].getpeerinfo()[0].keys()
 
     def test_addnode_deprecation(self):
         self.restart_node(1, ["-deprecatedrpc=getpeerinfo_addnode"])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
         self.log.info(
             "Test getpeerinfo by default no longer returns an addnode field")
         assert "addnode" not in self.nodes[0].getpeerinfo()[0].keys()
 
         self.log.info(
             "Test getpeerinfo returns addnode with -deprecatedrpc=addnode")
         assert "addnode" in self.nodes[1].getpeerinfo()[0].keys()
 
 
 if __name__ == "__main__":
     GetpeerinfoDeprecationTest().main()
diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py
index da6995039..1232697f5 100755
--- a/test/functional/rpc_invalidateblock.py
+++ b/test/functional/rpc_invalidateblock.py
@@ -1,94 +1,94 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the invalidateblock RPC."""
 
 from test_framework.address import ADDRESS_ECREG_UNSPENDABLE_DESCRIPTOR
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.util import assert_equal
 
 
 class InvalidateTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 3
         self.extra_args = [["-noparkdeepreorg"], [], []]
 
     def setup_network(self):
         self.setup_nodes()
 
     def run_test(self):
         self.log.info(
             "Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
         self.log.info("Mine 4 blocks on Node 0")
         self.nodes[0].generatetoaddress(
             4, self.nodes[0].get_deterministic_priv_key().address)
         assert_equal(self.nodes[0].getblockcount(), 4)
         besthash_n0 = self.nodes[0].getbestblockhash()
 
         self.log.info("Mine competing 6 blocks on Node 1")
         self.nodes[1].generatetoaddress(
             6, self.nodes[1].get_deterministic_priv_key().address)
         assert_equal(self.nodes[1].getblockcount(), 6)
 
         self.log.info("Connect nodes to force a reorg")
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks(self.nodes[0:2])
         assert_equal(self.nodes[0].getblockcount(), 6)
         badhash = self.nodes[1].getblockhash(2)
 
         self.log.info(
             "Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
         self.nodes[0].invalidateblock(badhash)
         assert_equal(self.nodes[0].getblockcount(), 4)
         assert_equal(self.nodes[0].getbestblockhash(), besthash_n0)
 
         self.log.info("\nMake sure we won't reorg to a lower work chain:")
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
         self.log.info("Sync node 2 to node 1 so both have 6 blocks")
         self.sync_blocks(self.nodes[1:3])
         assert_equal(self.nodes[2].getblockcount(), 6)
         self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
         self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
         assert_equal(self.nodes[1].getblockcount(), 4)
         self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
         self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
         assert_equal(self.nodes[2].getblockcount(), 2)
         self.log.info("..and then mine a block")
         self.nodes[2].generatetoaddress(
             1, self.nodes[2].get_deterministic_priv_key().address)
         self.log.info("Verify all nodes are at the right height")
         self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
         self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
         self.wait_until(lambda: self.nodes[1].getblockcount() == 4, timeout=5)
 
         self.log.info("Verify that we reconsider all ancestors as well")
         blocks = self.nodes[1].generatetodescriptor(
             10, ADDRESS_ECREG_UNSPENDABLE_DESCRIPTOR)
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
         # Invalidate the two blocks at the tip
         self.nodes[1].invalidateblock(blocks[-1])
         self.nodes[1].invalidateblock(blocks[-2])
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-3])
         # Reconsider only the previous tip
         self.nodes[1].reconsiderblock(blocks[-1])
         # Should be back at the tip by now
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
 
         self.log.info("Verify that we reconsider all descendants")
         blocks = self.nodes[1].generatetodescriptor(
             10, ADDRESS_ECREG_UNSPENDABLE_DESCRIPTOR)
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
         # Invalidate the two blocks at the tip
         self.nodes[1].invalidateblock(blocks[-2])
         self.nodes[1].invalidateblock(blocks[-4])
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-5])
         # Reconsider only the previous tip
         self.nodes[1].reconsiderblock(blocks[-4])
         # Should be back at the tip by now
         assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
 
 
 if __name__ == '__main__':
     InvalidateTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 66c4b4c46..4c5e3caf0 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -1,278 +1,277 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test RPC calls related to net.
 
 Tests correspond to code in rpc/net.cpp.
 """
 
 import time
 from decimal import Decimal
 from itertools import product
 
 import test_framework.messages
 from test_framework.avatools import create_coinbase_stakes
 from test_framework.key import ECKey
 from test_framework.messages import NODE_NETWORK
 from test_framework.p2p import P2PInterface
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_approx,
     assert_equal,
     assert_greater_than,
     assert_greater_than_or_equal,
     assert_raises_rpc_error,
-    connect_nodes,
     p2p_port,
 )
 from test_framework.wallet_util import bytes_to_wif
 
 
 def assert_net_servicesnames(servicesflag, servicenames):
     """Utility that checks if all flags are correctly decoded in
     `getpeerinfo` and `getnetworkinfo`.
 
     :param servicesflag: The services as an integer.
     :param servicenames: The list of decoded services names, as strings.
     """
     servicesflag_generated = 0
     for servicename in servicenames:
         servicesflag_generated |= getattr(
             test_framework.messages, 'NODE_' + servicename)
     assert servicesflag_generated == servicesflag
 
 
 class NetTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [["-enableavalanche=1", "-minrelaytxfee=10"],
                            ["-enableavalanche=1", "-minrelaytxfee=5"]]
         self.supports_cli = False
 
     def run_test(self):
         # Get out of IBD for the minfeefilter and getpeerinfo tests.
         self.nodes[0].generate(101)
         # Connect nodes both ways.
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[0])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 0)
         self.sync_all()
 
         self.test_connection_count()
         self.test_getpeerinfo()
         self.test_getnettotals()
         self.test_getnetworkinfo()
         self.test_getaddednodeinfo()
         self.test_service_flags()
         self.test_getnodeaddresses()
 
     def test_connection_count(self):
         self.log.info("Test getconnectioncount")
         # After using `connect_nodes` to connect nodes 0 and 1 to each other.
         assert_equal(self.nodes[0].getconnectioncount(), 2)
 
     def test_getnettotals(self):
         self.log.info("Test getnettotals")
         # getnettotals totalbytesrecv and totalbytessent should be
         # consistent with getpeerinfo. Since the RPC calls are not atomic,
         # and messages might have been recvd or sent between RPC calls, call
         # getnettotals before and after and verify that the returned values
         # from getpeerinfo are bounded by those values.
         net_totals_before = self.nodes[0].getnettotals()
         peer_info = self.nodes[0].getpeerinfo()
         net_totals_after = self.nodes[0].getnettotals()
         assert_equal(len(peer_info), 2)
         peers_recv = sum([peer['bytesrecv'] for peer in peer_info])
         peers_sent = sum([peer['bytessent'] for peer in peer_info])
 
         assert_greater_than_or_equal(
             peers_recv, net_totals_before['totalbytesrecv'])
         assert_greater_than_or_equal(
             net_totals_after['totalbytesrecv'], peers_recv)
         assert_greater_than_or_equal(
             peers_sent, net_totals_before['totalbytessent'])
         assert_greater_than_or_equal(
             net_totals_after['totalbytessent'], peers_sent)
 
         # test getnettotals and getpeerinfo by doing a ping
         # the bytes sent/received should change
         # note ping and pong are 32 bytes each
         self.nodes[0].ping()
         self.wait_until(lambda: (self.nodes[0].getnettotals()[
             'totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1)
         self.wait_until(lambda: (self.nodes[0].getnettotals()[
             'totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1)
 
         peer_info_after_ping = self.nodes[0].getpeerinfo()
         for before, after in zip(peer_info, peer_info_after_ping):
             assert_greater_than_or_equal(
                 after['bytesrecv_per_msg'].get(
                     'pong', 0), before['bytesrecv_per_msg'].get(
                     'pong', 0) + 32)
             assert_greater_than_or_equal(
                 after['bytessent_per_msg'].get(
                     'ping', 0), before['bytessent_per_msg'].get(
                     'ping', 0) + 32)
 
     def test_getnetworkinfo(self):
         self.log.info("Test getnetworkinfo")
         info = self.nodes[0].getnetworkinfo()
         assert_equal(info['networkactive'], True)
         assert_equal(info['connections'], 2)
         assert_equal(info['connections_in'], 1)
         assert_equal(info['connections_out'], 1)
 
         with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
             self.nodes[0].setnetworkactive(state=False)
         assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
         # Wait a bit for all sockets to close
         self.wait_until(lambda: self.nodes[0].getnetworkinfo()[
             'connections'] == 0, timeout=3)
 
         with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
             self.nodes[0].setnetworkactive(state=True)
         # Connect nodes both ways.
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[0])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 0)
 
         info = self.nodes[0].getnetworkinfo()
         assert_equal(info['networkactive'], True)
         assert_equal(info['connections'], 2)
         assert_equal(info['connections_in'], 1)
         assert_equal(info['connections_out'], 1)
 
         # check the `servicesnames` field
         network_info = [node.getnetworkinfo() for node in self.nodes]
         for info in network_info:
             assert_net_servicesnames(int(info["localservices"], 0x10),
                                      info["localservicesnames"])
 
     def test_getaddednodeinfo(self):
         self.log.info("Test getaddednodeinfo")
         assert_equal(self.nodes[0].getaddednodeinfo(), [])
         # add a node (node2) to node0
         ip_port = "127.0.0.1:{}".format(p2p_port(2))
         self.nodes[0].addnode(node=ip_port, command='add')
         # check that the node has indeed been added
         added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
         assert_equal(len(added_nodes), 1)
         assert_equal(added_nodes[0]['addednode'], ip_port)
         # check that node cannot be added again
         assert_raises_rpc_error(-23,
                                 "Node already added",
                                 self.nodes[0].addnode,
                                 node=ip_port,
                                 command='add')
         # check that node can be removed
         self.nodes[0].addnode(node=ip_port, command='remove')
         assert_equal(self.nodes[0].getaddednodeinfo(), [])
         # check that trying to remove the node again returns an error
         assert_raises_rpc_error(-24,
                                 "Node could not be removed",
                                 self.nodes[0].addnode,
                                 node=ip_port,
                                 command='remove')
         # check that a non-existent node returns an error
         assert_raises_rpc_error(-24, "Node has not been added",
                                 self.nodes[0].getaddednodeinfo, '1.1.1.1')
 
     def test_getpeerinfo(self):
         self.log.info("Test getpeerinfo")
         # Create a few getpeerinfo last_block/last_transaction/last_proof
         # values.
         if self.is_wallet_compiled():
             self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1000000)
         tip = self.nodes[1].generate(1)[0]
         self.sync_all()
 
         stake = create_coinbase_stakes(
             self.nodes[1], [tip], self.nodes[1].get_deterministic_priv_key().key)
         privkey = ECKey()
         privkey.generate()
         proof = self.nodes[1].buildavalancheproof(
             42, 2000000000, bytes_to_wif(privkey.get_bytes()), stake)
         self.nodes[1].sendavalancheproof(proof)
         self.sync_proofs()
 
         time_now = int(time.time())
         peer_info = [x.getpeerinfo() for x in self.nodes]
         # Verify last_block, last_transaction and last_proof keys/values.
         for node, peer, field in product(range(self.num_nodes), range(2), [
                                          'last_block', 'last_transaction', 'last_proof']):
             assert field in peer_info[node][peer].keys()
             if peer_info[node][peer][field] != 0:
                 assert_approx(peer_info[node][peer][field], time_now, vspan=60)
         # check both sides of bidirectional connection between nodes
         # the address bound to on one side will be the source address for the
         # other node
         assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
         assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr'])
         assert_equal(peer_info[0][0]['minfeefilter'], Decimal("5.00"))
         assert_equal(peer_info[1][0]['minfeefilter'], Decimal("10.00"))
         # check the `servicesnames` field
         for info in peer_info:
             assert_net_servicesnames(int(info[0]["services"], 0x10),
                                      info[0]["servicesnames"])
 
         assert_equal(peer_info[0][0]['connection_type'], 'inbound')
         assert_equal(peer_info[0][1]['connection_type'], 'manual')
 
         assert_equal(peer_info[1][0]['connection_type'], 'manual')
         assert_equal(peer_info[1][1]['connection_type'], 'inbound')
 
     def test_service_flags(self):
         self.log.info("Test service flags")
         self.nodes[0].add_p2p_connection(
             P2PInterface(), services=(
                 1 << 5) | (
                 1 << 63))
         assert_equal(['UNKNOWN[2^5]', 'UNKNOWN[2^63]'],
                      self.nodes[0].getpeerinfo()[-1]['servicesnames'])
         self.nodes[0].disconnect_p2ps()
 
     def test_getnodeaddresses(self):
         self.log.info("Test getnodeaddresses")
         self.nodes[0].add_p2p_connection(P2PInterface())
 
         # Add some addresses to the Address Manager over RPC. Due to the way
         # bucket and bucket position are calculated, some of these addresses
         # will collide.
         imported_addrs = []
         for i in range(10000):
             first_octet = i >> 8
             second_octet = i % 256
             a = "{}.{}.1.1".format(first_octet, second_octet)
             imported_addrs.append(a)
             self.nodes[0].addpeeraddress(a, 8333)
 
         # Obtain addresses via rpc call and check they were ones sent in before.
         #
         # Maximum possible addresses in addrman is 10000, although actual
         # number will usually be less due to bucket and bucket position
         # collisions.
         node_addresses = self.nodes[0].getnodeaddresses(0)
         assert_greater_than(len(node_addresses), 5000)
         assert_greater_than(10000, len(node_addresses))
         for a in node_addresses:
             assert_greater_than(a["time"], 1527811200)  # 1st June 2018
             assert_equal(a["services"], NODE_NETWORK)
             assert a["address"] in imported_addrs
             assert_equal(a["port"], 8333)
 
         node_addresses = self.nodes[0].getnodeaddresses(1)
         assert_equal(len(node_addresses), 1)
 
         assert_raises_rpc_error(-8, "Address count out of range",
                                 self.nodes[0].getnodeaddresses, -1)
 
         # addrman's size cannot be known reliably after insertion, as hash collisions may occur
         # so only test that requesting a large number of addresses returns less
         # than that
         LARGE_REQUEST_COUNT = 10000
         node_addresses = self.nodes[0].getnodeaddresses(LARGE_REQUEST_COUNT)
         assert_greater_than(LARGE_REQUEST_COUNT, len(node_addresses))
 
 
 if __name__ == '__main__':
     NetTest().main()
diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py
index 82771ad20..0c4979a55 100755
--- a/test/functional/rpc_preciousblock.py
+++ b/test/functional/rpc_preciousblock.py
@@ -1,125 +1,125 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the preciousblock RPC."""
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.util import assert_equal
 
 
 def unidirectional_node_sync_via_rpc(node_src, node_dest):
     blocks_to_copy = []
     blockhash = node_src.getbestblockhash()
     while True:
         try:
             assert len(node_dest.getblock(blockhash, False)) > 0
             break
         except Exception:
             blocks_to_copy.append(blockhash)
             blockhash = node_src.getblockheader(
                 blockhash, True)['previousblockhash']
     blocks_to_copy.reverse()
     for blockhash in blocks_to_copy:
         blockdata = node_src.getblock(blockhash, False)
         assert node_dest.submitblock(blockdata) in (None, 'inconclusive')
 
 
 def node_sync_via_rpc(nodes):
     for node_src in nodes:
         for node_dest in nodes:
             if node_src is node_dest:
                 continue
             unidirectional_node_sync_via_rpc(node_src, node_dest)
 
 
 class PreciousTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 3
         self.extra_args = [["-noparkdeepreorg"],
                            ["-noparkdeepreorg"], ["-noparkdeepreorg"]]
         self.supports_cli = False
 
     def setup_network(self):
         self.setup_nodes()
 
     def run_test(self):
         self.log.info(
             "Ensure submitblock can in principle reorg to a competing chain")
         # A non-wallet address to mine to
 
         def gen_address(
             i): return self.nodes[i].get_deterministic_priv_key().address
         self.nodes[0].generatetoaddress(1, gen_address(0))
         assert_equal(self.nodes[0].getblockcount(), 1)
         hashZ = self.nodes[1].generatetoaddress(2, gen_address(1))[-1]
         assert_equal(self.nodes[1].getblockcount(), 2)
         node_sync_via_rpc(self.nodes[0:3])
         assert_equal(self.nodes[0].getbestblockhash(), hashZ)
 
         self.log.info("Mine blocks A-B-C on Node 0")
         hashC = self.nodes[0].generatetoaddress(3, gen_address(0))[-1]
         assert_equal(self.nodes[0].getblockcount(), 5)
         self.log.info("Mine competing blocks E-F-G on Node 1")
         hashG = self.nodes[1].generatetoaddress(3, gen_address(1))[-1]
         assert_equal(self.nodes[1].getblockcount(), 5)
         assert hashC != hashG
         self.log.info("Connect nodes and check no reorg occurs")
         # Submit competing blocks via RPC so any reorg should occur before we
         # proceed (no way to wait on inaction for p2p sync)
         node_sync_via_rpc(self.nodes[0:2])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         assert_equal(self.nodes[0].getbestblockhash(), hashC)
         assert_equal(self.nodes[1].getbestblockhash(), hashG)
         self.log.info("Make Node0 prefer block G")
         self.nodes[0].preciousblock(hashG)
         assert_equal(self.nodes[0].getbestblockhash(), hashG)
         self.log.info("Make Node0 prefer block C again")
         self.nodes[0].preciousblock(hashC)
         assert_equal(self.nodes[0].getbestblockhash(), hashC)
         self.log.info("Make Node1 prefer block C")
         self.nodes[1].preciousblock(hashC)
         # wait because node 1 may not have downloaded hashC
         self.sync_blocks(self.nodes[0:2])
         assert_equal(self.nodes[1].getbestblockhash(), hashC)
         self.log.info("Make Node1 prefer block G again")
         self.nodes[1].preciousblock(hashG)
         assert_equal(self.nodes[1].getbestblockhash(), hashG)
         self.log.info("Make Node0 prefer block G again")
         self.nodes[0].preciousblock(hashG)
         assert_equal(self.nodes[0].getbestblockhash(), hashG)
         self.log.info("Make Node1 prefer block C again")
         self.nodes[1].preciousblock(hashC)
         assert_equal(self.nodes[1].getbestblockhash(), hashC)
         self.log.info(
             "Mine another block (E-F-G-)H on Node 0 and reorg Node 1")
         self.nodes[0].generatetoaddress(1, gen_address(0))
         assert_equal(self.nodes[0].getblockcount(), 6)
         self.sync_blocks(self.nodes[0:2])
         hashH = self.nodes[0].getbestblockhash()
         assert_equal(self.nodes[1].getbestblockhash(), hashH)
         self.log.info("Node1 should not be able to prefer block C anymore")
         self.nodes[1].preciousblock(hashC)
         assert_equal(self.nodes[1].getbestblockhash(), hashH)
         self.log.info("Mine competing blocks I-J-K-L on Node 2")
         self.nodes[2].generatetoaddress(4, gen_address(2))
         assert_equal(self.nodes[2].getblockcount(), 6)
         hashL = self.nodes[2].getbestblockhash()
         self.log.info("Connect nodes and check no reorg occurs")
         node_sync_via_rpc(self.nodes[1:3])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
         assert_equal(self.nodes[0].getbestblockhash(), hashH)
         assert_equal(self.nodes[1].getbestblockhash(), hashH)
         assert_equal(self.nodes[2].getbestblockhash(), hashL)
         self.log.info("Make Node1 prefer block L")
         self.nodes[1].preciousblock(hashL)
         assert_equal(self.nodes[1].getbestblockhash(), hashL)
         self.log.info("Make Node2 prefer block H")
         self.nodes[2].preciousblock(hashH)
         assert_equal(self.nodes[2].getbestblockhash(), hashH)
 
 
 if __name__ == '__main__':
     PreciousTest().main()
diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py
index 7cd2f6b53..db733dd06 100755
--- a/test/functional/rpc_rawtransaction.py
+++ b/test/functional/rpc_rawtransaction.py
@@ -1,695 +1,694 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the rawtranscation RPCs.
 
 Test the following RPCs:
    - createrawtransaction
    - signrawtransactionwithwallet
    - sendrawtransaction
    - decoderawtransaction
    - getrawtransaction
 """
 
 from collections import OrderedDict
 from decimal import Decimal
 from io import BytesIO
 
 from test_framework.messages import (
     COutPoint,
     CTransaction,
     CTxIn,
     CTxOut,
     ToHex,
 )
 from test_framework.script import CScript
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.txtools import pad_raw_tx
 from test_framework.util import (
     assert_equal,
     assert_greater_than,
     assert_raises_rpc_error,
-    connect_nodes,
     hex_str_to_bytes,
 )
 
 
 class multidict(dict):
     """Dictionary that allows duplicate keys.
     Constructed with a list of (key, value) tuples. When dumped by the json module,
     will output invalid json with repeated keys, eg:
     >>> json.dumps(multidict([(1,2),(1,2)])
     '{"1": 2, "1": 2}'
     Used to test calls to rpc methods with repeated keys in the json object."""
 
     def __init__(self, x):
         dict.__init__(self, x)
         self.x = x
 
     def items(self):
         return self.x
 
 
 # Create one-input, one-output, no-fee transaction:
 class RawTransactionsTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 3
         self.extra_args = [["-txindex"], ["-txindex"], ["-txindex"]]
         # whitelist all peers to speed up tx relay / mempool sync
         for args in self.extra_args:
             args.append("-whitelist=noban@127.0.0.1")
 
         self.supports_cli = False
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         super().setup_network()
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 2)
 
     def run_test(self):
         self.log.info(
             'prepare some coins for multiple *rawtransaction commands')
         self.nodes[2].generate(1)
         self.sync_all()
         self.nodes[0].generate(101)
         self.sync_all()
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1500000)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1000000)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5000000)
         self.sync_all()
         self.nodes[0].generate(5)
         self.sync_all()
 
         self.log.info(
             'Test getrawtransaction on genesis block coinbase returns an error')
         block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
         assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction",
                                 self.nodes[0].getrawtransaction, block['merkleroot'])
 
         self.log.info(
             'Check parameter types and required parameters of createrawtransaction')
         # Test `createrawtransaction` required parameters
         assert_raises_rpc_error(-1, "createrawtransaction",
                                 self.nodes[0].createrawtransaction)
         assert_raises_rpc_error(-1, "createrawtransaction",
                                 self.nodes[0].createrawtransaction, [])
 
         # Test `createrawtransaction` invalid extra parameters
         assert_raises_rpc_error(-1, "createrawtransaction",
                                 self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
 
         # Test `createrawtransaction` invalid `inputs`
         txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
         assert_raises_rpc_error(-3, "Expected type array",
                                 self.nodes[0].createrawtransaction, 'foo', {})
         assert_raises_rpc_error(-1, "JSON value is not an object as expected",
                                 self.nodes[0].createrawtransaction, ['foo'], {})
         assert_raises_rpc_error(-1,
                                 "JSON value is not a string as expected",
                                 self.nodes[0].createrawtransaction,
                                 [{}],
                                 {})
         assert_raises_rpc_error(-8,
                                 "txid must be of length 64 (not 3, for 'foo')",
                                 self.nodes[0].createrawtransaction,
                                 [{'txid': 'foo'}],
                                 {})
         assert_raises_rpc_error(-8,
                                 "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')",
                                 self.nodes[0].createrawtransaction,
                                 [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}],
                                 {})
         assert_raises_rpc_error(-8, "Invalid parameter, missing vout key",
                                 self.nodes[0].createrawtransaction, [{'txid': txid}], {})
         assert_raises_rpc_error(-8, "Invalid parameter, vout must be a number",
                                 self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
         assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative",
                                 self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
         assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range",
                                 self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
 
         # Test `createrawtransaction` invalid `outputs`
         address = self.nodes[0].getnewaddress()
         address2 = self.nodes[0].getnewaddress()
         assert_raises_rpc_error(-1, "JSON value is not an array as expected",
                                 self.nodes[0].createrawtransaction, [], 'foo')
         # Should not throw for backwards compatibility
         self.nodes[0].createrawtransaction(inputs=[], outputs={})
         self.nodes[0].createrawtransaction(inputs=[], outputs=[])
         assert_raises_rpc_error(-8, "Data must be hexadecimal string",
                                 self.nodes[0].createrawtransaction, [], {'data': 'foo'})
         assert_raises_rpc_error(-5, "Invalid Bitcoin address",
                                 self.nodes[0].createrawtransaction, [], {'foo': 0})
         assert_raises_rpc_error(-3, "Invalid amount",
                                 self.nodes[0].createrawtransaction, [], {address: 'foo'})
         assert_raises_rpc_error(-3, "Amount out of range",
                                 self.nodes[0].createrawtransaction, [], {address: -1})
         assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format(
             address), self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
         assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format(
             address), self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
         assert_raises_rpc_error(-8,
                                 "Invalid parameter, duplicate key: data",
                                 self.nodes[0].createrawtransaction,
                                 [],
                                 [{"data": 'aa'},
                                     {"data": "bb"}])
         assert_raises_rpc_error(-8,
                                 "Invalid parameter, duplicate key: data",
                                 self.nodes[0].createrawtransaction,
                                 [],
                                 multidict([("data",
                                             'aa'),
                                            ("data",
                                             "bb")]))
         assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key",
                                 self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
         assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected",
                                 self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
 
         # Test `createrawtransaction` invalid `locktime`
         assert_raises_rpc_error(-3, "Expected type number",
                                 self.nodes[0].createrawtransaction, [], {}, 'foo')
         assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range",
                                 self.nodes[0].createrawtransaction, [], {}, -1)
         assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range",
                                 self.nodes[0].createrawtransaction, [], {}, 4294967296)
 
         self.log.info(
             'Check that createrawtransaction accepts an array and object as outputs')
         tx = CTransaction()
         # One output
         tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(
             inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
         assert_equal(len(tx.vout), 1)
         assert_equal(
             tx.serialize().hex(),
             self.nodes[2].createrawtransaction(
                 inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
         )
         # Two outputs
         tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[
                        {'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
         assert_equal(len(tx.vout), 2)
         assert_equal(
             tx.serialize().hex(),
             self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[
                                                {address: 99}, {address2: 99}]),
         )
         # Multiple mixed outputs
         tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[
                        {'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
         assert_equal(len(tx.vout), 3)
         assert_equal(
             tx.serialize().hex(),
             self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[
                                                {address: 99}, {address2: 99}, {'data': '99'}]),
         )
 
         for type in ["legacy"]:
             addr = self.nodes[0].getnewaddress("", type)
             addrinfo = self.nodes[0].getaddressinfo(addr)
             pubkey = addrinfo["scriptPubKey"]
 
             self.log.info(
                 'sendrawtransaction with missing prevtx info ({})'.format(type))
 
             # Test `signrawtransactionwithwallet` invalid `prevtxs`
             inputs = [{'txid': txid, 'vout': 3, 'sequence': 1000}]
             outputs = {self.nodes[0].getnewaddress(): 1}
             rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
 
             prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
             succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
             assert succ["complete"]
 
             assert_raises_rpc_error(-8, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                 {
                     "txid": txid,
                     "scriptPubKey": pubkey,
                     "vout": 3,
                 }
             ])
 
             assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                 {
                     "txid": txid,
                     "scriptPubKey": pubkey,
                     "amount": 1,
                 }
             ])
             assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                 {
                     "scriptPubKey": pubkey,
                     "vout": 3,
                     "amount": 1,
                 }
             ])
             assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
                 {
                     "txid": txid,
                     "vout": 3,
                     "amount": 1
                 }
             ])
 
         #########################################
         # sendrawtransaction with missing input #
         #########################################
 
         self.log.info('sendrawtransaction with missing input')
         # won't exists
         inputs = [
             {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout': 1}]
         outputs = {self.nodes[0].getnewaddress(): 4998000}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         rawtx = pad_raw_tx(rawtx)
         rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)
 
         # This will raise an exception since there are missing inputs
         assert_raises_rpc_error(-25,
                                 "bad-txns-inputs-missingorspent",
                                 self.nodes[2].sendrawtransaction,
                                 rawtx['hex'])
 
         #####################################
         # getrawtransaction with block hash #
         #####################################
 
         # make a tx by sending then generate 2 blocks; block1 has the tx in it
         tx = self.nodes[2].sendtoaddress(
             self.nodes[1].getnewaddress(), 1000000)
         block1, block2 = self.nodes[2].generate(2)
         self.sync_all()
         # We should be able to get the raw transaction by providing the correct
         # block
         gottx = self.nodes[0].getrawtransaction(tx, True, block1)
         assert_equal(gottx['txid'], tx)
         assert_equal(gottx['in_active_chain'], True)
         # We should not have the 'in_active_chain' flag when we don't provide a
         # block
         gottx = self.nodes[0].getrawtransaction(tx, True)
         assert_equal(gottx['txid'], tx)
         assert 'in_active_chain' not in gottx
         # We should not get the tx if we provide an unrelated block
         assert_raises_rpc_error(-5, "No such transaction found",
                                 self.nodes[0].getrawtransaction, tx, True, block2)
         # An invalid block hash should raise the correct errors
         assert_raises_rpc_error(-1,
                                 "JSON value is not a string as expected",
                                 self.nodes[0].getrawtransaction,
                                 tx,
                                 True,
                                 True)
         assert_raises_rpc_error(-8,
                                 "parameter 3 must be of length 64 (not 6, for 'foobar')",
                                 self.nodes[0].getrawtransaction,
                                 tx,
                                 True,
                                 "foobar")
         assert_raises_rpc_error(-8,
                                 "parameter 3 must be of length 64 (not 8, for 'abcd1234')",
                                 self.nodes[0].getrawtransaction,
                                 tx,
                                 True,
                                 "abcd1234")
         assert_raises_rpc_error(
             -8,
             "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')",
             self.nodes[0].getrawtransaction,
             tx,
             True,
             "ZZZ0000000000000000000000000000000000000000000000000000000000000")
         assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction,
                                 tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
         # Undo the blocks and check in_active_chain
         self.nodes[0].invalidateblock(block1)
         gottx = self.nodes[0].getrawtransaction(
             txid=tx, verbose=True, blockhash=block1)
         assert_equal(gottx['in_active_chain'], False)
         self.nodes[0].reconsiderblock(block1)
         assert_equal(self.nodes[0].getbestblockhash(), block2)
 
         #
         # RAW TX MULTISIG TESTS #
         #
         # 2of2 test
         addr1 = self.nodes[2].getnewaddress()
         addr2 = self.nodes[2].getnewaddress()
 
         addr1Obj = self.nodes[2].getaddressinfo(addr1)
         addr2Obj = self.nodes[2].getaddressinfo(addr2)
 
         # Tests for createmultisig and addmultisigaddress
         assert_raises_rpc_error(-5, "Invalid public key",
                                 self.nodes[0].createmultisig, 1, ["01020304"])
         # createmultisig can only take public keys
         self.nodes[0].createmultisig(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
         # addmultisigaddress can take both pubkeys and addresses so long as
         # they are in the wallet, which is tested here.
         assert_raises_rpc_error(-5, "Invalid public key",
                                 self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1])
 
         mSigObj = self.nodes[2].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr1])['address']
 
         # use balance deltas instead of absolute values
         bal = self.nodes[2].getbalance()
 
         # send 1,200,000 XEC to msig adr
         txId = self.nodes[0].sendtoaddress(mSigObj, 1200000)
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
         # node2 has both keys of the 2of2 ms addr., tx should affect the
         # balance
         assert_equal(self.nodes[2].getbalance(), bal + Decimal('1200000.00'))
 
         # 2of3 test from different nodes
         bal = self.nodes[2].getbalance()
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[2].getnewaddress()
         addr3 = self.nodes[2].getnewaddress()
 
         addr1Obj = self.nodes[1].getaddressinfo(addr1)
         addr2Obj = self.nodes[2].getaddressinfo(addr2)
         addr3Obj = self.nodes[2].getaddressinfo(addr3)
 
         mSigObj = self.nodes[2].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
 
         txId = self.nodes[0].sendtoaddress(mSigObj, 2200000)
         decTx = self.nodes[0].gettransaction(txId)
         rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
 
         # THIS IS AN INCOMPLETE FEATURE
         # NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND
         # COUNT AT BALANCE CALCULATION
         # for now, assume the funds of a 2of3 multisig tx are not marked as
         # spendable
         assert_equal(self.nodes[2].getbalance(), bal)
 
         txDetails = self.nodes[0].gettransaction(txId, True)
         rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
         vout = next(o for o in rawTx['vout']
                     if o['value'] == Decimal('2200000.00'))
 
         bal = self.nodes[0].getbalance()
         inputs = [{
             "txid": txId,
             "vout": vout['n'],
             "scriptPubKey": vout['scriptPubKey']['hex'],
             "amount": vout['value'],
         }]
         outputs = {self.nodes[0].getnewaddress(): 2190000}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(
             rawTx, inputs)
         # node1 only has one key, can't comp. sign the tx
         assert_equal(rawTxPartialSigned['complete'], False)
 
         rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
         # node2 can sign the tx compl., own two of three keys
         assert_equal(rawTxSigned['complete'], True)
         self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
         rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
         assert_equal(self.nodes[0].getbalance(), bal + Decimal(
             '50000000.00') + Decimal('2190000.00'))  # block reward + tx
 
         rawTxBlock = self.nodes[0].getblock(self.nodes[0].getbestblockhash())
 
         # 2of2 test for combining transactions
         bal = self.nodes[2].getbalance()
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[2].getnewaddress()
 
         addr1Obj = self.nodes[1].getaddressinfo(addr1)
         addr2Obj = self.nodes[2].getaddressinfo(addr2)
 
         self.nodes[1].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
         mSigObj = self.nodes[2].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
         mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
 
         txId = self.nodes[0].sendtoaddress(mSigObj, 2200000)
         decTx = self.nodes[0].gettransaction(txId)
         rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
 
         # the funds of a 2of2 multisig tx should not be marked as spendable
         assert_equal(self.nodes[2].getbalance(), bal)
 
         txDetails = self.nodes[0].gettransaction(txId, True)
         rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
         vout = next(o for o in rawTx2['vout']
                     if o['value'] == Decimal('2200000.00'))
 
         bal = self.nodes[0].getbalance()
         inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']
                    ['hex'], "redeemScript": mSigObjValid['hex'], "amount": vout['value']}]
         outputs = {self.nodes[0].getnewaddress(): 2190000}
         rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
         rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(
             rawTx2, inputs)
         self.log.debug(rawTxPartialSigned1)
         # node1 only has one key, can't comp. sign the tx
         assert_equal(rawTxPartialSigned1['complete'], False)
 
         rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(
             rawTx2, inputs)
         self.log.debug(rawTxPartialSigned2)
         # node2 only has one key, can't comp. sign the tx
         assert_equal(rawTxPartialSigned2['complete'], False)
         rawTxComb = self.nodes[2].combinerawtransaction(
             [rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
         self.log.debug(rawTxComb)
         self.nodes[2].sendrawtransaction(rawTxComb)
         rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
         assert_equal(self.nodes[0].getbalance(
         ), bal + Decimal('50000000.00') + Decimal('2190000.00'))  # block reward + tx
 
         # getrawtransaction tests
         # 1. valid parameters - only supply txid
         txId = rawTx["txid"]
         assert_equal(
             self.nodes[0].getrawtransaction(txId), rawTxSigned['hex'])
 
         # 2. valid parameters - supply txid and 0 for non-verbose
         assert_equal(
             self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex'])
 
         # 3. valid parameters - supply txid and False for non-verbose
         assert_equal(self.nodes[0].getrawtransaction(txId, False),
                      rawTxSigned['hex'])
 
         # 4. valid parameters - supply txid and 1 for verbose.
         # We only check the "hex" field of the output so we don't need to
         # update this test every time the output format changes.
         assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"],
                      rawTxSigned['hex'])
 
         # 5. valid parameters - supply txid and True for non-verbose
         assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"],
                      rawTxSigned['hex'])
 
         # 6. invalid parameters - supply txid and string "Flase"
         assert_raises_rpc_error(-1, "not a boolean",
                                 self.nodes[0].getrawtransaction,
                                 txId, "Flase")
 
         # 7. invalid parameters - supply txid and empty array
         assert_raises_rpc_error(-1, "not a boolean",
                                 self.nodes[0].getrawtransaction, txId, [])
 
         # 8. invalid parameters - supply txid and empty dict
         assert_raises_rpc_error(
             -1, "not a boolean", self.nodes[0].getrawtransaction, txId, {})
 
         # Sanity checks on verbose getrawtransaction output
         rawTxOutput = self.nodes[0].getrawtransaction(txId, True)
         assert_equal(rawTxOutput["hex"], rawTxSigned["hex"])
         assert_equal(rawTxOutput["txid"], txId)
         assert_equal(rawTxOutput["hash"], txId)
         assert_greater_than(rawTxOutput["size"], 300)
         assert_equal(rawTxOutput["version"], 0x02)
         assert_equal(rawTxOutput["locktime"], 0)
         assert_equal(len(rawTxOutput["vin"]), 1)
         assert_equal(len(rawTxOutput["vout"]), 1)
         assert_equal(rawTxOutput["blockhash"], rawTxBlock["hash"])
         assert_equal(rawTxOutput["confirmations"], 3)
         assert_equal(rawTxOutput["time"], rawTxBlock["time"])
         assert_equal(rawTxOutput["blocktime"], rawTxBlock["time"])
 
         inputs = [
             {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'sequence': 1000}]
         outputs = {self.nodes[0].getnewaddress(): 1}
         assert_raises_rpc_error(
             -8, 'Invalid parameter, missing vout key',
             self.nodes[0].createrawtransaction, inputs, outputs)
 
         inputs[0]['vout'] = "1"
         assert_raises_rpc_error(
             -8, 'Invalid parameter, vout must be a number',
             self.nodes[0].createrawtransaction, inputs, outputs)
 
         inputs[0]['vout'] = -1
         assert_raises_rpc_error(
             -8, 'Invalid parameter, vout cannot be negative',
             self.nodes[0].createrawtransaction, inputs, outputs)
 
         inputs[0]['vout'] = 1
         rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
         decrawtx = self.nodes[0].decoderawtransaction(rawtx)
         assert_equal(decrawtx['vin'][0]['sequence'], 1000)
 
         # 9. invalid parameters - sequence number out of range
         inputs[0]['sequence'] = -1
         assert_raises_rpc_error(
             -8, 'Invalid parameter, sequence number is out of range',
             self.nodes[0].createrawtransaction, inputs, outputs)
 
         # 10. invalid parameters - sequence number out of range
         inputs[0]['sequence'] = 4294967296
         assert_raises_rpc_error(
             -8, 'Invalid parameter, sequence number is out of range',
             self.nodes[0].createrawtransaction, inputs, outputs)
 
         inputs[0]['sequence'] = 4294967294
         rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
         decrawtx = self.nodes[0].decoderawtransaction(rawtx)
         assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
 
         ####################################
         # TRANSACTION VERSION NUMBER TESTS #
         ####################################
 
         # Test the minimum transaction version number that fits in a signed
         # 32-bit integer.
         # As transaction version is unsigned, this should convert to its
         # unsigned equivalent.
         tx = CTransaction()
         tx.nVersion = -0x80000000
         rawtx = ToHex(tx)
         decrawtx = self.nodes[0].decoderawtransaction(rawtx)
         assert_equal(decrawtx['version'], 0x80000000)
 
         # Test the maximum transaction version number that fits in a signed
         # 32-bit integer.
         tx = CTransaction()
         tx.nVersion = 0x7fffffff
         rawtx = ToHex(tx)
         decrawtx = self.nodes[0].decoderawtransaction(rawtx)
         assert_equal(decrawtx['version'], 0x7fffffff)
 
         self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate')
 
         # Test a transaction with a small fee.
         txId = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 1000000)
         rawTx = self.nodes[0].getrawtransaction(txId, True)
         vout = next(o for o in rawTx['vout']
                     if o['value'] == Decimal('1000000.00'))
 
         self.sync_all()
         inputs = [{"txid": txId, "vout": vout['n']}]
         # Fee 10,000 satoshis, (1,000,000 - (10000 sat * 0.01 XEC/sat)) =
         # 999900
         outputs = {self.nodes[0].getnewaddress(): Decimal("999900.00")}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
         assert_equal(rawTxSigned['complete'], True)
         # Fee 10,000 satoshis, ~200 b transaction, fee rate should land around 50 sat/byte = 500 XEC/kB
         # Thus, testmempoolaccept should reject
         testres = self.nodes[2].testmempoolaccept(
             [rawTxSigned['hex']], 500.00)[0]
         assert_equal(testres['allowed'], False)
         assert_equal(testres['reject-reason'], 'max-fee-exceeded')
         # and sendrawtransaction should throw
         assert_raises_rpc_error(-25,
                                 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)',
                                 self.nodes[2].sendrawtransaction,
                                 rawTxSigned['hex'],
                                 10.00)
         # and the following calls should both succeed
         testres = self.nodes[2].testmempoolaccept(
             rawtxs=[rawTxSigned['hex']])[0]
         assert_equal(testres['allowed'], True)
         self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'])
 
         # Test a transaction with a large fee.
         txId = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 1000000)
         rawTx = self.nodes[0].getrawtransaction(txId, True)
         vout = next(o for o in rawTx['vout']
                     if o['value'] == Decimal('1000000.00'))
 
         self.sync_all()
         inputs = [{"txid": txId, "vout": vout['n']}]
         # Fee 2,000,000 satoshis, (1,000,000 - (2,000,000 sat * 0.01 XEC/sat)) =
         # 980000
         outputs = {self.nodes[0].getnewaddress(): Decimal("980000.00")}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
         assert_equal(rawTxSigned['complete'], True)
         # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 200,000 XEC/kB
         # Thus, testmempoolaccept should reject
         testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0]
         assert_equal(testres['allowed'], False)
         assert_equal(testres['reject-reason'], 'max-fee-exceeded')
         # and sendrawtransaction should throw
         assert_raises_rpc_error(-25,
                                 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)',
                                 self.nodes[2].sendrawtransaction,
                                 rawTxSigned['hex'])
         # and the following calls should both succeed
         testres = self.nodes[2].testmempoolaccept(
             rawtxs=[rawTxSigned['hex']], maxfeerate='200000.00')[0]
         assert_equal(testres['allowed'], True)
         self.nodes[2].sendrawtransaction(
             hexstring=rawTxSigned['hex'],
             maxfeerate='200000.00')
 
         ##########################################
         # Decoding weird scripts in transactions #
         ##########################################
 
         self.log.info('Decode correctly-formatted but weird transactions')
         tx = CTransaction()
         # empty
         self.nodes[0].decoderawtransaction(ToHex(tx))
         # truncated push
         tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4e\x00\x00'))
         tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4c\x10TRUNC'))
         tx.vout.append(CTxOut(0, b'\x4e\x00\x00'))
         tx.vout.append(CTxOut(0, b'\x4c\x10TRUNC'))
         self.nodes[0].decoderawtransaction(ToHex(tx))
         # giant pushes and long scripts
         tx.vin.append(
             CTxIn(COutPoint(42, 0), CScript([b'giant push' * 10000])))
         tx.vout.append(CTxOut(0, CScript([b'giant push' * 10000])))
         self.nodes[0].decoderawtransaction(ToHex(tx))
 
         self.log.info('Refuse garbage after transaction')
         assert_raises_rpc_error(-22, 'TX decode failed',
                                 self.nodes[0].decoderawtransaction, ToHex(tx) + '00')
 
 
 if __name__ == '__main__':
     RawTransactionsTest().main()
diff --git a/test/functional/rpc_setban.py b/test/functional/rpc_setban.py
index e74217fea..fafa5c779 100755
--- a/test/functional/rpc_setban.py
+++ b/test/functional/rpc_setban.py
@@ -1,49 +1,49 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the setban rpc call."""
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import connect_nodes, p2p_port
+from test_framework.util import p2p_port
 
 
 class SetBanTests(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.setup_clean_chain = True
         self.extra_args = [[], []]
 
     def run_test(self):
         # Node 0 connects to Node 1, check that the noban permission is not
         # granted
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         peerinfo = self.nodes[1].getpeerinfo()[0]
         assert('noban' not in peerinfo['permissions'])
 
         # Node 0 get banned by Node 1
         self.nodes[1].setban("127.0.0.1", "add")
 
         # Node 0 should not be able to reconnect
         with self.nodes[1].assert_debug_log(expected_msgs=['dropped (banned)\n'], timeout=5):
             self.restart_node(1, [])
             self.nodes[0].addnode("127.0.0.1:" + str(p2p_port(1)), "onetry")
 
         # However, node 0 should be able to reconnect if it has noban
         # permission
         self.restart_node(1, ['-whitelist=127.0.0.1'])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         peerinfo = self.nodes[1].getpeerinfo()[0]
         assert('noban' in peerinfo['permissions'])
 
         # If we remove the ban, Node 0 should be able to reconnect even without
         # noban permission
         self.nodes[1].setban("127.0.0.1", "remove")
         self.restart_node(1, [])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         peerinfo = self.nodes[1].getpeerinfo()[0]
         assert('noban' not in peerinfo['permissions'])
 
 
 if __name__ == '__main__':
     SetBanTests().main()
diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py
index 7b6c674fc..695db9725 100755
--- a/test/functional/wallet_abandonconflict.py
+++ b/test/functional/wallet_abandonconflict.py
@@ -1,234 +1,232 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the abandontransaction RPC.
 
  The abandontransaction RPC marks a transaction and all its in-wallet
  descendants as abandoned which allows their inputs to be respent. It can be
  used to replace "stuck" or evicted transactions. It only works on transactions
  which are not included in a block and are not currently in the mempool. It has
  no effect on transactions which are already abandoned.
 """
 from decimal import Decimal
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_raises_rpc_error,
-    connect_nodes,
-    disconnect_nodes,
     satoshi_round,
 )
 
 
 class AbandonConflictTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.extra_args = [["-minrelaytxfee=10"], []]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         def total_fees(*txids):
             total = 0
             for txid in txids:
                 # '-=' is because gettransaction(txid)['fee'] returns a negative
                 total -= self.nodes[0].gettransaction(txid)['fee']
             return satoshi_round(total)
 
         self.nodes[1].generate(100)
         self.sync_blocks()
         balance = self.nodes[0].getbalance()
         txA = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10000000"))
         txB = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10000000"))
         txC = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10000000"))
 
         self.sync_mempools()
         self.nodes[1].generate(1)
 
         # Can not abandon non-wallet transaction
         assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id',
                                 lambda: self.nodes[0].abandontransaction(txid='ff' * 32))
         # Can not abandon confirmed transaction
         assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment',
                                 lambda: self.nodes[0].abandontransaction(txid=txA))
 
         self.sync_blocks()
         newbalance = self.nodes[0].getbalance()
 
         # no more than fees lost
         assert balance - newbalance <= total_fees(txA, txB, txC)
         balance = newbalance
 
         # Disconnect nodes so node0's transactions don't get into node1's
         # mempool
-        disconnect_nodes(self.nodes[0], self.nodes[1])
+        self.disconnect_nodes(0, 1)
 
         # Identify the 10btc outputs
         nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(
             txA)["details"] if tx_out["amount"] == Decimal("10000000"))
         nB = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(
             txB)["details"] if tx_out["amount"] == Decimal("10000000"))
         nC = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(
             txC)["details"] if tx_out["amount"] == Decimal("10000000"))
 
         inputs = []
         # spend 10btc outputs from txA and txB
         inputs.append({"txid": txA, "vout": nA})
         inputs.append({"txid": txB, "vout": nB})
         outputs = {}
 
         outputs[self.nodes[0].getnewaddress()] = Decimal("14999980")
         outputs[self.nodes[1].getnewaddress()] = Decimal("5000000")
         signed = self.nodes[0].signrawtransactionwithwallet(
             self.nodes[0].createrawtransaction(inputs, outputs))
         txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])
 
         # Identify the 14,999,980 XEC output
         nAB = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(
             txAB1)["details"] if tx_out["amount"] == Decimal("14999980"))
 
         # Create a child tx spending AB1 and C
         inputs = []
         # Amount 14,999,980 XEC
         inputs.append({"txid": txAB1, "vout": nAB})
         # Amount 10,000,000 XEC
         inputs.append({"txid": txC, "vout": nC})
         outputs = {}
         outputs[self.nodes[0].getnewaddress()] = Decimal("24999600")
         signed2 = self.nodes[0].signrawtransactionwithwallet(
             self.nodes[0].createrawtransaction(inputs, outputs))
         txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])
 
         # Create a child tx spending ABC2
         signed3_change = Decimal("24999000")
         inputs = [{"txid": txABC2, "vout": 0}]
         outputs = {self.nodes[0].getnewaddress(): signed3_change}
         signed3 = self.nodes[0].signrawtransactionwithwallet(
             self.nodes[0].createrawtransaction(inputs, outputs))
         # note tx is never directly referenced, only abandoned as a child of
         # the above
         self.nodes[0].sendrawtransaction(signed3["hex"])
 
         # In mempool txs from self should increase balance from change
         newbalance = self.nodes[0].getbalance()
         assert_equal(
             newbalance,
             balance -
             Decimal("30000000") +
             signed3_change)
         balance = newbalance
 
         # Restart the node with a higher min relay fee so the parent tx is no longer in mempool
         # TODO: redo with eviction
         self.restart_node(0, extra_args=["-minrelaytxfee=100"])
         assert self.nodes[0].getmempoolinfo()['loaded']
 
         # Verify txs no longer in either node's mempool
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
         assert_equal(len(self.nodes[1].getrawmempool()), 0)
 
         # Transactions which are not in the mempool should only reduce wallet balance.
         # Transaction inputs should still be spent, but the change not yet
         # received.
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - signed3_change)
         # Unconfirmed received funds that are not in mempool also shouldn't show
         # up in unconfirmed balance.  Note that the transactions stored in the wallet
         # are not necessarily in the node's mempool.
         balances = self.nodes[0].getbalances()['mine']
         assert_equal(
             balances['untrusted_pending'] +
             balances['trusted'],
             newbalance)
         # Unconfirmed transactions which are not in the mempool should also
         # not be in listunspent
         assert txABC2 not in [utxo["txid"]
                               for utxo in self.nodes[0].listunspent(0)]
         balance = newbalance
 
         # Abandon original transaction and verify inputs are available again
         # including that the child tx was also abandoned
         self.nodes[0].abandontransaction(txAB1)
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance + Decimal("30000000"))
         balance = newbalance
 
         # Verify that even with a low min relay fee, the tx is not re-accepted
         # from wallet on startup once abandoned.
         self.restart_node(0, extra_args=["-minrelaytxfee=10"])
         assert self.nodes[0].getmempoolinfo()['loaded']
 
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
         assert_equal(self.nodes[0].getbalance(), balance)
 
         # If the transaction is re-sent the wallet also unabandons it. The
         # change should be available, and it's child transaction should remain
         # abandoned.
         # NOTE: Abandoned transactions are internal to the wallet, and tracked
         # separately from other indices.
         self.nodes[0].sendrawtransaction(signed["hex"])
         newbalance = self.nodes[0].getbalance()
         assert_equal(
             newbalance,
             balance -
             Decimal("20000000") +
             Decimal("14999980"))
         balance = newbalance
 
         # Send child tx again so it is no longer abandoned.
         self.nodes[0].sendrawtransaction(signed2["hex"])
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("10000000")
                      - Decimal("14999980") + Decimal("24999600"))
         balance = newbalance
 
         # Reset to a higher relay fee so that we abandon a transaction
         self.restart_node(0, extra_args=["-minrelaytxfee=100"])
         assert self.nodes[0].getmempoolinfo()['loaded']
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("24999600"))
         balance = newbalance
 
         # Create a double spend of AB1. Spend it again from only A's 10 output.
         # Mine double spend from node 1.
         inputs = []
         inputs.append({"txid": txA, "vout": nA})
         outputs = {}
         outputs[self.nodes[1].getnewaddress()] = Decimal("9999900")
         tx = self.nodes[0].createrawtransaction(inputs, outputs)
         signed = self.nodes[0].signrawtransactionwithwallet(tx)
         self.nodes[1].sendrawtransaction(signed["hex"])
         self.nodes[1].generate(1)
 
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks()
 
         # Verify that B and C's 10,000,000 XEC outputs are available for
         # spending again because AB1 is now conflicted
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance + Decimal("20000000"))
         balance = newbalance
 
         # There is currently a minor bug around this and so this test doesn't
         # work.  See Issue #7315
         # Invalidate the block with the double spend and B's 10,000,000 XEC
         # output should no longer be available. Don't think C's should either
         self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
         newbalance = self.nodes[0].getbalance()
         # assert_equal(newbalance, balance - Decimal("10000000"))
         self.log.info(
             "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer")
         self.log.info(
             "conflicted has not resumed causing its inputs to be seen as spent.  See Issue #7315")
         self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
 
 
 if __name__ == '__main__':
     AbandonConflictTest().main()
diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py
index feb11af5a..fe8f89b46 100755
--- a/test/functional/wallet_address_types.py
+++ b/test/functional/wallet_address_types.py
@@ -1,300 +1,296 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test that the wallet can send and receive using all combinations of address types.
 
 There are 4 nodes-under-test:
     - node0 uses legacy addresses
     - node1 uses legacy addresses
     - node2 uses legacy addresses
     - node3 uses legacy addresses
 
 node4 exists to generate new blocks.
 
 ## Multisig address test
 
 Test that adding a multisig address with:
     - an uncompressed pubkey always gives a legacy address
     - only compressed pubkeys gives the an `-addresstype` address
 
 ## Sending to address types test
 
 A series of tests, iterating over node0-node3. In each iteration of the test, one node sends:
     - 10/101th of its balance to itself (using getrawchangeaddress for single key addresses)
     - 20/101th to the next node
     - 30/101th to the node after that
     - 40/101th to the remaining node
     - 1/101th remains as fee+change
 
 Iterate over each node for single key addresses, and then over each node for
 multisig addresses. Repeat test. As every node sends coins after receiving,
 this also verifies that spending coins sent to all these address types works.
 """
 
 import itertools
 from decimal import Decimal
 
 from test_framework.descriptors import descsum_check, descsum_create
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_greater_than,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_greater_than
 
 
 class AddressTypeTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 5
         # whitelist all peers to speed up tx relay / mempool sync
         self.extra_args = [["-whitelist=noban@127.0.0.1"]] * self.num_nodes
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.setup_nodes()
 
         # Fully mesh-connect nodes for faster mempool sync
         for i, j in itertools.product(range(self.num_nodes), repeat=2):
             if i > j:
-                connect_nodes(self.nodes[i], self.nodes[j])
+                self.connect_nodes(i, j)
         self.sync_all()
 
     def get_balances(self, key='trusted'):
         """Return a list of balances."""
         return [self.nodes[i].getbalances()['mine'][key] for i in range(4)]
 
     def test_address(self, node, address, multisig, typ):
         """Run sanity checks on an address."""
         self.log.info(address)
         info = self.nodes[node].getaddressinfo(address)
         assert(self.nodes[node].validateaddress(address)['isvalid'])
         assert_equal(info.get('solvable'), True)
 
         if not multisig and typ == 'legacy':
             # P2PKH
             assert(not info['isscript'])
             assert('pubkey' in info)
         elif typ == 'legacy':
             # P2SH-multisig
             assert(info['isscript'])
             assert_equal(info['script'], 'multisig')
             assert('pubkeys' in info)
         else:
             # Unknown type
             assert(False)
 
     def test_desc(self, node, address, multisig, typ, utxo):
         """Run sanity checks on a descriptor reported by getaddressinfo."""
         info = self.nodes[node].getaddressinfo(address)
         assert('desc' in info)
 
         assert_equal(info['desc'], utxo['desc'])
         assert(self.nodes[node].validateaddress(address)['isvalid'])
 
         # Use a ridiculously roundabout way to find the key origin info through
         # the PSBT logic. However, this does test consistency between the PSBT reported
         # fingerprints/paths and the descriptor logic.
         psbt = self.nodes[node].createpsbt(
             [{'txid': utxo['txid'], 'vout':utxo['vout']}], [{address: 100.00}])
         psbt = self.nodes[node].walletprocesspsbt(
             psbt, False, "ALL|FORKID", True)
         decode = self.nodes[node].decodepsbt(psbt['psbt'])
         key_descs = {}
         for deriv in decode['inputs'][0]['bip32_derivs']:
             assert_equal(len(deriv['master_fingerprint']), 8)
             assert_equal(deriv['path'][0], 'm')
             key_descs[deriv['pubkey']] = '[' + deriv['master_fingerprint'] + \
                 deriv['path'][1:] + ']' + deriv['pubkey']
 
         # Verify the descriptor checksum against the Python implementation
         assert(descsum_check(info['desc']))
         # Verify that stripping the checksum and recreating it using Python
         # roundtrips
         assert(info['desc'] == descsum_create(info['desc'][:-9]))
         # Verify that stripping the checksum and feeding it to
         # getdescriptorinfo roundtrips
         assert(info['desc'] == self.nodes[0].getdescriptorinfo(
             info['desc'][:-9])['descriptor'])
         assert_equal(
             info['desc'][-8:], self.nodes[0].getdescriptorinfo(info['desc'][:-9])['checksum'])
         # Verify that keeping the checksum and feeding it to getdescriptorinfo
         # roundtrips
         assert info['desc'] == self.nodes[0].getdescriptorinfo(info['desc'])[
             'descriptor']
         assert_equal(info['desc'][-8:],
                      self.nodes[0].getdescriptorinfo(info['desc'])['checksum'])
 
         if not multisig and typ == 'legacy':
             # P2PKH
             assert_equal(info['desc'],
                          descsum_create("pkh({})".format(key_descs[info['pubkey']])))
         elif typ == 'legacy':
             # P2SH-multisig
             assert_equal(info['desc'], descsum_create("sh(multi(2,{},{}))".format(
                 key_descs[info['pubkeys'][0]], key_descs[info['pubkeys'][1]])))
         else:
             # Unknown type
             assert(False)
 
     def test_change_output_type(
             self, node_sender, destinations, expected_type):
         txid = self.nodes[node_sender].sendmany(
             dummy="", amounts=dict.fromkeys(
                 destinations, 1000.00))
         raw_tx = self.nodes[node_sender].getrawtransaction(txid)
         tx = self.nodes[node_sender].decoderawtransaction(raw_tx)
 
         # Make sure the transaction has change:
         assert_equal(len(tx["vout"]), len(destinations) + 1)
 
         # Make sure the destinations are included, and remove them:
         output_addresses = [vout['scriptPubKey']['addresses'][0]
                             for vout in tx["vout"]]
         change_addresses = [
             d for d in output_addresses if d not in destinations]
         assert_equal(len(change_addresses), 1)
 
         self.log.debug(
             "Check if change address " +
             change_addresses[0] +
             " is " +
             expected_type)
         self.test_address(
             node_sender,
             change_addresses[0],
             multisig=False,
             typ=expected_type)
 
     def run_test(self):
         # Mine 101 blocks on node4 to bring nodes out of IBD and make sure that
         # no coinbases are maturing for the nodes-under-test during the test
         self.nodes[4].generate(101)
         self.sync_blocks()
 
         uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee"
         uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77"
         compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52"
         compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"
 
         # addmultisigaddress with at least 1 uncompressed key should return a
         # legacy address.
         for node in range(4):
             self.test_address(node, self.nodes[node].addmultisigaddress(
                 2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy')
             self.test_address(node, self.nodes[node].addmultisigaddress(
                 2, [compressed_1, uncompressed_2])['address'], True, 'legacy')
             self.test_address(node, self.nodes[node].addmultisigaddress(
                 2, [uncompressed_1, compressed_2])['address'], True, 'legacy')
         # addmultisigaddress with all compressed keys should return the
         # appropriate address type (even when the keys are not ours).
         self.test_address(0, self.nodes[0].addmultisigaddress(
             2, [compressed_1, compressed_2])['address'], True, 'legacy')
 
         for multisig, from_node in itertools.product([False, True], range(4)):
             self.log.info(
                 "Sending from node {} with{} multisig".format(from_node, "" if multisig else "out"))
             old_balances = self.get_balances()
             self.log.debug("Old balances are {}".format(old_balances))
             to_send = (
                 old_balances[from_node] /
                 101).quantize(
                 Decimal("0.01"))
             sends = {}
             addresses = {}
 
             self.log.debug("Prepare sends")
             for n, to_node in enumerate(range(from_node, from_node + 4)):
                 to_node %= 4
                 if not multisig:
                     if from_node == to_node:
                         # When sending non-multisig to self, use
                         # getrawchangeaddress
                         address = self.nodes[to_node].getrawchangeaddress()
                     else:
                         address = self.nodes[to_node].getnewaddress()
                 else:
                     addr1 = self.nodes[to_node].getnewaddress()
                     addr2 = self.nodes[to_node].getnewaddress()
                     address = self.nodes[to_node].addmultisigaddress(2, [addr1, addr2])[
                         'address']
 
                 # Do some sanity checking on the created address
                 typ = 'legacy'
                 self.test_address(to_node, address, multisig, typ)
 
                 # Output entry
                 sends[address] = to_send * 10 * (1 + n)
                 addresses[to_node] = (address, typ)
 
             self.log.debug("Sending: {}".format(sends))
             self.nodes[from_node].sendmany("", sends)
             self.sync_mempools()
 
             unconf_balances = self.get_balances('untrusted_pending')
             self.log.debug(
                 "Check unconfirmed balances: {}".format(unconf_balances))
             assert_equal(unconf_balances[from_node], 0)
             for n, to_node in enumerate(range(from_node + 1, from_node + 4)):
                 to_node %= 4
                 assert_equal(unconf_balances[to_node], to_send * 10 * (2 + n))
 
             # node4 collects fee and block subsidy to keep accounting simple
             self.nodes[4].generate(1)
             self.sync_blocks()
 
             # Verify that the receiving wallet contains a UTXO with the
             # expected address, and expected descriptor
             for n, to_node in enumerate(range(from_node, from_node + 4)):
                 to_node %= 4
                 found = False
                 for utxo in self.nodes[to_node].listunspent():
                     if utxo['address'] == addresses[to_node][0]:
                         found = True
                         self.test_desc(
                             to_node,
                             addresses[to_node][0],
                             multisig,
                             addresses[to_node][1],
                             utxo)
                         break
                 assert found
 
             new_balances = self.get_balances()
             self.log.debug("Check new balances: {}".format(new_balances))
             # We don't know what fee was set, so we can only check bounds on
             # the balance of the sending node
             assert_greater_than(new_balances[from_node], to_send * 10)
             assert_greater_than(to_send * 11, new_balances[from_node])
             for n, to_node in enumerate(range(from_node + 1, from_node + 4)):
                 to_node %= 4
                 assert_equal(
                     new_balances[to_node], old_balances[to_node] + to_send * 10 * (2 + n))
 
         # Get addresses from node2 and  node3:
         to_address_2 = self.nodes[2].getnewaddress()
         to_address_3_1 = self.nodes[3].getnewaddress()
         to_address_3_2 = self.nodes[3].getnewaddress()
 
         self.log.info("Various change output tests")
         self.test_change_output_type(0, [to_address_3_1], 'legacy')
         self.test_change_output_type(1, [to_address_2], 'legacy')
         self.test_change_output_type(1, [to_address_3_1], 'legacy')
         self.test_change_output_type(
             1, [to_address_2, to_address_3_1], 'legacy')
         self.test_change_output_type(
             1, [to_address_3_1, to_address_3_2], 'legacy')
         self.test_change_output_type(2, [to_address_3_1], 'legacy')
 
         self.log.info('Test getrawchangeaddress')
         self.test_address(
             3,
             self.nodes[3].getrawchangeaddress(),
             multisig=False,
             typ='legacy')
 
 
 if __name__ == '__main__':
     AddressTypeTest().main()
diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py
index 92b981952..00e0f8bc5 100755
--- a/test/functional/wallet_avoidreuse.py
+++ b/test/functional/wallet_avoidreuse.py
@@ -1,484 +1,483 @@
 #!/usr/bin/env python3
 # Copyright (c) 2018 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the avoid_reuse and setwalletflag features."""
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_approx,
     assert_equal,
     assert_raises_rpc_error,
-    connect_nodes,
 )
 
 
 def reset_balance(node, discardaddr):
     '''Throw away all owned coins by the node so it gets a balance of 0.'''
     balance = node.getbalance(avoid_reuse=False)
     if balance > 500000:
         node.sendtoaddress(
             address=discardaddr,
             amount=balance,
             subtractfeefromamount=True,
             avoid_reuse=False)
 
 
 def count_unspent(node):
     '''Count the unspent outputs for the given node and return various statistics'''
     r = {
         "total": {
             "count": 0,
             "sum": 0,
         },
         "reused": {
             "count": 0,
             "sum": 0,
         },
     }
     supports_reused = True
     for utxo in node.listunspent(minconf=0):
         r["total"]["count"] += 1
         r["total"]["sum"] += utxo["amount"]
         if supports_reused and "reused" in utxo:
             if utxo["reused"]:
                 r["reused"]["count"] += 1
                 r["reused"]["sum"] += utxo["amount"]
         else:
             supports_reused = False
     r["reused"]["supported"] = supports_reused
     return r
 
 
 def assert_unspent(node, total_count=None, total_sum=None,
                    reused_supported=None, reused_count=None, reused_sum=None):
     '''Make assertions about a node's unspent output statistics'''
     stats = count_unspent(node)
     if total_count is not None:
         assert_equal(stats["total"]["count"], total_count)
     if total_sum is not None:
         assert_approx(stats["total"]["sum"], total_sum, 1000)
     if reused_supported is not None:
         assert_equal(stats["reused"]["supported"], reused_supported)
     if reused_count is not None:
         assert_equal(stats["reused"]["count"], reused_count)
     if reused_sum is not None:
         assert_approx(stats["reused"]["sum"], reused_sum, 0.001)
 
 
 def assert_balances(node, mine):
     '''Make assertions about a node's getbalances output'''
     got = node.getbalances()["mine"]
     for k, v in mine.items():
         assert_approx(got[k], v, 1000)
 
 
 class AvoidReuseTest(BitcoinTestFramework):
 
     def set_test_params(self):
         self.setup_clean_chain = False
         self.num_nodes = 2
         # This test isn't testing txn relay/timing, so set whitelist on the
         # peers for instant txn relay. This speeds up the test run time 2-3x.
         self.extra_args = [["-whitelist=noban@127.0.0.1"]] * self.num_nodes
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         '''Set up initial chain and run tests defined below'''
 
         self.test_persistence()
         self.test_immutable()
 
         self.nodes[0].generate(110)
         self.sync_all()
         self.test_change_remains_change(self.nodes[1])
         reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
         self.test_sending_from_reused_address_without_avoid_reuse()
         reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
         self.test_sending_from_reused_address_fails()
         reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
         self.test_getbalances_used()
         reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
         self.test_full_destination_group_is_preferred()
         reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
         self.test_all_destination_groups_are_used()
 
     def test_persistence(self):
         '''Test that wallet files persist the avoid_reuse flag.'''
         self.log.info("Test wallet files persist avoid_reuse flag")
 
         # Configure node 1 to use avoid_reuse
         self.nodes[1].setwalletflag('avoid_reuse')
 
         # Flags should be node1.avoid_reuse=false, node2.avoid_reuse=true
         assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
         assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
 
         # Stop and restart node 1
         self.restart_node(1)
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
         # Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true
         assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
         assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
 
         # Attempting to set flag to its current state should throw
         assert_raises_rpc_error(-8,
                                 "Wallet flag is already set to false",
                                 self.nodes[0].setwalletflag,
                                 'avoid_reuse',
                                 False)
         assert_raises_rpc_error(-8,
                                 "Wallet flag is already set to true",
                                 self.nodes[1].setwalletflag,
                                 'avoid_reuse',
                                 True)
 
     def test_immutable(self):
         '''Test immutable wallet flags'''
         self.log.info("Test immutable wallet flags")
 
         # Attempt to set the disable_private_keys flag; this should not work
         assert_raises_rpc_error(-8,
                                 "Wallet flag is immutable",
                                 self.nodes[1].setwalletflag,
                                 'disable_private_keys')
 
         tempwallet = ".wallet_avoidreuse.py_test_immutable_wallet.dat"
 
         # Create a wallet with disable_private_keys set; this should work
         self.nodes[1].createwallet(wallet_name=tempwallet,
                                    disable_private_keys=True)
         w = self.nodes[1].get_wallet_rpc(tempwallet)
 
         # Attempt to unset the disable_private_keys flag; this should not work
         assert_raises_rpc_error(-8,
                                 "Wallet flag is immutable",
                                 w.setwalletflag,
                                 'disable_private_keys',
                                 False)
 
         # Unload temp wallet
         self.nodes[1].unloadwallet(tempwallet)
 
     def test_change_remains_change(self, node):
         self.log.info(
             "Test that change doesn't turn into non-change when spent")
 
         reset_balance(node, node.getnewaddress())
         addr = node.getnewaddress()
         txid = node.sendtoaddress(addr, 1000000)
         out = node.listunspent(
             minconf=0, query_options={
                 'minimumAmount': 2000000})
         assert_equal(len(out), 1)
         assert_equal(out[0]['txid'], txid)
         changeaddr = out[0]['address']
 
         # Make sure it's starting out as change as expected
         assert node.getaddressinfo(changeaddr)['ischange']
         for logical_tx in node.listtransactions():
             assert logical_tx.get('address') != changeaddr
 
         # Spend it
         reset_balance(node, node.getnewaddress())
 
         # It should still be change
         assert node.getaddressinfo(changeaddr)['ischange']
         for logical_tx in node.listtransactions():
             assert logical_tx.get('address') != changeaddr
 
     def test_sending_from_reused_address_without_avoid_reuse(self):
         '''
         Test the same as test_sending_from_reused_address_fails, except send
         the 10MM XEC with the avoid_reuse flag set to false. This means the
         10MM XEC send should succeed, where it fails in
         test_sending_from_reused_address_fails.
         '''
         self.log.info(
             "Test sending from reused address with avoid_reuse=false")
 
         fundaddr = self.nodes[1].getnewaddress()
         retaddr = self.nodes[0].getnewaddress()
 
         self.nodes[0].sendtoaddress(fundaddr, 10000000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # listunspent should show 1 single, unused 10MM XEC output
         assert_unspent(
             self.nodes[1],
             total_count=1,
             total_sum=10000000,
             reused_supported=True,
             reused_count=0)
         # getbalances should show no used, 10MM XEC trusted
         assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10000000})
         # node 0 should not show a used entry, as it does not enable
         # avoid_reuse
         assert("used" not in self.nodes[0].getbalances()["mine"])
 
         self.nodes[1].sendtoaddress(retaddr, 5000000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # listunspent should show 1 single, unused 5MM XEC output
         assert_unspent(
             self.nodes[1],
             total_count=1,
             total_sum=5000000,
             reused_supported=True,
             reused_count=0)
         # getbalances should show no used, 5MM XEC trusted
         assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000})
 
         self.nodes[0].sendtoaddress(fundaddr, 10000000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # listunspent should show 2 total outputs (5MM, 10MM XEC), one unused
         # (5MM), one reused (10MM)
         assert_unspent(
             self.nodes[1],
             total_count=2,
             total_sum=15000000,
             reused_count=1,
             reused_sum=10000000)
         # getbalances should show 10MM used, 5MM XEC trusted
         assert_balances(
             self.nodes[1],
             mine={
                 "used": 10000000,
                 "trusted": 5000000})
 
         self.nodes[1].sendtoaddress(
             address=retaddr, amount=10000000, avoid_reuse=False)
 
         # listunspent should show 1 total outputs (5MM XEC), unused
         assert_unspent(
             self.nodes[1],
             total_count=1,
             total_sum=5000000,
             reused_count=0)
         # getbalances should show no used, 5MM XEC trusted
         assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000})
 
         # node 1 should now have about 5MM XEC left (for both cases)
         assert_approx(self.nodes[1].getbalance(), 5000000, 1000)
         assert_approx(
             self.nodes[1].getbalance(
                 avoid_reuse=False),
             5000000,
             1000)
 
     def test_sending_from_reused_address_fails(self):
         '''
         Test the simple case where [1] generates a new address A, then
         [0] sends 10MM XEC to A.
         [1] spends 5MM XEC from A. (leaving roughly 5MM XEC useable)
         [0] sends 10MM XEC to A again.
         [1] tries to spend 10MM XEC (fails; dirty).
         [1] tries to spend 4MM XEC (succeeds; change address sufficient)
         '''
         self.log.info("Test sending from reused address fails")
 
         fundaddr = self.nodes[1].getnewaddress(label="", address_type="legacy")
         retaddr = self.nodes[0].getnewaddress()
 
         self.nodes[0].sendtoaddress(fundaddr, 10000000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # listunspent should show 1 single, unused 10MM XEC output
         assert_unspent(
             self.nodes[1],
             total_count=1,
             total_sum=10000000,
             reused_supported=True,
             reused_count=0)
         # getbalances should show no used, 10MM XEC trusted
         assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10000000})
 
         self.nodes[1].sendtoaddress(retaddr, 5000000)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # listunspent should show 1 single, unused 5MM XEC output
         assert_unspent(
             self.nodes[1],
             total_count=1,
             total_sum=5000000,
             reused_supported=True,
             reused_count=0)
         # getbalances should show no used, 5MM XEC trusted
         assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5000000})
 
         if not self.options.descriptors:
             # For the second send, we transmute it to a related single-key address
             # to make sure it's also detected as re-use
             # NB: this is not very useful for ABC, but we keep the new variable
             # name for consistency.
             new_fundaddr = fundaddr
 
             self.nodes[0].sendtoaddress(new_fundaddr, 10000000)
             self.nodes[0].generate(1)
             self.sync_all()
 
             # listunspent should show 2 total outputs (5MM, 10MM XEC), one unused
             # (5MM), one reused (10MM)
             assert_unspent(
                 self.nodes[1],
                 total_count=2,
                 total_sum=15000000,
                 reused_count=1,
                 reused_sum=10000000)
             # getbalances should show 10MM used, 5MM XEC trusted
             assert_balances(
                 self.nodes[1],
                 mine={
                     "used": 10000000,
                     "trusted": 5000000})
 
             # node 1 should now have a balance of 5MM (no dirty) or 15MM
             # (including dirty)
             assert_approx(self.nodes[1].getbalance(), 5000000, 1000)
             assert_approx(
                 self.nodes[1].getbalance(
                     avoid_reuse=False),
                 15000000,
                 1000)
 
             assert_raises_rpc_error(-6, "Insufficient funds",
                                     self.nodes[1].sendtoaddress, retaddr, 10000000)
 
             self.nodes[1].sendtoaddress(retaddr, 4000000)
 
             # listunspent should show 2 total outputs (1MM, 10MM XEC), one unused
             # (1MM), one reused (10MM)
             assert_unspent(
                 self.nodes[1],
                 total_count=2,
                 total_sum=11000000,
                 reused_count=1,
                 reused_sum=10000000)
             # getbalances should show 10MM used, 1MM XEC trusted
             assert_balances(
                 self.nodes[1],
                 mine={
                     "used": 10000000,
                     "trusted": 1000000})
 
             # node 1 should now have about 1MM XEC left (no dirty) and 11MM
             # (including dirty)
             assert_approx(self.nodes[1].getbalance(), 1000000, 1000)
             assert_approx(
                 self.nodes[1].getbalance(
                     avoid_reuse=False),
                 11000000,
                 1000)
 
     def test_getbalances_used(self):
         '''
         getbalances and listunspent should pick up on reused addresses
         immediately, even for address reusing outputs created before the first
         transaction was spending from that address
         '''
         self.log.info("Test getbalances used category")
 
         # node under test should be completely empty
         assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
 
         new_addr = self.nodes[1].getnewaddress()
         ret_addr = self.nodes[0].getnewaddress()
 
         # send multiple transactions, reusing one address
         for _ in range(11):
             self.nodes[0].sendtoaddress(new_addr, 1000000)
 
         self.nodes[0].generate(1)
         self.sync_all()
 
         # send transaction that should not use all the available outputs
         # per the current coin selection algorithm
         self.nodes[1].sendtoaddress(ret_addr, 5000000)
 
         # getbalances and listunspent should show the remaining outputs
         # in the reused address as used/reused
         assert_unspent(
             self.nodes[1],
             total_count=2,
             total_sum=6000000,
             reused_count=1,
             reused_sum=1000000)
         assert_balances(
             self.nodes[1],
             mine={
                 "used": 1000000,
                 "trusted": 5000000})
 
     def test_full_destination_group_is_preferred(self):
         '''
         Test the case where [1] only has 11 outputs of 1MM XEC in the same
         reused address and tries to send a small payment of 500,000 XEC.
         The wallet should use 10 outputs from the reused address as inputs and
         not a single 1MM XEC input, in order to join several outputs from
         the reused address.
         '''
         self.log.info(
             "Test that full destination groups are preferred in coin selection")
 
         # Node under test should be empty
         assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
 
         new_addr = self.nodes[1].getnewaddress()
         ret_addr = self.nodes[0].getnewaddress()
 
         # Send 11 outputs of 1MM XEC to the same, reused address in the wallet
         for _ in range(11):
             self.nodes[0].sendtoaddress(new_addr, 1000000)
 
         self.nodes[0].generate(1)
         self.sync_all()
 
         # Sending a transaction that is smaller than each one of the
         # available outputs
         txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=500000)
         inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
 
         # The transaction should use 10 inputs exactly
         assert_equal(len(inputs), 10)
 
     def test_all_destination_groups_are_used(self):
         '''
         Test the case where [1] only has 22 outputs of 1MM XEC in the same
         reused address and tries to send a payment of 20,5MM XEC.
         The wallet should use all 22 outputs from the reused address as inputs.
         '''
         self.log.info("Test that all destination groups are used")
 
         # Node under test should be empty
         assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
 
         new_addr = self.nodes[1].getnewaddress()
         ret_addr = self.nodes[0].getnewaddress()
 
         # Send 22 outputs of 1MM XEC to the same, reused address in the wallet
         for _ in range(22):
             self.nodes[0].sendtoaddress(new_addr, 1000000)
 
         self.nodes[0].generate(1)
         self.sync_all()
 
         # Sending a transaction that needs to use the full groups
         # of 10 inputs but also the incomplete group of 2 inputs.
         txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=20500000)
         inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
 
         # The transaction should use 22 inputs exactly
         assert_equal(len(inputs), 22)
 
 
 if __name__ == '__main__':
     AvoidReuseTest().main()
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index 1b492936d..14ff2aa32 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -1,259 +1,255 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the wallet backup features.
 
 Test case is:
 4 nodes. 1 2 and 3 send transactions between each other,
 fourth node is a miner.
 1 2 3 each mine a block to start, then
 Miner creates 100 blocks so 1 2 3 each have 50 mature
 coins to spend.
 Then 5 iterations of 1/2/3 sending coins amongst
 themselves to get transactions in the wallets,
 and the miner mining one block.
 
 Wallets are backed up using dumpwallet/backupwallet.
 Then 5 more iterations of transactions and mining a block.
 
 Miner then generates 101 more blocks, so any
 transaction fees paid mature.
 
 Sanity check:
   Sum(1,2,3,4 balances) == 114*50
 
 1/2/3 are shutdown, and their wallets erased.
 Then restore using wallet.dat backup. And
 confirm 1/2/3/4 balances are same as before.
 
 Shutdown again, restore using importwallet,
 and confirm again balances are correct.
 """
 import os
 import shutil
 from decimal import Decimal
 from random import randint
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 class WalletBackupTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.setup_clean_chain = True
         # nodes 1, 2,3 are spenders, let's give them a keypool=100
         # whitelist all peers to speed up tx relay / mempool sync
         self.extra_args = [
             ["-whitelist=noban@127.0.0.1", "-keypool=100"],
             ["-whitelist=noban@127.0.0.1", "-keypool=100"],
             ["-whitelist=noban@127.0.0.1", "-keypool=100"],
             ["-whitelist=noban@127.0.0.1"],
         ]
         self.rpc_timeout = 120
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.setup_nodes()
-        connect_nodes(self.nodes[0], self.nodes[3])
-        connect_nodes(self.nodes[1], self.nodes[3])
-        connect_nodes(self.nodes[2], self.nodes[3])
-        connect_nodes(self.nodes[2], self.nodes[0])
+        self.connect_nodes(0, 3)
+        self.connect_nodes(1, 3)
+        self.connect_nodes(2, 3)
+        self.connect_nodes(2, 0)
         self.sync_all()
 
     def one_send(self, from_node, to_address):
         if (randint(1, 2) == 1):
             amount = Decimal(randint(1, 10)) * Decimal(100000)
             self.nodes[from_node].sendtoaddress(to_address, amount)
 
     def do_one_round(self):
         a0 = self.nodes[0].getnewaddress()
         a1 = self.nodes[1].getnewaddress()
         a2 = self.nodes[2].getnewaddress()
 
         self.one_send(0, a1)
         self.one_send(0, a2)
         self.one_send(1, a0)
         self.one_send(1, a2)
         self.one_send(2, a0)
         self.one_send(2, a1)
 
         # Have the miner (node3) mine a block.
         # Must sync mempools before mining.
         self.sync_mempools()
         self.nodes[3].generate(1)
         self.sync_blocks()
 
     # As above, this mirrors the original bash test.
     def start_three(self):
         self.start_node(0)
         self.start_node(1)
         self.start_node(2)
-        connect_nodes(self.nodes[0], self.nodes[3])
-        connect_nodes(self.nodes[1], self.nodes[3])
-        connect_nodes(self.nodes[2], self.nodes[3])
-        connect_nodes(self.nodes[2], self.nodes[0])
+        self.connect_nodes(0, 3)
+        self.connect_nodes(1, 3)
+        self.connect_nodes(2, 3)
+        self.connect_nodes(2, 0)
 
     def stop_three(self):
         self.stop_node(0)
         self.stop_node(1)
         self.stop_node(2)
 
     def erase_three(self):
         os.remove(
             os.path.join(self.nodes[0].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
         os.remove(
             os.path.join(self.nodes[1].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
         os.remove(
             os.path.join(self.nodes[2].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
 
     def run_test(self):
         self.log.info("Generating initial blockchain")
         self.nodes[0].generate(1)
         self.sync_blocks()
         self.nodes[1].generate(1)
         self.sync_blocks()
         self.nodes[2].generate(1)
         self.sync_blocks()
         self.nodes[3].generate(100)
         self.sync_blocks()
 
         assert_equal(self.nodes[0].getbalance(), 50000000)
         assert_equal(self.nodes[1].getbalance(), 50000000)
         assert_equal(self.nodes[2].getbalance(), 50000000)
         assert_equal(self.nodes[3].getbalance(), 0)
 
         self.log.info("Creating transactions")
         # Five rounds of sending each other transactions.
         for _ in range(5):
             self.do_one_round()
 
         self.log.info("Backing up")
         self.nodes[0].backupwallet(os.path.join(
             self.nodes[0].datadir, 'wallet.bak'))
         self.nodes[0].dumpwallet(os.path.join(
             self.nodes[0].datadir, 'wallet.dump'))
         self.nodes[1].backupwallet(os.path.join(
             self.nodes[1].datadir, 'wallet.bak'))
         self.nodes[1].dumpwallet(os.path.join(
             self.nodes[1].datadir, 'wallet.dump'))
         self.nodes[2].backupwallet(os.path.join(
             self.nodes[2].datadir, 'wallet.bak'))
         self.nodes[2].dumpwallet(os.path.join(
             self.nodes[2].datadir, 'wallet.dump'))
 
         self.log.info("More transactions")
         for _ in range(5):
             self.do_one_round()
 
         # Generate 101 more blocks, so any fees paid mature
         self.nodes[3].generate(101)
         self.sync_all()
 
         balance0 = self.nodes[0].getbalance()
         balance1 = self.nodes[1].getbalance()
         balance2 = self.nodes[2].getbalance()
         balance3 = self.nodes[3].getbalance()
         total = balance0 + balance1 + balance2 + balance3
 
         # At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.)
         # 114 are mature, so the sum of all wallets should be 114 * 50 = 5700.
         assert_equal(total, 5700000000)
 
         ##
         # Test restoring spender wallets from backups
         ##
         self.log.info("Restoring using wallet.dat")
         self.stop_three()
         self.erase_three()
 
         # Start node2 with no chain
         shutil.rmtree(
             os.path.join(
                 self.nodes[2].datadir,
                 self.chain,
                 'blocks'))
         shutil.rmtree(os.path.join(
             self.nodes[2].datadir, self.chain, 'chainstate'))
 
         # Restore wallets from backup
         shutil.copyfile(
             os.path.join(self.nodes[0].datadir, 'wallet.bak'),
             os.path.join(self.nodes[0].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
         shutil.copyfile(
             os.path.join(self.nodes[1].datadir, 'wallet.bak'),
             os.path.join(self.nodes[1].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
         shutil.copyfile(
             os.path.join(self.nodes[2].datadir, 'wallet.bak'),
             os.path.join(self.nodes[2].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
 
         self.log.info("Re-starting nodes")
         self.start_three()
         self.sync_blocks()
 
         assert_equal(self.nodes[0].getbalance(), balance0)
         assert_equal(self.nodes[1].getbalance(), balance1)
         assert_equal(self.nodes[2].getbalance(), balance2)
 
         self.log.info("Restoring using dumped wallet")
         self.stop_three()
         self.erase_three()
 
         # start node2 with no chain
         shutil.rmtree(
             os.path.join(
                 self.nodes[2].datadir,
                 self.chain,
                 'blocks'))
         shutil.rmtree(os.path.join(
             self.nodes[2].datadir, self.chain, 'chainstate'))
 
         self.start_three()
 
         assert_equal(self.nodes[0].getbalance(), 0)
         assert_equal(self.nodes[1].getbalance(), 0)
         assert_equal(self.nodes[2].getbalance(), 0)
 
         self.nodes[0].importwallet(os.path.join(
             self.nodes[0].datadir, 'wallet.dump'))
         self.nodes[1].importwallet(os.path.join(
             self.nodes[1].datadir, 'wallet.dump'))
         self.nodes[2].importwallet(os.path.join(
             self.nodes[2].datadir, 'wallet.dump'))
 
         self.sync_blocks()
 
         assert_equal(self.nodes[0].getbalance(), balance0)
         assert_equal(self.nodes[1].getbalance(), balance1)
         assert_equal(self.nodes[2].getbalance(), balance2)
 
         # Backup to source wallet file must fail
         sourcePaths = [
             os.path.join(self.nodes[0].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename),
             os.path.join(self.nodes[0].datadir, self.chain, '.', 'wallets',
                          self.default_wallet_name, self.wallet_data_filename),
             os.path.join(self.nodes[0].datadir, self.chain, 'wallets',
                          self.default_wallet_name),
             os.path.join(self.nodes[0].datadir, self.chain, 'wallets')]
 
         for sourcePath in sourcePaths:
             assert_raises_rpc_error(-4, "backup failed",
                                     self.nodes[0].backupwallet, sourcePath)
 
 
 if __name__ == '__main__':
     WalletBackupTest().main()
diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py
index 64282fa57..75d53c5b4 100755
--- a/test/functional/wallet_balance.py
+++ b/test/functional/wallet_balance.py
@@ -1,327 +1,323 @@
 #!/usr/bin/env python3
 # Copyright (c) 2018-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the wallet balance RPC methods."""
 import struct
 from decimal import Decimal
 
 from test_framework.address import (
     ADDRESS_ECREG_UNSPENDABLE as ADDRESS_WATCHONLY,
 )
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 def create_transactions(node, address, amt, fees):
     # Create and sign raw transactions from node to address for amt.
     # Creates a transaction for each fee and returns an array
     # of the raw transactions.
     utxos = [u for u in node.listunspent(0) if u['spendable']]
 
     # Create transactions
     inputs = []
     ins_total = 0
     for utxo in utxos:
         inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
         ins_total += utxo['amount']
         if ins_total >= amt + max(fees):
             break
     # make sure there was enough utxos
     assert ins_total >= amt + max(fees)
 
     txs = []
     for fee in fees:
         outputs = {address: amt}
         # prevent 0 change output
         if ins_total > amt + fee:
             outputs[node.getrawchangeaddress()] = ins_total - amt - fee
         raw_tx = node.createrawtransaction(inputs, outputs, 0)
         raw_tx = node.signrawtransactionwithwallet(raw_tx)
         assert_equal(raw_tx['complete'], True)
         txs.append(raw_tx)
 
     return txs
 
 
 class WalletTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2
         self.setup_clean_chain = True
         self.extra_args = [
             # Limit mempool descendants as a hack to have wallet txs rejected
             # from the mempool
             ['-limitdescendantcount=3'],
             [],
         ]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         self.nodes[0].importaddress(ADDRESS_WATCHONLY)
         # Check that nodes don't own any UTXOs
         assert_equal(len(self.nodes[0].listunspent()), 0)
         assert_equal(len(self.nodes[1].listunspent()), 0)
 
         self.log.info("Check that only node 0 is watching an address")
         assert 'watchonly' in self.nodes[0].getbalances()
         assert 'watchonly' not in self.nodes[1].getbalances()
 
         self.log.info("Mining blocks ...")
         self.nodes[0].generate(1)
         self.sync_all()
         self.nodes[1].generate(1)
         self.nodes[1].generatetoaddress(101, ADDRESS_WATCHONLY)
         self.sync_all()
 
         assert_equal(self.nodes[0].getbalances()['mine']['trusted'], 50000000)
         assert_equal(self.nodes[0].getwalletinfo()['balance'], 50000000)
         assert_equal(self.nodes[1].getbalances()['mine']['trusted'], 50000000)
 
         assert_equal(self.nodes[0].getbalances()[
                      'watchonly']['immature'], 5000000000)
         assert 'watchonly' not in self.nodes[1].getbalances()
 
         assert_equal(self.nodes[0].getbalance(), 50000000)
         assert_equal(self.nodes[1].getbalance(), 50000000)
 
         self.log.info("Test getbalance with different arguments")
         assert_equal(self.nodes[0].getbalance("*"), 50000000)
         assert_equal(self.nodes[0].getbalance("*", 1), 50000000)
         assert_equal(self.nodes[0].getbalance("*", 1, True), 100000000)
         assert_equal(self.nodes[0].getbalance(minconf=1), 50000000)
         assert_equal(
             self.nodes[0].getbalance(
                 minconf=0,
                 include_watchonly=True),
             100000000)
         assert_equal(
             self.nodes[1].getbalance(
                 minconf=0,
                 include_watchonly=True),
             50000000)
 
         # Send 40 BTC from 0 to 1 and 60 BTC from 1 to 0.
         txs = create_transactions(
             self.nodes[0], self.nodes[1].getnewaddress(), 40000000, [Decimal('10000')])
         self.nodes[0].sendrawtransaction(txs[0]['hex'])
         # sending on both nodes is faster than waiting for propagation
         self.nodes[1].sendrawtransaction(txs[0]['hex'])
 
         self.sync_all()
         txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), 60000000, [
                                   Decimal('10000'), Decimal('20000')])
         self.nodes[1].sendrawtransaction(txs[0]['hex'])
         # sending on both nodes is faster than waiting for propagation
         self.nodes[0].sendrawtransaction(txs[0]['hex'])
         self.sync_all()
 
         # First argument of getbalance must be set to "*"
         assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"",
                                 self.nodes[1].getbalance, "")
 
         self.log.info("Test balances with unconfirmed inputs")
 
         # Before `test_balance()`, we have had two nodes with a balance of 50
         # each and then we:
         #
         # 1) Sent 40 from node A to node B with fee 0.01
         # 2) Sent 60 from node B to node A with fee 0.01
         #
         # Then we check the balances:
         #
         # 1) As is
         # 2) With transaction 2 from above with 2x the fee
         #
         # Prior to #16766, in this situation, the node would immediately report
         # a balance of 30 on node B as unconfirmed and trusted.
         #
         # After #16766, we show that balance as unconfirmed.
         #
         # The balance is indeed "trusted" and "confirmed" insofar as removing
         # the mempool transactions would return at least that much money. But
         # the algorithm after #16766 marks it as unconfirmed because the 'taint'
         # tracking of transaction trust for summing balances doesn't consider
         # which inputs belong to a user. In this case, the change output in
         # question could be "destroyed" by replace the 1st transaction above.
         #
         # The post #16766 behavior is correct; we shouldn't be treating those
         # funds as confirmed. If you want to rely on that specific UTXO existing
         # which has given you that balance, you cannot, as a third party
         # spending the other input would destroy that unconfirmed.
         #
         # For example, if the test transactions were:
         #
         # 1) Sent 40 from node A to node B with fee 0.01
         # 2) Sent 10 from node B to node A with fee 0.01
         #
         # Then our node would report a confirmed balance of 40 + 50 - 10 = 80
         # BTC, which is more than would be available if transaction 1 were
         # replaced.
 
         def test_balances(*, fee_node_1=0):
             # getbalances
             expected_balances_0 = {'mine': {'immature': Decimal('0E-2'),
                                             # change from node 0's send
                                             'trusted': Decimal('9990000'),
                                             'untrusted_pending': Decimal('60000000.0')},
                                    'watchonly': {'immature': Decimal('5000000000'),
                                                  'trusted': Decimal('50000000.0'),
                                                  'untrusted_pending': Decimal('0E-2')}}
             expected_balances_1 = {'mine': {'immature': Decimal('0E-2'),
                                             # node 1's send had an unsafe input
                                             'trusted': Decimal('0E-2'),
                                             # Doesn't include output of node
                                             # 0's send since it was spent
                                             'untrusted_pending': Decimal('30000000.0') - fee_node_1}}
             assert_equal(self.nodes[0].getbalances(), expected_balances_0)
             assert_equal(self.nodes[1].getbalances(), expected_balances_1)
             # getbalance without any arguments includes unconfirmed transactions, but not untrusted transactions
             # change from node 0's send
             assert_equal(self.nodes[0].getbalance(), Decimal('9990000'))
             # node 1's send had an unsafe input
             assert_equal(self.nodes[1].getbalance(), Decimal('0'))
             # Same with minconf=0
             assert_equal(
                 self.nodes[0].getbalance(
                     minconf=0),
                 Decimal('9990000'))
             assert_equal(self.nodes[1].getbalance(minconf=0), Decimal('0'))
             # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago
             # TODO: fix getbalance tracking of coin spentness depth
             assert_equal(self.nodes[0].getbalance(minconf=1), Decimal('0'))
             assert_equal(self.nodes[1].getbalance(minconf=1), Decimal('0'))
             # getunconfirmedbalance
             # output of node 1's spend
             assert_equal(
                 self.nodes[0].getunconfirmedbalance(),
                 Decimal('60000000'))
 
             # Doesn't include output of node 0's send since it was spent
             assert_equal(
                 self.nodes[1].getunconfirmedbalance(),
                 Decimal('30000000') - fee_node_1)
             # getwalletinfo.unconfirmed_balance
             assert_equal(self.nodes[0].getwalletinfo()[
                          "unconfirmed_balance"], Decimal('60000000'))
             assert_equal(
                 self.nodes[1].getwalletinfo()["unconfirmed_balance"],
                 Decimal('30000000') - fee_node_1)
 
         test_balances(fee_node_1=Decimal('10000'))
 
         # In the original Core version of this test, Node 1 would've bumped
         # the fee by 0.01 here to resend, but this is XEC, so it has 10000 XEC
         # left to spend on goods and services
         self.sync_all()
 
         self.log.info(
             "Test getbalance and getbalances.mine.untrusted_pending with conflicted unconfirmed inputs")
         test_balances(fee_node_1=Decimal('10000'))
 
         self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)
         self.sync_all()
 
         # balances are correct after the transactions are confirmed
         # node 1's send plus change from node 0's send
         balance_node0 = Decimal('69990000')
         # change from node 0's send
         balance_node1 = Decimal('29990000')
         assert_equal(self.nodes[0].getbalances()[
                      'mine']['trusted'], balance_node0)
         assert_equal(self.nodes[1].getbalances()[
                      'mine']['trusted'], balance_node1)
         assert_equal(self.nodes[0].getbalance(), balance_node0)
         assert_equal(self.nodes[1].getbalance(), balance_node1)
 
         # Send total balance away from node 1
         txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(
         ), Decimal('29970000'), [Decimal('10000')])
         self.nodes[1].sendrawtransaction(txs[0]['hex'])
         self.nodes[1].generatetoaddress(2, ADDRESS_WATCHONLY)
         self.sync_all()
 
         # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago
         # TODO: fix getbalance tracking of coin spentness depth
         # getbalance with minconf=3 should still show the old balance
         assert_equal(self.nodes[1].getbalance(minconf=3), Decimal('0'))
 
         # getbalance with minconf=2 will show the new balance.
         assert_equal(self.nodes[1].getbalance(minconf=2), Decimal('10000'))
 
         # check mempool transactions count for wallet unconfirmed balance after
         # dynamically loading the wallet.
         before = self.nodes[1].getbalances()['mine']['untrusted_pending']
         dst = self.nodes[1].getnewaddress()
         self.nodes[1].unloadwallet(self.default_wallet_name)
         self.nodes[0].sendtoaddress(dst, 100000)
         self.sync_all()
         self.nodes[1].loadwallet(self.default_wallet_name)
         after = self.nodes[1].getbalances()['mine']['untrusted_pending']
         assert_equal(before + Decimal('100000'), after)
 
         # Create 3 more wallet txs, where the last is not accepted to the
         # mempool because it is the third descendant of the tx above
         for _ in range(3):
             # Set amount high enough such that all coins are spent by each tx
             txid = self.nodes[0].sendtoaddress(
                 self.nodes[0].getnewaddress(), 99000000)
 
         self.log.info('Check that wallet txs not in the mempool are untrusted')
         assert txid not in self.nodes[0].getrawmempool()
         assert_equal(self.nodes[0].gettransaction(txid)['trusted'], False)
         assert_equal(self.nodes[0].getbalance(minconf=0), 0)
 
         self.log.info("Test replacement and reorg of non-mempool tx")
         tx_orig = self.nodes[0].gettransaction(txid)['hex']
         # Increase fee by 1 coin
         tx_replace = tx_orig.replace(
             struct.pack("<q", 99 * 10**8).hex(),
             struct.pack("<q", 98 * 10**8).hex(),
         )
         tx_replace = self.nodes[0].signrawtransactionwithwallet(tx_replace)[
             'hex']
         # Total balance is given by the sum of outputs of the tx
         total_amount = sum(
             [o['value'] for o in self.nodes[0].decoderawtransaction(tx_replace)['vout']])
         self.sync_all()
         self.nodes[1].sendrawtransaction(hexstring=tx_replace, maxfeerate=0)
 
         # Now confirm tx_replace
         block_reorg = self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)[0]
         self.sync_all()
         assert_equal(self.nodes[0].getbalance(minconf=0), total_amount)
 
         self.log.info('Put txs back into mempool of node 1 (not node 0)')
         self.nodes[0].invalidateblock(block_reorg)
         self.nodes[1].invalidateblock(block_reorg)
         self.sync_blocks()
         self.nodes[0].syncwithvalidationinterfacequeue()
         # wallet txs not in the mempool are untrusted
         assert_equal(self.nodes[0].getbalance(minconf=0), 0)
         self.nodes[0].generatetoaddress(1, ADDRESS_WATCHONLY)
         # wallet txs not in the mempool are untrusted
         assert_equal(self.nodes[0].getbalance(minconf=0), 0)
 
         # Now confirm tx_orig
         self.restart_node(1, ['-persistmempool=0'])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_blocks()
         self.nodes[1].sendrawtransaction(tx_orig)
         self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)
         self.sync_all()
         # The reorg recovered our fee of 1 coin
         assert_equal(
             self.nodes[0].getbalance(
                 minconf=0),
             total_amount +
             1000000)
 
 
 if __name__ == '__main__':
     WalletTest().main()
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index 9662986db..44252cdd9 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -1,671 +1,670 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the wallet."""
 from decimal import Decimal
 
 from test_framework.messages import CTransaction, FromHex
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_array_result,
     assert_equal,
     assert_fee_amount,
     assert_raises_rpc_error,
-    connect_nodes,
     count_bytes,
 )
 from test_framework.wallet_util import test_address
 
 
 class WalletTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.setup_clean_chain = True
         self.extra_args = [
             ["-acceptnonstdtxn=1"],
         ] * self.num_nodes
         self.supports_cli = False
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.setup_nodes()
         # Only need nodes 0-2 running at start of test
         self.stop_node(3)
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
         self.sync_all(self.nodes[0:3])
 
     def check_fee_amount(self, curr_balance,
                          balance_with_fee, fee_per_byte, tx_size):
         """Return curr_balance after asserting the fee was in range"""
         fee = balance_with_fee - curr_balance
         assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
         return curr_balance
 
     def run_test(self):
         # Check that there's no UTXO on none of the nodes
         assert_equal(len(self.nodes[0].listunspent()), 0)
         assert_equal(len(self.nodes[1].listunspent()), 0)
         assert_equal(len(self.nodes[2].listunspent()), 0)
 
         self.log.info("Mining blocks...")
 
         self.nodes[0].generate(1)
 
         walletinfo = self.nodes[0].getwalletinfo()
         assert_equal(walletinfo['immature_balance'], 50000000)
         assert_equal(walletinfo['balance'], 0)
 
         self.sync_all(self.nodes[0:3])
         self.nodes[1].generate(101)
         self.sync_all(self.nodes[0:3])
 
         assert_equal(self.nodes[0].getbalance(), 50000000)
         assert_equal(self.nodes[1].getbalance(), 50000000)
         assert_equal(self.nodes[2].getbalance(), 0)
 
         # Check that only first and second nodes have UTXOs
         utxos = self.nodes[0].listunspent()
         assert_equal(len(utxos), 1)
         assert_equal(len(self.nodes[1].listunspent()), 1)
         assert_equal(len(self.nodes[2].listunspent()), 0)
 
         self.log.info("test gettxout")
         confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
         # First, outputs that are unspent both in the chain and in the
         # mempool should appear with or without include_mempool
         txout = self.nodes[0].gettxout(
             txid=confirmed_txid, n=confirmed_index, include_mempool=False)
         assert_equal(txout['value'], 50000000)
         txout = self.nodes[0].gettxout(
             txid=confirmed_txid, n=confirmed_index, include_mempool=True)
         assert_equal(txout['value'], 50000000)
 
         # Send 21,000,000 XEC from 0 to 2 using sendtoaddress call.
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11000000)
         mempool_txid = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 10000000)
 
         self.log.info("test gettxout (second part)")
         # utxo spent in mempool should be visible if you exclude mempool
         # but invisible if you include mempool
         txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
         assert_equal(txout['value'], 50000000)
         txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
         assert txout is None
         # new utxo from mempool should be invisible if you exclude mempool
         # but visible if you include mempool
         txout = self.nodes[0].gettxout(mempool_txid, 0, False)
         assert txout is None
         txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
         txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
         # note the mempool tx will have randomly assigned indices
         # but 10 will go to node2 and the rest will go to node0
         balance = self.nodes[0].getbalance()
         assert_equal(set([txout1['value'], txout2['value']]),
                      set([10000000, balance]))
         walletinfo = self.nodes[0].getwalletinfo()
         assert_equal(walletinfo['immature_balance'], 0)
 
         # Have node0 mine a block, thus it will collect its own fee.
         self.nodes[0].generate(1)
         self.sync_all(self.nodes[0:3])
 
         # Exercise locking of unspent outputs
         unspent_0 = self.nodes[2].listunspent()[0]
         unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
         assert_raises_rpc_error(-8, "Invalid parameter, expected locked output",
                                 self.nodes[2].lockunspent, True, [unspent_0])
         self.nodes[2].lockunspent(False, [unspent_0])
         assert_raises_rpc_error(-8, "Invalid parameter, output already locked",
                                 self.nodes[2].lockunspent, False, [unspent_0])
         assert_raises_rpc_error(-6, "Insufficient funds",
                                 self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20000000)
         assert_equal([unspent_0], self.nodes[2].listlockunspent())
         self.nodes[2].lockunspent(True, [unspent_0])
         assert_equal(len(self.nodes[2].listlockunspent()), 0)
         assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')",
                                 self.nodes[2].lockunspent, False,
                                 [{"txid": "0000000000000000000000000000000000", "vout": 0}])
         assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')",
                                 self.nodes[2].lockunspent, False,
                                 [{"txid": "ZZZ0000000000000000000000000000000000000000000000000000000000000", "vout": 0}])
         assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
                                 self.nodes[2].lockunspent, False,
                                 [{"txid": "0000000000000000000000000000000000000000000000000000000000000000", "vout": 0}])
         assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
                                 self.nodes[2].lockunspent, False, [{"txid": unspent_0["txid"], "vout": 999}])
 
         # The lock on a manually selected output is ignored
         unspent_0 = self.nodes[1].listunspent()[0]
         self.nodes[1].lockunspent(False, [unspent_0])
         tx = self.nodes[1].createrawtransaction(
             [unspent_0], {self.nodes[1].getnewaddress(): 1000000})
         tx = self.nodes[1].fundrawtransaction(tx)['hex']
         self.nodes[1].fundrawtransaction(tx, {"lockUnspents": True})
 
         # fundrawtransaction can lock an input
         self.nodes[1].lockunspent(True, [unspent_0])
         assert_equal(len(self.nodes[1].listlockunspent()), 0)
         tx = self.nodes[1].fundrawtransaction(
             tx, {"lockUnspents": True})['hex']
         assert_equal(len(self.nodes[1].listlockunspent()), 1)
 
         # Send transaction
         tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"]
         self.nodes[1].sendrawtransaction(tx)
         assert_equal(len(self.nodes[1].listlockunspent()), 0)
 
         # Have node1 generate 100 blocks (so node0 can recover the fee)
         self.nodes[1].generate(100)
         self.sync_all(self.nodes[0:3])
 
         # node0 should end up with 100 btc in block rewards plus fees, but
         # minus the 21 plus fees sent to node2
         assert_equal(self.nodes[0].getbalance(), 100000000 - 21000000)
         assert_equal(self.nodes[2].getbalance(), 21000000)
 
         # Node0 should have two unspent outputs.
         # Create a couple of transactions to send them to node2, submit them through
         # node1, and make sure both node0 and node2 pick them up properly:
         node0utxos = self.nodes[0].listunspent(1)
         assert_equal(len(node0utxos), 2)
 
         # create both transactions
         txns_to_send = []
         for utxo in node0utxos:
             inputs = []
             outputs = {}
             inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
             outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3000000
             raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
             txns_to_send.append(
                 self.nodes[0].signrawtransactionwithwallet(raw_tx))
 
         # Have node 1 (miner) send the transactions
         self.nodes[1].sendrawtransaction(
             hexstring=txns_to_send[0]["hex"], maxfeerate=0)
         self.nodes[1].sendrawtransaction(
             hexstring=txns_to_send[1]["hex"], maxfeerate=0)
 
         # Have node1 mine a block to confirm transactions:
         self.nodes[1].generate(1)
         self.sync_all(self.nodes[0:3])
 
         assert_equal(self.nodes[0].getbalance(), 0)
         assert_equal(self.nodes[2].getbalance(), 94000000)
 
         # Verify that a spent output cannot be locked anymore
         spent_0 = {"txid": node0utxos[0]["txid"],
                    "vout": node0utxos[0]["vout"]}
         assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output",
                                 self.nodes[0].lockunspent, False, [spent_0])
 
         # Send 10,000,000 XEC normal
         old_balance = self.nodes[2].getbalance()
         address = self.nodes[0].getnewaddress("test")
         fee_per_byte = Decimal('1000') / 1000
         self.nodes[2].settxfee(fee_per_byte * 1000)
         txid = self.nodes[2].sendtoaddress(address, 10000000, "", "", False)
         self.nodes[2].generate(1)
         self.sync_all(self.nodes[0:3])
         ctx = FromHex(CTransaction(),
                       self.nodes[2].gettransaction(txid)['hex'])
 
         node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), old_balance - Decimal('10000000'),
                                            fee_per_byte, ctx.billable_size())
         assert_equal(self.nodes[0].getbalance(), Decimal('10000000'))
 
         # Send 10,000,000 XEC with subtract fee from amount
         txid = self.nodes[2].sendtoaddress(address, 10000000, "", "", True)
         self.nodes[2].generate(1)
         self.sync_all(self.nodes[0:3])
         node_2_bal -= Decimal('10000000')
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
         node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal(
             '20000000'), fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex']))
 
         # Sendmany 10,000,000 XEC
         txid = self.nodes[2].sendmany('', {address: 10000000}, 0, "", [])
         self.nodes[2].generate(1)
         self.sync_all(self.nodes[0:3])
         node_0_bal += Decimal('10000000')
         ctx = FromHex(CTransaction(),
                       self.nodes[2].gettransaction(txid)['hex'])
         node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(
         ), node_2_bal - Decimal('10000000'), fee_per_byte, ctx.billable_size())
         assert_equal(self.nodes[0].getbalance(), node_0_bal)
 
         # Sendmany 10,000,000 XEC with subtract fee from amount
         txid = self.nodes[2].sendmany(
             '', {address: 10000000}, 0, "", [address])
         self.nodes[2].generate(1)
         self.sync_all(self.nodes[0:3])
         node_2_bal -= Decimal('10000000')
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
         ctx = FromHex(CTransaction(),
                       self.nodes[2].gettransaction(txid)['hex'])
         node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(
         ), node_0_bal + Decimal('10000000'), fee_per_byte, ctx.billable_size())
 
         self.start_node(3, self.extra_args[3])
-        connect_nodes(self.nodes[0], self.nodes[3])
+        self.connect_nodes(0, 3)
         self.sync_all()
 
         # check if we can list zero value tx as available coins
         # 1. create raw_tx
         # 2. hex-changed one output to 0.0
         # 3. sign and send
         # 4. check if recipient (node0) can list the zero value tx
         usp = self.nodes[1].listunspent(
             query_options={'minimumAmount': '49998000'})[0]
         inputs = [{"txid": usp['txid'], "vout": usp['vout']}]
         outputs = {self.nodes[1].getnewaddress(): 49998000,
                    self.nodes[0].getnewaddress(): 11110000}
 
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace(
             "c0833842", "00000000")  # replace 11.11 with 0.0 (int32)
         signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(rawTx)
         decoded_raw_tx = self.nodes[1].decoderawtransaction(
             signed_raw_tx['hex'])
         zero_value_txid = decoded_raw_tx['txid']
         self.nodes[1].sendrawtransaction(signed_raw_tx['hex'])
 
         self.sync_all()
         self.nodes[1].generate(1)  # mine a block
         self.sync_all()
 
         # zero value tx must be in listunspents output
         unspent_txs = self.nodes[0].listunspent()
         found = False
         for uTx in unspent_txs:
             if uTx['txid'] == zero_value_txid:
                 found = True
                 assert_equal(uTx['amount'], Decimal('0'))
         assert found
 
         # do some -walletbroadcast tests
         self.stop_nodes()
         self.start_node(0, self.extra_args[0] + ["-walletbroadcast=0"])
         self.start_node(1, self.extra_args[1] + ["-walletbroadcast=0"])
         self.start_node(2, self.extra_args[2] + ["-walletbroadcast=0"])
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
         self.sync_all(self.nodes[0:3])
 
         txid_not_broadcast = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 2000000)
         tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
         self.nodes[1].generate(1)  # mine a block, tx should not be in there
         self.sync_all(self.nodes[0:3])
         # should not be changed because tx was not broadcasted
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # now broadcast from another node, mine a block, sync, and check the
         # balance
         self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex'])
         self.nodes[1].generate(1)
         self.sync_all(self.nodes[0:3])
         node_2_bal += 2000000
         tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # create another tx
         txid_not_broadcast = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 2000000)
 
         # restart the nodes with -walletbroadcast=1
         self.stop_nodes()
         self.start_node(0, self.extra_args[0])
         self.start_node(1, self.extra_args[1])
         self.start_node(2, self.extra_args[2])
-        connect_nodes(self.nodes[0], self.nodes[1])
-        connect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 1)
+        self.connect_nodes(1, 2)
+        self.connect_nodes(0, 2)
         self.sync_blocks(self.nodes[0:3])
 
         self.nodes[0].generate(1)
         self.sync_blocks(self.nodes[0:3])
         node_2_bal += 2000000
 
         # tx should be added to balance because after restarting the nodes tx
         # should be broadcasted
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # send a tx with value in a string (PR#6380 +)
         txid = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), "2000000")
         tx_obj = self.nodes[0].gettransaction(txid)
         assert_equal(tx_obj['amount'], Decimal('-2000000'))
 
         txid = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), "10000")
         tx_obj = self.nodes[0].gettransaction(txid)
         assert_equal(tx_obj['amount'], Decimal('-10000'))
 
         # check if JSON parser can handle scientific notation in strings
         txid = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), "1e3")
         tx_obj = self.nodes[0].gettransaction(txid)
         assert_equal(tx_obj['amount'], Decimal('-1000'))
 
         # General checks for errors from incorrect inputs
         # This will raise an exception because the amount is negative
         assert_raises_rpc_error(-3,
                                 "Amount out of range",
                                 self.nodes[0].sendtoaddress,
                                 self.nodes[2].getnewaddress(),
                                 "-1")
 
         # This will raise an exception because the amount type is wrong
         assert_raises_rpc_error(-3, "Invalid amount",
                                 self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
 
         # This will raise an exception since generate does not accept a string
         assert_raises_rpc_error(-1, "not an integer",
                                 self.nodes[0].generate, "2")
 
         # This will raise an exception for the invalid private key format
         assert_raises_rpc_error(-5,
                                 "Invalid private key encoding",
                                 self.nodes[0].importprivkey,
                                 "invalid")
 
         # This will raise an exception for importing an address with the PS2H
         # flag
         temp_address = self.nodes[1].getnewaddress()
         assert_raises_rpc_error(-5,
                                 "Cannot use the p2sh flag with an address - use a script instead",
                                 self.nodes[0].importaddress,
                                 temp_address,
                                 "label",
                                 False,
                                 True)
 
         # This will raise an exception for attempting to dump the private key
         # of an address you do not own
         assert_raises_rpc_error(-4,
                                 "Private key for address",
                                 self.nodes[0].dumpprivkey,
                                 temp_address)
 
         # This will raise an exception for attempting to get the private key of
         # an invalid Bitcoin address
         assert_raises_rpc_error(-5,
                                 "Invalid Bitcoin address",
                                 self.nodes[0].dumpprivkey,
                                 "invalid")
 
         # This will raise an exception for attempting to set a label for an
         # invalid Bitcoin address
         assert_raises_rpc_error(-5,
                                 "Invalid Bitcoin address",
                                 self.nodes[0].setlabel,
                                 "invalid address",
                                 "label")
 
         # This will raise an exception for importing an invalid address
         assert_raises_rpc_error(-5,
                                 "Invalid Bitcoin address or script",
                                 self.nodes[0].importaddress,
                                 "invalid")
 
         # This will raise an exception for attempting to import a pubkey that
         # isn't in hex
         assert_raises_rpc_error(-5,
                                 "Pubkey must be a hex string",
                                 self.nodes[0].importpubkey,
                                 "not hex")
 
         # This will raise an exception for importing an invalid pubkey
         assert_raises_rpc_error(-5,
                                 "Pubkey is not a valid public key",
                                 self.nodes[0].importpubkey,
                                 "5361746f736869204e616b616d6f746f")
 
         # Import address and private key to check correct behavior of spendable unspents
         # 1. Send some coins to generate new UTXO
         address_to_import = self.nodes[2].getnewaddress()
         txid = self.nodes[0].sendtoaddress(address_to_import, 1000000)
         self.nodes[0].generate(1)
         self.sync_all(self.nodes[0:3])
 
         # 2. Import address from node2 to node1
         self.nodes[1].importaddress(address_to_import)
 
         # 3. Validate that the imported address is watch-only on node1
         assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
 
         # 4. Check that the unspents after import are not spendable
         assert_array_result(self.nodes[1].listunspent(),
                             {"address": address_to_import},
                             {"spendable": False})
 
         # 5. Import private key of the previously imported address on node1
         priv_key = self.nodes[2].dumpprivkey(address_to_import)
         self.nodes[1].importprivkey(priv_key)
 
         # 6. Check that the unspents are now spendable on node1
         assert_array_result(self.nodes[1].listunspent(),
                             {"address": address_to_import},
                             {"spendable": True})
 
         # Mine a block from node0 to an address from node1
         coinbase_addr = self.nodes[1].getnewaddress()
         block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0]
         coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0]
         self.sync_all(self.nodes[0:3])
 
         # Check that the txid and balance is found by node1
         self.nodes[1].gettransaction(coinbase_txid)
 
         # check if wallet or blockchain maintenance changes the balance
         self.sync_all(self.nodes[0:3])
         blocks = self.nodes[0].generate(2)
         self.sync_all(self.nodes[0:3])
         balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
         block_count = self.nodes[0].getblockcount()
 
         # Check modes:
         #   - True: unicode escaped as \u....
         #   - False: unicode directly as UTF-8
         for mode in [True, False]:
             self.nodes[0].rpc.ensure_ascii = mode
             # unicode check: Basic Multilingual Plane, Supplementary Plane
             # respectively
             for label in [u'рыба', u'𝅘𝅥𝅯']:
                 addr = self.nodes[0].getnewaddress()
                 self.nodes[0].setlabel(addr, label)
                 test_address(self.nodes[0], addr, labels=[label])
                 assert label in self.nodes[0].listlabels()
         # restore to default
         self.nodes[0].rpc.ensure_ascii = True
 
         # maintenance tests
         maintenance = [
             '-rescan',
             '-reindex',
         ]
         chainlimit = 6
         for m in maintenance:
             self.log.info("check " + m)
             self.stop_nodes()
             # set lower ancestor limit for later
             self.start_node(
                 0, self.extra_args[0] + [m, "-limitancestorcount=" + str(chainlimit)])
             self.start_node(
                 1, self.extra_args[1] + [m, "-limitancestorcount=" + str(chainlimit)])
             self.start_node(
                 2, self.extra_args[2] + [m, "-limitancestorcount=" + str(chainlimit)])
             if m == '-reindex':
                 # reindex will leave rpc warm up "early"; Wait for it to finish
                 self.wait_until(
                     lambda: [block_count] * 3 ==
                             [self.nodes[i].getblockcount() for i in range(3)])
             assert_equal(balance_nodes,
                          [self.nodes[i].getbalance() for i in range(3)])
 
         # Exercise listsinceblock with the last two blocks
         coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
         assert_equal(coinbase_tx_1["lastblock"], blocks[1])
         assert_equal(len(coinbase_tx_1["transactions"]), 1)
         assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
         assert_equal(len(self.nodes[0].listsinceblock(
             blocks[1])["transactions"]), 0)
 
         # ==Check that wallet prefers to use coins that don't exceed mempool li
 
         # Get all non-zero utxos together
         chain_addrs = [self.nodes[0].getnewaddress(
         ), self.nodes[0].getnewaddress()]
         singletxid = self.nodes[0].sendtoaddress(
             chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
         self.nodes[0].generate(1)
         node0_balance = self.nodes[0].getbalance()
         # Split into two chains
         rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {
                                                    chain_addrs[0]: node0_balance / 2 - Decimal('10000'), chain_addrs[1]: node0_balance / 2 - Decimal('10000')})
         signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
         singletxid = self.nodes[0].sendrawtransaction(
             hexstring=signedtx["hex"], maxfeerate=0)
         self.nodes[0].generate(1)
 
         # Make a long chain of unconfirmed payments without hitting mempool limit
         # Each tx we make leaves only one output of change on a chain 1 longer
         # Since the amount to send is always much less than the outputs, we only ever need one output
         # So we should be able to generate exactly chainlimit txs for each
         # original output
         sending_addr = self.nodes[1].getnewaddress()
         txid_list = []
         for _ in range(chainlimit * 2):
             txid_list.append(self.nodes[0].sendtoaddress(
                 sending_addr, Decimal('10000')))
         assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2)
         assert_equal(len(txid_list), chainlimit * 2)
 
         # Without walletrejectlongchains, we will still generate a txid
         # The tx will be stored in the wallet but not accepted to the mempool
         extra_txid = self.nodes[0].sendtoaddress(
             sending_addr, Decimal('10000'))
         assert extra_txid not in self.nodes[0].getrawmempool()
         assert extra_txid in [tx["txid"]
                               for tx in self.nodes[0].listtransactions()]
         self.nodes[0].abandontransaction(extra_txid)
         total_txs = len(self.nodes[0].listtransactions("*", 99999))
 
         # Try with walletrejectlongchains
         # Double chain limit but require combining inputs, so we pass
         # SelectCoinsMinConf
         self.stop_node(0)
         self.start_node(0,
                         self.extra_args[0] + ["-walletrejectlongchains",
                                               "-limitancestorcount=" + str(2 * chainlimit)])
 
         # wait until the wallet has submitted all transactions to the mempool
         self.wait_until(
             lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2)
 
         # Prevent potential race condition when calling wallet RPCs right after
         # restart
         self.nodes[0].syncwithvalidationinterfacequeue()
 
         node0_balance = self.nodes[0].getbalance()
         # With walletrejectlongchains we will not create the tx and store it in
         # our wallet.
         assert_raises_rpc_error(-6, "Transaction has too long of a mempool chain",
                                 self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('10000'))
 
         # Verify nothing new in wallet
         assert_equal(total_txs, len(
             self.nodes[0].listtransactions("*", 99999)))
 
         # Test getaddressinfo on external address. Note that these addresses
         # are taken from disablewallet.py
         assert_raises_rpc_error(-5, "Invalid address",
                                 self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy")
         address_info = self.nodes[0].getaddressinfo(
             "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
         assert_equal(address_info['address'],
                      "ecregtest:qp8rs4qyd3aazk22eyzwg7fmdfzmxm02pyprkfhvm4")
         assert_equal(address_info["scriptPubKey"],
                      "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac")
         assert not address_info["ismine"]
         assert not address_info["iswatchonly"]
         assert not address_info["isscript"]
         assert not address_info["ischange"]
 
         # Test getaddressinfo 'ischange' field on change address.
         self.nodes[0].generate(1)
         destination = self.nodes[1].getnewaddress()
         txid = self.nodes[0].sendtoaddress(destination, 123000)
         tx = self.nodes[0].decoderawtransaction(
             self.nodes[0].gettransaction(txid)['hex'])
         output_addresses = [vout['scriptPubKey']['addresses'][0]
                             for vout in tx["vout"]]
         assert len(output_addresses) > 1
         for address in output_addresses:
             ischange = self.nodes[0].getaddressinfo(address)['ischange']
             assert_equal(ischange, address != destination)
             if ischange:
                 change = address
         self.nodes[0].setlabel(change, 'foobar')
         assert_equal(self.nodes[0].getaddressinfo(change)['ischange'], False)
 
         # Test gettransaction response with different arguments.
         self.log.info(
             "Testing gettransaction response with different arguments...")
         self.nodes[0].setlabel(change, 'baz')
         baz = self.nodes[0].listtransactions(label="baz", count=1)[0]
         expected_receive_vout = {"label": "baz",
                                  "address": baz["address"],
                                  "amount": baz["amount"],
                                  "category": baz["category"],
                                  "vout": baz["vout"]}
         expected_fields = frozenset({'amount',
                                      'confirmations',
                                      'details',
                                      'fee',
                                      'hex',
                                      'time',
                                      'timereceived',
                                      'trusted',
                                      'txid',
                                      'walletconflicts'})
         verbose_field = "decoded"
         expected_verbose_fields = expected_fields | {verbose_field}
 
         self.log.debug("Testing gettransaction response without verbose")
         tx = self.nodes[0].gettransaction(txid=txid)
         assert_equal(set([*tx]), expected_fields)
         assert_array_result(
             tx["details"], {
                 "category": "receive"}, expected_receive_vout)
 
         self.log.debug(
             "Testing gettransaction response with verbose set to False")
         tx = self.nodes[0].gettransaction(txid=txid, verbose=False)
         assert_equal(set([*tx]), expected_fields)
         assert_array_result(
             tx["details"], {
                 "category": "receive"}, expected_receive_vout)
 
         self.log.debug(
             "Testing gettransaction response with verbose set to True")
         tx = self.nodes[0].gettransaction(txid=txid, verbose=True)
         assert_equal(set([*tx]), expected_verbose_fields)
         assert_array_result(
             tx["details"], {
                 "category": "receive"}, expected_receive_vout)
         assert_equal(
             tx[verbose_field],
             self.nodes[0].decoderawtransaction(
                 tx["hex"]))
 
 
 if __name__ == '__main__':
     WalletTest().main()
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index 67e522bc0..36fdea62a 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -1,355 +1,351 @@
 #!/usr/bin/env python3
 # Copyright (c) 2016-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test Hierarchical Deterministic wallet function."""
 
 import os
 import shutil
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    assert_raises_rpc_error,
-    connect_nodes,
-)
+from test_framework.util import assert_equal, assert_raises_rpc_error
 
 
 class WalletHDTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [[], ['-keypool=0']]
         self.supports_cli = False
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         # Make sure we use hd, keep masterkeyid
         hd_fingerprint = self.nodes[1].getaddressinfo(
             self.nodes[1].getnewaddress())['hdmasterfingerprint']
         assert_equal(len(hd_fingerprint), 8)
 
         # create an internal key
         change_addr = self.nodes[1].getrawchangeaddress()
         change_addrV = self.nodes[1].getaddressinfo(change_addr)
         if self.options.descriptors:
             assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/0")
         else:
             # first internal child key
             assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'")
 
         # Import a non-HD private key in the HD wallet
         non_hd_add = 'ecregtest:qr09jgufyeae4s97nqp6mv0tv6eymfunygeyv0llfe'
         non_hd_key = 'cS9umN9w6cDMuRVYdbkfE4c7YUFLJRoXMfhQ569uY4odiQbVN8Rt'
         self.nodes[1].importprivkey(non_hd_key)
 
         # This should be enough to keep the master key and the non-HD key
         self.nodes[1].backupwallet(
             os.path.join(self.nodes[1].datadir, "hd.bak"))
         # self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
 
         # Derive some HD addresses and remember the last
         # Also send funds to each add
         self.nodes[0].generate(101)
         hd_add = None
         NUM_HD_ADDS = 10
         for i in range(1, NUM_HD_ADDS + 1):
             hd_add = self.nodes[1].getnewaddress()
             hd_info = self.nodes[1].getaddressinfo(hd_add)
             if self.options.descriptors:
                 assert_equal(hd_info["hdkeypath"], "m/44'/1'/0'/0/" + str(i))
             else:
                 assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'")
                 assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint)
             self.nodes[0].sendtoaddress(hd_add, 1000000)
             self.nodes[0].generate(1)
         self.nodes[0].sendtoaddress(non_hd_add, 1000000)
         self.nodes[0].generate(1)
 
         # create an internal key (again)
         change_addr = self.nodes[1].getrawchangeaddress()
         change_addrV = self.nodes[1].getaddressinfo(change_addr)
         if self.options.descriptors:
             assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/1")
         else:
             # second internal child key
             assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'")
 
         self.sync_all()
         assert_equal(
             self.nodes[1].getbalance(),
             (NUM_HD_ADDS * 1000000) + 1000000)
 
         self.log.info("Restore backup ...")
         self.stop_node(1)
         # we need to delete the complete regtest directory
         # otherwise node1 would auto-recover all funds in flag the keypool keys
         # as used
         shutil.rmtree(
             os.path.join(
                 self.nodes[1].datadir,
                 self.chain,
                 "blocks"))
         shutil.rmtree(os.path.join(
             self.nodes[1].datadir, self.chain, "chainstate"))
         shutil.copyfile(
             os.path.join(self.nodes[1].datadir, "hd.bak"),
             os.path.join(self.nodes[1].datadir, self.chain, 'wallets',
                          self.default_wallet_name, self.wallet_data_filename))
         self.start_node(1)
 
         # Assert that derivation is deterministic
         hd_add_2 = None
         for i in range(1, NUM_HD_ADDS + 1):
             hd_add_2 = self.nodes[1].getnewaddress()
             hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
             if self.options.descriptors:
                 assert_equal(hd_info_2["hdkeypath"], "m/44'/1'/0'/0/" + str(i))
             else:
                 assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'")
             assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
         assert_equal(hd_add, hd_add_2)
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_all()
 
         # Needs rescan
         self.restart_node(1, extra_args=self.extra_args[1] + ['-rescan'])
         assert_equal(
             self.nodes[1].getbalance(),
             (NUM_HD_ADDS * 1000000) + 1000000)
 
         # Try a RPC based rescan
         self.stop_node(1)
         shutil.rmtree(
             os.path.join(
                 self.nodes[1].datadir,
                 self.chain,
                 "blocks"))
         shutil.rmtree(os.path.join(
             self.nodes[1].datadir, self.chain, "chainstate"))
         shutil.copyfile(
             os.path.join(self.nodes[1].datadir, "hd.bak"),
             os.path.join(self.nodes[1].datadir, self.chain, "wallets",
                          self.default_wallet_name, self.wallet_data_filename))
         self.start_node(1, extra_args=self.extra_args[1])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_all()
         # Wallet automatically scans blocks older than key on startup
         assert_equal(
             self.nodes[1].getbalance(),
             (NUM_HD_ADDS * 1000000) + 1000000)
         out = self.nodes[1].rescanblockchain(0, 1)
         assert_equal(out['start_height'], 0)
         assert_equal(out['stop_height'], 1)
         out = self.nodes[1].rescanblockchain(2, 4)
         assert_equal(out['start_height'], 2)
         assert_equal(out['stop_height'], 4)
         out = self.nodes[1].rescanblockchain(3)
         assert_equal(out['start_height'], 3)
         assert_equal(out['stop_height'], self.nodes[1].getblockcount())
         out = self.nodes[1].rescanblockchain()
         assert_equal(out['start_height'], 0)
         assert_equal(out['stop_height'], self.nodes[1].getblockcount())
         assert_equal(
             self.nodes[1].getbalance(),
             (NUM_HD_ADDS * 1000000) + 1000000)
 
         # send a tx and make sure its using the internal chain for the
         # changeoutput
         txid = self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), 1000000)
         outs = self.nodes[1].decoderawtransaction(
             self.nodes[1].gettransaction(txid)['hex'])['vout']
         keypath = ""
         for out in outs:
             if out['value'] != 1000000:
                 keypath = self.nodes[1].getaddressinfo(
                     out['scriptPubKey']['addresses'][0])['hdkeypath']
 
         if self.options.descriptors:
             assert_equal(keypath[0:14], "m/44'/1'/0'/1/")
         else:
             assert_equal(keypath[0:7], "m/0'/1'")
 
         if not self.options.descriptors:
             # Generate a new HD seed on node 1 and make sure it is set
             orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
             self.nodes[1].sethdseed()
             new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
             assert orig_masterkeyid != new_masterkeyid
             addr = self.nodes[1].getnewaddress()
             # Make sure the new address is the first from the keypool
             assert_equal(self.nodes[1].getaddressinfo(
                 addr)['hdkeypath'], 'm/0\'/0\'/0\'')
             # Fill keypool with 1 key
             self.nodes[1].keypoolrefill(1)
 
             # Set a new HD seed on node 1 without flushing the keypool
             new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
             orig_masterkeyid = new_masterkeyid
             self.nodes[1].sethdseed(False, new_seed)
             new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
             assert orig_masterkeyid != new_masterkeyid
             addr = self.nodes[1].getnewaddress()
             assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(
                 addr)['hdseedid'])
             # Make sure the new address continues previous keypool
             assert_equal(self.nodes[1].getaddressinfo(
                 addr)['hdkeypath'], 'm/0\'/0\'/1\'')
 
             # Check that the next address is from the new seed
             self.nodes[1].keypoolrefill(1)
             next_addr = self.nodes[1].getnewaddress()
             assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(
                 next_addr)['hdseedid'])
             # Make sure the new address is not from previous keypool
             assert_equal(self.nodes[1].getaddressinfo(
                 next_addr)['hdkeypath'], 'm/0\'/0\'/0\'')
             assert next_addr != addr
 
             # Sethdseed parameter validity
             assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed,
                                     False, new_seed, 0)
             assert_raises_rpc_error(-5, "Invalid private key",
                                     self.nodes[1].sethdseed, False, "not_wif")
             assert_raises_rpc_error(
                 -1, "JSON value is not a boolean as expected",
                 self.nodes[1].sethdseed, "Not_bool")
             assert_raises_rpc_error(
                 -1, "JSON value is not a string as expected",
                 self.nodes[1].sethdseed, False, True)
             assert_raises_rpc_error(
                 -5, "Already have this key", self.nodes[1].sethdseed, False,
                 new_seed)
             assert_raises_rpc_error(
                 -5, "Already have this key", self.nodes[1].sethdseed, False,
                 self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
 
             self.log.info(
                 'Test sethdseed restoring with keys outside of the initial keypool')
             self.nodes[0].generate(10)
             # Restart node 1 with keypool of 3 and a different wallet
             self.nodes[1].createwallet(wallet_name='origin', blank=True)
             self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin'])
-            connect_nodes(self.nodes[0], self.nodes[1])
+            self.connect_nodes(0, 1)
 
             # sethdseed restoring and seeing txs to addresses out of the
             # keypool
             origin_rpc = self.nodes[1].get_wallet_rpc('origin')
             seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
             origin_rpc.sethdseed(True, seed)
 
             self.nodes[1].createwallet(wallet_name='restore', blank=True)
             restore_rpc = self.nodes[1].get_wallet_rpc('restore')
             # Set to be the same seed as origin_rpc
             restore_rpc.sethdseed(True, seed)
             # Rotate to a new seed, making original `seed` inactive
             restore_rpc.sethdseed(True)
 
             self.nodes[1].createwallet(wallet_name='restore2', blank=True)
             restore2_rpc = self.nodes[1].get_wallet_rpc('restore2')
             # Set to be the same seed as origin_rpc
             restore2_rpc.sethdseed(True, seed)
             # Rotate to a new seed, making original `seed` inactive
             restore2_rpc.sethdseed(True)
 
             # Check persistence of inactive seed by reloading restore. restore2
             # is still loaded to test the case where the wallet is not reloaded
             restore_rpc.unloadwallet()
             self.nodes[1].loadwallet('restore')
             restore_rpc = self.nodes[1].get_wallet_rpc('restore')
 
             # Empty origin keypool and get an address that is beyond the
             # initial keypool
             origin_rpc.getnewaddress()
             origin_rpc.getnewaddress()
             # Last address of initial keypool
             last_addr = origin_rpc.getnewaddress()
             # First address beyond initial keypool
             addr = origin_rpc.getnewaddress()
 
             # Check that the restored seed has last_addr but does not have addr
             info = restore_rpc.getaddressinfo(last_addr)
             assert_equal(info['ismine'], True)
             info = restore_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], False)
             info = restore2_rpc.getaddressinfo(last_addr)
             assert_equal(info['ismine'], True)
             info = restore2_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], False)
             # Check that the origin seed has addr
             info = origin_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], True)
 
             # Send a transaction to addr, which is out of the initial keypool.
             # The wallet that has set a new seed (restore_rpc) should not
             # detect this transaction.
             txid = self.nodes[0].sendtoaddress(addr, 1000000)
             origin_rpc.sendrawtransaction(
                 self.nodes[0].gettransaction(txid)['hex'])
             self.nodes[0].generate(1)
             self.sync_blocks()
             origin_rpc.gettransaction(txid)
             assert_raises_rpc_error(-5,
                                     'Invalid or non-wallet transaction id',
                                     restore_rpc.gettransaction,
                                     txid)
             out_of_kp_txid = txid
 
             # Send a transaction to last_addr, which is in the initial keypool.
             # The wallet that has set a new seed (restore_rpc) should detect
             # this transaction and generate 3 new keys from the initial seed.
             # The previous transaction (out_of_kp_txid) should still not be
             # detected as a rescan is required.
             txid = self.nodes[0].sendtoaddress(last_addr, 1000000)
             origin_rpc.sendrawtransaction(
                 self.nodes[0].gettransaction(txid)['hex'])
             self.nodes[0].generate(1)
             self.sync_blocks()
             origin_rpc.gettransaction(txid)
             restore_rpc.gettransaction(txid)
             assert_raises_rpc_error(-5,
                                     'Invalid or non-wallet transaction id',
                                     restore_rpc.gettransaction,
                                     out_of_kp_txid)
             restore2_rpc.gettransaction(txid)
             assert_raises_rpc_error(-5,
                                     'Invalid or non-wallet transaction id',
                                     restore2_rpc.gettransaction,
                                     out_of_kp_txid)
 
             # After rescanning, restore_rpc should now see out_of_kp_txid and
             # generate an additional key.
             # addr should now be part of restore_rpc and be ismine
             restore_rpc.rescanblockchain()
             restore_rpc.gettransaction(out_of_kp_txid)
             info = restore_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], True)
             restore2_rpc.rescanblockchain()
             restore2_rpc.gettransaction(out_of_kp_txid)
             info = restore2_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], True)
 
             # Check again that 3 keys were derived.
             # Empty keypool and get an address that is beyond the initial
             # keypool
             origin_rpc.getnewaddress()
             origin_rpc.getnewaddress()
             last_addr = origin_rpc.getnewaddress()
             addr = origin_rpc.getnewaddress()
 
             # Check that the restored seed has last_addr but does not have addr
             info = restore_rpc.getaddressinfo(last_addr)
             assert_equal(info['ismine'], True)
             info = restore_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], False)
             info = restore2_rpc.getaddressinfo(last_addr)
             assert_equal(info['ismine'], True)
             info = restore2_rpc.getaddressinfo(addr)
             assert_equal(info['ismine'], False)
 
 
 if __name__ == '__main__':
     WalletHDTest().main()
diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py
index ab7eecdec..4f3222a58 100755
--- a/test/functional/wallet_import_rescan.py
+++ b/test/functional/wallet_import_rescan.py
@@ -1,237 +1,237 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test wallet import RPCs.
 
 Test rescan behavior of importaddress, importpubkey, importprivkey, and
 importmulti RPCs with different types of keys and rescan options.
 
 In the first part of the test, node 0 creates an address for each type of
 import RPC call and node 0 sends XEC to it. Then other nodes import the
 addresses, and the test makes listtransactions and getbalance calls to confirm
 that the importing node either did or did not execute rescans picking up the
 send transactions.
 
 In the second part of the test, node 0 sends more XEC to each address, and the
 test makes more listtransactions and getbalance calls to confirm that the
 importing nodes pick up the new transactions regardless of whether rescans
 happened previously.
 """
 
 import collections
 import enum
 import itertools
 import random
 from decimal import Decimal
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes, set_node_times
+from test_framework.util import assert_equal, set_node_times
 
 Call = enum.Enum("Call", "single multiaddress multiscript")
 Data = enum.Enum("Data", "address pub priv")
 Rescan = enum.Enum("Rescan", "no yes late_timestamp")
 
 
 class Variant(collections.namedtuple("Variant", "call data rescan prune")):
     """Helper for importing one key and verifying scanned transactions."""
 
     def do_import(self, timestamp):
         """Call one key import RPC."""
         rescan = self.rescan == Rescan.yes
 
         assert_equal(self.address["solvable"], True)
 
         if self.call == Call.single:
             if self.data == Data.address:
                 response = self.node.importaddress(
                     address=self.address["address"], label=self.label, rescan=rescan)
             elif self.data == Data.pub:
                 response = self.node.importpubkey(
                     pubkey=self.address["pubkey"], label=self.label, rescan=rescan)
             elif self.data == Data.priv:
                 response = self.node.importprivkey(
                     privkey=self.key, label=self.label, rescan=rescan)
             assert_equal(response, None)
 
         elif self.call in (Call.multiaddress, Call.multiscript):
             request = {
                 "scriptPubKey": {
                     "address": self.address["address"]
                 } if self.call == Call.multiaddress else self.address["scriptPubKey"],
                 "timestamp": timestamp + TIMESTAMP_WINDOW + (
                     1 if self.rescan == Rescan.late_timestamp else 0),
                 "pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
                 "keys": [self.key] if self.data == Data.priv else [],
                 "label": self.label,
                 "watchonly": self.data != Data.priv
             }
             response = self.node.importmulti(
                 requests=[request],
                 options={
                     "rescan": self.rescan in (
                         Rescan.yes,
                         Rescan.late_timestamp)},
             )
             assert_equal(response, [{"success": True}])
 
     def check(self, txid=None, amount=None, confirmation_height=None):
         """Verify that listtransactions/listreceivedbyaddress return expected values."""
 
         txs = self.node.listtransactions(
             label=self.label, count=10000, include_watchonly=True)
         current_height = self.node.getblockcount()
         assert_equal(len(txs), self.expected_txs)
 
         addresses = self.node.listreceivedbyaddress(
             minconf=0, include_watchonly=True, address_filter=self.address['address'])
         if self.expected_txs:
             assert_equal(len(addresses[0]["txids"]), self.expected_txs)
 
         if txid is not None:
             tx, = [tx for tx in txs if tx["txid"] == txid]
             assert_equal(tx["label"], self.label)
             assert_equal(tx["address"], self.address["address"])
             assert_equal(tx["amount"], amount)
             assert_equal(tx["category"], "receive")
             assert_equal(tx["label"], self.label)
             assert_equal(tx["txid"], txid)
             assert_equal(tx["confirmations"],
                          1 + current_height - confirmation_height)
             assert_equal("trusted" not in tx, True)
 
             address, = [ad for ad in addresses if txid in ad["txids"]]
             assert_equal(address["address"], self.address["address"])
             assert_equal(address["amount"], self.expected_balance)
             assert_equal(address["confirmations"],
                          1 + current_height - confirmation_height)
             # Verify the transaction is correctly marked watchonly depending on
             # whether the transaction pays to an imported public key or
             # imported private key. The test setup ensures that transaction
             # inputs will not be from watchonly keys (important because
             # involvesWatchonly will be true if either the transaction output
             # or inputs are watchonly).
             if self.data != Data.priv:
                 assert_equal(address["involvesWatchonly"], True)
             else:
                 assert_equal("involvesWatchonly" not in address, True)
 
 
 # List of Variants for each way a key or address could be imported.
 IMPORT_VARIANTS = [Variant(*variants)
                    for variants in itertools.product(Call, Data, Rescan, (False, True))]
 
 # List of nodes to import keys to. Half the nodes will have pruning disabled,
 # half will have it enabled. Different nodes will be used for imports that are
 # expected to cause rescans, and imports that are not expected to cause
 # rescans, in order to prevent rescans during later imports picking up
 # transactions associated with earlier imports. This makes it easier to keep
 # track of expected balances and transactions.
 ImportNode = collections.namedtuple("ImportNode", "prune rescan")
 IMPORT_NODES = [ImportNode(*fields)
                 for fields in itertools.product((False, True), repeat=2)]
 
 # Rescans start at the earliest block up to 2 hours before the key timestamp.
 TIMESTAMP_WINDOW = 2 * 60 * 60
 
 AMOUNT_DUST = 5.46
 
 
 def get_rand_amount():
     r = random.uniform(AMOUNT_DUST, 1000000)
     return Decimal(str(round(r, 2)))
 
 
 class ImportRescanTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 2 + len(IMPORT_NODES)
         self.supports_cli = False
         self.rpc_timeout = 120
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def setup_network(self):
         self.extra_args = [[] for _ in range(self.num_nodes)]
         for i, import_node in enumerate(IMPORT_NODES, 2):
             if import_node.prune:
                 self.extra_args[i] += ["-prune=1"]
 
         self.add_nodes(self.num_nodes, extra_args=self.extra_args)
 
         # Import keys with pruning disabled
         self.start_nodes(extra_args=[[]] * self.num_nodes)
         self.import_deterministic_coinbase_privkeys()
         self.stop_nodes()
 
         self.start_nodes()
         for i in range(1, self.num_nodes):
-            connect_nodes(self.nodes[i], self.nodes[0])
+            self.connect_nodes(i, 0)
 
     def run_test(self):
         # Create one transaction on node 0 with a unique amount for
         # each possible type of wallet import RPC.
         for i, variant in enumerate(IMPORT_VARIANTS):
             variant.label = "label {} {}".format(i, variant)
             variant.address = self.nodes[1].getaddressinfo(
                 self.nodes[1].getnewaddress(label=variant.label))
             variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
             variant.initial_amount = get_rand_amount()
             variant.initial_txid = self.nodes[0].sendtoaddress(
                 variant.address["address"], variant.initial_amount)
             # Generate one block for each send
             self.nodes[0].generate(1)
             variant.confirmation_height = self.nodes[0].getblockcount()
             variant.timestamp = self.nodes[0].getblockheader(
                 self.nodes[0].getbestblockhash())["time"]
         # Conclude sync before calling setmocktime to avoid timeouts
         self.sync_all()
 
         # Generate a block further in the future (past the rescan window).
         assert_equal(self.nodes[0].getrawmempool(), [])
         set_node_times(self.nodes, self.nodes[0].getblockheader(
             self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # For each variation of wallet key import, invoke the import RPC and
         # check the results from getbalance and listtransactions.
         for variant in IMPORT_VARIANTS:
             self.log.info('Run import for variant {}'.format(variant))
             expect_rescan = variant.rescan == Rescan.yes
             variant.node = self.nodes[2 + IMPORT_NODES.index(
                 ImportNode(variant.prune, expect_rescan))]
             variant.do_import(variant.timestamp)
             if expect_rescan:
                 variant.expected_balance = variant.initial_amount
                 variant.expected_txs = 1
                 variant.check(variant.initial_txid, variant.initial_amount,
                               variant.confirmation_height)
             else:
                 variant.expected_balance = 0
                 variant.expected_txs = 0
                 variant.check()
 
         # Create new transactions sending to each address.
         for i, variant in enumerate(IMPORT_VARIANTS):
             variant.sent_amount = get_rand_amount()
             variant.sent_txid = self.nodes[0].sendtoaddress(
                 variant.address["address"], variant.sent_amount)
             # Generate one block for each send
             self.nodes[0].generate(1)
             variant.confirmation_height = self.nodes[0].getblockcount()
 
         assert_equal(self.nodes[0].getrawmempool(), [])
         self.sync_all()
 
         # Check the latest results from getbalance and listtransactions.
         for variant in IMPORT_VARIANTS:
             self.log.info('Run check for variant {}'.format(variant))
             variant.expected_balance += variant.sent_amount
             variant.expected_txs += 1
             variant.check(variant.sent_txid, variant.sent_amount,
                           variant.confirmation_height)
 
 
 if __name__ == "__main__":
     ImportRescanTest().main()
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index 75abb90a2..114c5dc38 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -1,80 +1,80 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test HD Wallet keypool restore function.
 
 Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks.
 
 - Start node1, shutdown and backup wallet.
 - Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110.
 - Stop node1, clear the datadir, move wallet file back into the datadir and restart node1.
 - connect node1 to node0. Verify that they sync and node1 receives its funds."""
 import os
 import shutil
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.util import assert_equal
 
 
 class KeypoolRestoreTest(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [[], ['-keypool=100']]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         wallet_path = os.path.join(
             self.nodes[1].datadir, self.chain, "wallets",
             self.default_wallet_name, self.wallet_data_filename)
         wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak")
         self.nodes[0].generate(101)
 
         self.log.info("Make backup of wallet")
         self.stop_node(1)
         shutil.copyfile(wallet_path, wallet_backup_path)
         self.start_node(1, self.extra_args[1])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
 
         self.log.info("Generate keys for wallet")
         for _ in range(90):
             addr_oldpool = self.nodes[1].getnewaddress()
         for _ in range(20):
             addr_extpool = self.nodes[1].getnewaddress()
 
         self.log.info("Send funds to wallet")
         self.nodes[0].sendtoaddress(addr_oldpool, 10000000)
         self.nodes[0].generate(1)
         self.nodes[0].sendtoaddress(addr_extpool, 5000000)
         self.nodes[0].generate(1)
         self.sync_blocks()
 
         self.log.info("Restart node with wallet backup")
         self.stop_node(1)
         shutil.copyfile(wallet_backup_path, wallet_path)
         self.start_node(1, self.extra_args[1])
-        connect_nodes(self.nodes[0], self.nodes[1])
+        self.connect_nodes(0, 1)
         self.sync_all()
 
         self.log.info("Verify keypool is restored and balance is correct")
         assert_equal(self.nodes[1].getbalance(), 15000000)
         assert_equal(self.nodes[1].listtransactions()
                      [0]['category'], "receive")
         # Check that we have marked all keys up to the used keypool key as used
         if self.options.descriptors:
             assert_equal(
                 self.nodes[1].getaddressinfo(
                     self.nodes[1].getnewaddress())['hdkeypath'],
                 "m/44'/1'/0'/0/110")
         else:
             assert_equal(
                 self.nodes[1].getaddressinfo(
                     self.nodes[1].getnewaddress())['hdkeypath'],
                 "m/0'/0'/110'")
 
 
 if __name__ == '__main__':
     KeypoolRestoreTest().main()
diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py
index e89342f42..5546be527 100755
--- a/test/functional/wallet_listsinceblock.py
+++ b/test/functional/wallet_listsinceblock.py
@@ -1,339 +1,338 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the listsinceblock RPC."""
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_array_result,
     assert_equal,
     assert_raises_rpc_error,
-    connect_nodes,
 )
 
 
 class ListSinceBlockTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.setup_clean_chain = True
         self.extra_args = [["-noparkdeepreorg"], ["-noparkdeepreorg"], [], []]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         # All nodes are in IBD from genesis, so they'll need the miner (node2) to be an outbound connection, or have
         # only one connection. (See fPreferredDownload in net_processing)
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
         self.nodes[2].generate(101)
         self.sync_all()
 
         self.test_no_blockhash()
         self.test_invalid_blockhash()
         self.test_reorg()
         self.test_double_spend()
         self.test_double_send()
         self.test_targetconfirmations()
 
     def test_no_blockhash(self):
         self.log.info("Test no blockhash")
         txid = self.nodes[2].sendtoaddress(
             self.nodes[0].getnewaddress(), 1000000)
         blockhash, = self.nodes[2].generate(1)
         blockheight = self.nodes[2].getblockheader(blockhash)['height']
         self.sync_all()
 
         txs = self.nodes[0].listtransactions()
         assert_array_result(txs, {"txid": txid}, {
             "category": "receive",
             "amount": 1000000,
             "blockhash": blockhash,
             "blockheight": blockheight,
             "confirmations": 1,
         })
         assert_equal(
             self.nodes[0].listsinceblock(),
             {"lastblock": blockhash,
              "removed": [],
              "transactions": txs})
         assert_equal(
             self.nodes[0].listsinceblock(""),
             {"lastblock": blockhash,
              "removed": [],
              "transactions": txs})
 
     def test_invalid_blockhash(self):
         self.log.info("Test invalid blockhash")
         assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
                                 "42759cde25462784395a337460bde75f58e73d3f08bd31fdc3507cbac856a2c4")
         assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
                                 "0000000000000000000000000000000000000000000000000000000000000000")
         assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 11, for 'invalid-hex')", self.nodes[0].listsinceblock,
                                 "invalid-hex")
         assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'Z000000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].listsinceblock,
                                 "Z000000000000000000000000000000000000000000000000000000000000000")
 
     def test_targetconfirmations(self):
         '''
         This tests when the value of target_confirmations exceeds the number of
         blocks in the main chain. In this case, the genesis block hash should be
         given for the `lastblock` property. If target_confirmations is < 1, then
         a -8 invalid parameter error is thrown.
         '''
         self.log.info("Test target_confirmations")
         blockhash, = self.nodes[2].generate(1)
         blockheight = self.nodes[2].getblockheader(blockhash)['height']
         self.sync_all()
 
         assert_equal(
             self.nodes[0].getblockhash(0),
             self.nodes[0].listsinceblock(blockhash, blockheight + 1)['lastblock'])
         assert_equal(
             self.nodes[0].getblockhash(0),
             self.nodes[0].listsinceblock(blockhash, blockheight + 1000)['lastblock'])
         assert_raises_rpc_error(-8, "Invalid parameter",
                                 self.nodes[0].listsinceblock, blockhash, 0)
 
     def test_reorg(self):
         '''
         `listsinceblock` did not behave correctly when handed a block that was
         no longer in the main chain:
 
              ab0
           /       \
         aa1 [tx0]   bb1
          |           |
         aa2         bb2
          |           |
         aa3         bb3
                      |
                     bb4
 
         Consider a client that has only seen block `aa3` above. It asks the node
         to `listsinceblock aa3`. But at some point prior the main chain switched
         to the bb chain.
 
         Previously: listsinceblock would find height=4 for block aa3 and compare
         this to height=5 for the tip of the chain (bb4). It would then return
         results restricted to bb3-bb4.
 
         Now: listsinceblock finds the fork at ab0 and returns results in the
         range bb1-bb4.
 
         This test only checks that [tx0] is present.
         '''
         self.log.info("Test reorg")
 
         # Split network into two
         self.split_network()
 
         # send to nodes[0] from nodes[2]
         senttx = self.nodes[2].sendtoaddress(
             self.nodes[0].getnewaddress(), 1000000)
 
         # generate on both sides
         nodes1_last_blockhash = self.nodes[1].generate(6)[-1]
         nodes2_first_blockhash = self.nodes[2].generate(7)[0]
         self.log.debug(
             "nodes[1] last blockhash = {}".format(nodes1_last_blockhash))
         self.log.debug(
             "nodes[2] first blockhash = {}".format(nodes2_first_blockhash))
 
         self.sync_all(self.nodes[:2])
         self.sync_all(self.nodes[2:])
 
         self.join_network()
 
         # listsinceblock(nodes1_last_blockhash) should now include tx as seen from nodes[0]
         # and return the block height which listsinceblock now exposes since
         # rABC6098a1cb2b25.
         transactions = self.nodes[0].listsinceblock(
             nodes1_last_blockhash)['transactions']
         found = next(tx for tx in transactions if tx['txid'] == senttx)
         assert_equal(
             found['blockheight'],
             self.nodes[0].getblockheader(nodes2_first_blockhash)['height'])
 
     def test_double_spend(self):
         '''
         This tests the case where the same UTXO is spent twice on two separate
         blocks as part of a reorg.
 
              ab0
           /       \
         aa1 [tx1]   bb1 [tx2]
          |           |
         aa2         bb2
          |           |
         aa3         bb3
                      |
                     bb4
 
         Problematic case:
 
         1. User 1 receives XEC in tx1 from utxo1 in block aa1.
         2. User 2 receives XEC in tx2 from utxo1 (same) in block bb1
         3. User 1 sees 2 confirmations at block aa3.
         4. Reorg into bb chain.
         5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now
            invalidated.
 
         Currently the solution to this is to detect that a reorg'd block is
         asked for in listsinceblock, and to iterate back over existing blocks up
         until the fork point, and to include all transactions that relate to the
         node wallet.
         '''
         self.log.info("Test double spend")
 
         self.sync_all()
 
         # Split network into two
         self.split_network()
 
         # share utxo between nodes[1] and nodes[2]
         utxos = self.nodes[2].listunspent()
         utxo = utxos[0]
         privkey = self.nodes[2].dumpprivkey(utxo['address'])
         self.nodes[1].importprivkey(privkey)
 
         # send from nodes[1] using utxo to nodes[0]
         change = '{:.2f}'.format(float(utxo['amount']) - 1000300.00)
         recipient_dict = {
             self.nodes[0].getnewaddress(): 1000000,
             self.nodes[1].getnewaddress(): change,
         }
         utxo_dicts = [{
             'txid': utxo['txid'],
             'vout': utxo['vout'],
         }]
         txid1 = self.nodes[1].sendrawtransaction(
             self.nodes[1].signrawtransactionwithwallet(
                 self.nodes[1].createrawtransaction(utxo_dicts, recipient_dict))['hex'])
 
         # send from nodes[2] using utxo to nodes[3]
         recipient_dict2 = {
             self.nodes[3].getnewaddress(): 1000000,
             self.nodes[2].getnewaddress(): change,
         }
         self.nodes[2].sendrawtransaction(
             self.nodes[2].signrawtransactionwithwallet(
                 self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict2))['hex'])
 
         # generate on both sides
         lastblockhash = self.nodes[1].generate(3)[2]
         self.nodes[2].generate(4)
 
         self.join_network()
 
         self.sync_all()
 
         # gettransaction should work for txid1
         assert self.nodes[0].gettransaction(
             txid1)['txid'] == txid1, "gettransaction failed to find txid1"
 
         # listsinceblock(lastblockhash) should now include txid1, as seen from
         # nodes[0]
         lsbres = self.nodes[0].listsinceblock(lastblockhash)
         assert any(tx['txid'] == txid1 for tx in lsbres['removed'])
 
         # but it should not include 'removed' if include_removed=false
         lsbres2 = self.nodes[0].listsinceblock(
             blockhash=lastblockhash, include_removed=False)
         assert 'removed' not in lsbres2
 
     def test_double_send(self):
         '''
         This tests the case where the same transaction is submitted twice on two
         separate blocks as part of a reorg. The former will vanish and the
         latter will appear as the true transaction (with confirmations dropping
         as a result).
 
              ab0
           /       \
         aa1 [tx1]   bb1
          |           |
         aa2         bb2
          |           |
         aa3         bb3 [tx1]
                      |
                     bb4
 
         Asserted:
 
         1. tx1 is listed in listsinceblock.
         2. It is included in 'removed' as it was removed, even though it is now
            present in a different block.
         3. It is listed with a confirmation count of 2 (bb3, bb4), not
            3 (aa1, aa2, aa3).
         '''
         self.log.info("Test double send")
 
         self.sync_all()
 
         # Split network into two
         self.split_network()
 
         # create and sign a transaction
         utxos = self.nodes[2].listunspent()
         utxo = utxos[0]
         change = '{:.2f}'.format(float(utxo['amount']) - 1000300.00)
         recipient_dict = {
             self.nodes[0].getnewaddress(): 1000000,
             self.nodes[2].getnewaddress(): change,
         }
         utxo_dicts = [{
             'txid': utxo['txid'],
             'vout': utxo['vout'],
         }]
         signedtxres = self.nodes[2].signrawtransactionwithwallet(
             self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict))
         assert signedtxres['complete']
 
         signedtx = signedtxres['hex']
 
         # send from nodes[1]; this will end up in aa1
         txid1 = self.nodes[1].sendrawtransaction(signedtx)
 
         # generate bb1-bb2 on right side
         self.nodes[2].generate(2)
 
         # send from nodes[2]; this will end up in bb3
         txid2 = self.nodes[2].sendrawtransaction(signedtx)
 
         assert_equal(txid1, txid2)
 
         # generate on both sides
         lastblockhash = self.nodes[1].generate(3)[2]
         self.nodes[2].generate(2)
 
         self.join_network()
 
         self.sync_all()
 
         # gettransaction should work for txid1
         tx1 = self.nodes[0].gettransaction(txid1)
         assert_equal(
             tx1['blockheight'],
             self.nodes[0].getblockheader(
                 tx1['blockhash'])['height'])
 
         # listsinceblock(lastblockhash) should now include txid1 in transactions
         # as well as in removed
         lsbres = self.nodes[0].listsinceblock(lastblockhash)
         assert any(tx['txid'] == txid1 for tx in lsbres['transactions'])
         assert any(tx['txid'] == txid1 for tx in lsbres['removed'])
 
         # find transaction and ensure confirmations is valid
         for tx in lsbres['transactions']:
             if tx['txid'] == txid1:
                 assert_equal(tx['confirmations'], 2)
 
         # the same check for the removed array; confirmations should STILL be 2
         for tx in lsbres['removed']:
             if tx['txid'] == txid1:
                 assert_equal(tx['confirmations'], 2)
 
 
 if __name__ == '__main__':
     ListSinceBlockTest().main()
diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py
index b419272a9..c8ecb63f6 100755
--- a/test/functional/wallet_reorgsrestore.py
+++ b/test/functional/wallet_reorgsrestore.py
@@ -1,124 +1,124 @@
 #!/usr/bin/env python3
 # Copyright (c) 2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 """Test tx status in case of reorgs while wallet being shutdown.
 
 Wallet txn status rely on block connection/disconnection for its
 accuracy. In case of reorgs happening while wallet being shutdown
 block updates are not going to be received. At wallet loading, we
 check against chain if confirmed txn are still in chain and change
 their status if block in which they have been included has been
 disconnected.
 """
 
 import os
 import shutil
 from decimal import Decimal
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes, disconnect_nodes
+from test_framework.util import assert_equal
 
 
 class ReorgsRestoreTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 3
         self.extra_args = [["-noparkdeepreorg"]] * self.num_nodes
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def run_test(self):
         # Send a tx from which to conflict outputs later
         txid_conflict_from = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10000000"))
         self.nodes[0].generate(1)
         self.sync_blocks()
 
         # Disconnect node1 from others to reorg its chain later
-        disconnect_nodes(self.nodes[0], self.nodes[1])
-        disconnect_nodes(self.nodes[1], self.nodes[2])
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.disconnect_nodes(0, 1)
+        self.disconnect_nodes(1, 2)
+        self.connect_nodes(0, 2)
 
         # Send a tx to be unconfirmed later
         txid = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10000000"))
         tx = self.nodes[0].gettransaction(txid)
         self.nodes[0].generate(4)
         tx_before_reorg = self.nodes[0].gettransaction(txid)
         assert_equal(tx_before_reorg["confirmations"], 4)
 
         # Disconnect node0 from node2 to broadcast a conflict on their
         # respective chains
-        disconnect_nodes(self.nodes[0], self.nodes[2])
+        self.disconnect_nodes(0, 2)
         nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(
             txid_conflict_from)["details"] if tx_out["amount"] == Decimal("10000000"))
         inputs = []
         inputs.append({"txid": txid_conflict_from, "vout": nA})
         outputs_1 = {}
         outputs_2 = {}
 
         # Create a conflicted tx broadcast on node0 chain and conflicting tx
         # broadcast on node1 chain. Both spend from txid_conflict_from
         outputs_1[self.nodes[0].getnewaddress()] = Decimal("9999980")
         outputs_2[self.nodes[0].getnewaddress()] = Decimal("9999980")
         conflicted = self.nodes[0].signrawtransactionwithwallet(
             self.nodes[0].createrawtransaction(inputs, outputs_1))
         conflicting = self.nodes[0].signrawtransactionwithwallet(
             self.nodes[0].createrawtransaction(inputs, outputs_2))
 
         conflicted_txid = self.nodes[0].sendrawtransaction(conflicted["hex"])
         self.nodes[0].generate(1)
         conflicting_txid = self.nodes[2].sendrawtransaction(conflicting["hex"])
         self.nodes[2].generate(9)
 
         # Reconnect node0 and node2 and check that conflicted_txid is
         # effectively conflicted
-        connect_nodes(self.nodes[0], self.nodes[2])
+        self.connect_nodes(0, 2)
         self.sync_blocks([self.nodes[0], self.nodes[2]])
         conflicted = self.nodes[0].gettransaction(conflicted_txid)
         conflicting = self.nodes[0].gettransaction(conflicting_txid)
         assert_equal(conflicted["confirmations"], -9)
         assert_equal(conflicted["walletconflicts"][0], conflicting["txid"])
 
         # Node0 wallet is shutdown
         self.restart_node(0)
 
         # The block chain re-orgs and the tx is included in a different block
         self.nodes[1].generate(9)
         self.nodes[1].sendrawtransaction(tx["hex"])
         self.nodes[1].generate(1)
         self.nodes[1].sendrawtransaction(conflicted["hex"])
         self.nodes[1].generate(1)
 
         # Node0 wallet file is loaded on longest sync'ed node1
         self.stop_node(1)
         self.nodes[0].backupwallet(
             os.path.join(
                 self.nodes[0].datadir,
                 'wallet.bak'))
         shutil.copyfile(
             os.path.join(
                 self.nodes[0].datadir,
                 'wallet.bak'),
             os.path.join(
                 self.nodes[1].datadir,
                 self.chain,
                 self.default_wallet_name,
                 self.wallet_data_filename))
         self.start_node(1)
         tx_after_reorg = self.nodes[1].gettransaction(txid)
         # Check that normal confirmed tx is confirmed again but with different
         # blockhash
         assert_equal(tx_after_reorg["confirmations"], 2)
         assert(tx_before_reorg["blockhash"] != tx_after_reorg["blockhash"])
         conflicted_after_reorg = self.nodes[1].gettransaction(conflicted_txid)
         # Check that conflicted tx is confirmed again with blockhash different
         # than previously conflicting tx
         assert_equal(conflicted_after_reorg["confirmations"], 1)
         assert(conflicting["blockhash"] != conflicted_after_reorg["blockhash"])
 
 
 if __name__ == '__main__':
     ReorgsRestoreTest().main()
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index 323466ce7..7cd5cc60e 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -1,147 +1,147 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
 
 import io
 
 from test_framework.messages import XEC, CTransaction
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes, disconnect_nodes
+from test_framework.util import assert_equal
 
 
 class TxnMallTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.extra_args = [["-noparkdeepreorg"], ["-noparkdeepreorg"], [], []]
         self.supports_cli = False
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def add_options(self, parser):
         parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
                             help="Test double-spend of 1-confirmed transaction")
 
     def setup_network(self):
         # Start with split network:
         super().setup_network()
-        disconnect_nodes(self.nodes[1], self.nodes[2])
+        self.disconnect_nodes(1, 2)
 
     def run_test(self):
         output_type = "legacy"
 
         # All nodes should start with 1,250,000,000 XEC:
         starting_balance = 1250000000
         for i in range(4):
             assert_equal(self.nodes[i].getbalance(), starting_balance)
             # bug workaround, coins generated assigned to first getnewaddress!
             self.nodes[i].getnewaddress()
 
         self.nodes[0].settxfee(1000)
 
         node0_address1 = self.nodes[0].getnewaddress(address_type=output_type)
         node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219000000)
         node0_tx1 = self.nodes[0].gettransaction(node0_txid1)
 
         node0_address2 = self.nodes[0].getnewaddress(address_type=output_type)
         node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29000000)
         node0_tx2 = self.nodes[0].gettransaction(node0_txid2)
 
         assert_equal(self.nodes[0].getbalance(),
                      starting_balance + node0_tx1["fee"] + node0_tx2["fee"])
 
         # Coins are sent to node1_address
         node1_address = self.nodes[1].getnewaddress()
 
         # Send tx1, and another transaction tx2 that won't be cloned
         txid1 = self.nodes[0].sendtoaddress(node1_address, 40000000)
         txid2 = self.nodes[0].sendtoaddress(node1_address, 20000000)
 
         # Construct a clone of tx1, to be malleated
         rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
         clone_inputs = [{"txid": rawtx1["vin"][0]["txid"],
                          "vout": rawtx1["vin"][0]["vout"],
                          "sequence": rawtx1["vin"][0]["sequence"]}]
         clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"],
                          rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]}
         clone_locktime = rawtx1["locktime"]
         clone_raw = self.nodes[0].createrawtransaction(
             clone_inputs, clone_outputs, clone_locktime)
 
         # createrawtransaction randomizes the order of its outputs, so swap
         # them if necessary.
         clone_tx = CTransaction()
         clone_tx.deserialize(io.BytesIO(bytes.fromhex(clone_raw)))
         if (rawtx1["vout"][0]["value"] == 40000000 and
                 clone_tx.vout[0].nValue != 40000000 * XEC or
                 rawtx1["vout"][0]["value"] != 40000000 and
                 clone_tx.vout[0].nValue == 40000000 * XEC):
             (clone_tx.vout[0], clone_tx.vout[1]) = (clone_tx.vout[1],
                                                     clone_tx.vout[0])
 
         # Use a different signature hash type to sign.  This creates an equivalent but malleated clone.
         # Don't send the clone anywhere yet
         tx1_clone = self.nodes[0].signrawtransactionwithwallet(
             clone_tx.serialize().hex(), None, "ALL|FORKID|ANYONECANPAY")
         assert_equal(tx1_clone["complete"], True)
 
         # Have node0 mine a block, if requested:
         if (self.options.mine_block):
             self.nodes[0].generate(1)
             self.sync_blocks(self.nodes[0:2])
 
         tx1 = self.nodes[0].gettransaction(txid1)
         tx2 = self.nodes[0].gettransaction(txid2)
 
         # Node0's balance should be starting balance, plus 50BTC for another
         # matured block, minus tx1 and tx2 amounts, and minus transaction fees:
         expected = starting_balance + node0_tx1["fee"] + node0_tx2["fee"]
         if self.options.mine_block:
             expected += 50000000
         expected += tx1["amount"] + tx1["fee"]
         expected += tx2["amount"] + tx2["fee"]
         assert_equal(self.nodes[0].getbalance(), expected)
 
         if self.options.mine_block:
             assert_equal(tx1["confirmations"], 1)
             assert_equal(tx2["confirmations"], 1)
         else:
             assert_equal(tx1["confirmations"], 0)
             assert_equal(tx2["confirmations"], 0)
 
         # Send clone and its parent to miner
         self.nodes[2].sendrawtransaction(node0_tx1["hex"])
         txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
 
         # ... mine a block...
         self.nodes[2].generate(1)
 
         # Reconnect the split network, and sync chain:
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
         self.nodes[2].sendrawtransaction(node0_tx2["hex"])
         self.nodes[2].sendrawtransaction(tx2["hex"])
         self.nodes[2].generate(1)  # Mine another block to make sure we sync
         self.sync_blocks()
 
         # Re-fetch transaction info:
         tx1 = self.nodes[0].gettransaction(txid1)
         tx1_clone = self.nodes[0].gettransaction(txid1_clone)
         tx2 = self.nodes[0].gettransaction(txid2)
 
         # Verify expected confirmations
         assert_equal(tx1["confirmations"], -2)
         assert_equal(tx1_clone["confirmations"], 2)
         assert_equal(tx2["confirmations"], 1)
 
         # Check node0's total balance; should be same as before the clone, +
         # 100,000,000 XEC for 2 matured, less possible orphaned matured subsidy
         expected += 100000000
         if (self.options.mine_block):
             expected -= 50000000
         assert_equal(self.nodes[0].getbalance(), expected)
 
 
 if __name__ == '__main__':
     TxnMallTest().main()
diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py
index e091f3eb5..985414d81 100755
--- a/test/functional/wallet_txn_doublespend.py
+++ b/test/functional/wallet_txn_doublespend.py
@@ -1,155 +1,150 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2019 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test the wallet accounts properly when there is a double-spend conflict."""
 from decimal import Decimal
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    assert_equal,
-    connect_nodes,
-    disconnect_nodes,
-    find_output,
-)
+from test_framework.util import assert_equal, find_output
 
 
 class TxnMallTest(BitcoinTestFramework):
     def set_test_params(self):
         self.num_nodes = 4
         self.extra_args = [["-noparkdeepreorg"], ["-noparkdeepreorg"], [], []]
         self.supports_cli = False
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_wallet()
 
     def add_options(self, parser):
         parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
                             help="Test double-spend of 1-confirmed transaction")
 
     def setup_network(self):
         # Start with split network:
         super().setup_network()
-        disconnect_nodes(self.nodes[1], self.nodes[2])
+        self.disconnect_nodes(1, 2)
 
     def run_test(self):
         # All nodes should start with 1,250,000,000 XEC:
         starting_balance = 1250000000
 
         # All nodes should be out of IBD.
         # If the nodes are not all out of IBD, that can interfere with
         # blockchain sync later in the test when nodes are connected, due to
         # timing issues.
         for n in self.nodes:
             assert n.getblockchaininfo()["initialblockdownload"] is False
 
         for i in range(4):
             assert_equal(self.nodes[i].getbalance(), starting_balance)
             # bug workaround, coins generated assigned to first getnewaddress!
             self.nodes[i].getnewaddress("")
 
         # Assign coins to foo and bar addresses:
         node0_address_foo = self.nodes[0].getnewaddress()
         fund_foo_txid = self.nodes[0].sendtoaddress(
             node0_address_foo, 1219000000)
         fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
 
         node0_address_bar = self.nodes[0].getnewaddress()
         fund_bar_txid = self.nodes[0].sendtoaddress(
             node0_address_bar, 29000000)
         fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
 
         assert_equal(self.nodes[0].getbalance(),
                      starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"])
 
         # Coins are sent to node1_address
         node1_address = self.nodes[1].getnewaddress()
 
         # First: use raw transaction API to send 1,240,000,000 XEC to
         # node1_address, but don't broadcast:
         doublespend_fee = Decimal('-20000')
         rawtx_input_0 = {}
         rawtx_input_0["txid"] = fund_foo_txid
         rawtx_input_0["vout"] = find_output(
             self.nodes[0], fund_foo_txid, 1219000000)
         rawtx_input_1 = {}
         rawtx_input_1["txid"] = fund_bar_txid
         rawtx_input_1["vout"] = find_output(
             self.nodes[0], fund_bar_txid, 29000000)
         inputs = [rawtx_input_0, rawtx_input_1]
         change_address = self.nodes[0].getnewaddress()
         outputs = {}
         outputs[node1_address] = 1240000000
         outputs[change_address] = 1248000000 - 1240000000 + doublespend_fee
         rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
         doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx)
         assert_equal(doublespend["complete"], True)
 
         # Create two spends using 1 50,000,000 XEC coin each
         txid1 = self.nodes[0].sendtoaddress(node1_address, 40000000)
         txid2 = self.nodes[0].sendtoaddress(node1_address, 20000000)
 
         # Have node0 mine a block:
         if (self.options.mine_block):
             self.nodes[0].generate(1)
             self.sync_blocks(self.nodes[0:2])
 
         tx1 = self.nodes[0].gettransaction(txid1)
         tx2 = self.nodes[0].gettransaction(txid2)
 
         # Node0's balance should be starting balance, plus 50,000,000 XEC for
         # another matured block, minus 40,000,000, minus 20,000,000, and minus
         # transaction fees:
         expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
         if self.options.mine_block:
             expected += 50000000
         expected += tx1["amount"] + tx1["fee"]
         expected += tx2["amount"] + tx2["fee"]
         assert_equal(self.nodes[0].getbalance(), expected)
 
         if self.options.mine_block:
             assert_equal(tx1["confirmations"], 1)
             assert_equal(tx2["confirmations"], 1)
             # Node1's balance should be both transaction amounts:
             assert_equal(self.nodes[1].getbalance(
             ), starting_balance - tx1["amount"] - tx2["amount"])
         else:
             assert_equal(tx1["confirmations"], 0)
             assert_equal(tx2["confirmations"], 0)
 
         # Now give doublespend and its parents to miner:
         self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
         self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
         doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
         # ... mine a block...
         self.nodes[2].generate(1)
 
         # Reconnect the split network, and sync chain:
-        connect_nodes(self.nodes[1], self.nodes[2])
+        self.connect_nodes(1, 2)
         self.nodes[2].generate(1)  # Mine another block to make sure we sync
         self.sync_blocks()
         assert_equal(self.nodes[0].gettransaction(
             doublespend_txid)["confirmations"], 2)
 
         # Re-fetch transaction info:
         tx1 = self.nodes[0].gettransaction(txid1)
         tx2 = self.nodes[0].gettransaction(txid2)
 
         # Both transactions should be conflicted
         assert_equal(tx1["confirmations"], -2)
         assert_equal(tx2["confirmations"], -2)
 
         # Node0's total balance should be starting balance, plus 100BTC for
         # two more matured blocks, minus 1240 for the double-spend, plus fees (which are
         # negative):
         expected = starting_balance + 100000000 - 1240000000 + \
             fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
         assert_equal(self.nodes[0].getbalance(), expected)
 
         # Node1's balance should be its initial balance (1250 for 25 block
         # rewards) plus the doublespend:
         assert_equal(self.nodes[1].getbalance(), 1250000000 + 1240000000)
 
 
 if __name__ == '__main__':
     TxnMallTest().main()