diff --git a/test/functional/abandonconflict.py b/test/functional/abandonconflict.py
index f9e96c77f..9b2fdf125 100755
--- a/test/functional/abandonconflict.py
+++ b/test/functional/abandonconflict.py
@@ -1,181 +1,176 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 import urllib.parse
 
 
 class AbandonConflictTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2
         self.setup_clean_chain = False
         self.extra_args = [["-minrelaytxfee=0.00001"], []]
 
     def run_test(self):
         self.nodes[1].generate(100)
         sync_blocks(self.nodes)
         balance = self.nodes[0].getbalance()
         txA = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10"))
         txB = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10"))
         txC = self.nodes[0].sendtoaddress(
             self.nodes[0].getnewaddress(), Decimal("10"))
         sync_mempools(self.nodes)
         self.nodes[1].generate(1)
 
         sync_blocks(self.nodes)
         newbalance = self.nodes[0].getbalance()
         # no more than fees lost
         assert(balance - newbalance < Decimal("0.001"))
         balance = newbalance
 
         url = urllib.parse.urlparse(self.nodes[1].url)
         self.nodes[0].disconnectnode(url.hostname + ":" + str(p2p_port(1)))
 
         # Identify the 10btc outputs
         nA = next(i for i, vout in enumerate(
             self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10"))
         nB = next(i for i, vout in enumerate(
             self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10"))
         nC = next(i for i, vout in enumerate(
             self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10"))
 
         inputs = []
         # spend 10btc outputs from txA and txB
         inputs.append({"txid": txA, "vout": nA})
         inputs.append({"txid": txB, "vout": nB})
         outputs = {}
 
         outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998")
         outputs[self.nodes[1].getnewaddress()] = Decimal("5")
         signed = self.nodes[0].signrawtransaction(
             self.nodes[0].createrawtransaction(inputs, outputs), None, None, "ALL|FORKID")
         txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])
 
         # Identify the 14.99998btc output
         nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(
             txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998"))
 
         # Create a child tx spending AB1 and C
         inputs = []
         inputs.append({"txid": txAB1, "vout": nAB})
         inputs.append({"txid": txC, "vout": nC})
         outputs = {}
         outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996")
         signed2 = self.nodes[0].signrawtransaction(
             self.nodes[0].createrawtransaction(inputs, outputs), None, None, "ALL|FORKID")
         txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])
 
         # In mempool txs from self should increase balance from change
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("30") + Decimal("24.9996"))
         balance = newbalance
 
         # Restart the node with a higher min relay fee so the parent tx is no longer in mempool
         # TODO: redo with eviction
-        # Note had to make sure tx did not have AllowFree priority
-        stop_node(self.nodes[0], 0)
-        self.nodes[0] = start_node(0, self.options.tmpdir, [
-                                   "-logtimemicros",
-                                   "-minrelaytxfee=0.0001"])
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir, [
+                                        "-minrelaytxfee=0.0001"])
 
         # Verify txs no longer in mempool
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
 
         # Not in mempool txs from self should only reduce balance
         # inputs are still spent, but change not received
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("24.9996"))
         # Unconfirmed received funds that are not in mempool, also shouldn't show
         # up in unconfirmed balance
         unconfbalance = self.nodes[
             0].getunconfirmedbalance() + self.nodes[0].getbalance()
         assert_equal(unconfbalance, newbalance)
         # Also shouldn't show up in listunspent
         assert(not txABC2 in [utxo["txid"]
                               for utxo in self.nodes[0].listunspent(0)])
         balance = newbalance
 
         # Abandon original transaction and verify inputs are available again
         # including that the child tx was also abandoned
         self.nodes[0].abandontransaction(txAB1)
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance + Decimal("30"))
         balance = newbalance
 
-        # Verify that even with a low min relay fee, the tx is not reaccepted
-        # from wallet on startup once abandoned
-        stop_node(self.nodes[0], 0)
-        self.nodes[0] = start_node(0, self.options.tmpdir, [
-                                   "-logtimemicros",
-                                   "-minrelaytxfee=0.00001"])
+        # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir, [
+                                        "-minrelaytxfee=0.00001"])
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
         assert_equal(self.nodes[0].getbalance(), balance)
 
         # But if its received again then it is unabandoned
         # And since now in mempool, the change is available
         # But its child tx remains abandoned
         self.nodes[0].sendrawtransaction(signed["hex"])
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998"))
         balance = newbalance
 
         # Send child tx again so its unabandoned
         self.nodes[0].sendrawtransaction(signed2["hex"])
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("10")
                      - Decimal("14.99998") + Decimal("24.9996"))
         balance = newbalance
 
         # Remove using high relay fee again
-        stop_node(self.nodes[0], 0)
-        self.nodes[0] = start_node(0, self.options.tmpdir, [
-                                   "-logtimemicros",
-                                   "-minrelaytxfee=0.0001"])
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir, [
+                                        "-minrelaytxfee=0.0001"])
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance - Decimal("24.9996"))
         balance = newbalance
 
         # Create a double spend of AB1 by spending again from only A's 10 output
         # Mine double spend from node 1
         inputs = []
         inputs.append({"txid": txA, "vout": nA})
         outputs = {}
         outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999")
         tx = self.nodes[0].createrawtransaction(inputs, outputs)
         signed = self.nodes[0].signrawtransaction(tx, None, None, "ALL|FORKID")
         self.nodes[1].sendrawtransaction(signed["hex"])
         self.nodes[1].generate(1)
 
         connect_nodes(self.nodes[0], 1)
         sync_blocks(self.nodes)
 
         # Verify that B and C's 10 BTC outputs are available for spending again
         # because AB1 is now conflicted
         newbalance = self.nodes[0].getbalance()
         assert_equal(newbalance, balance + Decimal("20"))
         balance = newbalance
 
         # There is currently a minor bug around this and so this test doesn't work.  See Issue #7315
         # Invalidate the block with the double spend and B's 10 BTC output should no longer be available
         # Don't think C's should either
         self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
         newbalance = self.nodes[0].getbalance()
         # assert_equal(newbalance, balance - Decimal("10"))
         self.log.info(
             "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer")
         self.log.info(
             "conflicted has not resumed causing its inputs to be seen as spent.  See Issue #7315")
         self.log.info(str(balance) + " -> " + str(newbalance) + " ?")
 
 
 if __name__ == '__main__':
     AbandonConflictTest().main()
diff --git a/test/functional/assumevalid.py b/test/functional/assumevalid.py
index c7cfafac2..ecd5e1cf4 100755
--- a/test/functional/assumevalid.py
+++ b/test/functional/assumevalid.py
@@ -1,215 +1,215 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test logic for skipping signature validation on old blocks.
 
 Test logic for skipping signature validation on blocks which we've assumed
 valid (https://github.com/bitcoin/bitcoin/pull/9484)
 
 We build a chain that includes and invalid signature for one of the
 transactions:
 
     0:        genesis block
     1:        block 1 with coinbase transaction output.
     2-101:    bury that block with 100 blocks so the coinbase transaction
               output can be spent
     102:      a block containing a transaction spending the coinbase
               transaction output. The transaction has an invalid signature.
     103-2202: bury the bad block with just over two weeks' worth of blocks
               (2100 blocks)
 
 Start three nodes:
 
     - node0 has no -assumevalid parameter. Try to sync to block 2202. It will
       reject block 102 and only sync as far as block 101
     - node1 has -assumevalid set to the hash of block 102. Try to sync to
       block 2202. node1 will sync all the way to block 2202.
     - node2 has -assumevalid set to the hash of block 102. Try to sync to
       block 200. node2 will reject block 102 since it's assumed valid, but it
       isn't buried by at least two weeks' work.
 """
 import time
 
 from test_framework.blocktools import (create_block, create_coinbase)
 from test_framework.key import CECKey
 from test_framework.mininode import (CBlockHeader,
                                      COutPoint,
                                      CTransaction,
                                      CTxIn,
                                      CTxOut,
                                      NetworkThread,
                                      NodeConn,
                                      NodeConnCB,
                                      msg_block,
                                      msg_headers)
 from test_framework.script import (CScript, OP_TRUE)
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (start_node, p2p_port, assert_equal)
+from test_framework.util import (p2p_port, assert_equal)
 
 
 class BaseNode(NodeConnCB):
     def send_header_for_blocks(self, new_blocks):
         headers_message = msg_headers()
         headers_message.headers = [CBlockHeader(b) for b in new_blocks]
         self.send_message(headers_message)
 
 
 class AssumeValidTest(BitcoinTestFramework):
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 3
 
     def setup_network(self):
         # Start node0. We don't start the other nodes yet since
         # we need to pre-mine a block with an invalid transaction
         # signature so we can pass in the block hash as assumevalid.
-        self.nodes = [start_node(0, self.options.tmpdir)]
+        self.nodes = [self.start_node(0, self.options.tmpdir)]
 
     def send_blocks_until_disconnected(self, node):
         """Keep sending blocks to the node until we're disconnected."""
         for i in range(len(self.blocks)):
             try:
                 node.send_message(msg_block(self.blocks[i]))
             except IOError as e:
                 assert str(e) == 'Not connected, no pushbuf'
                 break
 
     def assert_blockchain_height(self, node, height):
         """Wait until the blockchain is no longer advancing and verify it's reached the expected height."""
         last_height = node.getblock(node.getbestblockhash())['height']
         timeout = 10
         while True:
             time.sleep(0.25)
             current_height = node.getblock(node.getbestblockhash())['height']
             if current_height != last_height:
                 last_height = current_height
                 if timeout < 0:
                     assert False, "blockchain too short after timeout: %d" % current_height
                 timeout - 0.25
                 continue
             elif current_height > height:
                 assert False, "blockchain too long: %d" % current_height
             elif current_height == height:
                 break
 
     def run_test(self):
 
         # Connect to node0
         node0 = BaseNode()
         connections = []
         connections.append(
             NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
         node0.add_connection(connections[0])
 
         NetworkThread().start()  # Start up network handling in another thread
         node0.wait_for_verack()
 
         # Build the blockchain
         self.tip = int(self.nodes[0].getbestblockhash(), 16)
         self.block_time = self.nodes[0].getblock(
             self.nodes[0].getbestblockhash())['time'] + 1
 
         self.blocks = []
 
         # Get a pubkey for the coinbase TXO
         coinbase_key = CECKey()
         coinbase_key.set_secretbytes(b"horsebattery")
         coinbase_pubkey = coinbase_key.get_pubkey()
 
         # Create the first block with a coinbase output to our key
         height = 1
         block = create_block(self.tip, create_coinbase(
             height, coinbase_pubkey), self.block_time)
         self.blocks.append(block)
         self.block_time += 1
         block.solve()
         # Save the coinbase for later
         self.block1 = block
         self.tip = block.sha256
         height += 1
 
         # Bury the block 100 deep so the coinbase output is spendable
         for i in range(100):
             block = create_block(
                 self.tip, create_coinbase(height), self.block_time)
             block.solve()
             self.blocks.append(block)
             self.tip = block.sha256
             self.block_time += 1
             height += 1
 
         # Create a transaction spending the coinbase output with an invalid (null) signature
         tx = CTransaction()
         tx.vin.append(
             CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
         tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE])))
         tx.calc_sha256()
 
         block102 = create_block(
             self.tip, create_coinbase(height), self.block_time)
         self.block_time += 1
         block102.vtx.extend([tx])
         block102.hashMerkleRoot = block102.calc_merkle_root()
         block102.rehash()
         block102.solve()
         self.blocks.append(block102)
         self.tip = block102.sha256
         self.block_time += 1
         height += 1
 
         # Bury the assumed valid block 2100 deep
         for i in range(2100):
             block = create_block(
                 self.tip, create_coinbase(height), self.block_time)
             block.nVersion = 4
             block.solve()
             self.blocks.append(block)
             self.tip = block.sha256
             self.block_time += 1
             height += 1
 
         # Start node1 and node2 with assumevalid so they accept a block with a bad signature.
-        self.nodes.append(start_node(1, self.options.tmpdir,
-                                     ["-assumevalid=" + hex(block102.sha256)]))
+        self.nodes.append(self.start_node(1, self.options.tmpdir,
+                                          ["-assumevalid=" + hex(block102.sha256)]))
         node1 = BaseNode()  # connects to node1
         connections.append(
             NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1))
         node1.add_connection(connections[1])
         node1.wait_for_verack()
 
-        self.nodes.append(start_node(2, self.options.tmpdir,
-                                     ["-assumevalid=" + hex(block102.sha256)]))
+        self.nodes.append(self.start_node(2, self.options.tmpdir,
+                                          ["-assumevalid=" + hex(block102.sha256)]))
         node2 = BaseNode()  # connects to node2
         connections.append(
             NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
         node2.add_connection(connections[2])
         node2.wait_for_verack()
 
         # send header lists to all three nodes
         node0.send_header_for_blocks(self.blocks[0:2000])
         node0.send_header_for_blocks(self.blocks[2000:])
         node1.send_header_for_blocks(self.blocks[0:2000])
         node1.send_header_for_blocks(self.blocks[2000:])
         node2.send_header_for_blocks(self.blocks[0:200])
 
         # Send blocks to node0. Block 102 will be rejected.
         self.send_blocks_until_disconnected(node0)
         self.assert_blockchain_height(self.nodes[0], 101)
 
         # Send all blocks to node1. All blocks will be accepted.
         for i in range(2202):
             node1.send_message(msg_block(self.blocks[i]))
         # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
         node1.sync_with_ping(120)
         assert_equal(self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['height'], 2202)
 
         # Send blocks to node2. Block 102 will be rejected.
         self.send_blocks_until_disconnected(node2)
         self.assert_blockchain_height(self.nodes[2], 101)
 
 
 if __name__ == '__main__':
     AssumeValidTest().main()
diff --git a/test/functional/bip9-softforks.py b/test/functional/bip9-softforks.py
index 4d8068aa6..03e7d2a2b 100755
--- a/test/functional/bip9-softforks.py
+++ b/test/functional/bip9-softforks.py
@@ -1,258 +1,258 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.blockstore import BlockStore
 from test_framework.test_framework import ComparisonTestFramework
 from test_framework.util import *
 from test_framework.mininode import CTransaction, NetworkThread
 from test_framework.blocktools import create_coinbase, create_block
 from test_framework.comptool import TestInstance, TestManager
 from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
 from io import BytesIO
 import time
 import itertools
 
 '''
 This test is meant to exercise BIP forks
 Connect to a single node.
 regtest lock-in with 108/144 block signalling
 activation after a further 144 blocks
 mine 2 block and save coinbases for later use
 mine 141 blocks to transition from DEFINED to STARTED
 mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
 mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
 mine a further 143 blocks (LOCKED_IN)
 test that enforcement has not triggered (which triggers ACTIVE)
 test that enforcement has triggered
 '''
 
 
 class BIP9SoftForksTest(ComparisonTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 1
         self.extra_args = [['-whitelist=127.0.0.1']]
 
     def run_test(self):
         self.test = TestManager(self, self.options.tmpdir)
         self.test.add_all_connections(self.nodes)
         NetworkThread().start()  # Start up network handling in another thread
         self.test.run()
 
     def create_transaction(self, node, coinbase, to_address, amount):
         from_txid = node.getblock(coinbase)['tx'][0]
         inputs = [{"txid": from_txid, "vout": 0}]
         outputs = {to_address: amount}
         rawtx = node.createrawtransaction(inputs, outputs)
         tx = CTransaction()
         f = BytesIO(hex_str_to_bytes(rawtx))
         tx.deserialize(f)
         tx.nVersion = 2
         return tx
 
     def sign_transaction(self, node, tx):
         signresult = node.signrawtransaction(
             bytes_to_hex_str(tx.serialize()), None, None, "ALL|FORKID")
         tx = CTransaction()
         f = BytesIO(hex_str_to_bytes(signresult['hex']))
         tx.deserialize(f)
         return tx
 
     def generate_blocks(self, number, version, test_blocks=[]):
         for i in range(number):
             block = create_block(
                 self.tip, create_coinbase(self.height), self.last_block_time + 1)
             block.nVersion = version
             block.rehash()
             block.solve()
             test_blocks.append([block, True])
             self.last_block_time += 1
             self.tip = block.sha256
             self.height += 1
         return test_blocks
 
     def get_bip9_status(self, key):
         info = self.nodes[0].getblockchaininfo()
         return info['bip9_softforks'][key]
 
     def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
         assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
         assert_equal(self.get_bip9_status(bipName)['since'], 0)
 
         # generate some coins for later
         self.coinbase_blocks = self.nodes[0].generate(2)
         self.height = 3  # height of the next block to build
         self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
         self.nodeaddress = self.nodes[0].getnewaddress()
         self.last_block_time = int(time.time())
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
         assert_equal(self.get_bip9_status(bipName)['since'], 0)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName not in tmpl['rules'])
         assert(bipName not in tmpl['vbavailable'])
         assert_equal(tmpl['vbrequired'], 0)
         assert_equal(tmpl['version'], 0x20000000)
 
         # Test 1
         # Advance from DEFINED to STARTED
         test_blocks = self.generate_blocks(141, 4)
         yield TestInstance(test_blocks, sync_every_block=False)
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'started')
         assert_equal(self.get_bip9_status(bipName)['since'], 144)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName not in tmpl['rules'])
         assert_equal(tmpl['vbavailable'][bipName], bitno)
         assert_equal(tmpl['vbrequired'], 0)
         assert(tmpl['version'] & activated_version)
 
         # Test 2
         # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
         # using a variety of bits to simulate multiple parallel softforks
         test_blocks = self.generate_blocks(
             50, activated_version)  # 0x20000001 (signalling ready)
         test_blocks = self.generate_blocks(
             20, 4, test_blocks)  # 0x00000004 (signalling not)
         test_blocks = self.generate_blocks(
             50, activated_version, test_blocks)  # 0x20000101 (signalling ready)
         test_blocks = self.generate_blocks(
             24, 4, test_blocks)  # 0x20010000 (signalling not)
         yield TestInstance(test_blocks, sync_every_block=False)
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'started')
         assert_equal(self.get_bip9_status(bipName)['since'], 144)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName not in tmpl['rules'])
         assert_equal(tmpl['vbavailable'][bipName], bitno)
         assert_equal(tmpl['vbrequired'], 0)
         assert(tmpl['version'] & activated_version)
 
         # Test 3
         # 108 out of 144 signal bit 1 to achieve LOCKED_IN
         # using a variety of bits to simulate multiple parallel softforks
         # 0x20000001 (signalling ready)
         test_blocks = self.generate_blocks(58, activated_version)
         # 0x00000004 (signalling not)
         test_blocks = self.generate_blocks(26, 4, test_blocks)
         # 0x20000101 (signalling ready)
         test_blocks = self.generate_blocks(50, activated_version, test_blocks)
         # 0x20010000 (signalling not)
         test_blocks = self.generate_blocks(10, 4, test_blocks)
         yield TestInstance(test_blocks, sync_every_block=False)
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
         assert_equal(self.get_bip9_status(bipName)['since'], 432)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName not in tmpl['rules'])
 
         # Test 4
         # 143 more version 536870913 blocks (waiting period-1)
         test_blocks = self.generate_blocks(143, 4)
         yield TestInstance(test_blocks, sync_every_block=False)
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
         assert_equal(self.get_bip9_status(bipName)['since'], 432)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName not in tmpl['rules'])
 
         # Test 5
         # Check that the new rule is enforced
         spendtx = self.create_transaction(self.nodes[0],
                                           self.coinbase_blocks[0], self.nodeaddress, 1.0)
         invalidate(spendtx)
         spendtx = self.sign_transaction(self.nodes[0], spendtx)
         spendtx.rehash()
         invalidatePostSignature(spendtx)
         spendtx.rehash()
         block = create_block(
             self.tip, create_coinbase(self.height), self.last_block_time + 1)
         block.nVersion = activated_version
         block.vtx.append(spendtx)
         block.hashMerkleRoot = block.calc_merkle_root()
         block.rehash()
         block.solve()
 
         self.last_block_time += 1
         self.tip = block.sha256
         self.height += 1
         yield TestInstance([[block, True]])
 
         assert_equal(self.get_bip9_status(bipName)['status'], 'active')
         assert_equal(self.get_bip9_status(bipName)['since'], 576)
         tmpl = self.nodes[0].getblocktemplate({})
         assert(bipName in tmpl['rules'])
         assert(bipName not in tmpl['vbavailable'])
         assert_equal(tmpl['vbrequired'], 0)
         assert(not (tmpl['version'] & (1 << bitno)))
 
         # Test 6
         # Check that the new sequence lock rules are enforced
         spendtx = self.create_transaction(self.nodes[0],
                                           self.coinbase_blocks[1], self.nodeaddress, 1.0)
         invalidate(spendtx)
         spendtx = self.sign_transaction(self.nodes[0], spendtx)
         spendtx.rehash()
         invalidatePostSignature(spendtx)
         spendtx.rehash()
 
         block = create_block(
             self.tip, create_coinbase(self.height), self.last_block_time + 1)
         block.nVersion = 5
         block.vtx.append(spendtx)
         block.hashMerkleRoot = block.calc_merkle_root()
         block.rehash()
         block.solve()
         self.last_block_time += 1
         yield TestInstance([[block, False]])
 
         # Restart all
         self.test.clear_all_connections()
-        stop_nodes(self.nodes)
+        self.stop_nodes()
         shutil.rmtree(self.options.tmpdir + "/node0")
         self.setup_chain()
         self.setup_network()
         self.test.add_all_connections(self.nodes)
         NetworkThread().start()
         self.test.test_nodes[0].wait_for_verack()
 
     def get_tests(self):
         for test in itertools.chain(
                 self.test_BIP(
                     'csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0),
                 self.test_BIP(
                     'csv', 0x20000001, self.mtp_invalidate, self.donothing, 0),
                 self.test_BIP(
                     'csv', 0x20000001, self.donothing, self.csv_invalidate, 0)
         ):
             yield test
 
     def donothing(self, tx):
         return
 
     def csv_invalidate(self, tx):
         '''Modify the signature in vin 0 of the tx to fail CSV
         Prepends -1 CSV DROP in the scriptSig itself.
         '''
         tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
                                       list(CScript(tx.vin[0].scriptSig)))
 
     def sequence_lock_invalidate(self, tx):
         '''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan)
         '''
         tx.vin[0].nSequence = 0x00FFFFFF
         tx.nLockTime = 0
 
     def mtp_invalidate(self, tx):
         '''Modify the nLockTime to make it fails once MTP rule is activated
         '''
         # Disable Sequence lock, Activate nLockTime
         tx.vin[0].nSequence = 0x90FFFFFF
         tx.nLockTime = self.last_block_time
 
 
 if __name__ == '__main__':
     BIP9SoftForksTest().main()
diff --git a/test/functional/blockchain.py b/test/functional/blockchain.py
index 8ac16c5b8..c9db70d89 100755
--- a/test/functional/blockchain.py
+++ b/test/functional/blockchain.py
@@ -1,184 +1,183 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 """Test RPCs related to blockchainstate.
 
 Test the following RPCs:
     - gettxoutsetinfo
     - getdifficulty
     - getbestblockhash
     - getblockhash
     - getblockheader
     - getchaintxstats
     - getnetworkhashps
     - verifychain
 
 Tests correspond to code in rpc/blockchain.cpp.
 """
 
 from decimal import Decimal
 import http.client
 import subprocess
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_raises,
     assert_raises_jsonrpc,
     assert_is_hex_string,
     assert_is_hash_string,
-    start_node,
     bitcoind_processes,
     BITCOIND_PROC_WAIT_TIMEOUT,
 )
 
 
 class BlockchainTest(BitcoinTestFramework):
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = False
         self.num_nodes = 1
         self.extra_args = [['-stopatheight=207']]
 
     def run_test(self):
         self._test_getchaintxstats()
         self._test_gettxoutsetinfo()
         self._test_getblockheader()
         self._test_getdifficulty()
         self._test_getnetworkhashps()
         self._test_stopatheight()
         assert self.nodes[0].verifychain(4, 0)
 
     def _test_getchaintxstats(self):
         chaintxstats = self.nodes[0].getchaintxstats(1)
         # 200 txs plus genesis tx
         assert_equal(chaintxstats['txcount'], 201)
         # tx rate should be 1 per 10 minutes, or 1/600
         # we have to round because of binary math
         assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))
 
         b1 = self.nodes[0].getblock(self.nodes[0].getblockhash(1))
         b200 = self.nodes[0].getblock(self.nodes[0].getblockhash(200))
         time_diff = b200['mediantime'] - b1['mediantime']
 
         chaintxstats = self.nodes[0].getchaintxstats()
         assert_equal(chaintxstats['time'], b200['time'])
         assert_equal(chaintxstats['txcount'], 201)
         assert_equal(chaintxstats['window_block_count'], 199)
         assert_equal(chaintxstats['window_tx_count'], 199)
         assert_equal(chaintxstats['window_interval'], time_diff)
         assert_equal(
             round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))
 
         chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1['hash'])
         assert_equal(chaintxstats['time'], b1['time'])
         assert_equal(chaintxstats['txcount'], 2)
         assert_equal(chaintxstats['window_block_count'], 0)
         assert('window_tx_count' not in chaintxstats)
         assert('window_interval' not in chaintxstats)
         assert('txrate' not in chaintxstats)
 
         assert_raises_jsonrpc(-8, "Invalid block count: should be between 0 and the block's height - 1",
                               self.nodes[0].getchaintxstats, 201)
 
     def _test_gettxoutsetinfo(self):
         node = self.nodes[0]
         res = node.gettxoutsetinfo()
 
         assert_equal(res['total_amount'], Decimal('8725.00000000'))
         assert_equal(res['transactions'], 200)
         assert_equal(res['height'], 200)
         assert_equal(res['txouts'], 200)
         assert_equal(res['bogosize'], 17000),
         assert_equal(res['bestblock'], node.getblockhash(200))
         size = res['disk_size']
         assert size > 6400
         assert size < 64000
         assert_equal(len(res['bestblock']), 64)
         assert_equal(len(res['hash_serialized']), 64)
 
         self.log.info(
             "Test that gettxoutsetinfo() works for blockchain with just the genesis block")
         b1hash = node.getblockhash(1)
         node.invalidateblock(b1hash)
 
         res2 = node.gettxoutsetinfo()
         assert_equal(res2['transactions'], 0)
         assert_equal(res2['total_amount'], Decimal('0'))
         assert_equal(res2['height'], 0)
         assert_equal(res2['txouts'], 0)
         assert_equal(res2['bogosize'], 0),
         assert_equal(res2['bestblock'], node.getblockhash(0))
         assert_equal(len(res2['hash_serialized']), 64)
 
         self.log.info(
             "Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block")
         node.reconsiderblock(b1hash)
 
         res3 = node.gettxoutsetinfo()
         assert_equal(res['total_amount'], res3['total_amount'])
         assert_equal(res['transactions'], res3['transactions'])
         assert_equal(res['height'], res3['height'])
         assert_equal(res['txouts'], res3['txouts'])
         assert_equal(res['bogosize'], res3['bogosize'])
         assert_equal(res['bestblock'], res3['bestblock'])
         assert_equal(res['hash_serialized'], res3['hash_serialized'])
 
     def _test_getblockheader(self):
         node = self.nodes[0]
 
         assert_raises_jsonrpc(-5, "Block not found",
                               node.getblockheader, "nonsense")
 
         besthash = node.getbestblockhash()
         secondbesthash = node.getblockhash(199)
         header = node.getblockheader(besthash)
 
         assert_equal(header['hash'], besthash)
         assert_equal(header['height'], 200)
         assert_equal(header['confirmations'], 1)
         assert_equal(header['previousblockhash'], secondbesthash)
         assert_is_hex_string(header['chainwork'])
         assert_is_hash_string(header['hash'])
         assert_is_hash_string(header['previousblockhash'])
         assert_is_hash_string(header['merkleroot'])
         assert_is_hash_string(header['bits'], length=None)
         assert isinstance(header['time'], int)
         assert isinstance(header['mediantime'], int)
         assert isinstance(header['nonce'], int)
         assert isinstance(header['version'], int)
         assert isinstance(int(header['versionHex'], 16), int)
         assert isinstance(header['difficulty'], Decimal)
 
     def _test_getdifficulty(self):
         difficulty = self.nodes[0].getdifficulty()
         # 1 hash in 2 should be valid, so difficulty should be 1/2**31
         # binary => decimal => binary math is why we do this check
         assert abs(difficulty * 2**31 - 1) < 0.0001
 
     def _test_getnetworkhashps(self):
         hashes_per_second = self.nodes[0].getnetworkhashps()
         # This should be 2 hashes every 10 minutes or 1/300
         assert abs(hashes_per_second * 300 - 1) < 0.0001
 
     def _test_stopatheight(self):
         assert_equal(self.nodes[0].getblockcount(), 200)
         self.nodes[0].generate(6)
         assert_equal(self.nodes[0].getblockcount(), 206)
         self.log.debug('Node should not stop at this height')
         assert_raises(subprocess.TimeoutExpired,
                       lambda: bitcoind_processes[0].wait(timeout=3))
         try:
             self.nodes[0].generate(1)
         except (ConnectionError, http.client.BadStatusLine):
             pass  # The node already shut down before response
         self.log.debug('Node should stop at this height...')
         bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
-        self.nodes[0] = start_node(0, self.options.tmpdir)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir)
         assert_equal(self.nodes[0].getblockcount(), 207)
 
 
 if __name__ == '__main__':
     BlockchainTest().main()
diff --git a/test/functional/disconnect_ban.py b/test/functional/disconnect_ban.py
index 4f82b6006..284697c59 100755
--- a/test/functional/disconnect_ban.py
+++ b/test/functional/disconnect_ban.py
@@ -1,128 +1,125 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test node disconnect and ban behavior"""
 
 from test_framework.mininode import wait_until
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (assert_equal,
                                  assert_raises_jsonrpc,
-                                 connect_nodes_bi,
-                                 start_node,
-                                 stop_node,
-                                 )
+                                 connect_nodes_bi)
 
 
 class DisconnectBanTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2
         self.setup_clean_chain = False
 
     def run_test(self):
         self.log.info("Test setban and listbanned RPCs")
 
         self.log.info("setban: successfully ban single IP address")
         # node1 should have 2 connections to node0 at this point
         assert_equal(len(self.nodes[1].getpeerinfo()), 2)
         self.nodes[1].setban("127.0.0.1", "add")
         wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0)
         # all nodes must be disconnected at this point
         assert_equal(len(self.nodes[1].getpeerinfo()), 0)
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("clearbanned: successfully clear ban list")
         self.nodes[1].clearbanned()
         assert_equal(len(self.nodes[1].listbanned()), 0)
         self.nodes[1].setban("127.0.0.0/24", "add")
 
         self.log.info("setban: fail to ban an already banned subnet")
         assert_equal(len(self.nodes[1].listbanned()), 1)
         assert_raises_jsonrpc(
             -23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
 
         self.log.info("setban: fail to ban an invalid subnet")
         assert_raises_jsonrpc(
             -30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
         # still only one banned ip because 127.0.0.1 is within the range of
         # 127.0.0.0/24
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("setban remove: fail to unban a non-banned subnet")
         assert_raises_jsonrpc(
             -30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
         assert_equal(len(self.nodes[1].listbanned()), 1)
 
         self.log.info("setban remove: successfully unban subnet")
         self.nodes[1].setban("127.0.0.0/24", "remove")
         assert_equal(len(self.nodes[1].listbanned()), 0)
         self.nodes[1].clearbanned()
         assert_equal(len(self.nodes[1].listbanned()), 0)
 
         self.log.info("setban: test persistence across node restart")
         self.nodes[1].setban("127.0.0.0/32", "add")
         self.nodes[1].setban("127.0.0.0/24", "add")
         # ban for 1 seconds
         self.nodes[1].setban("192.168.0.1", "add", 1)
         # ban for 1000 seconds
         self.nodes[1].setban(
             "2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000)
         listBeforeShutdown = self.nodes[1].listbanned()
         assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
         wait_until(lambda: len(self.nodes[1].listbanned()) == 3)
 
-        stop_node(self.nodes[1], 1)
+        self.stop_node(1)
 
-        self.nodes[1] = start_node(1, self.options.tmpdir)
+        self.nodes[1] = self.start_node(1, self.options.tmpdir)
         listAfterShutdown = self.nodes[1].listbanned()
         assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
         assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
         assert_equal("/19" in listAfterShutdown[2]['address'], True)
 
         # Clear ban lists
         self.nodes[1].clearbanned()
         connect_nodes_bi(self.nodes, 0, 1)
 
         self.log.info("Test disconnectnode RPCs")
 
         self.log.info(
             "disconnectnode: fail to disconnect when calling with address and nodeid")
         address1 = self.nodes[0].getpeerinfo()[0]['addr']
         node1 = self.nodes[0].getpeerinfo()[0]['addr']
         assert_raises_jsonrpc(
             -32602, "Only one of address and nodeid should be provided.",
             self.nodes[0].disconnectnode, address=address1, nodeid=node1)
 
         self.log.info(
             "disconnectnode: fail to disconnect when calling with junk address")
         assert_raises_jsonrpc(-29, "Node not found in connected nodes",
                               self.nodes[0].disconnectnode, address="221B Baker Street")
 
         self.log.info(
             "disconnectnode: successfully disconnect node by address")
         address1 = self.nodes[0].getpeerinfo()[0]['addr']
         self.nodes[0].disconnectnode(address=address1)
         wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1)
         assert not [node for node in self.nodes[0]
                     .getpeerinfo() if node['addr'] == address1]
 
         self.log.info("disconnectnode: successfully reconnect node")
         # reconnect the node
         connect_nodes_bi(self.nodes, 0, 1)
         assert_equal(len(self.nodes[0].getpeerinfo()), 2)
         assert [node for node in self.nodes[0]
                 .getpeerinfo() if node['addr'] == address1]
 
         self.log.info(
             "disconnectnode: successfully disconnect node by node id")
         id1 = self.nodes[0].getpeerinfo()[0]['id']
         self.nodes[0].disconnectnode(nodeid=id1)
         wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1)
         assert not [node for node in self.nodes[
             0].getpeerinfo() if node['id'] == id1]
 
 
 if __name__ == '__main__':
     DisconnectBanTest().main()
diff --git a/test/functional/forknotify.py b/test/functional/forknotify.py
index 34d156087..bff8ab736 100755
--- a/test/functional/forknotify.py
+++ b/test/functional/forknotify.py
@@ -1,77 +1,77 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 #
 # Test -alertnotify
 #
 """Test the -alertnotify option."""
 
 import os
 import time
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class ForkNotifyTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2
         self.setup_clean_chain = False
 
     def setup_network(self):
         self.nodes = []
         self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
         with open(self.alert_filename, 'w', encoding='utf8'):
             pass  # Just open then close to create zero-length file
-        self.nodes.append(start_node(0, self.options.tmpdir,
-                                     ["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
+        self.nodes.append(self.start_node(0, self.options.tmpdir,
+                                          ["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
         # Node1 mines block.version=211 blocks
-        self.nodes.append(start_node(1, self.options.tmpdir,
-                                     ["-blockversion=211"]))
+        self.nodes.append(self.start_node(1, self.options.tmpdir,
+                                          ["-blockversion=211"]))
         connect_nodes(self.nodes[1], 0)
 
         self.sync_all()
 
     def run_test(self):
         # Mine 51 up-version blocks
         self.nodes[1].generate(51)
         self.sync_all()
         # -alertnotify should trigger on the 51'st,
         # but mine and sync another to give
         # -alertnotify time to write
         self.nodes[1].generate(1)
         self.sync_all()
 
         # Give bitcoind 10 seconds to write the alert notification
         timeout = 10.0
         while timeout > 0:
             if os.path.exists(self.alert_filename) and os.path.getsize(self.alert_filename):
                 break
             time.sleep(0.1)
             timeout -= 0.1
         else:
             assert False, "-alertnotify did not warn of up-version blocks"
 
         with open(self.alert_filename, 'r', encoding='utf8') as f:
             alert_text = f.read()
 
         # Mine more up-version blocks, should not get more alerts:
         self.nodes[1].generate(1)
         self.sync_all()
         self.nodes[1].generate(1)
         self.sync_all()
 
         with open(self.alert_filename, 'r', encoding='utf8') as f:
             alert_text2 = f.read()
 
         if alert_text != alert_text2:
             raise AssertionError(
                 "-alertnotify excessive warning of up-version blocks")
 
 
 if __name__ == '__main__':
     ForkNotifyTest().main()
diff --git a/test/functional/fundrawtransaction.py b/test/functional/fundrawtransaction.py
index 7cba6eb03..051d5bd90 100755
--- a/test/functional/fundrawtransaction.py
+++ b/test/functional/fundrawtransaction.py
@@ -1,778 +1,778 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 def get_unspent(listunspent, amount):
     for utx in listunspent:
         if utx['amount'] == amount:
             return utx
     raise AssertionError(
         'Could not find unspent with amount={}'.format(amount))
 
 
 class RawTransactionsTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 4
 
     def setup_network(self, split=False):
         self.setup_nodes()
 
         connect_nodes_bi(self.nodes, 0, 1)
         connect_nodes_bi(self.nodes, 1, 2)
         connect_nodes_bi(self.nodes, 0, 2)
         connect_nodes_bi(self.nodes, 0, 3)
 
     def run_test(self):
         min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
         # This test is not meant to test fee estimation and we'd like
         # to be sure all txs are sent at a consistent desired feerate
         for node in self.nodes:
             node.settxfee(min_relay_tx_fee)
 
         # if the fee's positive delta is higher than this value tests will fail,
         # neg. delta always fail the tests.
         # The size of the signature of every input may be at most 2 bytes larger
         # than a minimum sized signature.
 
         #            = 2 bytes * minRelayTxFeePerByte
         feeTolerance = 2 * min_relay_tx_fee / 1000
 
         self.nodes[2].generate(1)
         self.sync_all()
         self.nodes[0].generate(121)
         self.sync_all()
 
         watchonly_address = self.nodes[0].getnewaddress()
         watchonly_pubkey = self.nodes[
             0].validateaddress(watchonly_address)["pubkey"]
         watchonly_amount = Decimal(200)
         self.nodes[3].importpubkey(watchonly_pubkey, "", True)
         watchonly_txid = self.nodes[0].sendtoaddress(
             watchonly_address, watchonly_amount)
         self.nodes[0].sendtoaddress(
             self.nodes[3].getnewaddress(), watchonly_amount / 10)
 
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
 
         self.nodes[0].generate(1)
         self.sync_all()
 
         #
         # simple test #
         #
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         assert(len(dec_tx['vin']) > 0)  # test that we have enough inputs
 
         #
         # simple test with two coins #
         #
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 2.2}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         assert(len(dec_tx['vin']) > 0)  # test if we have enough inputs
 
         #
         # simple test with two coins #
         #
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 2.6}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         assert(len(dec_tx['vin']) > 0)
         assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
 
         #
         # simple test with two outputs #
         #
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 2.6, self.nodes[1].getnewaddress(): 2.5}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         assert(len(dec_tx['vin']) > 0)
         assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
 
         #
         # test a fundrawtransaction with a VIN greater than the required amount #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 1.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         # compare vin total and totalout+fee
         assert_equal(fee + totalOut, utx['amount'])
 
         #
         # test a fundrawtransaction with which will not get a change output #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {
             self.nodes[0].getnewaddress(): Decimal(5.0) - fee - feeTolerance}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
 
         assert_equal(rawtxfund['changepos'], -1)
         assert_equal(fee + totalOut, utx['amount'])
         # compare vin total and totalout+fee
 
         #
         # test a fundrawtransaction with an invalid option #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         assert_raises_jsonrpc(-3, "Unexpected key foo", self.nodes[
                               2].fundrawtransaction, rawtx, {'foo': 'bar'})
 
         #
         # test a fundrawtransaction with an invalid change address #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         assert_raises_jsonrpc(
             -5, "changeAddress must be a valid bitcoin address",
             self.nodes[2].fundrawtransaction, rawtx, {'changeAddress': 'foobar'})
 
         #
         # test a fundrawtransaction with a provided change address #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         change = self.nodes[2].getnewaddress()
         assert_raises_jsonrpc(-8, "changePosition out of bounds", self.nodes[
                               2].fundrawtransaction, rawtx, {'changeAddress': change, 'changePosition': 2})
         rawtxfund = self.nodes[2].fundrawtransaction(
             rawtx, {'changeAddress': change, 'changePosition': 0})
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         out = dec_tx['vout'][0]
         assert_equal(change, out['scriptPubKey']['addresses'][0])
 
         #
         # test a fundrawtransaction with a VIN smaller than the required amount #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 1)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 1.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
 
         # 4-byte version + 1-byte vin count + 36-byte prevout then script_len
         rawtx = rawtx[:82] + "0100" + rawtx[84:]
 
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
         assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for i, out in enumerate(dec_tx['vout']):
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
             else:
                 assert_equal(i, rawtxfund['changepos'])
 
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
         assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
 
         assert_equal(matchingOuts, 1)
         assert_equal(len(dec_tx['vout']), 2)
 
         #
         # test a fundrawtransaction with two VINs #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 1)
         utx2 = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']},
                   {'txid': utx2['txid'], 'vout': utx2['vout']}]
         outputs = {self.nodes[0].getnewaddress(): 6.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
 
         assert_equal(matchingOuts, 1)
         assert_equal(len(dec_tx['vout']), 2)
 
         matchingIns = 0
         for vinOut in dec_tx['vin']:
             for vinIn in inputs:
                 if vinIn['txid'] == vinOut['txid']:
                     matchingIns += 1
 
         # we now must see two vins identical to vins given as params
         assert_equal(matchingIns, 2)
 
         #
         # test a fundrawtransaction with two VINs and two vOUTs #
         #
         utx = get_unspent(self.nodes[2].listunspent(), 1)
         utx2 = get_unspent(self.nodes[2].listunspent(), 5)
 
         inputs = [{'txid': utx['txid'], 'vout': utx['vout']},
                   {'txid': utx2['txid'], 'vout': utx2['vout']}]
         outputs = {
             self.nodes[0].getnewaddress(): 6.0, self.nodes[0].getnewaddress(): 1.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
         assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         fee = rawtxfund['fee']
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
         totalOut = 0
         matchingOuts = 0
         for out in dec_tx['vout']:
             totalOut += out['value']
             if out['scriptPubKey']['addresses'][0] in outputs:
                 matchingOuts += 1
 
         assert_equal(matchingOuts, 2)
         assert_equal(len(dec_tx['vout']), 3)
 
         #
         # test a fundrawtransaction with invalid vin #
         #
         listunspent = self.nodes[2].listunspent()
         inputs = [
             {'txid': "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout': 0}]
         # invalid vin!
         outputs = {self.nodes[0].getnewaddress(): 1.0}
         rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
 
         assert_raises_jsonrpc(
             -4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx)
 
         #
         # compare fee of a standard pubkeyhash transaction
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1.1}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # create same transaction over sendtoaddress
         txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # compare fee
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert(feeDelta >= 0 and feeDelta <= feeTolerance)
         #
 
         #
         # compare fee of a standard pubkeyhash transaction with multiple
         # outputs
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1.1, self.nodes[1].getnewaddress(): 1.2, self.nodes[1].getnewaddress(): 0.1, self.nodes[
             1].getnewaddress(): 1.3, self.nodes[1].getnewaddress(): 0.2, self.nodes[1].getnewaddress(): 0.3}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
         # create same transaction over sendtoaddress
         txId = self.nodes[0].sendmany("", outputs)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # compare fee
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert(feeDelta >= 0 and feeDelta <= feeTolerance)
         #
 
         #
         # compare fee of a 2of2 multisig p2sh transaction
 
         # create 2of2 addr
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[1].getnewaddress()
 
         addr1Obj = self.nodes[1].validateaddress(addr1)
         addr2Obj = self.nodes[1].validateaddress(addr2)
 
         mSigObj = self.nodes[1].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
 
         inputs = []
         outputs = {mSigObj: 1.1}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # create same transaction over sendtoaddress
         txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # compare fee
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert(feeDelta >= 0 and feeDelta <= feeTolerance)
         #
 
         #
         # compare fee of a standard pubkeyhash transaction
 
         # create 4of5 addr
         addr1 = self.nodes[1].getnewaddress()
         addr2 = self.nodes[1].getnewaddress()
         addr3 = self.nodes[1].getnewaddress()
         addr4 = self.nodes[1].getnewaddress()
         addr5 = self.nodes[1].getnewaddress()
 
         addr1Obj = self.nodes[1].validateaddress(addr1)
         addr2Obj = self.nodes[1].validateaddress(addr2)
         addr3Obj = self.nodes[1].validateaddress(addr3)
         addr4Obj = self.nodes[1].validateaddress(addr4)
         addr5Obj = self.nodes[1].validateaddress(addr5)
 
         mSigObj = self.nodes[1].addmultisigaddress(
             4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
 
         inputs = []
         outputs = {mSigObj: 1.1}
         rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[0].fundrawtransaction(rawTx)
 
         # create same transaction over sendtoaddress
         txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
         signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
 
         # compare fee
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert(feeDelta >= 0 and feeDelta <= feeTolerance)
         #
 
         #
         # spend a 2of2 multisig transaction over fundraw
 
         # create 2of2 addr
         addr1 = self.nodes[2].getnewaddress()
         addr2 = self.nodes[2].getnewaddress()
 
         addr1Obj = self.nodes[2].validateaddress(addr1)
         addr2Obj = self.nodes[2].validateaddress(addr2)
 
         mSigObj = self.nodes[2].addmultisigaddress(
             2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
 
         # send 1.2 BTC to msig addr
         txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
         self.sync_all()
         self.nodes[1].generate(1)
         self.sync_all()
 
         oldBalance = self.nodes[1].getbalance()
         inputs = []
         outputs = {self.nodes[1].getnewaddress(): 1.1}
         rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[2].fundrawtransaction(rawTx)
 
         signedTx = self.nodes[2].signrawtransaction(
             fundedTx['hex'], None, None, "ALL|FORKID")
         txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
         self.sync_all()
         self.nodes[1].generate(1)
         self.sync_all()
 
         # make sure funds are received at node1
         assert_equal(
             oldBalance + Decimal('1.10000000'), self.nodes[1].getbalance())
 
         #
         # locked wallet test
         self.nodes[1].encryptwallet("test")
+        self.stop_node(0)
+        self.stop_node(2)
+        self.stop_node(3)
         self.nodes.pop(1)
-        stop_node(self.nodes[0], 0)
-        stop_node(self.nodes[1], 2)
-        stop_node(self.nodes[2], 3)
 
-        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
         # This test is not meant to test fee estimation and we'd like
         # to be sure all txs are sent at a consistent desired feerate
         for node in self.nodes:
             node.settxfee(min_relay_tx_fee)
 
         connect_nodes_bi(self.nodes, 0, 1)
         connect_nodes_bi(self.nodes, 1, 2)
         connect_nodes_bi(self.nodes, 0, 2)
         connect_nodes_bi(self.nodes, 0, 3)
         self.sync_all()
 
         # drain the keypool
         self.nodes[1].getnewaddress()
         self.nodes[1].getrawchangeaddress()
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1.1}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         # fund a transaction that requires a new key for the change output
         # creating the key must be impossible because the wallet is locked
         assert_raises_jsonrpc(
             -4, "Insufficient funds", self.nodes[1].fundrawtransaction, rawtx)
 
         # refill the keypool
         self.nodes[1].walletpassphrase("test", 100)
         # need to refill the keypool to get an internal change address
         self.nodes[1].keypoolrefill(8)
         self.nodes[1].walletlock()
 
         assert_raises_jsonrpc(-13, "walletpassphrase", self.nodes[
                               1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
 
         oldBalance = self.nodes[0].getbalance()
 
         inputs = []
         outputs = {self.nodes[0].getnewaddress(): 1.1}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
 
         # now we need to unlock
         self.nodes[1].walletpassphrase("test", 600)
         signedTx = self.nodes[1].signrawtransaction(
             fundedTx['hex'], None, None, "ALL|FORKID")
         txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
         self.nodes[1].generate(1)
         self.sync_all()
 
         # make sure funds are received at node1
         assert_equal(
             oldBalance + Decimal('51.10000000'), self.nodes[0].getbalance())
 
         #
         # multiple (~19) inputs tx test | Compare fee #
         #
 
         # empty node1, send some small coins from node0 to node1
         self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
 
         for i in range(0, 20):
             self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # fund a tx with ~20 small inputs
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 0.15, self.nodes[0].getnewaddress(): 0.04}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
 
         # create same transaction over sendtoaddress
         txId = self.nodes[1].sendmany("", outputs)
         signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
 
         # compare fee
         feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
         assert(feeDelta >= 0 and feeDelta <= feeTolerance * 19)  # ~19 inputs
 
         #
         # multiple (~19) inputs tx test | sign/send #
         #
 
         # again, empty node1, send some small coins from node0 to node1
         self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
 
         for i in range(0, 20):
             self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # fund a tx with ~20 small inputs
         oldBalance = self.nodes[0].getbalance()
 
         inputs = []
         outputs = {
             self.nodes[0].getnewaddress(): 0.15, self.nodes[0].getnewaddress(): 0.04}
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
         fundedTx = self.nodes[1].fundrawtransaction(rawTx)
         fundedAndSignedTx = self.nodes[1].signrawtransaction(
             fundedTx['hex'], None, None, "ALL|FORKID")
         txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
         assert_equal(oldBalance + Decimal('50.19000000'),
                      self.nodes[0].getbalance())  # 0.19+block reward
 
         #
         # test fundrawtransaction with OP_RETURN and no vin #
         #
 
         rawtx = "0100000000010000000000000000066a047465737400000000"
         dec_tx = self.nodes[2].decoderawtransaction(rawtx)
 
         assert_equal(len(dec_tx['vin']), 0)
         assert_equal(len(dec_tx['vout']), 1)
 
         rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
         dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
 
         assert_greater_than(len(dec_tx['vin']), 0)  # at least one vin
         assert_equal(len(dec_tx['vout']), 2)  # one change output added
 
         #
         # test a fundrawtransaction using only watchonly #
         #
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): watchonly_amount / 2}
         rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         result = self.nodes[3].fundrawtransaction(
             rawtx, {'includeWatching': True})
         res_dec = self.nodes[0].decoderawtransaction(result["hex"])
         assert_equal(len(res_dec["vin"]), 1)
         assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
 
         assert("fee" in result.keys())
         assert_greater_than(result["changepos"], -1)
 
         #
         # test fundrawtransaction using the entirety of watched funds #
         #
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): watchonly_amount}
         rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         # Backward compatibility test (2nd param is includeWatching)
         result = self.nodes[3].fundrawtransaction(rawtx, True)
         res_dec = self.nodes[0].decoderawtransaction(result["hex"])
         assert_equal(len(res_dec["vin"]), 2)
         assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec[
                "vin"][1]["txid"] == watchonly_txid)
 
         assert_greater_than(result["fee"], 0)
         assert_greater_than(result["changepos"], -1)
         assert_equal(result["fee"] + res_dec["vout"][
                      result["changepos"]]["value"], watchonly_amount / 10)
 
         signedtx = self.nodes[3].signrawtransaction(
             result["hex"], None, None, "ALL|FORKID")
         assert(not signedtx["complete"])
         signedtx = self.nodes[0].signrawtransaction(
             signedtx["hex"], None, None, "ALL|FORKID")
         assert(signedtx["complete"])
         self.nodes[0].sendrawtransaction(signedtx["hex"])
         self.nodes[0].generate(1)
         self.sync_all()
 
         #
         # Test feeRate option #
         #
 
         # Make sure there is exactly one input so coin selection can't skew the
         # result
         assert_equal(len(self.nodes[3].listunspent(1)), 1)
 
         inputs = []
         outputs = {self.nodes[3].getnewaddress(): 1}
         rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
         result = self.nodes[3].fundrawtransaction(
             rawtx)  # uses min_relay_tx_fee (set by settxfee)
         result2 = self.nodes[3].fundrawtransaction(
             rawtx, {"feeRate": 2 * min_relay_tx_fee})
         result3 = self.nodes[3].fundrawtransaction(
             rawtx, {"feeRate": 10 * min_relay_tx_fee})
         result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
         assert_fee_amount(
             result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
         assert_fee_amount(
             result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
 
         #
         # Test address reuse option #
         #
 
         result3 = self.nodes[3].fundrawtransaction(
             rawtx, {"reserveChangeKey": False})
         res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
         changeaddress = ""
         for out in res_dec['vout']:
             if out['value'] > 1.0:
                 changeaddress += out['scriptPubKey']['addresses'][0]
         assert(changeaddress != "")
         nextaddr = self.nodes[3].getrawchangeaddress()
         # frt should not have removed the key from the keypool
         assert(changeaddress == nextaddr)
 
         result3 = self.nodes[3].fundrawtransaction(rawtx)
         res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
         changeaddress = ""
         for out in res_dec['vout']:
             if out['value'] > 1.0:
                 changeaddress += out['scriptPubKey']['addresses'][0]
         assert(changeaddress != "")
         nextaddr = self.nodes[3].getnewaddress()
         # Now the change address key should be removed from the keypool
         assert(changeaddress != nextaddr)
 
         #
         # Test subtractFeeFromOutputs option #
         #
 
         # Make sure there is exactly one input so coin selection can't skew the
         # result
         assert_equal(len(self.nodes[3].listunspent(1)), 1)
 
         inputs = []
         outputs = {self.nodes[2].getnewaddress(): 1}
         rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         result = [self.nodes[3].fundrawtransaction(rawtx),  # uses min_relay_tx_fee (set by settxfee)
                   self.nodes[3].fundrawtransaction(
                       rawtx, {"subtractFeeFromOutputs": []}),  # empty subtraction list
                   self.nodes[3].fundrawtransaction(
                       rawtx, {"subtractFeeFromOutputs": [0]}),  # uses min_relay_tx_fee (set by settxfee)
                   self.nodes[3].fundrawtransaction(
                       rawtx, {"feeRate": 2 * min_relay_tx_fee}),
                   self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2 * min_relay_tx_fee, "subtractFeeFromOutputs": [0]})]
 
         dec_tx = [self.nodes[3].decoderawtransaction(tx['hex'])
                   for tx in result]
         output = [d['vout'][1 - r['changepos']]['value']
                   for d, r in zip(dec_tx, result)]
         change = [d['vout'][r['changepos']]['value']
                   for d, r in zip(dec_tx, result)]
 
         assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee'])
         assert_equal(result[3]['fee'], result[4]['fee'])
         assert_equal(change[0], change[1])
         assert_equal(output[0], output[1])
         assert_equal(output[0], output[2] + result[2]['fee'])
         assert_equal(change[0] + result[0]['fee'], change[2])
         assert_equal(output[3], output[4] + result[4]['fee'])
         assert_equal(change[3] + result[3]['fee'], change[4])
 
         inputs = []
         outputs = {
             self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)}
         keys = list(outputs.keys())
         rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
 
         result = [self.nodes[3].fundrawtransaction(rawtx),
                   # split the fee between outputs 0, 2, and 3, but not output 1
                   self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})]
 
         dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']),
                   self.nodes[3].decoderawtransaction(result[1]['hex'])]
 
         # Nested list of non-change output amounts for each transaction
         output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']]
                   for d, r in zip(dec_tx, result)]
 
         # List of differences in output amounts between normal and subtractFee
         # transactions
         share = [o0 - o1 for o0, o1 in zip(output[0], output[1])]
 
         # output 1 is the same in both transactions
         assert_equal(share[1], 0)
 
         # the other 3 outputs are smaller as a result of subtractFeeFromOutputs
         assert_greater_than(share[0], 0)
         assert_greater_than(share[2], 0)
         assert_greater_than(share[3], 0)
 
         # outputs 2 and 3 take the same share of the fee
         assert_equal(share[2], share[3])
 
         # output 0 takes at least as much share of the fee, and no more than 2
         # satoshis more, than outputs 2 and 3
         assert_greater_than_or_equal(share[0], share[2])
         assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0])
 
         # the fee is the same in both transactions
         assert_equal(result[0]['fee'], result[1]['fee'])
 
         # the total subtracted from the outputs is equal to the fee
         assert_equal(share[0] + share[2] + share[3], result[0]['fee'])
 
 
 if __name__ == '__main__':
     RawTransactionsTest().main()
diff --git a/test/functional/import-rescan.py b/test/functional/import-rescan.py
index ac57743ac..8316969b6 100755
--- a/test/functional/import-rescan.py
+++ b/test/functional/import-rescan.py
@@ -1,208 +1,207 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 '''
 Test rescan behavior of importaddress, importpubkey, importprivkey, and
 importmulti RPCs with different types of keys and rescan options.
 
 In the first part of the test, node 0 creates an address for each type of
 import RPC call and node 0 sends BTC to it. Then other nodes import the
 addresses, and the test makes listtransactions and getbalance calls to confirm
 that the importing node either did or did not execute rescans picking up the
 send transactions.
 
 In the second part of the test, node 0 sends more BTC to each address, and the
 test makes more listtransactions and getbalance calls to confirm that the
 importing nodes pick up the new transactions regardless of whether rescans
 happened previously.
 '''
 
 from test_framework.authproxy import JSONRPCException
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
-    start_nodes, connect_nodes, sync_blocks, assert_equal, set_node_times)
-from decimal import Decimal
+    connect_nodes, sync_blocks, assert_equal, set_node_times)
 
 import collections
 import enum
 import itertools
 
 Call = enum.Enum("Call", "single multi")
 Data = enum.Enum("Data", "address pub priv")
 Rescan = enum.Enum("Rescan", "no yes late_timestamp")
 
 
 class Variant(collections.namedtuple("Variant", "call data rescan prune")):
 
     """Helper for importing one key and verifying scanned transactions."""
 
     def do_import(self, timestamp):
         """Call one key import RPC."""
 
         if self.call == Call.single:
             if self.data == Data.address:
                 response, error = try_rpc(
                     self.node.importaddress, self.address[
                         "address"], self.label,
                     self.rescan == Rescan.yes)
             elif self.data == Data.pub:
                 response, error = try_rpc(
                     self.node.importpubkey, self.address["pubkey"], self.label,
                     self.rescan == Rescan.yes)
             elif self.data == Data.priv:
                 response, error = try_rpc(
                     self.node.importprivkey, self.key, self.label, self.rescan == Rescan.yes)
             assert_equal(response, None)
             assert_equal(
                 error, {'message': 'Rescan is disabled in pruned mode',
                         'code': -4} if self.expect_disabled else None)
         elif self.call == Call.multi:
             response = self.node.importmulti([{
                 "scriptPubKey": {
                     "address": self.address["address"]
                 },
                 "timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
                 "pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
                 "keys": [self.key] if self.data == Data.priv else [],
                 "label": self.label,
                 "watchonly": self.data != Data.priv
             }], {"rescan": self.rescan in (Rescan.yes, Rescan.late_timestamp)})
             assert_equal(response, [{"success": True}])
 
     def check(self, txid=None, amount=None, confirmations=None):
         """Verify that getbalance/listtransactions return expected values."""
 
         balance = self.node.getbalance(self.label, 0, True)
         assert_equal(balance, self.expected_balance)
 
         txs = self.node.listtransactions(self.label, 10000, 0, True)
         assert_equal(len(txs), self.expected_txs)
 
         if txid is not None:
             tx, = [tx for tx in txs if tx["txid"] == txid]
             assert_equal(tx["account"], self.label)
             assert_equal(tx["address"], self.address["address"])
             assert_equal(tx["amount"], amount)
             assert_equal(tx["category"], "receive")
             assert_equal(tx["label"], self.label)
             assert_equal(tx["txid"], txid)
             assert_equal(tx["confirmations"], confirmations)
             assert_equal("trusted" not in tx, True)
             # Verify the transaction is correctly marked watchonly depending on
             # whether the transaction pays to an imported public key or
             # imported private key. The test setup ensures that transaction
             # inputs will not be from watchonly keys (important because
             # involvesWatchonly will be true if either the transaction output
             # or inputs are watchonly).
             if self.data != Data.priv:
                 assert_equal(tx["involvesWatchonly"], True)
             else:
                 assert_equal("involvesWatchonly" not in tx, True)
 
 
 # List of Variants for each way a key or address could be imported.
 IMPORT_VARIANTS = [Variant(*variants)
                    for variants in itertools.product(Call, Data, Rescan, (False, True))]
 
 # List of nodes to import keys to. Half the nodes will have pruning disabled,
 # half will have it enabled. Different nodes will be used for imports that are
 # expected to cause rescans, and imports that are not expected to cause
 # rescans, in order to prevent rescans during later imports picking up
 # transactions associated with earlier imports. This makes it easier to keep
 # track of expected balances and transactions.
 ImportNode = collections.namedtuple("ImportNode", "prune rescan")
 IMPORT_NODES = [ImportNode(*fields)
                 for fields in itertools.product((False, True), repeat=2)]
 
 # Rescans start at the earliest block up to 2 hours before the key timestamp.
 TIMESTAMP_WINDOW = 2 * 60 * 60
 
 
 class ImportRescanTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2 + len(IMPORT_NODES)
 
     def setup_network(self):
         extra_args = [[] for _ in range(self.num_nodes)]
         for i, import_node in enumerate(IMPORT_NODES, 2):
             if import_node.prune:
                 extra_args[i] += ["-prune=1"]
 
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, extra_args)
         for i in range(1, self.num_nodes):
             connect_nodes(self.nodes[i], 0)
 
     def run_test(self):
         # Create one transaction on node 0 with a unique amount and label for
         # each possible type of wallet import RPC.
         for i, variant in enumerate(IMPORT_VARIANTS):
             variant.label = "label {} {}".format(i, variant)
             variant.address = self.nodes[1].validateaddress(
                 self.nodes[1].getnewaddress(variant.label))
             variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
             variant.initial_amount = 10 - (i + 1) / 4.0
             variant.initial_txid = self.nodes[0].sendtoaddress(
                 variant.address["address"], variant.initial_amount)
 
         # Generate a block containing the initial transactions, then another
         # block further in the future (past the rescan window).
         self.nodes[0].generate(1)
         assert_equal(self.nodes[0].getrawmempool(), [])
         timestamp = self.nodes[0].getblockheader(
             self.nodes[0].getbestblockhash())["time"]
         set_node_times(self.nodes, timestamp + TIMESTAMP_WINDOW + 1)
         self.nodes[0].generate(1)
         sync_blocks(self.nodes)
 
         # For each variation of wallet key import, invoke the import RPC and
         # check the results from getbalance and listtransactions.
         for variant in IMPORT_VARIANTS:
             variant.expect_disabled = variant.rescan == Rescan.yes and variant.prune and variant.call == Call.single
             expect_rescan = variant.rescan == Rescan.yes and not variant.expect_disabled
             variant.node = self.nodes[
                 2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
             variant.do_import(timestamp)
             if expect_rescan:
                 variant.expected_balance = variant.initial_amount
                 variant.expected_txs = 1
                 variant.check(variant.initial_txid, variant.initial_amount, 2)
             else:
                 variant.expected_balance = 0
                 variant.expected_txs = 0
                 variant.check()
 
         # Create new transactions sending to each address.
         fee = self.nodes[0].getnetworkinfo()["relayfee"]
         for i, variant in enumerate(IMPORT_VARIANTS):
             variant.sent_amount = 10 - (2 * i + 1) / 8.0
             variant.sent_txid = self.nodes[0].sendtoaddress(
                 variant.address["address"], variant.sent_amount)
 
         # Generate a block containing the new transactions.
         self.nodes[0].generate(1)
         assert_equal(self.nodes[0].getrawmempool(), [])
         sync_blocks(self.nodes)
 
         # Check the latest results from getbalance and listtransactions.
         for variant in IMPORT_VARIANTS:
             if not variant.expect_disabled:
                 variant.expected_balance += variant.sent_amount
                 variant.expected_txs += 1
                 variant.check(variant.sent_txid, variant.sent_amount, 1)
             else:
                 variant.check()
 
 
 def try_rpc(func, *args, **kwargs):
     try:
         return func(*args, **kwargs), None
     except JSONRPCException as e:
         return None, e.error
 
 
 if __name__ == "__main__":
     ImportRescanTest().main()
diff --git a/test/functional/importmulti.py b/test/functional/importmulti.py
index 8819a6527..76d73aba5 100755
--- a/test/functional/importmulti.py
+++ b/test/functional/importmulti.py
@@ -1,506 +1,505 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class ImportMultiTest (BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2
         self.setup_clean_chain = True
 
     def setup_network(self, split=False):
         self.setup_nodes()
 
     def run_test(self):
         self.log.info("Mining blocks...")
         self.nodes[0].generate(1)
         self.nodes[1].generate(1)
         timestamp = self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['mediantime']
 
         # keyword definition
         PRIV_KEY = 'privkey'
         PUB_KEY = 'pubkey'
         ADDRESS_KEY = 'address'
         SCRIPT_KEY = 'script'
 
         node0_address1 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         node0_address2 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         node0_address3 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
 
         # Check only one address
         assert_equal(node0_address1['ismine'], True)
 
         # Node 1 sync test
         assert_equal(self.nodes[1].getblockcount(), 1)
 
         # Address Test - before import
         address_info = self.nodes[1].validateaddress(node0_address1['address'])
         assert_equal(address_info['iswatchonly'], False)
         assert_equal(address_info['ismine'], False)
 
         # RPC importmulti -----------------------------------------------
 
         # Bitcoin Address
         self.log.info("Should import an address")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], timestamp)
         watchonly_address = address['address']
         watchonly_timestamp = timestamp
 
         self.log.info("Should not import an invalid address")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": "not valid address",
             },
             "timestamp": "now",
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -5)
         assert_equal(result[0]['error']['message'], 'Invalid address')
 
         # ScriptPubKey + internal
         self.log.info("Should import a scriptPubKey with internal flag")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "internal": True
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], timestamp)
 
         # ScriptPubKey + !internal
         self.log.info("Should not import a scriptPubKey without internal flag")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -8)
         assert_equal(result[0]['error']['message'],
                      'Internal must be set for hex scriptPubKey')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # Address + Public key + !Internal
         self.log.info("Should import an address with public key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "pubkeys": [address['pubkey']]
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], timestamp)
 
         # ScriptPubKey + Public key + internal
         self.log.info(
             "Should import a scriptPubKey with internal and with public key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         request = [{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "pubkeys": [address['pubkey']],
             "internal": True
         }]
         result = self.nodes[1].importmulti(request)
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], timestamp)
 
         # ScriptPubKey + Public key + !internal
         self.log.info(
             "Should not import a scriptPubKey without internal and with public key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         request = [{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "pubkeys": [address['pubkey']]
         }]
         result = self.nodes[1].importmulti(request)
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -8)
         assert_equal(result[0]['error']['message'],
                      'Internal must be set for hex scriptPubKey')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # Address + Private key + !watchonly
         self.log.info("Should import an address with private key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address['address'])]
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], True)
         assert_equal(address_assert['timestamp'], timestamp)
 
         self.log.info(
             "Should not import an address with private key if is already imported")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address['address'])]
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -4)
         assert_equal(result[0]['error']['message'],
                      'The wallet already contains the private key for this address or script')
 
         # Address + Private key + watchonly
         self.log.info(
             "Should not import an address with private key and with watchonly")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address['address'])],
             "watchonly": True
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -8)
         assert_equal(result[0]['error']['message'],
                      'Incompatibility found between watchonly and keys')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # ScriptPubKey + Private key + internal
         self.log.info(
             "Should import a scriptPubKey with internal and with private key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address['address'])],
             "internal": True
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], True)
         assert_equal(address_assert['timestamp'], timestamp)
 
         # ScriptPubKey + Private key + !internal
         self.log.info(
             "Should not import a scriptPubKey without internal and with private key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address['address'])]
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -8)
         assert_equal(result[0]['error']['message'],
                      'Internal must be set for hex scriptPubKey')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # P2SH address
         sig_address_1 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_2 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_3 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         multi_sig_script = self.nodes[0].createmultisig(
             2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
         self.nodes[1].generate(100)
         transactionid = self.nodes[1].sendtoaddress(
             multi_sig_script['address'], 10.00)
         self.nodes[1].generate(1)
         timestamp = self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['mediantime']
         transaction = self.nodes[1].gettransaction(transactionid)
 
         self.log.info("Should import a p2sh")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": multi_sig_script['address']
             },
             "timestamp": "now",
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(
             multi_sig_script['address'])
         assert_equal(address_assert['isscript'], True)
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['timestamp'], timestamp)
         p2shunspent = self.nodes[1].listunspent(
             0, 999999, [multi_sig_script['address']])[0]
         assert_equal(p2shunspent['spendable'], False)
         assert_equal(p2shunspent['solvable'], False)
 
         # P2SH + Redeem script
         sig_address_1 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_2 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_3 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         multi_sig_script = self.nodes[0].createmultisig(
             2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
         self.nodes[1].generate(100)
         transactionid = self.nodes[1].sendtoaddress(
             multi_sig_script['address'], 10.00)
         self.nodes[1].generate(1)
         timestamp = self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['mediantime']
         transaction = self.nodes[1].gettransaction(transactionid)
 
         self.log.info("Should import a p2sh with respective redeem script")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": multi_sig_script['address']
             },
             "timestamp": "now",
             "redeemscript": multi_sig_script['redeemScript']
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(
             multi_sig_script['address'])
         assert_equal(address_assert['timestamp'], timestamp)
 
         p2shunspent = self.nodes[1].listunspent(
             0, 999999, [multi_sig_script['address']])[0]
         assert_equal(p2shunspent['spendable'], False)
         assert_equal(p2shunspent['solvable'], True)
 
         # P2SH + Redeem script + Private Keys + !Watchonly
         sig_address_1 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_2 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_3 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         multi_sig_script = self.nodes[0].createmultisig(
             2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
         self.nodes[1].generate(100)
         transactionid = self.nodes[1].sendtoaddress(
             multi_sig_script['address'], 10.00)
         self.nodes[1].generate(1)
         timestamp = self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['mediantime']
         transaction = self.nodes[1].gettransaction(transactionid)
 
         self.log.info(
             "Should import a p2sh with respective redeem script and private keys")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": multi_sig_script['address']
             },
             "timestamp": "now",
             "redeemscript": multi_sig_script['redeemScript'],
             "keys": [self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(
             multi_sig_script['address'])
         assert_equal(address_assert['timestamp'], timestamp)
 
         p2shunspent = self.nodes[1].listunspent(
             0, 999999, [multi_sig_script['address']])[0]
         assert_equal(p2shunspent['spendable'], False)
         assert_equal(p2shunspent['solvable'], True)
 
         # P2SH + Redeem script + Private Keys + Watchonly
         sig_address_1 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_2 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         sig_address_3 = self.nodes[0].validateaddress(
             self.nodes[0].getnewaddress())
         multi_sig_script = self.nodes[0].createmultisig(
             2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
         self.nodes[1].generate(100)
         transactionid = self.nodes[1].sendtoaddress(
             multi_sig_script['address'], 10.00)
         self.nodes[1].generate(1)
         timestamp = self.nodes[1].getblock(
             self.nodes[1].getbestblockhash())['mediantime']
         transaction = self.nodes[1].gettransaction(transactionid)
 
         self.log.info(
             "Should import a p2sh with respective redeem script and private keys")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": multi_sig_script['address']
             },
             "timestamp": "now",
             "redeemscript": multi_sig_script['redeemScript'],
             "keys": [self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
             "watchonly": True
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -8)
         assert_equal(result[0]['error']['message'],
                      'Incompatibility found between watchonly and keys')
 
         # Address + Public key + !Internal + Wrong pubkey
         self.log.info("Should not import an address with a wrong public key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "pubkeys": [address2['pubkey']]
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -5)
         assert_equal(result[0]['error']['message'], 'Consistency check failed')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # ScriptPubKey + Public key + internal + Wrong pubkey
         self.log.info(
             "Should not import a scriptPubKey with internal and with a wrong public key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         request = [{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "pubkeys": [address2['pubkey']],
             "internal": True
         }]
         result = self.nodes[1].importmulti(request)
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -5)
         assert_equal(result[0]['error']['message'], 'Consistency check failed')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # Address + Private key + !watchonly + Wrong private key
         self.log.info("Should not import an address with a wrong private key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": address['address']
             },
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address2['address'])]
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -5)
         assert_equal(result[0]['error']['message'], 'Consistency check failed')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # ScriptPubKey + Private key + internal + Wrong private key
         self.log.info(
             "Should not import a scriptPubKey with internal and with a wrong private key")
         address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
         result = self.nodes[1].importmulti([{
             "scriptPubKey": address['scriptPubKey'],
             "timestamp": "now",
             "keys": [self.nodes[0].dumpprivkey(address2['address'])],
             "internal": True
         }])
         assert_equal(result[0]['success'], False)
         assert_equal(result[0]['error']['code'], -5)
         assert_equal(result[0]['error']['message'], 'Consistency check failed')
         address_assert = self.nodes[1].validateaddress(address['address'])
         assert_equal(address_assert['iswatchonly'], False)
         assert_equal(address_assert['ismine'], False)
         assert_equal('timestamp' in address_assert, False)
 
         # Importing existing watch only address with new timestamp should replace saved timestamp.
         assert_greater_than(timestamp, watchonly_timestamp)
         self.log.info("Should replace previously saved watch only timestamp.")
         result = self.nodes[1].importmulti([{
             "scriptPubKey": {
                 "address": watchonly_address,
             },
             "timestamp": "now",
         }])
         assert_equal(result[0]['success'], True)
         address_assert = self.nodes[1].validateaddress(watchonly_address)
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], timestamp)
         watchonly_timestamp = timestamp
 
-        # restart nodes to check for proper serialization/deserialization of
-        # watch only address
-        stop_nodes(self.nodes)
-        self.nodes = start_nodes(2, self.options.tmpdir)
+        # restart nodes to check for proper serialization/deserialization of watch only address
+        self.stop_nodes()
+        self.nodes = self.start_nodes(2, self.options.tmpdir)
         address_assert = self.nodes[1].validateaddress(watchonly_address)
         assert_equal(address_assert['iswatchonly'], True)
         assert_equal(address_assert['ismine'], False)
         assert_equal(address_assert['timestamp'], watchonly_timestamp)
 
         # Bad or missing timestamps
         self.log.info("Should throw on invalid or missing timestamp values")
         assert_raises_message(
             JSONRPCException, 'Missing required timestamp field for key',
             self.nodes[1].importmulti, [{
                 "scriptPubKey": address['scriptPubKey'],
             }])
         assert_raises_message(
             JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
             self.nodes[1].importmulti, [{
                 "scriptPubKey": address['scriptPubKey'],
                 "timestamp": "",
             }])
 
 
 if __name__ == '__main__':
     ImportMultiTest().main()
diff --git a/test/functional/keypool.py b/test/functional/keypool.py
index ce61242bc..08d89ad28 100755
--- a/test/functional/keypool.py
+++ b/test/functional/keypool.py
@@ -1,97 +1,97 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 # Exercise the wallet keypool, and interaction with wallet encryption/locking
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class KeyPoolTest(BitcoinTestFramework):
 
     def run_test(self):
         nodes = self.nodes
         addr_before_encrypting = nodes[0].getnewaddress()
         addr_before_encrypting_data = nodes[
             0].validateaddress(addr_before_encrypting)
         wallet_info_old = nodes[0].getwalletinfo()
         assert(addr_before_encrypting_data[
                'hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
 
         # Encrypt wallet and wait to terminate
         nodes[0].encryptwallet('test')
         bitcoind_processes[0].wait()
         # Restart node 0
-        nodes[0] = start_node(0, self.options.tmpdir)
+        nodes[0] = self.start_node(0, self.options.tmpdir)
         # Keep creating keys
         addr = nodes[0].getnewaddress()
         addr_data = nodes[0].validateaddress(addr)
         wallet_info = nodes[0].getwalletinfo()
         assert(addr_before_encrypting_data[
                'hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
         assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
         assert_raises_jsonrpc(
             -12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
 
         # put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
         nodes[0].walletpassphrase('test', 12000)
         nodes[0].keypoolrefill(6)
         nodes[0].walletlock()
         wi = nodes[0].getwalletinfo()
         assert_equal(wi['keypoolsize_hd_internal'], 6)
         assert_equal(wi['keypoolsize'], 6)
 
         # drain the internal keys
         nodes[0].getrawchangeaddress()
         nodes[0].getrawchangeaddress()
         nodes[0].getrawchangeaddress()
         nodes[0].getrawchangeaddress()
         nodes[0].getrawchangeaddress()
         nodes[0].getrawchangeaddress()
         addr = set()
         # the next one should fail
         assert_raises_jsonrpc(-12, "Keypool ran out",
                               nodes[0].getrawchangeaddress)
 
         # drain the external keys
         addr.add(nodes[0].getnewaddress())
         addr.add(nodes[0].getnewaddress())
         addr.add(nodes[0].getnewaddress())
         addr.add(nodes[0].getnewaddress())
         addr.add(nodes[0].getnewaddress())
         addr.add(nodes[0].getnewaddress())
         assert(len(addr) == 6)
         # the next one should fail
         assert_raises_jsonrpc(
             -12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
 
         # refill keypool with three new addresses
         nodes[0].walletpassphrase('test', 1)
         nodes[0].keypoolrefill(3)
 
         # test walletpassphrase timeout
         time.sleep(1.1)
         assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
 
         # drain them by mining
         nodes[0].generate(1)
         nodes[0].generate(1)
         nodes[0].generate(1)
         assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].generate, 1)
 
         nodes[0].walletpassphrase('test', 100)
         nodes[0].keypoolrefill(100)
         wi = nodes[0].getwalletinfo()
         assert_equal(wi['keypoolsize_hd_internal'], 100)
         assert_equal(wi['keypoolsize'], 100)
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = False
         self.num_nodes = 1
 
 
 if __name__ == '__main__':
     KeyPoolTest().main()
diff --git a/test/functional/listtransactions.py b/test/functional/listtransactions.py
index e64a89651..73e62e63a 100755
--- a/test/functional/listtransactions.py
+++ b/test/functional/listtransactions.py
@@ -1,110 +1,110 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 # Exercise the listtransactions API
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 from test_framework.mininode import CTransaction, COIN
 from io import BytesIO
 
 
 def txFromHex(hexstring):
     tx = CTransaction()
     f = BytesIO(hex_str_to_bytes(hexstring))
     tx.deserialize(f)
     return tx
 
 
 class ListTransactionsTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 4
         self.setup_clean_chain = False
 
     def setup_nodes(self):
         # This test requires mocktime
         enable_mocktime()
-        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
 
     def run_test(self):
         # Simple send, 0 to 1:
         txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
         self.sync_all()
         assert_array_result(self.nodes[0].listtransactions(),
                             {"txid": txid},
                             {"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 0})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"txid": txid},
                             {"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 0})
         # mine a block, confirmations should change:
         self.nodes[0].generate(1)
         self.sync_all()
         assert_array_result(self.nodes[0].listtransactions(),
                             {"txid": txid},
                             {"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 1})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"txid": txid},
                             {"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 1})
 
         # send-to-self:
         txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
         assert_array_result(self.nodes[0].listtransactions(),
                             {"txid": txid, "category": "send"},
                             {"amount": Decimal("-0.2")})
         assert_array_result(self.nodes[0].listtransactions(),
                             {"txid": txid, "category": "receive"},
                             {"amount": Decimal("0.2")})
 
         # sendmany from node1: twice to self, twice to node2:
         send_to = {self.nodes[0].getnewaddress(): 0.11,
                    self.nodes[1].getnewaddress(): 0.22,
                    self.nodes[0].getaccountaddress("from1"): 0.33,
                    self.nodes[1].getaccountaddress("toself"): 0.44}
         txid = self.nodes[1].sendmany("", send_to)
         self.sync_all()
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "send", "amount": Decimal("-0.11")},
                             {"txid": txid})
         assert_array_result(self.nodes[0].listtransactions(),
                             {"category": "receive", "amount": Decimal("0.11")},
                             {"txid": txid})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "send", "amount": Decimal("-0.22")},
                             {"txid": txid})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "receive", "amount": Decimal("0.22")},
                             {"txid": txid})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "send", "amount": Decimal("-0.33")},
                             {"txid": txid})
         assert_array_result(self.nodes[0].listtransactions(),
                             {"category": "receive", "amount": Decimal("0.33")},
                             {"txid": txid, "account": "from1"})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "send", "amount": Decimal("-0.44")},
                             {"txid": txid, "account": ""})
         assert_array_result(self.nodes[1].listtransactions(),
                             {"category": "receive", "amount": Decimal("0.44")},
                             {"txid": txid, "account": "toself"})
 
         multisig = self.nodes[1].createmultisig(
             1, [self.nodes[1].getnewaddress()])
         self.nodes[0].importaddress(
             multisig["redeemScript"], "watchonly", False, True)
         txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
         self.nodes[1].generate(1)
         self.sync_all()
         assert(
             len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
         assert_array_result(
             self.nodes[0].listtransactions("watchonly", 100, 0, True),
             {"category": "receive", "amount": Decimal("0.1")},
             {"txid": txid, "account": "watchonly"})
 
 
 if __name__ == '__main__':
     ListTransactionsTest().main()
diff --git a/test/functional/maxuploadtarget.py b/test/functional/maxuploadtarget.py
index 53331c87f..953cc709d 100755
--- a/test/functional/maxuploadtarget.py
+++ b/test/functional/maxuploadtarget.py
@@ -1,191 +1,191 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 '''
 Test behavior of -maxuploadtarget.
 
 * Verify that getdata requests for old blocks (>1week) are dropped
 if uploadtarget has been reached.
 * Verify that getdata requests for recent blocks are respecteved even
 if uploadtarget has been reached.
 * Verify that the upload counters are reset after 24 hours.
 '''
 from collections import defaultdict
 import time
 
 from test_framework.mininode import *
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE
 
 
 class TestNode(NodeConnCB):
 
     def __init__(self):
         super().__init__()
         self.block_receive_map = defaultdict(int)
 
     def on_inv(self, conn, message):
         pass
 
     def on_block(self, conn, message):
         message.block.calc_sha256()
         self.block_receive_map[message.block.sha256] += 1
 
 
 class MaxUploadTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 1
         # Start a node with maxuploadtarget of 200 MB (/24h)
         self.extra_args = [["-maxuploadtarget=200"]]
 
         # Cache for utxos, as the listunspent may take a long time later in the
         # test
         self.utxo_cache = []
 
     def run_test(self):
         # Before we connect anything, we first set the time on the node
         # to be in the past, otherwise things break because the CNode
         # time counters can't be reset backward after initialization
         old_time = int(time.time() - 2 * 60 * 60 * 24 * 7)
         self.nodes[0].setmocktime(old_time)
 
         # Generate some old blocks
         self.nodes[0].generate(130)
 
         # test_nodes[0] will only request old blocks
         # test_nodes[1] will only request new blocks
         # test_nodes[2] will test resetting the counters
         test_nodes = []
         connections = []
 
         for i in range(3):
             test_nodes.append(TestNode())
             connections.append(
                 NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
             test_nodes[i].add_connection(connections[i])
 
         # Start up network handling in another thread
         NetworkThread().start()
         [x.wait_for_verack() for x in test_nodes]
 
         # Test logic begins here
 
         # Now mine a big block
         mine_large_block(self.nodes[0], self.utxo_cache)
 
         # Store the hash; we'll request this later
         big_old_block = self.nodes[0].getbestblockhash()
         old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
         big_old_block = int(big_old_block, 16)
 
         # Advance to two days ago
         self.nodes[0].setmocktime(int(time.time()) - 2 * 60 * 60 * 24)
 
         # Mine one more block, so that the prior block looks old
         mine_large_block(self.nodes[0], self.utxo_cache)
 
         # We'll be requesting this new block too
         big_new_block = self.nodes[0].getbestblockhash()
         big_new_block = int(big_new_block, 16)
 
         # test_nodes[0] will test what happens if we just keep requesting the
         # the same big old block too many times (expect: disconnect)
 
         getdata_request = msg_getdata()
         getdata_request.inv.append(CInv(2, big_old_block))
 
         max_bytes_per_day = 200 * 1024 * 1024
         daily_buffer = 144 * LEGACY_MAX_BLOCK_SIZE
         max_bytes_available = max_bytes_per_day - daily_buffer
         success_count = max_bytes_available // old_block_size
 
         # 144MB will be reserved for relaying new blocks, so expect this to
         # succeed for ~70 tries.
         for i in range(success_count):
             test_nodes[0].send_message(getdata_request)
             test_nodes[0].sync_with_ping()
             assert_equal(test_nodes[0].block_receive_map[big_old_block], i + 1)
 
         assert_equal(len(self.nodes[0].getpeerinfo()), 3)
         # At most a couple more tries should succeed (depending on how long
         # the test has been running so far).
         for i in range(3):
             test_nodes[0].send_message(getdata_request)
         test_nodes[0].wait_for_disconnect()
         assert_equal(len(self.nodes[0].getpeerinfo()), 2)
         self.log.info(
             "Peer 0 disconnected after downloading old block too many times")
 
         # Requesting the current block on test_nodes[1] should succeed indefinitely,
         # even when over the max upload target.
         # We'll try 200 times
         getdata_request.inv = [CInv(2, big_new_block)]
         for i in range(200):
             test_nodes[1].send_message(getdata_request)
             test_nodes[1].sync_with_ping()
             assert_equal(test_nodes[1].block_receive_map[big_new_block], i + 1)
 
         self.log.info("Peer 1 able to repeatedly download new block")
 
         # But if test_nodes[1] tries for an old block, it gets disconnected
         # too.
         getdata_request.inv = [CInv(2, big_old_block)]
         test_nodes[1].send_message(getdata_request)
         test_nodes[1].wait_for_disconnect()
         assert_equal(len(self.nodes[0].getpeerinfo()), 1)
 
         self.log.info("Peer 1 disconnected after trying to download old block")
 
         self.log.info("Advancing system time on node to clear counters...")
 
         # If we advance the time by 24 hours, then the counters should reset,
         # and test_nodes[2] should be able to retrieve the old block.
         self.nodes[0].setmocktime(int(time.time()))
         test_nodes[2].sync_with_ping()
         test_nodes[2].send_message(getdata_request)
         test_nodes[2].sync_with_ping()
         assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
 
         self.log.info("Peer 2 able to download old block")
 
         [c.disconnect_node() for c in connections]
 
         # stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
         self.log.info("Restarting nodes with -whitelist=127.0.0.1")
-        stop_node(self.nodes[0], 0)
-        self.nodes[0] = start_node(0, self.options.tmpdir, [
-                                   "-whitelist=127.0.0.1", "-maxuploadtarget=1"])
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir, [
+                                        "-whitelist=127.0.0.1", "-maxuploadtarget=1"])
 
         # recreate/reconnect a test node
         test_nodes = [TestNode()]
         connections = [NodeConn('127.0.0.1', p2p_port(
             0), self.nodes[0], test_nodes[0])]
         test_nodes[0].add_connection(connections[0])
 
         NetworkThread().start()  # Start up network handling in another thread
         test_nodes[0].wait_for_verack()
 
         # retrieve 20 blocks which should be enough to break the 1MB limit
         getdata_request.inv = [CInv(2, big_new_block)]
         for i in range(20):
             test_nodes[0].send_message(getdata_request)
             test_nodes[0].sync_with_ping()
             assert_equal(test_nodes[0].block_receive_map[big_new_block], i + 1)
 
         getdata_request.inv = [CInv(2, big_old_block)]
         test_nodes[0].send_and_ping(getdata_request)
         # node is still connected because of the whitelist
         assert_equal(len(self.nodes[0].getpeerinfo()), 1)
 
         self.log.info(
             "Peer still connected after trying to download old block (whitelisted)")
 
 
 if __name__ == '__main__':
     MaxUploadTest().main()
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index 98517ccaf..fbd20c85e 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -1,98 +1,98 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test mempool persistence.
 
 By default, bitcoind will dump mempool on shutdown and
 then reload it on startup. This can be overridden with
 the -persistmempool=0 command line option.
 
 Test is as follows:
 
   - start node0, node1 and node2. node1 has -persistmempool=0
   - create 5 transactions on node2 to its own address. Note that these
     are not sent to node0 or node1 addresses because we don't want
     them to be saved in the wallet.
   - check that node0 and node1 have 5 transactions in their mempools
   - shutdown all nodes.
   - startup node0. Verify that it still has 5 transactions
     in its mempool. Shutdown node0. This tests that by default the
     mempool is persistent.
   - startup node1. Verify that its mempool is empty. Shutdown node1.
     This tests that with -persistmempool=0, the mempool is not
     dumped to disk when the node is shut down.
   - Restart node0 with -persistmempool=0. Verify that its mempool is
     empty. Shutdown node0. This tests that with -persistmempool=0,
     the mempool is not loaded from disk on start up.
   - Restart node0 with -persistmempool. Verify that it has 5
     transactions in its mempool. This tests that -persistmempool=0
     does not overwrite a previously valid mempool stored on disk.
 
 """
 import time
 
 from test_framework.mininode import wait_until
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class MempoolPersistTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         # We need 3 nodes for this test. Node1 does not have a persistent mempool.
         self.num_nodes = 3
         self.setup_clean_chain = False
         self.extra_args = [[], ["-persistmempool=0"], []]
 
     def run_test(self):
         chain_height = self.nodes[0].getblockcount()
         assert_equal(chain_height, 200)
 
         self.log.debug("Mine a single block to get out of IBD")
         self.nodes[0].generate(1)
         self.sync_all()
 
         self.log.debug("Send 5 transactions from node2 (to its own address)")
         for i in range(5):
             self.nodes[2].sendtoaddress(
                 self.nodes[2].getnewaddress(), Decimal("10"))
         self.sync_all()
 
         self.log.debug(
             "Verify that node0 and node1 have 5 transactions in their mempools")
         assert_equal(len(self.nodes[0].getrawmempool()), 5)
         assert_equal(len(self.nodes[1].getrawmempool()), 5)
 
         self.log.debug(
             "Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.")
-        stop_nodes(self.nodes)
+        self.stop_nodes()
         self.nodes = []
-        self.nodes.append(start_node(0, self.options.tmpdir))
-        self.nodes.append(start_node(1, self.options.tmpdir))
+        self.nodes.append(self.start_node(0, self.options.tmpdir))
+        self.nodes.append(self.start_node(1, self.options.tmpdir))
         # Give bitcoind a second to reload the mempool
         time.sleep(1)
         assert wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
         assert_equal(len(self.nodes[1].getrawmempool()), 0)
 
         self.log.debug(
             "Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
-        stop_nodes(self.nodes)
+        self.stop_nodes()
         self.nodes = []
-        self.nodes.append(start_node(
+        self.nodes.append(self.start_node(
             0, self.options.tmpdir, ["-persistmempool=0"]))
         # Give bitcoind a second to reload the mempool
         time.sleep(1)
         assert_equal(len(self.nodes[0].getrawmempool()), 0)
 
         self.log.debug(
             "Stop-start node0. Verify that it has the transactions in its mempool.")
-        stop_nodes(self.nodes)
+        self.stop_nodes()
         self.nodes = []
-        self.nodes.append(start_node(0, self.options.tmpdir))
+        self.nodes.append(self.start_node(0, self.options.tmpdir))
         assert wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
 
 
 if __name__ == '__main__':
     MempoolPersistTest().main()
diff --git a/test/functional/net.py b/test/functional/net.py
index 15d19dcee..f4e835c2a 100755
--- a/test/functional/net.py
+++ b/test/functional/net.py
@@ -1,97 +1,96 @@
 #!/usr/bin/env python3
 # Copyright (c) 2017 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """Test RPC calls related to net.
 
 Tests correspond to code in rpc/net.cpp.
 """
 
 import time
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
     assert_equal,
     assert_raises_jsonrpc,
     connect_nodes_bi,
-    p2p_port,
-)
+    p2p_port)
 
 
 class NetTest(BitcoinTestFramework):
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 2
 
     def run_test(self):
         self._test_connection_count()
         self._test_getnettotals()
         self._test_getnetworkinginfo()
         self._test_getaddednodeinfo()
 
     def _test_connection_count(self):
         # connect_nodes_bi connects each node to the other
         assert_equal(self.nodes[0].getconnectioncount(), 2)
 
     def _test_getnettotals(self):
         # check that getnettotals totalbytesrecv and totalbytessent
         # are consistent with getpeerinfo
         peer_info = self.nodes[0].getpeerinfo()
         assert_equal(len(peer_info), 2)
         net_totals = self.nodes[0].getnettotals()
         assert_equal(sum([peer['bytesrecv'] for peer in peer_info]),
                      net_totals['totalbytesrecv'])
         assert_equal(sum([peer['bytessent'] for peer in peer_info]),
                      net_totals['totalbytessent'])
         # test getnettotals and getpeerinfo by doing a ping
         # the bytes sent/received should change
         # note ping and pong are 32 bytes each
         self.nodes[0].ping()
         time.sleep(0.1)
         peer_info_after_ping = self.nodes[0].getpeerinfo()
         net_totals_after_ping = self.nodes[0].getnettotals()
         for before, after in zip(peer_info, peer_info_after_ping):
             assert_equal(before['bytesrecv_per_msg']['pong'] +
                          32, after['bytesrecv_per_msg']['pong'])
             assert_equal(before['bytessent_per_msg']['ping'] +
                          32, after['bytessent_per_msg']['ping'])
         assert_equal(net_totals['totalbytesrecv'] + 32 * 2,
                      net_totals_after_ping['totalbytesrecv'])
         assert_equal(net_totals['totalbytessent'] + 32 * 2,
                      net_totals_after_ping['totalbytessent'])
 
     def _test_getnetworkinginfo(self):
         assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
         assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
 
         self.nodes[0].setnetworkactive(False)
         assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
         timeout = 3
         while self.nodes[0].getnetworkinfo()['connections'] != 0:
             # Wait a bit for all sockets to close
             assert timeout > 0, 'not all connections closed in time'
             timeout -= 0.1
             time.sleep(0.1)
 
         self.nodes[0].setnetworkactive(True)
         connect_nodes_bi(self.nodes, 0, 1)
         assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
         assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
 
     def _test_getaddednodeinfo(self):
         assert_equal(self.nodes[0].getaddednodeinfo(), [])
         # add a node (node2) to node0
         ip_port = "127.0.0.1:{}".format(p2p_port(2))
         self.nodes[0].addnode(ip_port, 'add')
         # check that the node has indeed been added
         added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
         assert_equal(len(added_nodes), 1)
         assert_equal(added_nodes[0]['addednode'], ip_port)
         # check that a non-existant node returns an error
         assert_raises_jsonrpc(-24, "Node has not been added",
                               self.nodes[0].getaddednodeinfo, '1.1.1.1')
 
 
 if __name__ == '__main__':
     NetTest().main()
diff --git a/test/functional/p2p-versionbits-warning.py b/test/functional/p2p-versionbits-warning.py
index ed9c97c1a..fffbd82b7 100755
--- a/test/functional/p2p-versionbits-warning.py
+++ b/test/functional/p2p-versionbits-warning.py
@@ -1,144 +1,146 @@
 #!/usr/bin/env python3
 # Copyright (c) 2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.mininode import *
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 import re
 import time
 from test_framework.blocktools import create_block, create_coinbase
 
 '''
 Test version bits' warning system.
 
 Generate chains with block versions that appear to be signalling unknown
 soft-forks, and test that warning alerts are generated.
 '''
 
 VB_PERIOD = 144  # versionbits period length for regtest
 VB_THRESHOLD = 108  # versionbits activation threshold for regtest
 VB_TOP_BITS = 0x20000000
 VB_UNKNOWN_BIT = 27  # Choose a bit unassigned to any deployment
 
 WARN_UNKNOWN_RULES_MINED = "Unknown block versions being mined! It's possible unknown rules are in effect"
 WARN_UNKNOWN_RULES_ACTIVE = "unknown new rules activated (versionbit {})".format(
     VB_UNKNOWN_BIT)
 VB_PATTERN = re.compile("^Warning.*versionbit")
 
+
 class TestNode(NodeConnCB):
     def on_inv(self, conn, message):
         pass
 
+
 class VersionBitsWarningTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 1
 
     def setup_network(self):
         self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
         # Open and close to create zero-length file
         with open(self.alert_filename, 'w', encoding='utf8') as _:
             pass
         self.extra_args = [
             ["-alertnotify=echo %s >> \"" + self.alert_filename + "\""]]
         self.setup_nodes()
 
     # Send numblocks blocks via peer with nVersionToUse set.
     def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
         tip = self.nodes[0].getbestblockhash()
         height = self.nodes[0].getblockcount()
         block_time = self.nodes[0].getblockheader(tip)["time"] + 1
         tip = int(tip, 16)
 
         for _ in range(numblocks):
             block = create_block(tip, create_coinbase(height + 1), block_time)
             block.nVersion = nVersionToUse
             block.solve()
             peer.send_message(msg_block(block))
             block_time += 1
             height += 1
             tip = block.sha256
         peer.sync_with_ping()
 
     def test_versionbits_in_alert_file(self):
         with open(self.alert_filename, 'r', encoding='utf8') as f:
             alert_text = f.read()
         assert(VB_PATTERN.match(alert_text))
 
     def run_test(self):
         # Setup the p2p connection and start up the network thread.
         test_node = TestNode()
 
         connections = []
         connections.append(
             NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
         test_node.add_connection(connections[0])
 
         NetworkThread().start()  # Start up network handling in another thread
 
         # Test logic begins here
         test_node.wait_for_verack()
 
         # 1. Have the node mine one period worth of blocks
         self.nodes[0].generate(VB_PERIOD)
 
         # 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
         # blocks signaling some unknown bit.
         nVersion = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT)
         self.send_blocks_with_version(test_node, VB_THRESHOLD - 1, nVersion)
 
         # Fill rest of period with regular version blocks
         self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
         # Check that we're not getting any versionbit-related errors in
         # get*info()
         assert(not VB_PATTERN.match(self.nodes[0].getinfo()["errors"]))
         assert(not VB_PATTERN.match(self.nodes[0].getmininginfo()["errors"]))
         assert(not VB_PATTERN.match(
             self.nodes[0].getnetworkinfo()["warnings"]))
 
         # 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
         # some unknown bit
         self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
         self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
         # Might not get a versionbits-related alert yet, as we should
         # have gotten a different alert due to more than 51/100 blocks
         # being of unexpected version.
         # Check that get*info() shows some kind of error.
         assert(WARN_UNKNOWN_RULES_MINED in self.nodes[0].getinfo()["errors"])
         assert(WARN_UNKNOWN_RULES_MINED in self.nodes[
                0].getmininginfo()["errors"])
         assert(WARN_UNKNOWN_RULES_MINED in self.nodes[
                0].getnetworkinfo()["warnings"])
 
         # Mine a period worth of expected blocks so the generic block-version warning
         # is cleared, and restart the node. This should move the versionbit state
         # to ACTIVE.
         self.nodes[0].generate(VB_PERIOD)
-        stop_nodes(self.nodes)
+        self.stop_nodes()
         # Empty out the alert file
         with open(self.alert_filename, 'w', encoding='utf8') as _:
             pass
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, self.extra_args)
 
         # Connecting one block should be enough to generate an error.
         self.nodes[0].generate(1)
         assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getinfo()["errors"])
-        assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[
-               0].getmininginfo()["errors"])
-        assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[
-               0].getnetworkinfo()["warnings"])
-        stop_nodes(self.nodes)
+        assert(
+            WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getmininginfo()["errors"])
+        assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getnetworkinfo()[
+               "warnings"])
+        self.stop_nodes()
         self.test_versionbits_in_alert_file()
 
         # Test framework expects the node to still be running...
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, self.extra_args)
 
 
 if __name__ == '__main__':
     VersionBitsWarningTest().main()
diff --git a/test/functional/proxy_test.py b/test/functional/proxy_test.py
index 27b1a0e1b..1a0f09613 100755
--- a/test/functional/proxy_test.py
+++ b/test/functional/proxy_test.py
@@ -1,220 +1,220 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
-import socket
-import os
-
-from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    PORT_MIN,
-    PORT_RANGE,
-    start_nodes,
-    assert_equal,
-)
-from test_framework.netutil import test_ipv6_local
 '''
 Test plan:
 - Start bitcoind's with different proxy configurations
 - Use addnode to initiate connections
 - Verify that proxies are connected to, and the right connection command is given
 - Proxy configurations to test on bitcoind side:
     - `-proxy` (proxy everything)
     - `-onion` (proxy just onions)
     - `-proxyrandomize` Circuit randomization
 - Proxy configurations to test on proxy side,
     - support no authentication (other proxy)
     - support no authentication + user/pass authentication (Tor)
     - proxy on IPv6
 
 - Create various proxies (as threads)
 - Create bitcoinds that connect to them
 - Manipulate the bitcoinds using addnode (onetry) an observe effects
 
 addnode connect to IPv4
 addnode connect to IPv6
 addnode connect to onion
 addnode connect to generic DNS name
 '''
 
+import socket
+import os
+
+from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+    PORT_MIN,
+    PORT_RANGE,
+    assert_equal,
+)
+from test_framework.netutil import test_ipv6_local
+
 RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE  # Start after p2p and rpc ports
 
 
 class ProxyTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 4
         self.setup_clean_chain = False
 
     def setup_nodes(self):
         self.have_ipv6 = test_ipv6_local()
         # Create two proxies on different ports
         # ... one unauthenticated
         self.conf1 = Socks5Configuration()
         self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
         self.conf1.unauth = True
         self.conf1.auth = False
         # ... one supporting authenticated and unauthenticated (Tor)
         self.conf2 = Socks5Configuration()
         self.conf2.addr = (
             '127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
         self.conf2.unauth = True
         self.conf2.auth = True
         if self.have_ipv6:
             # ... one on IPv6 with similar configuration
             self.conf3 = Socks5Configuration()
             self.conf3.af = socket.AF_INET6
             self.conf3.addr = (
                 '::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
             self.conf3.unauth = True
             self.conf3.auth = True
         else:
             self.log.info("Warning: testing without local IPv6 support")
 
         self.serv1 = Socks5Server(self.conf1)
         self.serv1.start()
         self.serv2 = Socks5Server(self.conf2)
         self.serv2.start()
         if self.have_ipv6:
             self.serv3 = Socks5Server(self.conf3)
             self.serv3.start()
 
         # Note: proxies are not used to connect to local nodes
         # this is because the proxy to use is based on CService.GetNetwork(),
         # which return NET_UNROUTABLE for localhost
         args = [
             ['-listen', '-proxy=%s:%i' %
                 (self.conf1.addr), '-proxyrandomize=1'],
             ['-listen', '-proxy=%s:%i' %
                 (self.conf1.addr), '-onion=%s:%i' % (self.conf2.addr), '-proxyrandomize=0'],
             ['-listen', '-proxy=%s:%i' %
                 (self.conf2.addr), '-proxyrandomize=1'],
             []
         ]
         if self.have_ipv6:
             args[3] = ['-listen', '-proxy=[%s]:%i' %
                        (self.conf3.addr), '-proxyrandomize=0', '-noonion']
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, extra_args=args)
 
     def node_test(self, node, proxies, auth, test_onion=True):
         rv = []
         # Test: outgoing IPv4 connection through node
         node.addnode("15.61.23.23:1234", "onetry")
         cmd = proxies[0].queue.get()
         assert(isinstance(cmd, Socks5Command))
         # Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME,
         # even if connecting directly to IPv4/IPv6
         assert_equal(cmd.atyp, AddressType.DOMAINNAME)
         assert_equal(cmd.addr, b"15.61.23.23")
         assert_equal(cmd.port, 1234)
         if not auth:
             assert_equal(cmd.username, None)
             assert_equal(cmd.password, None)
         rv.append(cmd)
 
         if self.have_ipv6:
             # Test: outgoing IPv6 connection through node
             node.addnode(
                 "[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
             cmd = proxies[1].queue.get()
             assert(isinstance(cmd, Socks5Command))
             # Note: bitcoind's SOCKS5 implementation only sends atyp
             # DOMAINNAME, even if connecting directly to IPv4/IPv6
             assert_equal(cmd.atyp, AddressType.DOMAINNAME)
             assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
             assert_equal(cmd.port, 5443)
             if not auth:
                 assert_equal(cmd.username, None)
                 assert_equal(cmd.password, None)
             rv.append(cmd)
 
         if test_onion:
             # Test: outgoing onion connection through node
             node.addnode("bitcoinostk4e4re.onion:8333", "onetry")
             cmd = proxies[2].queue.get()
             assert(isinstance(cmd, Socks5Command))
             assert_equal(cmd.atyp, AddressType.DOMAINNAME)
             assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
             assert_equal(cmd.port, 8333)
             if not auth:
                 assert_equal(cmd.username, None)
                 assert_equal(cmd.password, None)
             rv.append(cmd)
 
         # Test: outgoing DNS name connection through node
         node.addnode("node.noumenon:8333", "onetry")
         cmd = proxies[3].queue.get()
         assert(isinstance(cmd, Socks5Command))
         assert_equal(cmd.atyp, AddressType.DOMAINNAME)
         assert_equal(cmd.addr, b"node.noumenon")
         assert_equal(cmd.port, 8333)
         if not auth:
             assert_equal(cmd.username, None)
             assert_equal(cmd.password, None)
         rv.append(cmd)
 
         return rv
 
     def run_test(self):
         # basic -proxy
         self.node_test(
             self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
 
         # -proxy plus -onion
         self.node_test(
             self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
 
         # -proxy plus -onion, -proxyrandomize
         rv = self.node_test(
             self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
         # Check that credentials as used for -proxyrandomize connections are
         # unique
         credentials = set((x.username, x.password) for x in rv)
         assert_equal(len(credentials), len(rv))
 
         if self.have_ipv6:
             # proxy on IPv6 localhost
             self.node_test(
                 self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
 
         def networks_dict(d):
             r = {}
             for x in d['networks']:
                 r[x['name']] = x
             return r
 
         # test RPC getnetworkinfo
         n0 = networks_dict(self.nodes[0].getnetworkinfo())
         for net in ['ipv4', 'ipv6', 'onion']:
             assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
             assert_equal(n0[net]['proxy_randomize_credentials'], True)
         assert_equal(n0['onion']['reachable'], True)
 
         n1 = networks_dict(self.nodes[1].getnetworkinfo())
         for net in ['ipv4', 'ipv6']:
             assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
             assert_equal(n1[net]['proxy_randomize_credentials'], False)
         assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
         assert_equal(n1['onion']['proxy_randomize_credentials'], False)
         assert_equal(n1['onion']['reachable'], True)
 
         n2 = networks_dict(self.nodes[2].getnetworkinfo())
         for net in ['ipv4', 'ipv6', 'onion']:
             assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
             assert_equal(n2[net]['proxy_randomize_credentials'], True)
         assert_equal(n2['onion']['reachable'], True)
 
         if self.have_ipv6:
             n3 = networks_dict(self.nodes[3].getnetworkinfo())
             for net in ['ipv4', 'ipv6']:
                 assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
                 assert_equal(n3[net]['proxy_randomize_credentials'], False)
             assert_equal(n3['onion']['reachable'], False)
 
 
 if __name__ == '__main__':
     ProxyTest().main()
diff --git a/test/functional/pruning.py b/test/functional/pruning.py
index 052a04bd8..287925c8d 100755
--- a/test/functional/pruning.py
+++ b/test/functional/pruning.py
@@ -1,516 +1,503 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 #
 # Test pruning code
 # ********
 # WARNING:
 # This test uses 4GB of disk space.
 # This test takes 30 mins or more (up to 2 hours)
 # ********
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 import time
 import os
 
 MIN_BLOCKS_TO_KEEP = 288
 
 # Rescans start at the earliest block up to 2 hours before a key timestamp, so
 # the manual prune RPC avoids pruning blocks in the same window to be
 # compatible with pruning based on key creation time.
 TIMESTAMP_WINDOW = 2 * 60 * 60
 
 
 def calc_usage(blockdir):
     return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(blockdir + f)) / (1024. * 1024.)
 
 
 class PruneTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 6
 
         # Create nodes 0 and 1 to mine.
         # Create node 2 to test pruning.
         self.full_node_default_args = ["-maxreceivebuffer=20000", "-blockmaxsize=999000", "-checkblocks=5",
                                        "-limitdescendantcount=100", "-limitdescendantsize=5000", "-limitancestorcount=100", "-limitancestorsize=5000"]
         # Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
         # Create nodes 5 to test wallet in prune mode, but do not connect
         self.extra_args = [self.full_node_default_args,
                            self.full_node_default_args,
                            ["-maxreceivebuffer=20000", "-prune=550"],
                            ["-maxreceivebuffer=20000", "-blockmaxsize=999000"],
                            ["-maxreceivebuffer=20000", "-blockmaxsize=999000"],
                            ["-prune=550"]]
 
     def setup_network(self):
         self.setup_nodes()
 
         self.prunedir = self.options.tmpdir + "/node2/regtest/blocks/"
 
         connect_nodes(self.nodes[0], 1)
         connect_nodes(self.nodes[1], 2)
         connect_nodes(self.nodes[2], 0)
         connect_nodes(self.nodes[0], 3)
         connect_nodes(self.nodes[0], 4)
         sync_blocks(self.nodes[0:5])
 
     def create_big_chain(self):
         # Start by creating some coinbases we can spend later
         self.nodes[1].generate(200)
         sync_blocks(self.nodes[0:2])
         self.nodes[0].generate(150)
         # Then mine enough full blocks to create more than 550MiB of data
         for i in range(645):
             mine_large_block(self.nodes[0], self.utxo_cache_0)
 
         sync_blocks(self.nodes[0:5])
 
     def test_height_min(self):
         if not os.path.isfile(self.prunedir + "blk00000.dat"):
             raise AssertionError("blk00000.dat is missing, pruning too early")
         self.log.info("Success")
         self.log.info("Though we're already using more than 550MiB, current usage: %d" %
                       calc_usage(self.prunedir))
         self.log.info(
             "Mining 25 more blocks should cause the first block file to be pruned")
         # Pruning doesn't run until we're allocating another chunk, 20 full
         # blocks past the height cutoff will ensure this
         for i in range(25):
             mine_large_block(self.nodes[0], self.utxo_cache_0)
 
         waitstart = time.time()
         while os.path.isfile(self.prunedir + "blk00000.dat"):
             time.sleep(0.1)
             if time.time() - waitstart > 30:
                 raise AssertionError(
                     "blk00000.dat not pruned when it should be")
 
         self.log.info("Success")
         usage = calc_usage(self.prunedir)
         self.log.info("Usage should be below target: %d" % usage)
         if (usage > 550):
             raise AssertionError("Pruning target not being met")
 
     def create_chain_with_staleblocks(self):
         # Create stale blocks in manageable sized chunks
         self.log.info(
             "Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
 
         for j in range(12):
             # Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
             # Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
             # Stopping node 0 also clears its mempool, so it doesn't have
             # node1's transactions to accidentally mine
             self.stop_node(0)
-            self.nodes[0] = start_node(
+            self.nodes[0] = self.start_node(
                 0, self.options.tmpdir, self.full_node_default_args, timewait=900)
             # Mine 24 blocks in node 1
             for i in range(24):
                 if j == 0:
                     mine_large_block(self.nodes[1], self.utxo_cache_1)
                 else:
                     # Add node1's wallet transactions back to the mempool, to
                     # avoid the mined blocks from being too small.
                     self.nodes[1].resendwallettransactions()
                     # tx's already in mempool from previous disconnects
                     self.nodes[1].generate(1)
 
             # Reorg back with 25 block chain from node 0
             for i in range(25):
                 mine_large_block(self.nodes[0], self.utxo_cache_0)
 
             # Create connections in the order so both nodes can see the reorg
             # at the same time
             connect_nodes(self.nodes[1], 0)
             connect_nodes(self.nodes[2], 0)
             sync_blocks(self.nodes[0:3])
 
         self.log.info("Usage can be over target because of high stale rate: %d" %
                       calc_usage(self.prunedir))
 
     def reorg_test(self):
         # Node 1 will mine a 300 block chain starting 287 blocks back from Node
         # 0 and Node 2's tip. This will cause Node 2 to do a reorg requiring
         # 288 blocks of undo data to the reorg_test chain. Reboot node 1 to
         # clear its mempool (hopefully make the invalidate faster). Lower the
         # block max size so we don't keep mining all our big mempool
         # transactions (from disconnected blocks)
         self.stop_node(1)
-        self.nodes[1] = start_node(
-            1, self.options.tmpdir, ["-maxreceivebuffer=20000",
-                                     "-blockmaxsize=5000",
-                                     "-checkblocks=5",
-                                     "-disablesafemode"],
-            timewait=900)
+        self.nodes[1] = self.start_node(1, self.options.tmpdir, [
+                                        "-maxreceivebuffer=20000", "-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
 
         height = self.nodes[1].getblockcount()
         self.log.info("Current block height: %d" % height)
 
         invalidheight = height - 287
         badhash = self.nodes[1].getblockhash(invalidheight)
         self.log.info("Invalidating block %s at height %d" %
                       (badhash, invalidheight))
         self.nodes[1].invalidateblock(badhash)
 
         # We've now switched to our previously mined-24 block fork on node 1, but thats not what we want.
         # So invalidate that fork as well, until we're on the same chain as
         # node 0/2 (but at an ancestor 288 blocks ago)
         mainchainhash = self.nodes[0].getblockhash(invalidheight - 1)
         curhash = self.nodes[1].getblockhash(invalidheight - 1)
         while curhash != mainchainhash:
             self.nodes[1].invalidateblock(curhash)
             curhash = self.nodes[1].getblockhash(invalidheight - 1)
 
         assert(self.nodes[1].getblockcount() == invalidheight - 1)
         self.log.info("New best height: %d" % self.nodes[1].getblockcount())
 
         # Reboot node1 to clear those giant tx's from mempool
         self.stop_node(1)
-        self.nodes[1] = start_node(
-            1, self.options.tmpdir, ["-maxreceivebuffer=20000",
-                                     "-blockmaxsize=5000",
-                                     "-checkblocks=5",
-                                     "-disablesafemode",
-                                     "-blockmaxsize=1000000"],
-            timewait=900)
+        self.nodes[1] = self.start_node(1, self.options.tmpdir, [
+                                        "-maxreceivebuffer=20000", "-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
 
         self.log.info("Generating new longer chain of 300 more blocks")
         self.nodes[1].generate(300)
 
         self.log.info("Reconnect nodes")
         connect_nodes(self.nodes[0], 1)
         connect_nodes(self.nodes[2], 1)
         sync_blocks(self.nodes[0:3], timeout=120)
 
         self.log.info("Verify height on node 2: %d" %
                       self.nodes[2].getblockcount())
         self.log.info("Usage possibly still high bc of stale blocks in block files: %d" %
                       calc_usage(self.prunedir))
 
         self.log.info(
             "Mine 220 more blocks so we have requisite history (some blocks will be big and cause pruning of previous chain)")
 
         # Get node0's wallet transactions back in its mempool, to avoid the
         # mined blocks from being too small.
         self.nodes[0].resendwallettransactions()
 
         for i in range(22):
             # This can be slow, so do this in multiple RPC calls to avoid
             # RPC timeouts.
             # node 0 has many large tx's in its mempool from the disconnects
             self.nodes[0].generate(10)
         sync_blocks(self.nodes[0:3], timeout=300)
 
         usage = calc_usage(self.prunedir)
         self.log.info("Usage should be below target: %d" % usage)
         if (usage > 550):
             raise AssertionError("Pruning target not being met")
 
         return invalidheight, badhash
 
     def reorg_back(self):
         # Verify that a block on the old main chain fork has been pruned away
         assert_raises_jsonrpc(
             -1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
         self.log.info("Will need to redownload block %d" % self.forkheight)
 
         # Verify that we have enough history to reorg back to the fork point.
         # Although this is more than 288 blocks, because this chain was written
         # more recently and only its other 299 small and 220 large block are in
         # the block files after it, its expected to still be retained.
         self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
 
         first_reorg_height = self.nodes[2].getblockcount()
         curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
         self.nodes[2].invalidateblock(curchainhash)
         goalbestheight = self.mainchainheight
         goalbesthash = self.mainchainhash2
 
         # As of 0.10 the current block download logic is not able to reorg to
         # the original chain created in create_chain_with_stale_blocks because
         # it doesn't know of any peer thats on that chain from which to
         # redownload its missing blocks. Invalidate the reorg_test chain in
         # node 0 as well, it can successfully switch to the original chain
         # because it has all the block data. However it must mine enough blocks
         # to have a more work chain than the reorg_test chain in order to
         # trigger node 2's block download logic. At this point node 2 is within
         # 288 blocks of the fork point so it will preserve its ability to
         # reorg.
         if self.nodes[2].getblockcount() < self.mainchainheight:
             blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
             self.log.info(
                 "Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: %d" % blocks_to_mine)
             self.nodes[0].invalidateblock(curchainhash)
             assert(self.nodes[0].getblockcount() == self.mainchainheight)
             assert(self.nodes[0].getbestblockhash() == self.mainchainhash2)
             goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1]
             goalbestheight = first_reorg_height + 1
 
         self.log.info(
             "Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
         waitstart = time.time()
         while self.nodes[2].getblockcount() < goalbestheight:
             time.sleep(0.1)
             if time.time() - waitstart > 900:
                 raise AssertionError("Node 2 didn't reorg to proper height")
         assert(self.nodes[2].getbestblockhash() == goalbesthash)
         # Verify we can now have the data for a block previously pruned
         assert(self.nodes[2].getblock(
             self.forkhash)["height"] == self.forkheight)
 
     def manual_test(self, node_number, use_timestamp):
         # at this point, node has 995 blocks and has not yet run in prune mode
-        node = self.nodes[node_number] = start_node(
-            node_number, self.options.tmpdir,
-            ["-blockmaxsize=1000000"], timewait=900)
+        node = self.nodes[node_number] = self.start_node(
+            node_number, self.options.tmpdir, timewait=900)
         assert_equal(node.getblockcount(), 995)
         assert_raises_jsonrpc(
             -1, "not in prune mode", node.pruneblockchain, 500)
         self.stop_node(node_number)
 
         # now re-start in manual pruning mode
-        node = self.nodes[node_number] = start_node(
-            node_number, self.options.tmpdir,
-            ["-prune=1", "-blockmaxsize=1000000"], timewait=900)
+        node = self.nodes[node_number] = self.start_node(
+            node_number, self.options.tmpdir, ["-prune=1"], timewait=900)
         assert_equal(node.getblockcount(), 995)
 
         def height(index):
             if use_timestamp:
                 return node.getblockheader(node.getblockhash(index))["time"] + TIMESTAMP_WINDOW
             else:
                 return index
 
         def prune(index, expected_ret=None):
             ret = node.pruneblockchain(height(index))
             # Check the return value. When use_timestamp is True, just check
             # that the return value is less than or equal to the expected
             # value, because when more than one block is generated per second,
             # a timestamp will not be granular enough to uniquely identify an
             # individual block.
             if expected_ret is None:
                 expected_ret = index
             if use_timestamp:
                 assert_greater_than(ret, 0)
                 assert_greater_than(expected_ret + 1, ret)
             else:
                 assert_equal(ret, expected_ret)
 
         def has_block(index):
             return os.path.isfile(self.options.tmpdir + "/node{}/regtest/blocks/blk{:05}.dat".format(node_number, index))
 
         # should not prune because chain tip of node 3 (995) < PruneAfterHeight
         # (1000)
         assert_raises_jsonrpc(
             -1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
 
         # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
         node.generate(6)
         assert_equal(node.getblockchaininfo()["blocks"], 1001)
 
         # negative heights should raise an exception
         assert_raises_jsonrpc(-8, "Negative", node.pruneblockchain, -10)
 
         # height=100 too low to prune first block file so this is a no-op
         prune(100)
         if not has_block(0):
             raise AssertionError(
                 "blk00000.dat is missing when should still be there")
 
         # Does nothing
         node.pruneblockchain(height(0))
         if not has_block(0):
             raise AssertionError(
                 "blk00000.dat is missing when should still be there")
 
         # height=500 should prune first file
         prune(500)
         if has_block(0):
             raise AssertionError(
                 "blk00000.dat is still there, should be pruned by now")
         if not has_block(1):
             raise AssertionError(
                 "blk00001.dat is missing when should still be there")
 
         # height=650 should prune second file
         prune(650)
         if has_block(1):
             raise AssertionError(
                 "blk00001.dat is still there, should be pruned by now")
 
         # height=1000 should not prune anything more, because tip-288 is in
         # blk00002.dat.
         prune(1000, 1001 - MIN_BLOCKS_TO_KEEP)
         if not has_block(2):
             raise AssertionError(
                 "blk00002.dat is still there, should be pruned by now")
 
         # advance the tip so blk00002.dat and blk00003.dat can be pruned (the
         # last 288 blocks should now be in blk00004.dat)
         node.generate(288)
         prune(1000)
         if has_block(2):
             raise AssertionError(
                 "blk00002.dat is still there, should be pruned by now")
         if has_block(3):
             raise AssertionError(
                 "blk00003.dat is still there, should be pruned by now")
 
         # stop node, start back up with auto-prune at 550MB, make sure still
         # runs
         self.stop_node(node_number)
-        self.nodes[node_number] = start_node(
-            node_number, self.options.tmpdir,
-            ["-prune=550", "-blockmaxsize=1000000"], timewait=900)
+        self.nodes[node_number] = self.start_node(
+            node_number, self.options.tmpdir, ["-prune=550"], timewait=900)
 
         self.log.info("Success")
 
     def wallet_test(self):
         # check that the pruning node's wallet is still in good shape
         self.log.info("Stop and start pruning node to trigger wallet rescan")
         self.stop_node(2)
-        start_node(2, self.options.tmpdir, ["-prune=550"])
+        self.start_node(2, self.options.tmpdir, ["-prune=550"])
         self.log.info("Success")
 
         # check that wallet loads loads successfully when restarting a pruned node after IBD.
         # this was reported to fail in #7494.
         self.log.info("Syncing node 5 to test wallet")
         connect_nodes(self.nodes[0], 5)
         nds = [self.nodes[0], self.nodes[5]]
         sync_blocks(nds, wait=5, timeout=300)
-        # Stop and start to trigger rescan
-        self.stop_node(5)
-        start_node(5, self.options.tmpdir, ["-prune=550"])
+        self.stop_node(5)  # stop and start to trigger rescan
+        self.start_node(5, self.options.tmpdir, ["-prune=550"])
         self.log.info("Success")
 
     def run_test(self):
         self.log.info(
             "Warning! This test requires 4GB of disk space and takes over 30 mins (up to 2 hours)")
         self.log.info("Mining a big blockchain of 995 blocks")
 
         # Determine default relay fee
         self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
 
         # Cache for utxos, as the listunspent may take a long time later in the
         # test
         self.utxo_cache_0 = []
         self.utxo_cache_1 = []
 
         self.create_big_chain()
         # Chain diagram key:
         # *   blocks on main chain
         # +,&,$,@ blocks on other forks
         # X   invalidated block
         # N1  Node 1
         #
         # Start by mining a simple chain that all nodes have
         # N0=N1=N2 **...*(995)
 
         # stop manual-pruning node with 995 blocks
         self.stop_node(3)
         self.stop_node(4)
 
         self.log.info(
             "Check that we haven't started pruning yet because we're below PruneAfterHeight")
         self.test_height_min()
         # Extend this chain past the PruneAfterHeight
         # N0=N1=N2 **...*(1020)
 
         self.log.info(
             "Check that we'll exceed disk space target if we have a very high stale block rate")
         self.create_chain_with_staleblocks()
         # Disconnect N0
         # And mine a 24 block chain on N1 and a separate 25 block chain on N0
         # N1=N2 **...*+...+(1044)
         # N0    **...**...**(1045)
         #
         # reconnect nodes causing reorg on N1 and N2
         # N1=N2 **...*(1020) *...**(1045)
         #                   \
         #                    +...+(1044)
         #
         # repeat this process until you have 12 stale forks hanging off the
         # main chain on N1 and N2
         # N0    *************************...***************************(1320)
         #
         # N1=N2 **...*(1020) *...**(1045) *..         ..**(1295) *...**(1320)
         #                   \            \                      \
         #                    +...+(1044)  &..                    $...$(1319)
 
         # Save some current chain state for later use
         self.mainchainheight = self.nodes[2].getblockcount()  # 1320
         self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
 
         self.log.info("Check that we can survive a 288 block reorg still")
         (self.forkheight, self.forkhash) = self.reorg_test()  # (1033, )
         # Now create a 288 block reorg by mining a longer chain on N1
         # First disconnect N1
         # Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
         # N1   **...*(1020) **...**(1032)X..
         #                  \
         #                   ++...+(1031)X..
         #
         # Now mine 300 more blocks on N1
         # N1    **...*(1020) **...**(1032) @@...@(1332)
         #                 \               \
         #                  \               X...
         #                   \                 \
         #                    ++...+(1031)X..   ..
         #
         # Reconnect nodes and mine 220 more blocks on N1
         # N1    **...*(1020) **...**(1032) @@...@@@(1552)
         #                 \               \
         #                  \               X...
         #                   \                 \
         #                    ++...+(1031)X..   ..
         #
         # N2    **...*(1020) **...**(1032) @@...@@@(1552)
         #                 \               \
         #                  \               *...**(1320)
         #                   \                 \
         #                    ++...++(1044)     ..
         #
         # N0    ********************(1032) @@...@@@(1552)
         #                                 \
         #                                  *...**(1320)
 
         self.log.info(
             "Test that we can rerequest a block we previously pruned if needed for a reorg")
         self.reorg_back()
         # Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
         # Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
         # original main chain (*), but will require redownload of some blocks
         # In order to have a peer we think we can download from, must also perform this invalidation
         # on N0 and mine a new longest chain to trigger.
         # Final result:
         # N0    ********************(1032) **...****(1553)
         #                                 \
         #                                  X@...@@@(1552)
         #
         # N2    **...*(1020) **...**(1032) **...****(1553)
         #                 \               \
         #                  \               X@...@@@(1552)
         #                   \
         #                    +..
         #
         # N1 doesn't change because 1033 on main chain (*) is invalid
 
         self.log.info("Test manual pruning with block indices")
         self.manual_test(3, use_timestamp=False)
 
         self.log.info("Test manual pruning with timestamps")
         self.manual_test(4, use_timestamp=True)
 
         self.log.info("Test wallet re-scan")
         self.wallet_test()
 
         self.log.info("Done")
 
 
 if __name__ == '__main__':
     PruneTest().main()
diff --git a/test/functional/receivedby.py b/test/functional/receivedby.py
index ef6c0b12a..6f9358807 100755
--- a/test/functional/receivedby.py
+++ b/test/functional/receivedby.py
@@ -1,165 +1,165 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 # Exercise the listreceivedbyaddress API
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 def get_sub_array_from_array(object_array, to_match):
     '''
     Finds and returns a sub array from an array of arrays.
     to_match should be a unique idetifier of a sub array
     '''
     for item in object_array:
         all_match = True
         for key, value in to_match.items():
             if item[key] != value:
                 all_match = False
         if not all_match:
             continue
         return item
     return []
 
 
 class ReceivedByTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 4
         self.setup_clean_chain = False
 
     def setup_nodes(self):
         # This test requires mocktime
         enable_mocktime()
-        self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
+        self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
 
     def run_test(self):
         '''
         listreceivedbyaddress Test
         '''
         # Send from node 0 to 1
         addr = self.nodes[1].getnewaddress()
         txid = self.nodes[0].sendtoaddress(addr, 0.1)
         self.sync_all()
 
         # Check not listed in listreceivedbyaddress because has 0 confirmations
         assert_array_result(self.nodes[1].listreceivedbyaddress(),
                             {"address": addr},
                             {},
                             True)
         # Bury Tx under 10 block so it will be returned by
         # listreceivedbyaddress
         self.nodes[1].generate(10)
         self.sync_all()
         assert_array_result(self.nodes[1].listreceivedbyaddress(),
                             {"address": addr},
                             {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
         # With min confidence < 10
         assert_array_result(self.nodes[1].listreceivedbyaddress(5),
                             {"address": addr},
                             {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
         # With min confidence > 10, should not find Tx
         assert_array_result(
             self.nodes[1].listreceivedbyaddress(11), {"address": addr}, {}, True)
 
         # Empty Tx
         addr = self.nodes[1].getnewaddress()
         assert_array_result(self.nodes[1].listreceivedbyaddress(0, True),
                             {"address": addr},
                             {"address": addr, "account": "", "amount": 0, "confirmations": 0, "txids": []})
 
         '''
             getreceivedbyaddress Test
         '''
         # Send from node 0 to 1
         addr = self.nodes[1].getnewaddress()
         txid = self.nodes[0].sendtoaddress(addr, 0.1)
         self.sync_all()
 
         # Check balance is 0 because of 0 confirmations
         balance = self.nodes[1].getreceivedbyaddress(addr)
         if balance != Decimal("0.0"):
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaddress, %0.2f" % (balance))
 
         # Check balance is 0.1
         balance = self.nodes[1].getreceivedbyaddress(addr, 0)
         if balance != Decimal("0.1"):
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaddress, %0.2f" % (balance))
 
         # Bury Tx under 10 block so it will be returned by the default
         # getreceivedbyaddress
         self.nodes[1].generate(10)
         self.sync_all()
         balance = self.nodes[1].getreceivedbyaddress(addr)
         if balance != Decimal("0.1"):
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaddress, %0.2f" % (balance))
 
         '''
             listreceivedbyaccount + getreceivedbyaccount Test
         '''
         # set pre-state
         addrArr = self.nodes[1].getnewaddress()
         account = self.nodes[1].getaccount(addrArr)
         received_by_account_json = get_sub_array_from_array(
             self.nodes[1].listreceivedbyaccount(), {"account": account})
         if len(received_by_account_json) == 0:
             raise AssertionError("No accounts found in node")
         balance_by_account = self.nodes[1].getreceivedbyaccount(account)
 
         txid = self.nodes[0].sendtoaddress(addr, 0.1)
         self.sync_all()
 
         # listreceivedbyaccount should return received_by_account_json because
         # of 0 confirmations
         assert_array_result(self.nodes[1].listreceivedbyaccount(),
                             {"account": account},
                             received_by_account_json)
 
         # getreceivedbyaddress should return same balance because of 0
         # confirmations
         balance = self.nodes[1].getreceivedbyaccount(account)
         if balance != balance_by_account:
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaccount, %0.2f" % (balance))
 
         self.nodes[1].generate(10)
         self.sync_all()
         # listreceivedbyaccount should return updated account balance
         assert_array_result(self.nodes[1].listreceivedbyaccount(),
                             {"account": account},
                             {"account": received_by_account_json["account"], "amount": (received_by_account_json["amount"] + Decimal("0.1"))})
 
         # getreceivedbyaddress should return updates balance
         balance = self.nodes[1].getreceivedbyaccount(account)
         if balance != balance_by_account + Decimal("0.1"):
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaccount, %0.2f" % (balance))
 
         # Create a new account named "mynewaccount" that has a 0 balance
         self.nodes[1].getaccountaddress("mynewaccount")
         received_by_account_json = get_sub_array_from_array(
             self.nodes[1].listreceivedbyaccount(0, True), {"account": "mynewaccount"})
         if len(received_by_account_json) == 0:
             raise AssertionError("No accounts found in node")
 
         # Test includeempty of listreceivedbyaccount
         if received_by_account_json["amount"] != Decimal("0.0"):
             raise AssertionError("Wrong balance returned by listreceivedbyaccount, %0.2f" %
                                  (received_by_account_json["amount"]))
 
         # Test getreceivedbyaccount for 0 amount accounts
         balance = self.nodes[1].getreceivedbyaccount("mynewaccount")
         if balance != Decimal("0.0"):
             raise AssertionError(
                 "Wrong balance returned by getreceivedbyaccount, %0.2f" % (balance))
 
 
 if __name__ == '__main__':
     ReceivedByTest().main()
diff --git a/test/functional/reindex.py b/test/functional/reindex.py
index d4844862f..1718de90e 100755
--- a/test/functional/reindex.py
+++ b/test/functional/reindex.py
@@ -1,47 +1,42 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 #
 # Test -reindex and -reindex-chainstate with CheckBlockIndex
 #
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    start_nodes,
-    stop_nodes,
-    assert_equal,
-)
+from test_framework.util import assert_equal
 import time
 
 
 class ReindexTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 1
 
     def reindex(self, justchainstate=False):
         self.nodes[0].generate(3)
         blockcount = self.nodes[0].getblockcount()
-        stop_nodes(self.nodes)
-        extra_args = [[
-            "-reindex-chainstate" if justchainstate else "-reindex",
-            "-checkblockindex=1"]]
-        self.nodes = start_nodes(
+        self.stop_nodes()
+        extra_args = [
+            ["-reindex-chainstate" if justchainstate else "-reindex", "-checkblockindex=1"]]
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, extra_args)
         while self.nodes[0].getblockcount() < blockcount:
             time.sleep(0.1)
         assert_equal(self.nodes[0].getblockcount(), blockcount)
         self.log.info("Success")
 
     def run_test(self):
         self.reindex(False)
         self.reindex(True)
         self.reindex(False)
         self.reindex(True)
 
 
 if __name__ == '__main__':
     ReindexTest().main()
diff --git a/test/functional/rpcbind_test.py b/test/functional/rpcbind_test.py
index d076df25e..1310cfe6b 100755
--- a/test/functional/rpcbind_test.py
+++ b/test/functional/rpcbind_test.py
@@ -1,118 +1,118 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 # Test for -rpcbind, as well as -rpcallowip and -rpcconnect
 
 from test_framework.test_framework import BitcoinTestFramework, SkipTest
 from test_framework.util import *
 from test_framework.netutil import *
 
 
 class RPCBindTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 1
 
     def setup_network(self):
         pass
 
     def setup_nodes(self):
         pass
 
     def run_bind_test(self, allow_ips, connect_to, addresses, expected):
         '''
         Start a node with requested rpcallowip and rpcbind parameters,
         then try to connect, and check if the set of bound addresses
         matches the expected set.
         '''
         expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
         base_args = ['-disablewallet', '-nolisten']
         if allow_ips:
             base_args += ['-rpcallowip=' + x for x in allow_ips]
         binds = ['-rpcbind=' + addr for addr in addresses]
-        self.nodes = start_nodes(
-            self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to)
+        self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, [
+                                      base_args + binds], connect_to)
         pid = bitcoind_processes[0].pid
         assert_equal(set(get_bind_addrs(pid)), set(expected))
-        stop_nodes(self.nodes)
+        self.stop_nodes()
 
     def run_allowip_test(self, allow_ips, rpchost, rpcport):
         '''
         Start a node with rpcallow IP, and request getnetworkinfo
         at a non-localhost IP.
         '''
-        base_args = ['-disablewallet', '-nolisten'] + [
-            '-rpcallowip=' + x for x in allow_ips]
-        self.nodes = start_nodes(
+        base_args = ['-disablewallet', '-nolisten'] + \
+            ['-rpcallowip=' + x for x in allow_ips]
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, [base_args])
         # connect to node through non-loopback interface
         node = get_rpc_proxy(rpc_url(0, "%s:%d" % (rpchost, rpcport)), 0)
         node.getnetworkinfo()
-        stop_nodes(self.nodes)
+        self.stop_nodes()
 
     def run_test(self):
         # due to OS-specific network stats queries, this test works only on Linux
         if not sys.platform.startswith('linux'):
             raise SkipTest("This test can only be run on linux.")
         # find the first non-loopback interface for testing
         non_loopback_ip = None
         for name, ip in all_interfaces():
             if ip != '127.0.0.1':
                 non_loopback_ip = ip
                 break
         if non_loopback_ip is None:
             raise SkipTest(
                 "This test requires at least one non-loopback IPv4 interface.")
         try:
             s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
             s.connect(("::1", 1))
             s.close
         except OSError:
             raise SkipTest("This test requires IPv6 support.")
 
         self.log.info("Using interface %s for testing" % non_loopback_ip)
 
         defaultport = rpc_port(0)
 
         # check default without rpcallowip (IPv4 and IPv6 localhost)
         self.run_bind_test(None, '127.0.0.1', [],
                            [('127.0.0.1', defaultport), ('::1', defaultport)])
         # check default with rpcallowip (IPv6 any)
         self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
                            [('::0', defaultport)])
         # check only IPv4 localhost (explicit)
         self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
                            [('127.0.0.1', defaultport)])
         # check only IPv4 localhost (explicit) with alternative port
         self.run_bind_test(
             ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
             [('127.0.0.1', 32171)])
         # check only IPv4 localhost (explicit) with multiple alternative ports
         # on same host
         self.run_bind_test(
             ['127.0.0.1'], '127.0.0.1:32171', [
                 '127.0.0.1:32171', '127.0.0.1:32172'],
             [('127.0.0.1', 32171), ('127.0.0.1', 32172)])
         # check only IPv6 localhost (explicit)
         self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
                            [('::1', defaultport)])
         # check both IPv4 and IPv6 localhost (explicit)
         self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
                            [('127.0.0.1', defaultport), ('::1', defaultport)])
         # check only non-loopback interface
         self.run_bind_test(
             [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
             [(non_loopback_ip, defaultport)])
 
         # Check that with invalid rpcallowip, we are denied
         self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport)
         assert_raises_jsonrpc(-342, "non-JSON HTTP response with '403 Forbidden' from server",
                               self.run_allowip_test, ['1.1.1.1'], non_loopback_ip, defaultport)
 
 
 if __name__ == '__main__':
     RPCBindTest().main()
diff --git a/test/functional/smartfees.py b/test/functional/smartfees.py
index be08a8eb0..fd0a9c540 100755
--- a/test/functional/smartfees.py
+++ b/test/functional/smartfees.py
@@ -1,295 +1,293 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 #
 # Test fee estimation code
 #
 
 from collections import OrderedDict
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 # Construct 2 trivial P2SH's and the ScriptSigs that spend them
 # So we can create many many transactions without needing to spend
 # time signing.
 P2SH_1 = "2MySexEGVzZpRgNQ1JdjdP5bRETznm3roQ2"  # P2SH of "OP_1 OP_DROP"
 P2SH_2 = "2NBdpwq8Aoo1EEKEXPNrKvr5xQr3M9UfcZA"  # P2SH of "OP_2 OP_DROP"
 # Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
 # 4 bytes of OP_TRUE and push 2-byte redeem script of "OP_1 OP_DROP" or
 # "OP_2 OP_DROP"
 SCRIPT_SIG = ["0451025175", "0451025275"]
 
 global log
 
 
 def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
     '''
     Create and send a transaction with a random fee.
     The transaction pays to a trivial P2SH script, and assumes that its inputs
     are of the same form.
     The function takes a list of confirmed outputs and unconfirmed outputs
     and attempts to use the confirmed list first for its inputs.
     It adds the newly created outputs to the unconfirmed list.
     Returns (raw transaction, fee)
     '''
     # It's best to exponentially distribute our random fees
     # because the buckets are exponentially spaced.
     # Exponentially distributed from 1-128 * fee_increment
     rand_fee = float(fee_increment) * (1.1892**random.randint(0, 28))
     # Total fee ranges from min_fee to min_fee + 127*fee_increment
     fee = min_fee - fee_increment + satoshi_round(rand_fee)
     inputs = []
     total_in = Decimal("0.00000000")
     while total_in <= (amount + fee) and len(conflist) > 0:
         t = conflist.pop(0)
         total_in += t["amount"]
         inputs.append({"txid": t["txid"], "vout": t["vout"]})
     if total_in <= amount + fee:
         while total_in <= (amount + fee) and len(unconflist) > 0:
             t = unconflist.pop(0)
             total_in += t["amount"]
             inputs.append({"txid": t["txid"], "vout": t["vout"]})
         if total_in <= amount + fee:
             raise RuntimeError(
                 "Insufficient funds: need %d, have %d" % (amount + fee, total_in))
     outputs = {}
     outputs = OrderedDict([(P2SH_1, total_in - amount - fee),
                            (P2SH_2, amount)])
     rawtx = from_node.createrawtransaction(inputs, outputs)
     # createrawtransaction constructs a transaction that is ready to be signed.
     # These transactions don't need to be signed, but we still have to insert the ScriptSig
     # that will satisfy the ScriptPubKey.
     completetx = rawtx[0:10]
     inputnum = 0
     for inp in inputs:
         completetx += rawtx[10 + 82 * inputnum:82 + 82 * inputnum]
         completetx += SCRIPT_SIG[inp["vout"]]
         completetx += rawtx[84 + 82 * inputnum:92 + 82 * inputnum]
         inputnum += 1
     completetx += rawtx[10 + 82 * inputnum:]
     txid = from_node.sendrawtransaction(completetx, True)
     unconflist.append(
         {"txid": txid, "vout": 0, "amount": total_in - amount - fee})
     unconflist.append({"txid": txid, "vout": 1, "amount": amount})
 
     return (completetx, fee)
 
 
 def split_inputs(from_node, txins, txouts, initial_split=False):
     '''
     We need to generate a lot of very small inputs so we can generate a ton of transactions
     and they will have low priority.
     This function takes an input from txins, and creates and sends a transaction
     which splits the value into 2 outputs which are appended to txouts.
     '''
     prevtxout = txins.pop()
     inputs = []
     inputs.append({"txid": prevtxout["txid"], "vout": prevtxout["vout"]})
     half_change = satoshi_round(prevtxout["amount"] / 2)
     rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000")
     outputs = OrderedDict([(P2SH_1, half_change), (P2SH_2, rem_change)])
     rawtx = from_node.createrawtransaction(inputs, outputs)
     # If this is the initial split we actually need to sign the transaction
     # Otherwise we just need to insert the property ScriptSig
     if (initial_split):
         completetx = from_node.signrawtransaction(
             rawtx, None, None, "ALL|FORKID")["hex"]
     else:
         completetx = rawtx[0:82] + SCRIPT_SIG[prevtxout["vout"]] + rawtx[84:]
     txid = from_node.sendrawtransaction(completetx, True)
     txouts.append({"txid": txid, "vout": 0, "amount": half_change})
     txouts.append({"txid": txid, "vout": 1, "amount": rem_change})
 
 
 def check_estimates(node, fees_seen, max_invalid, print_estimates=True):
     '''
     This function calls estimatefee and verifies that the estimates
     meet certain invariants.
     '''
     all_estimates = [node.estimatefee(i) for i in range(1, 26)]
     if print_estimates:
         log.info([str(all_estimates[e - 1])
                   for e in [1, 2, 3, 6, 15, 25]])
     delta = 1.0e-6  # account for rounding error
     last_e = max(fees_seen)
     for e in [x for x in all_estimates if x >= 0]:
         # Estimates should be within the bounds of what transactions fees
         # actually were:
         if float(e) + delta < min(fees_seen) or float(e) - delta > max(fees_seen):
             raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
                                  % (float(e), min(fees_seen), max(fees_seen)))
         # Estimates should be monotonically decreasing
         if float(e) - delta > last_e:
             raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms"
                                  % (float(e), float(last_e)))
         last_e = e
     valid_estimate = False
     invalid_estimates = 0
     for i, e in enumerate(all_estimates):  # estimate is for i+1
         if e >= 0:
             valid_estimate = True
             # estimatesmartfee should return the same result
             assert_equal(node.estimatesmartfee(i + 1)["feerate"], e)
 
         else:
             invalid_estimates += 1
 
             # estimatesmartfee should still be valid
             approx_estimate = node.estimatesmartfee(i + 1)["feerate"]
             answer_found = node.estimatesmartfee(i + 1)["blocks"]
             assert(approx_estimate > 0)
             assert(answer_found > i + 1)
 
             # Once we're at a high enough confirmation count that we can give an estimate
             # We should have estimates for all higher confirmation counts
             if valid_estimate:
                 raise AssertionError(
                     "Invalid estimate appears at higher confirm count than valid estimate")
 
     # Check on the expected number of different confirmation counts
     # that we might not have valid estimates for
     if invalid_estimates > max_invalid:
         raise AssertionError(
             "More than (%d) invalid estimates" % (max_invalid))
     return all_estimates
 
 
 class EstimateFeeTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 3
         self.setup_clean_chain = False
 
     def setup_network(self):
         '''
         We'll setup the network to have 3 nodes that all mine with different parameters.
         But first we need to use one node to create a lot of small low priority outputs
         which we will use to generate our transactions.
         '''
         self.nodes = []
         # Use node0 to mine blocks for input splitting
-        self.nodes.append(
-            start_node(0, self.options.tmpdir, ["-maxorphantx=1000",
-                                                "-whitelist=127.0.0.1"]))
+        self.nodes.append(self.start_node(0, self.options.tmpdir, ["-maxorphantx=1000",
+                                                                   "-whitelist=127.0.0.1"]))
 
         self.log.info("This test is time consuming, please be patient")
         self.log.info(
             "Splitting inputs to small size so we can generate low priority tx's")
         self.txouts = []
         self.txouts2 = []
         # Split a coinbase into two transaction puzzle outputs
         split_inputs(self.nodes[0], self.nodes[
                      0].listunspent(0), self.txouts, True)
 
         # Mine
         while (len(self.nodes[0].getrawmempool()) > 0):
             self.nodes[0].generate(1)
 
         # Repeatedly split those 2 outputs, doubling twice for each rep
         # Use txouts to monitor the available utxo, since these won't be
         # tracked in wallet
         reps = 0
         while (reps < 5):
             # Double txouts to txouts2
             while (len(self.txouts) > 0):
                 split_inputs(self.nodes[0], self.txouts, self.txouts2)
             while (len(self.nodes[0].getrawmempool()) > 0):
                 self.nodes[0].generate(1)
             # Double txouts2 to txouts
             while (len(self.txouts2) > 0):
                 split_inputs(self.nodes[0], self.txouts2, self.txouts)
             while (len(self.nodes[0].getrawmempool()) > 0):
                 self.nodes[0].generate(1)
             reps += 1
         self.log.info("Finished splitting")
 
         # Now we can connect the other nodes, didn't want to connect them earlier
         # so the estimates would not be affected by the splitting transactions
         # Node1 mines small blocks but that are bigger than the expected transaction rate,
         # and allows free transactions.
         # NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
         # (17k is room enough for 110 or so transactions)
-        self.nodes.append(start_node(1, self.options.tmpdir,
-                                     ["-blockprioritypercentage=9", "-blockmaxsize=17000",
-                                      "-maxorphantx=1000"]))
+        self.nodes.append(self.start_node(1, self.options.tmpdir,
+                                          ["-blockmaxsize=17000", "-maxorphantx=1000"]))
         connect_nodes(self.nodes[1], 0)
 
         # Node2 is a stingy miner, that
         # produces too small blocks (room for only 55 or so transactions)
         node2args = ["-blockprioritypercentage=0",
                      "-blockmaxsize=8000", "-maxorphantx=1000"]
 
-        self.nodes.append(start_node(2, self.options.tmpdir, node2args))
+        self.nodes.append(self.start_node(2, self.options.tmpdir, node2args))
         connect_nodes(self.nodes[0], 2)
         connect_nodes(self.nodes[2], 1)
 
         self.sync_all()
 
     def transact_and_mine(self, numblocks, mining_node):
         min_fee = Decimal("0.00001")
         # We will now mine numblocks blocks generating on average 100 transactions between each block
         # We shuffle our confirmed txout set before each set of transactions
         # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
         # resorting to tx's that depend on the mempool when those run out
         for i in range(numblocks):
             random.shuffle(self.confutxo)
             for j in range(random.randrange(100 - 50, 100 + 50)):
                 from_index = random.randint(1, 2)
                 (txhex, fee) = small_txpuzzle_randfee(self.nodes[
                     from_index], self.confutxo,
                     self.memutxo, Decimal("0.005"), min_fee, min_fee)
                 tx_kbytes = (len(txhex) // 2) / 1000.0
                 self.fees_per_kb.append(float(fee) / tx_kbytes)
             sync_mempools(self.nodes[0:3], wait=.1)
             mined = mining_node.getblock(
                 mining_node.generate(1)[0], True)["tx"]
             sync_blocks(self.nodes[0:3], wait=.1)
             # update which txouts are confirmed
             newmem = []
             for utx in self.memutxo:
                 if utx["txid"] in mined:
                     self.confutxo.append(utx)
                 else:
                     newmem.append(utx)
             self.memutxo = newmem
 
     def run_test(self):
         # Make log handler available to helper functions
         global log
         log = self.log
 
         self.fees_per_kb = []
         self.memutxo = []
         self.confutxo = self.txouts  # Start with the set of confirmed txouts after splitting
         self.log.info("Will output estimates for 1/2/3/6/15/25 blocks")
 
         for i in range(2):
             self.log.info(
                 "Creating transactions and mining them with a block size that can't keep up")
             # Create transactions and mine 10 small blocks with node 2, but
             # create txs faster than we can mine
             self.transact_and_mine(10, self.nodes[2])
             check_estimates(self.nodes[1], self.fees_per_kb, 14)
 
             self.log.info(
                 "Creating transactions and mining them at a block size that is just big enough")
             # Generate transactions while mining 10 more blocks, this time with node1
             # which mines blocks with capacity just above the rate that
             # transactions are being created
             self.transact_and_mine(10, self.nodes[1])
             check_estimates(self.nodes[1], self.fees_per_kb, 2)
 
         # Finish by mining a normal-sized block:
         while len(self.nodes[1].getrawmempool()) > 0:
             self.nodes[1].generate(1)
 
         sync_blocks(self.nodes[0:3], wait=.1)
         self.log.info("Final estimates after emptying mempools")
         check_estimates(self.nodes[1], self.fees_per_kb, 2)
 
 
 if __name__ == '__main__':
     EstimateFeeTest().main()
diff --git a/test/functional/wallet-accounts.py b/test/functional/wallet-accounts.py
index 9331ae926..a4a817bda 100755
--- a/test/functional/wallet-accounts.py
+++ b/test/functional/wallet-accounts.py
@@ -1,91 +1,86 @@
 #!/usr/bin/env python3
 # Copyright (c) 2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    start_nodes,
-    start_node,
-    assert_equal,
-    connect_nodes_bi,
-)
+from test_framework.util import assert_equal
 
 
 class WalletAccountsTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [[]]
 
     def run_test(self):
         node = self.nodes[0]
         # Check that there's no UTXO on any of the nodes
         assert_equal(len(node.listunspent()), 0)
 
         node.generate(101)
 
         assert_equal(node.getbalance(), 50)
 
         accounts = ["a", "b", "c", "d", "e"]
         amount_to_send = 1.0
         account_addresses = dict()
         for account in accounts:
             address = node.getaccountaddress(account)
             account_addresses[account] = address
 
             node.getnewaddress(account)
             assert_equal(node.getaccount(address), account)
             assert(address in node.getaddressesbyaccount(account))
 
             node.sendfrom("", address, amount_to_send)
 
         node.generate(1)
 
         for i in range(len(accounts)):
             from_account = accounts[i]
             to_account = accounts[(i + 1) % len(accounts)]
             to_address = account_addresses[to_account]
             node.sendfrom(from_account, to_address, amount_to_send)
 
         node.generate(1)
 
         for account in accounts:
             address = node.getaccountaddress(account)
             assert(address != account_addresses[account])
             assert_equal(node.getreceivedbyaccount(account), 2)
             node.move(account, "", node.getbalance(account))
 
         node.generate(101)
 
         expected_account_balances = {"": 5200}
         for account in accounts:
             expected_account_balances[account] = 0
 
         assert_equal(node.listaccounts(), expected_account_balances)
 
         assert_equal(node.getbalance(""), 5200)
 
         for account in accounts:
             address = node.getaccountaddress("")
             node.setaccount(address, account)
             assert(address in node.getaddressesbyaccount(account))
             assert(address not in node.getaddressesbyaccount(""))
 
         for account in accounts:
             addresses = []
             for x in range(10):
                 addresses.append(node.getnewaddress())
             multisig_address = node.addmultisigaddress(5, addresses, account)
             node.sendfrom("", multisig_address, 50)
 
         node.generate(101)
 
         for account in accounts:
             assert_equal(node.getbalance(account), 50)
 
 
 if __name__ == '__main__':
     WalletAccountsTest().main()
diff --git a/test/functional/wallet-dump.py b/test/functional/wallet-dump.py
index d0f4b95b3..5ab082658 100755
--- a/test/functional/wallet-dump.py
+++ b/test/functional/wallet-dump.py
@@ -1,116 +1,116 @@
 #!/usr/bin/env python3
 # Copyright (c) 2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
-    start_nodes, start_node, assert_equal, bitcoind_processes)
+from test_framework.util import (assert_equal, bitcoind_processes)
 
 
 def read_dump(file_name, addrs, hd_master_addr_old):
     """
     Read the given dump, count the addrs that match, count change and reserve.
     Also check that the old hd_master is inactive
     """
     with open(file_name, encoding='utf8') as inputfile:
         found_addr = 0
         found_addr_chg = 0
         found_addr_rsv = 0
         hd_master_addr_ret = None
         for line in inputfile:
             # only read non comment lines
             if line[0] != "#" and len(line) > 10:
                 # split out some data
                 key_label, comment = line.split("#")
                 # key = key_label.split(" ")[0]
                 keytype = key_label.split(" ")[2]
                 if len(comment) > 1:
                     addr_keypath = comment.split(" addr=")[1]
                     addr = addr_keypath.split(" ")[0]
                     keypath = None
                     if keytype == "inactivehdmaster=1":
                         # ensure the old master is still available
                         assert(hd_master_addr_old == addr)
                     elif keytype == "hdmaster=1":
                         # ensure we have generated a new hd master key
                         assert(hd_master_addr_old != addr)
                         hd_master_addr_ret = addr
                     else:
                         keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
 
                     # count key types
                     for addrObj in addrs:
                         if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=":
                             found_addr += 1
                             break
                         elif keytype == "change=1":
                             found_addr_chg += 1
                             break
                         elif keytype == "reserve=1":
                             found_addr_rsv += 1
                             break
         return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
 
 
 class WalletDumpTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = False
         self.num_nodes = 1
         self.extra_args = [["-keypool=90"]]
 
     def setup_network(self, split=False):
         # Use 1 minute timeout because the initial getnewaddress RPC can take
         # longer than the default 30 seconds due to an expensive
         # CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
         # the test often takes even longer.
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, self.extra_args, timewait=60)
 
     def run_test(self):
         tmpdir = self.options.tmpdir
 
         # generate 20 addresses to compare against the dump
         test_addr_count = 20
         addrs = []
         for i in range(0, test_addr_count):
             addr = self.nodes[0].getnewaddress()
             vaddr = self.nodes[0].validateaddress(
                 addr)  # required to get hd keypath
             addrs.append(vaddr)
         # Should be a no-op:
         self.nodes[0].keypoolrefill()
 
         # dump unencrypted wallet
         self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
 
         found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
             read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
         assert_equal(found_addr, test_addr_count)
         # all keys must be in the dump
         assert_equal(found_addr_chg, 50)  # 50 blocks where mined
         # 90 keys plus 100% internal keys
         assert_equal(found_addr_rsv, 90 * 2)
 
         # encrypt wallet, restart, unlock and dump
         self.nodes[0].encryptwallet('test')
         bitcoind_processes[0].wait()
-        self.nodes[0] = start_node(0, self.options.tmpdir, self.extra_args[0])
+        self.nodes[0] = self.start_node(
+            0, self.options.tmpdir, self.extra_args[0])
         self.nodes[0].walletpassphrase('test', 10)
         # Should be a no-op:
         self.nodes[0].keypoolrefill()
         self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
 
         found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
             read_dump(
                 tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
         assert_equal(found_addr, test_addr_count)
         # old reserve keys are marked as change now
         assert_equal(found_addr_chg, 90 * 2 + 50)
         assert_equal(found_addr_rsv, 90 * 2)
 
 
 if __name__ == '__main__':
     WalletDumpTest().main()
diff --git a/test/functional/wallet-hd.py b/test/functional/wallet-hd.py
index 6cd2bd413..f2e1de50f 100755
--- a/test/functional/wallet-hd.py
+++ b/test/functional/wallet-hd.py
@@ -1,116 +1,116 @@
 #!/usr/bin/env python3
 # Copyright (c) 2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import (
-    start_nodes,
-    start_node,
     assert_equal,
     connect_nodes_bi,
     assert_start_raises_init_error
 )
 import os
 import shutil
 
 
 class WalletHDTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 2
         self.extra_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']]
 
     def run_test(self):
         tmpdir = self.options.tmpdir
 
         # Make sure can't switch off usehd after wallet creation
         self.stop_node(1)
         assert_start_raises_init_error(1, self.options.tmpdir, [
                                        '-usehd=0'], 'already existing HD wallet')
-        self.nodes[1] = start_node(1, self.options.tmpdir, self.extra_args[1])
+        self.nodes[1] = self.start_node(
+            1, self.options.tmpdir, self.extra_args[1])
         connect_nodes_bi(self.nodes, 0, 1)
 
         # Make sure we use hd, keep masterkeyid
         masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
         assert_equal(len(masterkeyid), 40)
 
         # create an internal key
         change_addr = self.nodes[1].getrawchangeaddress()
         change_addrV = self.nodes[1].validateaddress(change_addr)
         # first internal child key
         assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'")
 
         # Import a non-HD private key in the HD wallet
         non_hd_add = self.nodes[0].getnewaddress()
         self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
 
         # This should be enough to keep the master key and the non-HD key
         self.nodes[1].backupwallet(tmpdir + "/hd.bak")
         # self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
 
         # Derive some HD addresses and remember the last
         # Also send funds to each add
         self.nodes[0].generate(101)
         hd_add = None
         num_hd_adds = 300
         for i in range(num_hd_adds):
             hd_add = self.nodes[1].getnewaddress()
             hd_info = self.nodes[1].validateaddress(hd_add)
             assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i + 1) + "'")
             assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
             self.nodes[0].sendtoaddress(hd_add, 1)
             self.nodes[0].generate(1)
         self.nodes[0].sendtoaddress(non_hd_add, 1)
         self.nodes[0].generate(1)
 
         # create an internal key (again)
         change_addr = self.nodes[1].getrawchangeaddress()
         change_addrV = self.nodes[1].validateaddress(change_addr)
         # second internal child key
         assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'")
 
         self.sync_all()
         assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
 
         self.log.info("Restore backup ...")
         self.stop_node(1)
         os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
-        shutil.copyfile(
-            tmpdir + "/hd.bak", tmpdir + "/node1/regtest/wallet.dat")
-        self.nodes[1] = start_node(1, self.options.tmpdir, self.extra_args[1])
-        # connect_nodes_bi(self.nodes, 0, 1)
+        shutil.copyfile(tmpdir + "/hd.bak", tmpdir +
+                        "/node1/regtest/wallet.dat")
+        self.nodes[1] = self.start_node(
+            1, self.options.tmpdir, self.extra_args[1])
+        #connect_nodes_bi(self.nodes, 0, 1)
 
         # Assert that derivation is deterministic
         hd_add_2 = None
         for _ in range(num_hd_adds):
             hd_add_2 = self.nodes[1].getnewaddress()
             hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
             assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(_ + 1) + "'")
             assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
         assert_equal(hd_add, hd_add_2)
 
         # Needs rescan
         self.stop_node(1)
-        self.nodes[1] = start_node(
+        self.nodes[1] = self.start_node(
             1, self.options.tmpdir, self.extra_args[1] + ['-rescan'])
-        # connect_nodes_bi(self.nodes, 0, 1)
+        #connect_nodes_bi(self.nodes, 0, 1)
         assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
 
         # send a tx and make sure its using the internal chain for the changeoutput
         txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
         outs = self.nodes[1].decoderawtransaction(
             self.nodes[1].gettransaction(txid)['hex'])['vout']
         keypath = ""
         for out in outs:
             if out['value'] != 1:
                 keypath = self.nodes[1].validateaddress(
                     out['scriptPubKey']['addresses'][0])['hdkeypath']
 
         assert_equal(keypath[0:7], "m/0'/1'")
 
 
 if __name__ == '__main__':
     WalletHDTest().main()
diff --git a/test/functional/wallet.py b/test/functional/wallet.py
index 6b36539fd..8a14fff8e 100755
--- a/test/functional/wallet.py
+++ b/test/functional/wallet.py
@@ -1,435 +1,435 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class WalletTest (BitcoinTestFramework):
 
     def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
         """Return curr_balance after asserting the fee was in range"""
         fee = balance_with_fee - curr_balance
         assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
         return curr_balance
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 4
         self.extra_args = [['-usehd={:d}'.format(i % 2 == 0)]
                            for i in range(4)]
 
     def setup_network(self):
-        self.nodes = start_nodes(3, self.options.tmpdir, self.extra_args[:3])
+        self.nodes = self.start_nodes(
+            3, self.options.tmpdir, self.extra_args[:3])
         connect_nodes_bi(self.nodes, 0, 1)
         connect_nodes_bi(self.nodes, 1, 2)
         connect_nodes_bi(self.nodes, 0, 2)
         self.sync_all()
 
     def run_test(self):
 
         # Check that there's no UTXO on none of the nodes
         assert_equal(len(self.nodes[0].listunspent()), 0)
         assert_equal(len(self.nodes[1].listunspent()), 0)
         assert_equal(len(self.nodes[2].listunspent()), 0)
 
         self.log.info("Mining blocks...")
 
         self.nodes[0].generate(1)
 
         walletinfo = self.nodes[0].getwalletinfo()
         assert_equal(walletinfo['immature_balance'], 50)
         assert_equal(walletinfo['balance'], 0)
 
         self.sync_all()
         self.nodes[1].generate(101)
         self.sync_all()
 
         assert_equal(self.nodes[0].getbalance(), 50)
         assert_equal(self.nodes[1].getbalance(), 50)
         assert_equal(self.nodes[2].getbalance(), 0)
 
         # Check that only first and second nodes have UTXOs
         assert_equal(len(self.nodes[0].listunspent()), 1)
         assert_equal(len(self.nodes[1].listunspent()), 1)
         assert_equal(len(self.nodes[2].listunspent()), 0)
 
         # Send 21 BTC from 0 to 2 using sendtoaddress call.
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
         self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
 
         walletinfo = self.nodes[0].getwalletinfo()
         assert_equal(walletinfo['immature_balance'], 0)
 
         # Have node0 mine a block, thus it will collect its own fee.
         self.nodes[0].generate(1)
         self.sync_all()
 
         # Exercise locking of unspent outputs
         unspent_0 = self.nodes[2].listunspent()[0]
         unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
         self.nodes[2].lockunspent(False, [unspent_0])
         assert_raises_jsonrpc(-4, "Insufficient funds",
                               self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
         assert_equal([unspent_0], self.nodes[2].listlockunspent())
         self.nodes[2].lockunspent(True, [unspent_0])
         assert_equal(len(self.nodes[2].listlockunspent()), 0)
 
         # Have node1 generate 100 blocks (so node0 can recover the fee)
         self.nodes[1].generate(100)
         self.sync_all()
 
         # node0 should end up with 100 btc in block rewards plus fees, but
         # minus the 21 plus fees sent to node2
         assert_equal(self.nodes[0].getbalance(), 100 - 21)
         assert_equal(self.nodes[2].getbalance(), 21)
 
         # Node0 should have two unspent outputs.
         # Create a couple of transactions to send them to node2, submit them through
         # node1, and make sure both node0 and node2 pick them up properly:
         node0utxos = self.nodes[0].listunspent(1)
         assert_equal(len(node0utxos), 2)
 
         # create both transactions
         txns_to_send = []
         for utxo in node0utxos:
             inputs = []
             outputs = {}
             inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
             outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] - 3
             raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
             txns_to_send.append(
                 self.nodes[0].signrawtransaction(raw_tx, None, None, "ALL|FORKID"))
 
         # Have node 1 (miner) send the transactions
         self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
         self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)
 
         # Have node1 mine a block to confirm transactions:
         self.nodes[1].generate(1)
         self.sync_all()
 
         assert_equal(self.nodes[0].getbalance(), 0)
         assert_equal(self.nodes[2].getbalance(), 94)
         assert_equal(self.nodes[2].getbalance("from1"), 94 - 21)
 
         # Send 10 BTC normal
         address = self.nodes[0].getnewaddress("test")
         fee_per_byte = Decimal('0.001') / 1000
         self.nodes[2].settxfee(fee_per_byte * 1000)
         txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
         self.nodes[2].generate(1)
         self.sync_all()
         node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal(
             '84'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
         assert_equal(self.nodes[0].getbalance(), Decimal('10'))
 
         # Send 10 BTC with subtract fee from amount
         txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
         self.nodes[2].generate(1)
         self.sync_all()
         node_2_bal -= Decimal('10')
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
         node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal(
             '20'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
 
         # Sendmany 10 BTC
         txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [])
         self.nodes[2].generate(1)
         self.sync_all()
         node_0_bal += Decimal('10')
         node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal(
             '10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
         assert_equal(self.nodes[0].getbalance(), node_0_bal)
 
         # Sendmany 10 BTC with subtract fee from amount
         txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [address])
         self.nodes[2].generate(1)
         self.sync_all()
         node_2_bal -= Decimal('10')
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
         node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal(
             '10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
 
         # Test ResendWalletTransactions:
         # Create a couple of transactions, then start up a fourth
         # node (nodes[3]) and ask nodes[0] to rebroadcast.
         # EXPECT: nodes[3] should have those transactions in its mempool.
         txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
         txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
         sync_mempools(self.nodes)
 
-        self.nodes.append(
-            start_node(3, self.options.tmpdir, self.extra_args[3]))
+        self.nodes.append(self.start_node(
+            3, self.options.tmpdir, self.extra_args[3]))
         connect_nodes_bi(self.nodes, 0, 3)
         sync_blocks(self.nodes)
 
         relayed = self.nodes[0].resendwallettransactions()
         assert_equal(set(relayed), {txid1, txid2})
         sync_mempools(self.nodes)
 
         assert(txid1 in self.nodes[3].getrawmempool())
 
         # Exercise balance rpcs
         assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
         assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
 
         # check if we can list zero value tx as available coins
         # 1. create rawtx
         # 2. hex-changed one output to 0.0
         # 3. sign and send
         # 4. check if recipient (node0) can list the zero value tx
         usp = self.nodes[1].listunspent()
         inputs = [{"txid": usp[0]['txid'], "vout":usp[0]['vout']}]
         outputs = {
             self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}
 
         rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace(
             "c0833842", "00000000")  # replace 11.11 with 0.0 (int32)
         decRawTx = self.nodes[1].decoderawtransaction(rawTx)
         signedRawTx = self.nodes[
             1].signrawtransaction(rawTx, None, None, "ALL|FORKID")
         decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
         zeroValueTxid = decRawTx['txid']
         sendResp = self.nodes[1].sendrawtransaction(signedRawTx['hex'])
 
         self.sync_all()
         self.nodes[1].generate(1)  # mine a block
         self.sync_all()
 
         unspentTxs = self.nodes[
             0].listunspent()  # zero value tx must be in listunspents output
         found = False
         for uTx in unspentTxs:
             if uTx['txid'] == zeroValueTxid:
                 found = True
                 assert_equal(uTx['amount'], Decimal('0'))
         assert(found)
 
         # do some -walletbroadcast tests
-        stop_nodes(self.nodes)
-        extra_args = [["-walletbroadcast=0"] for i in range(3)]
-        self.nodes = start_nodes(3, self.options.tmpdir, extra_args)
+        self.stop_nodes()
+        self.nodes = self.start_nodes(3, self.options.tmpdir, [
+                                      ["-walletbroadcast=0"], ["-walletbroadcast=0"], ["-walletbroadcast=0"]])
         connect_nodes_bi(self.nodes, 0, 1)
         connect_nodes_bi(self.nodes, 1, 2)
         connect_nodes_bi(self.nodes, 0, 2)
         self.sync_all()
 
         txIdNotBroadcasted = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 2)
         txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
         # mine a block, tx should not be in there
         self.nodes[1].generate(1)
         self.sync_all()
         # should not be changed because tx was not broadcasted
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # now broadcast from another node, mine a block, sync, and check the
         # balance
         self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex'])
         self.nodes[1].generate(1)
         self.sync_all()
         node_2_bal += 2
         txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # create another tx
         txIdNotBroadcasted = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), 2)
 
         # restart the nodes with -walletbroadcast=1
-        stop_nodes(self.nodes)
-        self.nodes = start_nodes(3, self.options.tmpdir)
+        self.stop_nodes()
+        self.nodes = self.start_nodes(3, self.options.tmpdir)
         connect_nodes_bi(self.nodes, 0, 1)
         connect_nodes_bi(self.nodes, 1, 2)
         connect_nodes_bi(self.nodes, 0, 2)
         sync_blocks(self.nodes)
 
         self.nodes[0].generate(1)
         sync_blocks(self.nodes)
         node_2_bal += 2
 
         # tx should be added to balance because after restarting the nodes tx
         # should be broadcastet
         assert_equal(self.nodes[2].getbalance(), node_2_bal)
 
         # send a tx with value in a string (PR#6380 +)
         txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
         txObj = self.nodes[0].gettransaction(txId)
         assert_equal(txObj['amount'], Decimal('-2'))
 
         txId = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), "0.0001")
         txObj = self.nodes[0].gettransaction(txId)
         assert_equal(txObj['amount'], Decimal('-0.0001'))
 
         # check if JSON parser can handle scientific notation in strings
         txId = self.nodes[0].sendtoaddress(
             self.nodes[2].getnewaddress(), "1e-4")
         txObj = self.nodes[0].gettransaction(txId)
         assert_equal(txObj['amount'], Decimal('-0.0001'))
 
         # This will raise an exception because the amount type is wrong
         assert_raises_jsonrpc(-3, "Invalid amount",
                               self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
 
         # This will raise an exception since generate does not accept a string
         assert_raises_jsonrpc(-1, "not an integer",
                               self.nodes[0].generate, "2")
 
         # Import address and private key to check correct behavior of spendable unspents
         # 1. Send some coins to generate new UTXO
         address_to_import = self.nodes[2].getnewaddress()
         txid = self.nodes[0].sendtoaddress(address_to_import, 1)
         self.nodes[0].generate(1)
         self.sync_all()
 
         # 2. Import address from node2 to node1
         self.nodes[1].importaddress(address_to_import)
 
         # 3. Validate that the imported address is watch-only on node1
         assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"])
 
         # 4. Check that the unspents after import are not spendable
         assert_array_result(self.nodes[1].listunspent(),
                             {"address": address_to_import},
                             {"spendable": False})
 
         # 5. Import private key of the previously imported address on node1
         priv_key = self.nodes[2].dumpprivkey(address_to_import)
         self.nodes[1].importprivkey(priv_key)
 
         # 6. Check that the unspents are now spendable on node1
         assert_array_result(self.nodes[1].listunspent(),
                             {"address": address_to_import},
                             {"spendable": True})
 
         # Mine a block from node0 to an address from node1
         cbAddr = self.nodes[1].getnewaddress()
         blkHash = self.nodes[0].generatetoaddress(1, cbAddr)[0]
         cbTxId = self.nodes[0].getblock(blkHash)['tx'][0]
         self.sync_all()
 
         # Check that the txid and balance is found by node1
         self.nodes[1].gettransaction(cbTxId)
 
         # check if wallet or blockchain maintenance changes the balance
         self.sync_all()
         blocks = self.nodes[0].generate(2)
         self.sync_all()
         balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
         block_count = self.nodes[0].getblockcount()
 
         # Check modes:
         #   - True: unicode escaped as \u....
         #   - False: unicode directly as UTF-8
         for mode in [True, False]:
             self.nodes[0].ensure_ascii = mode
             # unicode check: Basic Multilingual Plane, Supplementary Plane
             # respectively
             for s in [u'рыба', u'𝅘𝅥𝅯']:
                 addr = self.nodes[0].getaccountaddress(s)
                 label = self.nodes[0].getaccount(addr)
                 assert_equal(label, s)
                 assert(s in self.nodes[0].listaccounts().keys())
         self.nodes[0].ensure_ascii = True  # restore to default
 
         # maintenance tests
         maintenance = [
             '-rescan',
             '-reindex',
             '-zapwallettxes=1',
             '-zapwallettxes=2',
             # disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463
             # '-salvagewallet',
         ]
         chainlimit = 6
         for m in maintenance:
             self.log.info("check " + m)
-            stop_nodes(self.nodes)
+            self.stop_nodes()
             # set lower ancestor limit for later
-            self.nodes = start_nodes(3, self.options.tmpdir,
-                                     [[m, "-limitancestorcount=" + str(chainlimit)]] * 3)
+            self.nodes = self.start_nodes(3, self.options.tmpdir, [
+                                          [m, "-limitancestorcount=" + str(chainlimit)]] * 3)
             while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
                 # reindex will leave rpc warm up "early"; Wait for it to finish
                 time.sleep(0.1)
             assert_equal(
                 balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
 
         # Exercise listsinceblock with the last two blocks
         coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
         assert_equal(coinbase_tx_1["lastblock"], blocks[1])
         assert_equal(len(coinbase_tx_1["transactions"]), 1)
         assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
         assert_equal(
             len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
 
         # ==Check that wallet prefers to use coins that don't exceed mempool li
 
         # Get all non-zero utxos together
         chain_addrs = [
             self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()]
         singletxid = self.nodes[0].sendtoaddress(
             chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
         self.nodes[0].generate(1)
         node0_balance = self.nodes[0].getbalance()
         # Split into two chains
         rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {
                                                    chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')})
         signedtx = self.nodes[0].signrawtransaction(
             rawtx, None, None, "ALL|FORKID")
         singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"])
         self.nodes[0].generate(1)
 
         # Make a long chain of unconfirmed payments without hitting mempool limit
         # Each tx we make leaves only one output of change on a chain 1 longer
         # Since the amount to send is always much less than the outputs, we only ever need one output
         # So we should be able to generate exactly chainlimit txs for each
         # original output
         sending_addr = self.nodes[1].getnewaddress()
         txid_list = []
         for i in range(chainlimit * 2):
             txid_list.append(
                 self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')))
         assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2)
         assert_equal(len(txid_list), chainlimit * 2)
 
         # Without walletrejectlongchains, we will still generate a txid
         # The tx will be stored in the wallet but not accepted to the mempool
         extra_txid = self.nodes[0].sendtoaddress(
             sending_addr, Decimal('0.0001'))
         assert(extra_txid not in self.nodes[0].getrawmempool())
         assert(extra_txid in [tx["txid"]
                               for tx in self.nodes[0].listtransactions()])
         self.nodes[0].abandontransaction(extra_txid)
         total_txs = len(self.nodes[0].listtransactions("*", 99999))
 
         # Try with walletrejectlongchains
-        # Double chain limit but require combining inputs, so we pass
-        # SelectCoinsMinConf
-        stop_node(self.nodes[0], 0)
-        self.nodes[0] = start_node(0, self.options.tmpdir, [
-                                   "-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
+        # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir, [
+                                        "-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
 
         # wait for loadmempool
         timeout = 10
         while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
             time.sleep(0.5)
             timeout -= 0.5
         assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)
 
         node0_balance = self.nodes[0].getbalance()
         # With walletrejectlongchains we will not create the tx and store it in our wallet.
         assert_raises_jsonrpc(-4, "Transaction has too long of a mempool chain",
                               self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))
 
         # Verify nothing new in wallet
         assert_equal(
             total_txs, len(self.nodes[0].listtransactions("*", 99999)))
 
 
 if __name__ == '__main__':
     WalletTest().main()
diff --git a/test/functional/walletbackup.py b/test/functional/walletbackup.py
index 049172f42..068e3f862 100755
--- a/test/functional/walletbackup.py
+++ b/test/functional/walletbackup.py
@@ -1,215 +1,215 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 """
 Exercise the wallet backup code.  Ported from walletbackup.sh.
 
 Test case is:
 4 nodes. 1 2 and 3 send transactions between each other,
 fourth node is a miner.
 1 2 3 each mine a block to start, then
 Miner creates 100 blocks so 1 2 3 each have 50 mature
 coins to spend.
 Then 5 iterations of 1/2/3 sending coins amongst
 themselves to get transactions in the wallets,
 and the miner mining one block.
 
 Wallets are backed up using dumpwallet/backupwallet.
 Then 5 more iterations of transactions and mining a block.
 
 Miner then generates 101 more blocks, so any
 transaction fees paid mature.
 
 Sanity check:
   Sum(1,2,3,4 balances) == 114*50
 
 1/2/3 are shutdown, and their wallets erased.
 Then restore using wallet.dat backup. And
 confirm 1/2/3/4 balances are same as before.
 
 Shutdown again, restore using importwallet,
 and confirm again balances are correct.
 """
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 from random import randint
 
 
 class WalletBackupTest(BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 4
         # nodes 1, 2,3 are spenders, let's give them a keypool=100
         self.extra_args = [
             ["-keypool=100"], ["-keypool=100"], ["-keypool=100"], []]
 
     # This mirrors how the network was setup in the bash test
     def setup_network(self):
         self.setup_nodes()
         connect_nodes(self.nodes[0], 3)
         connect_nodes(self.nodes[1], 3)
         connect_nodes(self.nodes[2], 3)
         connect_nodes(self.nodes[2], 0)
         self.sync_all()
 
     def one_send(self, from_node, to_address):
         if (randint(1, 2) == 1):
             amount = Decimal(randint(1, 10)) / Decimal(10)
             self.nodes[from_node].sendtoaddress(to_address, amount)
 
     def do_one_round(self):
         a0 = self.nodes[0].getnewaddress()
         a1 = self.nodes[1].getnewaddress()
         a2 = self.nodes[2].getnewaddress()
 
         self.one_send(0, a1)
         self.one_send(0, a2)
         self.one_send(1, a0)
         self.one_send(1, a2)
         self.one_send(2, a0)
         self.one_send(2, a1)
 
         # Have the miner (node3) mine a block.
         # Must sync mempools before mining.
         sync_mempools(self.nodes)
         self.nodes[3].generate(1)
         sync_blocks(self.nodes)
 
     # As above, this mirrors the original bash test.
     def start_three(self):
-        self.nodes[0] = start_node(0, self.options.tmpdir)
-        self.nodes[1] = start_node(1, self.options.tmpdir)
-        self.nodes[2] = start_node(2, self.options.tmpdir)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir)
+        self.nodes[1] = self.start_node(1, self.options.tmpdir)
+        self.nodes[2] = self.start_node(2, self.options.tmpdir)
         connect_nodes(self.nodes[0], 3)
         connect_nodes(self.nodes[1], 3)
         connect_nodes(self.nodes[2], 3)
         connect_nodes(self.nodes[2], 0)
 
     def stop_three(self):
-        stop_node(self.nodes[0], 0)
-        stop_node(self.nodes[1], 1)
-        stop_node(self.nodes[2], 2)
+        self.stop_node(0)
+        self.stop_node(1)
+        self.stop_node(2)
 
     def erase_three(self):
         os.remove(self.options.tmpdir + "/node0/regtest/wallet.dat")
         os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
         os.remove(self.options.tmpdir + "/node2/regtest/wallet.dat")
 
     def run_test(self):
         self.log.info("Generating initial blockchain")
         self.nodes[0].generate(1)
         sync_blocks(self.nodes)
         self.nodes[1].generate(1)
         sync_blocks(self.nodes)
         self.nodes[2].generate(1)
         sync_blocks(self.nodes)
         self.nodes[3].generate(100)
         sync_blocks(self.nodes)
 
         assert_equal(self.nodes[0].getbalance(), 50)
         assert_equal(self.nodes[1].getbalance(), 50)
         assert_equal(self.nodes[2].getbalance(), 50)
         assert_equal(self.nodes[3].getbalance(), 0)
 
         self.log.info("Creating transactions")
         # Five rounds of sending each other transactions.
         for i in range(5):
             self.do_one_round()
 
         self.log.info("Backing up")
         tmpdir = self.options.tmpdir
         self.nodes[0].backupwallet(tmpdir + "/node0/wallet.bak")
         self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.dump")
         self.nodes[1].backupwallet(tmpdir + "/node1/wallet.bak")
         self.nodes[1].dumpwallet(tmpdir + "/node1/wallet.dump")
         self.nodes[2].backupwallet(tmpdir + "/node2/wallet.bak")
         self.nodes[2].dumpwallet(tmpdir + "/node2/wallet.dump")
 
         self.log.info("More transactions")
         for i in range(5):
             self.do_one_round()
 
         # Generate 101 more blocks, so any fees paid mature
         self.nodes[3].generate(101)
         self.sync_all()
 
         balance0 = self.nodes[0].getbalance()
         balance1 = self.nodes[1].getbalance()
         balance2 = self.nodes[2].getbalance()
         balance3 = self.nodes[3].getbalance()
         total = balance0 + balance1 + balance2 + balance3
 
         # At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.)
         # 114 are mature, so the sum of all wallets should be 114 * 50 = 5700.
         assert_equal(total, 5700)
 
         #
         # Test restoring spender wallets from backups
         #
         self.log.info("Restoring using wallet.dat")
         self.stop_three()
         self.erase_three()
 
         # Start node2 with no chain
         shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
         shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
 
         # Restore wallets from backup
         shutil.copyfile(
             tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallet.dat")
         shutil.copyfile(
             tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallet.dat")
         shutil.copyfile(
             tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallet.dat")
 
         self.log.info("Re-starting nodes")
         self.start_three()
         sync_blocks(self.nodes)
 
         assert_equal(self.nodes[0].getbalance(), balance0)
         assert_equal(self.nodes[1].getbalance(), balance1)
         assert_equal(self.nodes[2].getbalance(), balance2)
 
         self.log.info("Restoring using dumped wallet")
         self.stop_three()
         self.erase_three()
 
         # start node2 with no chain
         shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
         shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
 
         self.start_three()
 
         assert_equal(self.nodes[0].getbalance(), 0)
         assert_equal(self.nodes[1].getbalance(), 0)
         assert_equal(self.nodes[2].getbalance(), 0)
 
         self.nodes[0].importwallet(tmpdir + "/node0/wallet.dump")
         self.nodes[1].importwallet(tmpdir + "/node1/wallet.dump")
         self.nodes[2].importwallet(tmpdir + "/node2/wallet.dump")
 
         sync_blocks(self.nodes)
 
         assert_equal(self.nodes[0].getbalance(), balance0)
         assert_equal(self.nodes[1].getbalance(), balance1)
         assert_equal(self.nodes[2].getbalance(), balance2)
 
         # Backup to source wallet file must fail
         sourcePaths = [
             tmpdir + "/node0/regtest/wallet.dat",
             tmpdir + "/node0/./regtest/wallet.dat",
             tmpdir + "/node0/regtest/",
             tmpdir + "/node0/regtest"]
 
         for sourcePath in sourcePaths:
             assert_raises_jsonrpc(-4, "backup failed",
                                   self.nodes[0].backupwallet, sourcePath)
 
 
 if __name__ == '__main__':
     WalletBackupTest().main()
diff --git a/test/functional/zapwallettxes.py b/test/functional/zapwallettxes.py
index 2aaae88ed..e7056d38d 100755
--- a/test/functional/zapwallettxes.py
+++ b/test/functional/zapwallettxes.py
@@ -1,77 +1,75 @@
 #!/usr/bin/env python3
 # Copyright (c) 2014-2016 The Bitcoin Core developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 
 
 class ZapWalletTXesTest (BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.setup_clean_chain = True
         self.num_nodes = 3
 
     def setup_network(self):
         super().setup_network()
         connect_nodes_bi(self.nodes, 0, 2)
 
     def run_test(self):
         self.log.info("Mining blocks...")
         self.nodes[0].generate(1)
         self.sync_all()
         self.nodes[1].generate(101)
         self.sync_all()
 
         assert_equal(self.nodes[0].getbalance(), 50)
 
         txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
         txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
         self.sync_all()
         self.nodes[0].generate(1)
         self.sync_all()
 
         txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
         txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
 
         tx0 = self.nodes[0].gettransaction(txid0)
         assert_equal(tx0['txid'], txid0)  # tx0 must be available (confirmed)
 
         tx1 = self.nodes[0].gettransaction(txid1)
         assert_equal(tx1['txid'], txid1)  # tx1 must be available (confirmed)
 
         tx2 = self.nodes[0].gettransaction(txid2)
         assert_equal(tx2['txid'], txid2)  # tx2 must be available (unconfirmed)
 
         tx3 = self.nodes[0].gettransaction(txid3)
         assert_equal(tx3['txid'], txid3)  # tx3 must be available (unconfirmed)
 
         # restart bitcoind
-        self.nodes[0].stop()
-        bitcoind_processes[0].wait()
-        self.nodes[0] = start_node(0, self.options.tmpdir)
+        self.stop_node(0)
+        self.nodes[0] = self.start_node(0, self.options.tmpdir)
 
         tx3 = self.nodes[0].gettransaction(txid3)
         assert_equal(tx3['txid'], txid3)  # tx must be available (unconfirmed)
 
-        self.nodes[0].stop()
-        bitcoind_processes[0].wait()
+        self.stop_node(0)
 
         # restart bitcoind with zapwallettxes
-        self.nodes[0] = start_node(
+        self.nodes[0] = self.start_node(
             0, self.options.tmpdir, ["-zapwallettxes=1"])
 
         assert_raises(JSONRPCException, self.nodes[0].gettransaction, [txid3])
         # there must be a expection because the unconfirmed wallettx0 must be
         # gone by now
 
         tx0 = self.nodes[0].gettransaction(txid0)
         assert_equal(tx0['txid'], txid0)
         # tx0 (confirmed) must still be available because it was
         # confirmed
 
 
 if __name__ == '__main__':
     ZapWalletTXesTest().main()
diff --git a/test/functional/zmq_test.py b/test/functional/zmq_test.py
index 74999741c..162ee97dd 100755
--- a/test/functional/zmq_test.py
+++ b/test/functional/zmq_test.py
@@ -1,130 +1,131 @@
 #!/usr/bin/env python3
 # Copyright (c) 2015-2017 The Bitcoin Core developers
 # Copyright (c) 2017 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 """Test the ZMQ API."""
 import configparser
 import os
 import struct
 
 from test_framework.test_framework import BitcoinTestFramework, SkipTest
 from test_framework.util import (
     assert_equal,
     bytes_to_hex_str,
     start_nodes,
 )
 
 
 class ZMQTest (BitcoinTestFramework):
 
     def __init__(self):
         super().__init__()
         self.num_nodes = 2
 
     def setup_nodes(self):
         # Try to import python3-zmq. Skip this test if the import fails.
         try:
             import zmq
         except ImportError:
             raise SkipTest("python3-zmq module not available.")
 
         # Check that bitcoin has been built with ZMQ enabled
         config = configparser.ConfigParser()
         if not self.options.configfile:
             self.options.configfile = os.path.dirname(
                 __file__) + "/../config.ini"
         config.read_file(open(self.options.configfile))
 
         if not config["components"].getboolean("ENABLE_ZMQ"):
             raise SkipTest("bitcoind has not been built with zmq enabled.")
 
         self.zmqContext = zmq.Context()
         self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
         self.zmqSubSocket.set(zmq.RCVTIMEO, 60000)
         self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
         self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
+
         ip_address = "tcp://127.0.0.1:28332"
         self.zmqSubSocket.connect(ip_address)
         extra_args = [['-zmqpubhashtx=%s' %
                        ip_address, '-zmqpubhashblock=%s' % ip_address], []]
-        self.nodes = start_nodes(
+        self.nodes = self.start_nodes(
             self.num_nodes, self.options.tmpdir, extra_args)
 
     def run_test(self):
         try:
             self._zmq_test()
         finally:
             # Destroy the zmq context
             self.log.debug("Destroying zmq context")
             self.zmqContext.destroy(linger=None)
 
     def _zmq_test(self):
         genhashes = self.nodes[0].generate(1)
         self.sync_all()
 
         self.log.info("Wait for tx")
         msg = self.zmqSubSocket.recv_multipart()
         topic = msg[0]
         assert_equal(topic, b"hashtx")
         body = msg[1]
         msgSequence = struct.unpack('<I', msg[-1])[-1]
         # Must be sequence 0 on hashtx
         assert_equal(msgSequence, 0)
 
         self.log.info("Wait for block")
         msg = self.zmqSubSocket.recv_multipart()
         topic = msg[0]
         body = msg[1]
         msgSequence = struct.unpack('<I', msg[-1])[-1]
 
         # Must be sequence 0 on hashblock
         assert_equal(msgSequence, 0)
         blkhash = bytes_to_hex_str(body)
 
         # blockhash from generate must be equal to the hash received over zmq
         assert_equal(genhashes[0], blkhash)
 
         self.log.info("Generate 10 blocks (and 10 coinbase txes)")
         n = 10
         genhashes = self.nodes[1].generate(n)
         self.sync_all()
 
         zmqHashes = []
         blockcount = 0
         for x in range(n * 2):
             msg = self.zmqSubSocket.recv_multipart()
             topic = msg[0]
             body = msg[1]
             if topic == b"hashblock":
                 zmqHashes.append(bytes_to_hex_str(body))
                 msgSequence = struct.unpack('<I', msg[-1])[-1]
                 assert_equal(msgSequence, blockcount + 1)
                 blockcount += 1
 
         for x in range(n):
             # blockhash from generate must be equal to the hash
             # received over zmq
             assert_equal(genhashes[x], zmqHashes[x])
 
         # Test tx from a second node
         hashRPC = self.nodes[1].sendtoaddress(
             self.nodes[0].getnewaddress(), 1.0)
         self.sync_all()
 
         # Now we should receive a zmq msg because the tx was broadcast
         msg = self.zmqSubSocket.recv_multipart()
         topic = msg[0]
         body = msg[1]
         assert_equal(topic, b"hashtx")
         hashZMQ = bytes_to_hex_str(body)
         msgSequence = struct.unpack('<I', msg[-1])[-1]
         assert_equal(msgSequence, blockcount + 1)
 
         # txid from sendtoaddress must be equal to the hash received over zmq
         assert_equal(hashRPC, hashZMQ)
 
 
 if __name__ == '__main__':
     ZMQTest().main()