diff --git a/test/functional/maxuploadtarget.py b/test/functional/maxuploadtarget.py
--- a/test/functional/maxuploadtarget.py
+++ b/test/functional/maxuploadtarget.py
@@ -19,6 +19,7 @@
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
 from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE
+from test_framework.blocktools import mine_big_block
 
 
 class TestNode(NodeConnCB):
@@ -75,7 +76,7 @@
         # Test logic begins here
 
         # Now mine a big block
-        mine_large_block(self.nodes[0], self.utxo_cache)
+        mine_big_block(self.nodes[0], self.utxo_cache)
 
         # Store the hash; we'll request this later
         big_old_block = self.nodes[0].getbestblockhash()
@@ -86,7 +87,7 @@
         self.nodes[0].setmocktime(int(time.time()) - 2 * 60 * 60 * 24)
 
         # Mine one more block, so that the prior block looks old
-        mine_large_block(self.nodes[0], self.utxo_cache)
+        mine_big_block(self.nodes[0], self.utxo_cache)
 
         # We'll be requesting this new block too
         big_new_block = self.nodes[0].getbestblockhash()
diff --git a/test/functional/pruning.py b/test/functional/pruning.py
--- a/test/functional/pruning.py
+++ b/test/functional/pruning.py
@@ -13,6 +13,8 @@
 
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import *
+from test_framework.blocktools import mine_big_block
+
 import time
 import os
 
@@ -69,7 +71,7 @@
         self.nodes[0].generate(150)
         # Then mine enough full blocks to create more than 550MiB of data
         for i in range(645):
-            mine_large_block(self.nodes[0], self.utxo_cache_0)
+            mine_big_block(self.nodes[0], self.utxo_cache_0)
 
         sync_blocks(self.nodes[0:5])
 
@@ -84,7 +86,7 @@
         # Pruning doesn't run until we're allocating another chunk, 20 full
         # blocks past the height cutoff will ensure this
         for i in range(25):
-            mine_large_block(self.nodes[0], self.utxo_cache_0)
+            mine_big_block(self.nodes[0], self.utxo_cache_0)
 
         waitstart = time.time()
         while os.path.isfile(self.prunedir + "blk00000.dat"):
@@ -114,7 +116,7 @@
             # Mine 24 blocks in node 1
             for i in range(24):
                 if j == 0:
-                    mine_large_block(self.nodes[1], self.utxo_cache_1)
+                    mine_big_block(self.nodes[1], self.utxo_cache_1)
                 else:
                     # Add node1's wallet transactions back to the mempool, to
                     # avoid the mined blocks from being too small.
@@ -124,7 +126,7 @@
 
             # Reorg back with 25 block chain from node 0
             for i in range(25):
-                mine_large_block(self.nodes[0], self.utxo_cache_0)
+                mine_big_block(self.nodes[0], self.utxo_cache_0)
 
             # Create connections in the order so both nodes can see the reorg
             # at the same time
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -134,6 +134,18 @@
     return utxos
 
 
+def mine_big_block(node, utxos=None):
+    # generate a 66k transaction,
+    # and 14 of them is close to the 1MB block limit
+    num = 14
+    utxos = utxos if utxos is not None else []
+    if len(utxos) < num:
+        utxos.clear()
+        utxos.extend(node.listunspent())
+    send_big_transactions(node, utxos, num, 100)
+    node.generate(1)
+
+
 def send_big_transactions(node, utxos, num, fee_multiplier):
     from .cashaddr import decode
     txids = []
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -628,17 +628,3 @@
         txid = node.sendrawtransaction(signresult["hex"], True)
         txids.append(txid)
     return txids
-
-
-def mine_large_block(node, utxos=None):
-    # generate a 66k transaction,
-    # and 14 of them is close to the 1MB block limit
-    num = 14
-    txouts = gen_return_txouts()
-    utxos = utxos if utxos is not None else []
-    if len(utxos) < num:
-        utxos.clear()
-        utxos.extend(node.listunspent())
-    fee = 100 * node.getnetworkinfo()["relayfee"]
-    create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
-    node.generate(1)