Page Menu
Home
Phabricator
Search
Configure Global Search
Log In
Files
F13711314
D13191.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
21 KB
Subscribers
None
D13191.diff
View Options
diff --git a/test/functional/feature_addrman.py b/test/functional/feature_addrman.py
--- a/test/functional/feature_addrman.py
+++ b/test/functional/feature_addrman.py
@@ -86,13 +86,13 @@
"Check that addrman from future is overwritten with new addrman")
self.stop_node(0)
write_addrman(peers_dat, lowest_compatible=111)
- assert_equal(os.path.exists(peers_dat + ".bak"), False)
+ assert_equal(os.path.exists(f"{peers_dat}.bak"), False)
with self.nodes[0].assert_debug_log([
f'Creating new peers.dat because the file version was not compatible ("{peers_dat}"). Original backed up to peers.dat.bak',
]):
self.start_node(0)
assert_equal(self.nodes[0].getnodeaddresses(), [])
- assert_equal(os.path.exists(peers_dat + ".bak"), True)
+ assert_equal(os.path.exists(f"{peers_dat}.bak"), True)
self.log.info("Check that corrupt addrman cannot be read (EOF)")
self.stop_node(0)
diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py
--- a/test/functional/feature_assumevalid.py
+++ b/test/functional/feature_assumevalid.py
@@ -153,8 +153,8 @@
# Start node1 and node2 with assumevalid so they accept a block with a
# bad signature.
- self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
- self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])
+ self.start_node(1, extra_args=[f"-assumevalid={hex(block102.sha256)}"])
+ self.start_node(2, extra_args=[f"-assumevalid={hex(block102.sha256)}"])
p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -113,7 +113,7 @@
# Allow the block to mature
blocks = []
for i in range(NUM_BUFFER_BLOCKS_TO_GENERATE):
- blocks.append(self.next_block("maturitybuffer.{}".format(i)))
+ blocks.append(self.next_block(f"maturitybuffer.{i}"))
self.save_spendable_output()
self.send_blocks(blocks)
@@ -149,7 +149,7 @@
self.log.info(
"Reject block with invalid tx: %s",
TxTemplate.__name__)
- blockname = "for_invalid.{}".format(TxTemplate.__name__)
+ blockname = f"for_invalid.{TxTemplate.__name__}"
badblock = self.next_block(blockname)
badtx = template.get_tx()
if TxTemplate != invalid_txs.InputMissing:
@@ -1116,11 +1116,11 @@
self.move_tip(88)
blocks2 = []
for i in range(89, LARGE_REORG_SIZE + 89):
- blocks2.append(self.next_block("alt" + str(i)))
+ blocks2.append(self.next_block(f"alt{i}"))
self.send_blocks(blocks2, False, force_send=True)
# extend alt chain to trigger re-org
- block = self.next_block("alt" + str(chain1_tip + 1))
+ block = self.next_block(f"alt{chain1_tip + 1}")
self.send_blocks([block], True, timeout=2440)
# ... and re-org back to the first chain
@@ -1226,13 +1226,12 @@
# save the current tip so it can be spent by a later block
def save_spendable_output(self):
- self.log.debug("saving spendable output {}".format(self.tip.vtx[0]))
+ self.log.debug(f"saving spendable output {self.tip.vtx[0]}")
self.spendable_outputs.append(self.tip)
# get an output that we previously marked as spendable
def get_spendable_output(self):
- self.log.debug("getting spendable output {}".format(
- self.spendable_outputs[0].vtx[0]))
+ self.log.debug(f"getting spendable output {self.spendable_outputs[0].vtx[0]}")
return self.spendable_outputs.pop(0).vtx[0]
# move the tip back to a previous block
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -625,7 +625,7 @@
# Generate 9 blocks then spend in the 10th
block = self.nodes[0].getbestblockhash()
self.last_block_time += 600
- self.tip = int("0x" + block, 0)
+ self.tip = int(f"0x{block}", 0)
self.tipheight += 1
# Test #122
self.send_blocks(self.generate_blocks(9))
@@ -656,7 +656,7 @@
# Generate 8 blocks then spend in the 9th (9 * 600 > 10 * 512)
block = self.nodes[0].getbestblockhash()
self.last_block_time += 600
- self.tip = int("0x" + block, 0)
+ self.tip = int(f"0x{block}", 0)
self.tipheight += 1
# Test #125
self.send_blocks(self.generate_blocks(8))
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -52,7 +52,7 @@
def run_test(self):
peer = self.nodes[0].add_p2p_connection(P2PInterface())
- self.log.info("Mining {} blocks".format(DERSIG_HEIGHT - 1))
+ self.log.info(f"Mining {DERSIG_HEIGHT - 1} blocks")
self.coinbase_txids = [self.nodes[0].getblock(
b)['tx'][0] for b in self.generate(self.nodes[0], DERSIG_HEIGHT - 1)]
self.nodeaddress = self.nodes[0].getnewaddress()
diff --git a/test/functional/feature_init.py b/test/functional/feature_init.py
--- a/test/functional/feature_init.py
+++ b/test/functional/feature_init.py
@@ -103,7 +103,7 @@
for target_file in target_files:
self.log.info(f"Tweaking file to ensure failure {target_file}")
- bak_path = str(target_file) + ".bak"
+ bak_path = f"{target_file}.bak"
target_file.rename(bak_path)
# TODO: at some point, we should test perturbing the files instead
@@ -128,7 +128,7 @@
)
for target_file in target_files:
- bak_path = str(target_file) + ".bak"
+ bak_path = f"{target_file}.bak"
self.log.debug(
f"Restoring file from {bak_path} and restarting")
Path(bak_path).rename(target_file)
diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py
--- a/test/functional/feature_logging.py
+++ b/test/functional/feature_logging.py
@@ -29,7 +29,7 @@
# test alternative log file name outside datadir
tempname = os.path.join(self.options.tmpdir, "foo.log")
- self.restart_node(0, ["-debuglogfile={}".format(tempname)])
+ self.restart_node(0, [f"-debuglogfile={tempname}"])
assert os.path.isfile(tempname)
# check that invalid log (relative) will cause error
@@ -38,13 +38,13 @@
self.stop_node(0)
exp_stderr = r"Error: Could not open debug log file \S+$"
self.nodes[0].assert_start_raises_init_error(
- ["-debuglogfile={}".format(invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX)
+ [f"-debuglogfile={invalidname}"], exp_stderr, match=ErrorMatch.FULL_REGEX)
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
# check that invalid log (relative) works after path exists
self.stop_node(0)
os.mkdir(invdir)
- self.start_node(0, ["-debuglogfile={}".format(invalidname)])
+ self.start_node(0, [f"-debuglogfile={invalidname}"])
assert os.path.isfile(os.path.join(invdir, "foo.log"))
# check that invalid log (absolute) will cause error
@@ -52,13 +52,13 @@
invdir = os.path.join(self.options.tmpdir, "foo")
invalidname = os.path.join(invdir, "foo.log")
self.nodes[0].assert_start_raises_init_error(
- ["-debuglogfile={}".format(invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX)
+ [f"-debuglogfile={invalidname}"], exp_stderr, match=ErrorMatch.FULL_REGEX)
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
# check that invalid log (absolute) works after path exists
self.stop_node(0)
os.mkdir(invdir)
- self.start_node(0, ["-debuglogfile={}".format(invalidname)])
+ self.start_node(0, [f"-debuglogfile={invalidname}"])
assert os.path.isfile(os.path.join(invdir, "foo.log"))
# check that -nodebuglogfile disables logging
@@ -69,7 +69,7 @@
assert not os.path.isfile(default_log_path)
# just sanity check no crash here
- self.restart_node(0, ["-debuglogfile={}".format(os.devnull)])
+ self.restart_node(0, [f"-debuglogfile={os.devnull}"])
if __name__ == '__main__':
diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py
--- a/test/functional/feature_minchainwork.py
+++ b/test/functional/feature_minchainwork.py
@@ -47,15 +47,13 @@
# minchainwork is exceeded
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
self.log.info(
- "Testing relay across node {} (minChainWork = {})".format(
- 1, self.node_min_work[1]))
+ f"Testing relay across node 1 (minChainWork = {self.node_min_work[1]})")
starting_blockcount = self.nodes[2].getblockcount()
num_blocks_to_generate = int(
(self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
- self.log.info("Generating {} blocks on node0".format(
- num_blocks_to_generate))
+ self.log.info(f"Generating {num_blocks_to_generate} blocks on node0")
hashes = self.generate(
self.nodes[0],
num_blocks_to_generate,
@@ -71,8 +69,7 @@
time.sleep(3)
self.log.info("Verifying node 2 has no more blocks than before")
- self.log.info("Blockcounts: {}".format(
- [n.getblockcount() for n in self.nodes]))
+ self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
# Node2 shouldn't have any new headers yet, because node1 should not
# have relayed anything.
assert_equal(len(self.nodes[2].getchaintips()), 1)
@@ -94,8 +91,7 @@
# continue the test.
self.sync_all()
- self.log.info("Blockcounts: {}".format(
- [n.getblockcount() for n in self.nodes]))
+ self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
if __name__ == '__main__':
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -20,7 +20,7 @@
def notify_outputname(walletname, txid):
- return txid if os.name == 'nt' else '{}_{}'.format(walletname, txid)
+ return txid if os.name == 'nt' else f'{walletname}_{txid}'
class NotificationsTest(BitcoinTestFramework):
@@ -44,9 +44,9 @@
# -alertnotify and -blocknotify on node0, walletnotify on node1
self.extra_args = [["-alertnotify=echo > {}".format(
os.path.join(self.alertnotify_dir, '%s')),
- "-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
+ f"-blocknotify=echo > {os.path.join(self.blocknotify_dir, '%s')}"],
["-rescan",
- "-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s')))]]
+ f"-walletnotify=echo > {os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))}"]]
self.wallet_names = [self.default_wallet_name, self.wallet]
super().setup_network()
diff --git a/test/functional/feature_proxy.py b/test/functional/feature_proxy.py
--- a/test/functional/feature_proxy.py
+++ b/test/functional/feature_proxy.py
@@ -128,7 +128,7 @@
rv = []
addr = "15.61.23.23:1234"
self.log.debug(
- "Test: outgoing IPv4 connection through node for address {}".format(addr))
+ f"Test: outgoing IPv4 connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[0].queue.get()
assert isinstance(cmd, Socks5Command)
@@ -146,7 +146,7 @@
if self.have_ipv6:
addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
self.log.debug(
- "Test: outgoing IPv6 connection through node for address {}".format(addr))
+ f"Test: outgoing IPv6 connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[1].queue.get()
assert isinstance(cmd, Socks5Command)
@@ -164,7 +164,7 @@
if test_onion:
addr = "bitcoinostk4e4re.onion:8333"
self.log.debug(
- "Test: outgoing onion connection through node for address {}".format(addr))
+ f"Test: outgoing onion connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[2].queue.get()
assert isinstance(cmd, Socks5Command)
@@ -179,7 +179,7 @@
addr = "node.noumenon:8333"
self.log.debug(
- "Test: outgoing DNS name connection through node for address {}".format(addr))
+ f"Test: outgoing DNS name connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[3].queue.get()
assert isinstance(cmd, Socks5Command)
@@ -242,11 +242,9 @@
n1 = networks_dict(self.nodes[1].getnetworkinfo())
assert_equal(NETWORKS, n1.keys())
for net in ['ipv4', 'ipv6']:
- assert_equal(n1[net]['proxy'], '{}:{}'.format(
- self.conf1.addr[0], self.conf1.addr[1]))
+ assert_equal(n1[net]['proxy'], f'{self.conf1.addr[0]}:{self.conf1.addr[1]}')
assert_equal(n1[net]['proxy_randomize_credentials'], False)
- assert_equal(n1['onion']['proxy'], '{}:{}'.format(
- self.conf2.addr[0], self.conf2.addr[1]))
+ assert_equal(n1['onion']['proxy'], f'{self.conf2.addr[0]}:{self.conf2.addr[1]}')
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
assert_equal(n1['i2p']['proxy'], '{}:{}'.format(*self.i2p_sam))
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -152,7 +152,7 @@
self.log.info("Success")
usage = calc_usage(self.prunedir)
- self.log.info("Usage should be below target: {}".format(usage))
+ self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def create_chain_with_staleblocks(self):
@@ -187,12 +187,11 @@
# 288 blocks of undo data to the reorg_test chain.
height = self.nodes[1].getblockcount()
- self.log.info("Current block height: {}".format(height))
+ self.log.info(f"Current block height: {height}")
self.forkheight = height - 287
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
- self.log.info("Invalidating block {} at height {}".format(
- self.forkhash, self.forkheight))
+ self.log.info(f"Invalidating block {self.forkhash} at height {self.forkheight}")
self.nodes[1].invalidateblock(self.forkhash)
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want.
@@ -205,8 +204,7 @@
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
assert self.nodes[1].getblockcount() == self.forkheight - 1
- self.log.info("New best height: {}".format(
- self.nodes[1].getblockcount()))
+ self.log.info(f"New best height: {self.nodes[1].getblockcount()}")
# Disconnect node1 and generate the new chain
self.disconnect_nodes(0, 1)
@@ -220,8 +218,7 @@
self.connect_nodes(1, 2)
self.sync_blocks(self.nodes[0:3], timeout=120)
- self.log.info("Verify height on node 2: {}".format(
- self.nodes[2].getblockcount()))
+ self.log.info(f"Verify height on node 2: {self.nodes[2].getblockcount()}")
self.log.info("Usage possibly still high because of stale blocks in block files: {}".format(
calc_usage(self.prunedir)))
@@ -232,7 +229,7 @@
self.sync_blocks(self.nodes[0:3], timeout=120)
usage = calc_usage(self.prunedir)
- self.log.info("Usage should be below target: {}".format(usage))
+ self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def reorg_back(self):
@@ -242,7 +239,7 @@
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
self.nodes[2].verifychain(checklevel=4, nblocks=0)
self.log.info(
- "Will need to redownload block {}".format(self.forkheight))
+ f"Will need to redownload block {self.forkheight}")
# Verify that we have enough history to reorg back to the fork point.
# Although this is more than 288 blocks, because this chain was written
@@ -314,7 +311,7 @@
def has_block(index):
return os.path.isfile(os.path.join(
- self.nodes[node_number].datadir, self.chain, "blocks", "blk{:05}.dat".format(index)))
+ self.nodes[node_number].datadir, self.chain, "blocks", f"blk{index:05}.dat"))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight
# (1000)
diff --git a/test/functional/feature_settings.py b/test/functional/feature_settings.py
--- a/test/functional/feature_settings.py
+++ b/test/functional/feature_settings.py
@@ -92,7 +92,7 @@
with altsettings.open("w") as fp:
fp.write('{"key": "value"}')
with node.assert_debug_log(expected_msgs=['Setting file arg: key = "value"']):
- self.start_node(0, extra_args=["-settings={}".format(altsettings)])
+ self.start_node(0, extra_args=[f"-settings={altsettings}"])
self.stop_node(0)
diff --git a/test/functional/feature_uaclient.py b/test/functional/feature_uaclient.py
--- a/test/functional/feature_uaclient.py
+++ b/test/functional/feature_uaclient.py
@@ -54,17 +54,17 @@
self.stop_node(0)
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientname=" + "a" * 256], expected, match=ErrorMatch.FULL_REGEX)
+ [f"-uaclientname={'a' * 256}"], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uaclientversion max length")
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientversion=" + "a" * 256], expected, match=ErrorMatch.FULL_REGEX)
+ [f"-uaclientversion={'a' * 256}"], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uaclientname and -uaclientversion max length")
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientname=" + "a" * 128, "-uaclientversion=" + "a" * 128], expected, match=ErrorMatch.FULL_REGEX)
+ [f"-uaclientname={'a' * 128}", f"-uaclientversion={'a' * 128}"], expected, match=ErrorMatch.FULL_REGEX)
self.log.info(
"test -uaclientname and -uaclientversion invalid characters")
@@ -73,20 +73,20 @@
expected = r"Error: -uaclientname \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientname=" + invalid_char],
+ [f"-uaclientname={invalid_char}"],
expected, match=ErrorMatch.FULL_REGEX)
# for client version
expected = r"Error: -uaclientversion \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientversion=" + invalid_char],
+ [f"-uaclientversion={invalid_char}"],
expected, match=ErrorMatch.FULL_REGEX)
# for both
expected = r"Error: -uaclientname \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
- ["-uaclientname=" + invalid_char,
- "-uaclientversion=" + invalid_char],
+ [f"-uaclientname={invalid_char}",
+ f"-uaclientversion={invalid_char}"],
expected, match=ErrorMatch.FULL_REGEX)
diff --git a/test/functional/feature_uacomment.py b/test/functional/feature_uacomment.py
--- a/test/functional/feature_uacomment.py
+++ b/test/functional/feature_uacomment.py
@@ -29,14 +29,14 @@
self.stop_node(0)
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \(256\). Reduce the number or size of uacomments."
self.nodes[0].assert_start_raises_init_error(
- ["-uacomment=" + 'a' * 256], expected, match=ErrorMatch.FULL_REGEX)
+ [f"-uacomment={'a' * 256}"], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uacomment unsafe characters")
for unsafe_char in ['/', ':', '(', ')', '₿', '🏃']:
expected = r"Error: User Agent comment \(" + re.escape(
unsafe_char) + r"\) contains unsafe characters."
self.nodes[0].assert_start_raises_init_error(
- ["-uacomment=" + unsafe_char], expected, match=ErrorMatch.FULL_REGEX)
+ [f"-uacomment={unsafe_char}"], expected, match=ErrorMatch.FULL_REGEX)
if __name__ == '__main__':
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Sat, Apr 26, 11:34 (14 h, 24 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5573414
Default Alt Text
D13191.diff (21 KB)
Attached To
D13191: test: use f-strings in feature_* tests (part 3, with flynt)
Event Timeline
Log In to Comment