diff --git a/cmake/utils/junit-reports-merge.py b/cmake/utils/junit-reports-merge.py index 3b92b89a9..982e02c70 100755 --- a/cmake/utils/junit-reports-merge.py +++ b/cmake/utils/junit-reports-merge.py @@ -1,132 +1,132 @@ #!/usr/bin/env python3 # Copyright (c) 2020 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import datetime import fcntl import os import sys import xml.etree.ElementTree as ET class TestSuite: def __init__(self, name, report_dir): self.name = name self.test_cases = {} self.report_file = os.path.join(report_dir, f'{self.name}.xml') def add_test_case(self, test_case): self.test_cases[test_case.test_id] = test_case def get_failed_tests(self): return [t for t in self.test_cases.values() if not t.test_success] def dump(self): # Calculate test suite duration as the sum of all test case duraration duration = round(sum([ float(t.node.get('time', 0.0)) for t in self.test_cases.values() ]), 3) test_suite = ET.Element( 'testsuite', { 'name': self.name, 'id': '0', 'timestamp': datetime.datetime.now().isoformat('T'), 'time': str(duration), 'tests': str(len(self.test_cases)), 'failures': str(len(self.get_failed_tests())), } ) for test_case in self.test_cases.values(): test_suite.append(test_case.node) report_dir = os.path.dirname(self.report_file) os.makedirs(report_dir, exist_ok=True) ET.ElementTree(test_suite).write( self.report_file, 'UTF-8', xml_declaration=True, ) def load(self): tree = ET.parse(self.report_file) xml_root = tree.getroot() assert xml_root.tag == 'testsuite' assert self.name == xml_root.get('name') for test_case in xml_root.findall('testcase'): self.add_test_case(TestCase(test_case)) class TestCase: def __init__(self, node): self.node = node self.test_success = self.node.find('failure') is None def __getattr__(self, attribute): if attribute == 'test_id': return f"{self.classname}/{self.name}" return self.node.attrib[attribute] class Lock: def __init__(self, suite, lock_dir): self.lock_file = os.path.join(lock_dir, f'{suite}.lock') def __enter__(self): os.makedirs(os.path.dirname(self.lock_file), exist_ok=True) self.fd = open(self.lock_file, 'w', encoding='utf-8') fcntl.lockf(self.fd, fcntl.LOCK_EX) - def __exit__(self, type, value, traceback): + def __exit__(self, exception_type, exception_value, traceback): fcntl.lockf(self.fd, fcntl.LOCK_UN) self.fd.close() def main(report_dir, lock_dir, suite, test): junit = f'{suite}-{test}.xml' if not os.path.isfile(junit): return 0 tree = ET.parse(junit) # Junit root can be a single test suite or multiple test suites. The # later case is unsupported. xml_root = tree.getroot() if xml_root.tag != 'testsuite': raise AssertionError( "The parser only supports a single test suite per report") test_suite_name = xml_root.get('name') lock = Lock(suite, lock_dir) with lock: test_suite = TestSuite(test_suite_name, report_dir) if os.path.isfile(test_suite.report_file): test_suite.load() for child in xml_root: if child.tag != 'testcase' or (child.find('skipped') is not None): continue test_suite.add_test_case(TestCase(child)) test_suite.dump() sys.exit( 1 if test in [case.classname for case in test_suite.get_failed_tests()] else 0 ) main( report_dir=sys.argv[1], lock_dir=sys.argv[2], suite=sys.argv[3], test=sys.argv[4], ) diff --git a/contrib/buildbot/test/test_phabricator.py b/contrib/buildbot/test/test_phabricator.py index f5f431176..cfa15f301 100755 --- a/contrib/buildbot/test/test_phabricator.py +++ b/contrib/buildbot/test/test_phabricator.py @@ -1,553 +1,553 @@ #!/usr/bin/env python3 # # Copyright (c) 2020 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import os import test.mocks.phabricator import unittest from base64 import b64encode import mock from phabricator_wrapper import BITCOIN_ABC_PROJECT_PHID, BITCOIN_ABC_REPO from build import BuildStatus, BuildTarget class PhabricatorTests(unittest.TestCase): def setUp(self): self.phab = test.mocks.phabricator.instance() def tearDown(self): pass def test_get_project_members(self): self.phab.project.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "PROJ", "phid": BITCOIN_ABC_PROJECT_PHID, "attachments": { "members": { "members": [ { "phid": "PHID-USER-usernumber1" }, { "phid": "PHID-USER-usernumber2" }, { "phid": "PHID-USER-usernumber3" }, ] } } } ]) abc_members = self.phab.get_project_members(BITCOIN_ABC_PROJECT_PHID) self.phab.project.search.assert_called_with( constraints={ "phids": [BITCOIN_ABC_PROJECT_PHID], }, attachments={ "members": True, }, ) self.assertEqual( abc_members, [ "PHID-USER-usernumber1", "PHID-USER-usernumber2", "PHID-USER-usernumber3", ] ) def test_get_latest_diff_staging_ref(self): revision_PHID = "PHID-DREV-987645" def assert_diff_searched_called(): return self.phab.differential.diff.search.assert_called_with( constraints={ "revisionPHIDs": [revision_PHID], }, order="newest" ) # No diff associated to the revision ref = self.phab.get_latest_diff_staging_ref(revision_PHID) assert_diff_searched_called() self.assertEqual(ref, "") # 2 diffs associated with the revision. Ordering is guaranteed by the # "order" request parameter. self.phab.differential.diff.search.return_value = test.mocks.phabricator.Result([ { "id": 42, "type": "DIFF", "phid": "PHID-DIFF-123456", }, { "id": 41, "type": "DIFF", "phid": "PHID-DIFF-abcdef", }, ]) ref = self.phab.get_latest_diff_staging_ref(revision_PHID) assert_diff_searched_called() self.assertEqual(ref, "refs/tags/phabricator/diff/42") def test_get_current_user_phid(self): user_PHID = "PHID-USER-foobarbaz" self.phab.user.whoami.return_value = { "phid": user_PHID, "userName": "foo", "realName": "Foo Bar", } # The whoami result should be cached. Call the method a few times and # check the call occurs once and the result is always as expected. for i in range(10): phid = self.phab.get_current_user_phid() self.phab.user.whoami.assert_called_once() self.assertEqual(phid, user_PHID) def test_getRevisionAuthor(self): self.phab.differential.revision.search.return_value = test.mocks.phabricator.Result([{ 'fields': { 'authorPHID': 'PHID-USER-2345', }, }]) expectedAuthor = { "phid": 'PHID-USER-2345', } self.phab.user.search.return_value = test.mocks.phabricator.Result([ expectedAuthor]) actualAuthor = self.phab.getRevisionAuthor('D1234') self.assertEqual(actualAuthor, expectedAuthor) def test_getAuthorSlackUsername(self): self.assertEqual("", self.phab.getAuthorSlackUsername({})) self.assertEqual("", self.phab.getAuthorSlackUsername({'fields': {}})) self.assertEqual("test-slack-name", self.phab.getAuthorSlackUsername({ 'fields': { 'custom.abc:slack-username': 'test-slack-name', 'username': 'test-username', }, })) self.assertEqual("test-username", self.phab.getAuthorSlackUsername({ 'fields': { 'username': 'test-username', }, })) def test_user_roles(self): user_PHID = "PHID-USER-abcdef" def assert_user_search_called(): return self.phab.user.search.assert_called_with( constraints={ "phids": [user_PHID], } ) # User not found user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual(user_roles, []) # User found self.phab.user.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "USER", "phid": user_PHID, "fields": { "username": "foobar", "realName": "Foo Bar", "roles": [ "admin", "verified", "approved", "activated", ], "dateCreated": 0, "dateModified": 0, "custom.abc:slack-username": "Foobar", }, }, ]) user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual( user_roles, [ "admin", "verified", "approved", "activated", ] ) # If more than 1 user is returned (should never occur), check no role is # returned to prevent privilege exploits. self.phab.user.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "USER", "phid": user_PHID, "fields": { "roles": [ "verified", ], }, }, { "id": 2, "type": "USER", "phid": user_PHID, "fields": { "roles": [ "admin", ], }, }, ]) user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual(user_roles, []) def test_get_laster_master_commit_hash(self): with self.assertRaises(AssertionError): self.phab.get_latest_master_commit_hash() self.phab.diffusion.commit.search.return_value = test.mocks.phabricator.Result([ { "id": 1234, "type": "CMIT", "phid": "PHID-CMIT-abcdef", "fields": { "identifier": "0000000000000000000000000000000123456789", "repositoryPHID": "PHID-REPO-abcrepo", }, } ]) commit_hash = self.phab.get_latest_master_commit_hash() self.phab.diffusion.commit.search.assert_called_with( constraints={ "repositories": [BITCOIN_ABC_REPO], }, limit=1, ) self.assertEqual( commit_hash, "0000000000000000000000000000000123456789") def test_get_revision_changed_files(self): self.phab.differential.getcommitpaths.return_value = [ "file1", "dir/file2", ] self.assertEqual( self.phab.get_revision_changed_files(1234), [ "file1", "dir/file2", ]) def test_get_file_content_from_master(self): commit_hash = "0000000000000000000000000000000123456789" file_phid = "PHID-FILE-somefile" path = "some/file/" self.phab.get_latest_master_commit_hash = mock.Mock() self.phab.get_latest_master_commit_hash.return_value = commit_hash self.phab.diffusion.browsequery = mock.Mock() - def configure_browsequery(file_path=path, hash="abcdef"): + def configure_browsequery(file_path=path, file_hash="abcdef"): self.phab.diffusion.browsequery.return_value = { "paths": [ { "fullPath": "some/file/1", "hash": "1234" }, { "fullPath": "some/file/2", "hash": "5678" }, { "fullPath": file_path, - "hash": hash + "hash": file_hash }, ] } def assert_diffusion_browsequery_called(): self.phab.get_latest_master_commit_hash.assert_called() self.phab.diffusion.browsequery.assert_called_with( path=os.path.dirname(path) or None, commit=commit_hash, repository=BITCOIN_ABC_REPO, branch="master", ) def configure_file_content_query( file_phid=file_phid, too_slow=False, too_huge=False): output = { "tooSlow": too_slow, "tooHuge": too_huge, } if file_phid is not None: output["filePHID"] = file_phid self.phab.diffusion.filecontentquery.return_value = output def assert_file_commit_and_file_searched(): self.phab.get_latest_master_commit_hash.assert_called() self.phab.diffusion.filecontentquery.assert_called_with( path=path, commit=commit_hash, timeout=5, byteLimit=1024 * 1024, repository=BITCOIN_ABC_REPO, branch="master", ) # Browse query failure self.phab.diffusion.browsequery.return_value = {} with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() # Browse query returns no file self.phab.diffusion.browsequery.return_value = {'paths': []} with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() # Browse query failed to find our file configure_browsequery(file_path='something/else') with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() configure_browsequery() # Missing file PHID configure_file_content_query(file_phid=None) with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Too long configure_file_content_query(too_slow=True) with self.assertRaisesRegex(AssertionError, "is oversized or took too long to download"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Too huge configure_file_content_query(too_huge=True) with self.assertRaisesRegex(AssertionError, "is oversized or took too long to download"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Check the file content can be retrieved expected_content = b'Some nice content' result = test.mocks.phabricator.Result([]) result.response = b64encode(expected_content) self.phab.file.download.return_value = result configure_file_content_query() file_content = self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() self.phab.file.download.assert_called_with(phid=file_phid) self.assertEqual(file_content, expected_content) # With later calls the content is returned directly thanks to the cache self.phab.diffusion.filecontentquery.reset_mock() self.phab.file.download.reset_mock() for i in range(10): file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() # If the master commit changes, the file content is still valid in cache # as long as its file hash is unchanged for i in range(10): commit_hash = str(int(commit_hash) + 1) self.phab.get_latest_master_commit_hash.return_value = commit_hash file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() # But if the file hash changes, the file content needs to be updated... - configure_browsequery(hash="defghi") + configure_browsequery(file_hash="defghi") file_content = self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() self.phab.file.download.assert_called_with(phid=file_phid) self.assertEqual(file_content, expected_content) # ... only once. self.phab.diffusion.filecontentquery.reset_mock() self.phab.file.download.reset_mock() for i in range(10): file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() def test_set_text_panel_content(self): panel_id = 42 panel_content = "My wonderful panel content" self.phab.dashboard.panel.edit.return_value = { "error": None, "errorMessage": None, "response": { "object": { "id": panel_id, "phid": "PHID-DSHP-123456789", "transactions": [ { "phid": "PHID-XACT-DSHP-abcdefghi" } ] } } } def call_set_text_panel_content(): self.phab.set_text_panel_content(panel_id, panel_content) self.phab.dashboard.panel.edit.assert_called_with( objectIdentifier=panel_id, transactions=[ { "type": "text", "value": panel_content } ] ) # Happy path call_set_text_panel_content() # Error self.phab.dashboard.panel.edit.return_value["error"] = "You shall not pass !" with self.assertRaisesRegex(AssertionError, "Failed to edit panel"): call_set_text_panel_content() def test_update_build_target_status(self): build_target = BuildTarget("PHID-HMBT-1234") # With no builds queued, default to pass self.phab.update_build_target_status(build_target) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="pass") # Queue a build build_target.queue_build("build-1", "build-name") self.phab.update_build_target_status(build_target) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="work") # Test various statuses self.phab.update_build_target_status( build_target, "build-1", BuildStatus.Queued) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="work") self.phab.update_build_target_status( build_target, "build-1", BuildStatus.Running) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="work") self.phab.update_build_target_status( build_target, "build-1", BuildStatus.Failure) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="fail") self.phab.update_build_target_status( build_target, "build-1", BuildStatus.Success) self.phab.harbormaster.sendmessage.assert_called_with( receiver=build_target.phid, type="pass") def test_get_object_token(self): user_PHID = "PHID-USER-foobarbaz" self.phab.user.whoami.return_value = { "phid": user_PHID, } object_PHID = "PHID-DREV-abcdef" def assert_token_given_called(): self.phab.token.given.assert_called_with( authorPHIDs=[user_PHID], objectPHIDs=[object_PHID], tokenPHIDs=[], ) # There is no token for this object self.phab.token.given.return_value = [] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "") # There is exactly 1 token for this object self.phab.token.given.return_value = [ { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-1", "dateCreated": 0, }, ] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "PHID-TOKN-like-1") # If there is more than a single token only the first one is returned self.phab.token.given.return_value = [ { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-1", "dateCreated": 0, }, { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-2", "dateCreated": 1, }, ] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "PHID-TOKN-like-1") def test_set_object_token(self): object_PHID = "PHID-DREV-abcdef" def assert_token_give_called(token_PHID): self.phab.token.give.assert_called_with( objectPHID=object_PHID, tokenPHID=token_PHID, ) # Rescind any previoulsy awarded token self.phab.set_object_token(object_PHID) assert_token_give_called("") token_PHID = "PHID-TOKN-like-1" self.phab.set_object_token(object_PHID, token_PHID) assert_token_give_called(token_PHID) if __name__ == '__main__': unittest.main() diff --git a/test/functional/feature_addrman.py b/test/functional/feature_addrman.py index d01c962aa..6f58a91bc 100755 --- a/test/functional/feature_addrman.py +++ b/test/functional/feature_addrman.py @@ -1,161 +1,161 @@ #!/usr/bin/env python3 # Copyright (c) 2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test addrman functionality""" import os import struct from test_framework.messages import hash256, ser_uint256 from test_framework.test_framework import BitcoinTestFramework from test_framework.test_node import ErrorMatch from test_framework.util import assert_equal def serialize_addrman( *, - format=1, + addrman_format=1, lowest_compatible=4, net_magic=b"\xfa\xbf\xb5\xda", bucket_key=1, len_new=None, len_tried=None, mock_checksum=None, ): new = [] tried = [] INCOMPATIBILITY_BASE = 32 r = net_magic - r += struct.pack("B", format) + r += struct.pack("B", addrman_format) r += struct.pack("B", INCOMPATIBILITY_BASE + lowest_compatible) r += ser_uint256(bucket_key) r += struct.pack("i", len_new or len(new)) r += struct.pack("i", len_tried or len(tried)) ADDRMAN_NEW_BUCKET_COUNT = 1 << 10 r += struct.pack("i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30)) for _ in range(ADDRMAN_NEW_BUCKET_COUNT): r += struct.pack("i", 0) checksum = hash256(r) r += mock_checksum or checksum return r def write_addrman(peers_dat, **kwargs): with open(peers_dat, "wb") as f: f.write(serialize_addrman(**kwargs)) class AddrmanTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def run_test(self): peers_dat = os.path.join( self.nodes[0].datadir, self.chain, "peers.dat") def init_error(reason): return ( f"Error: Invalid or corrupt peers.dat \\({reason}\\). If you believe this " f"is a bug, please report it to {self.config['environment']['PACKAGE_BUGREPORT']}. " f'As a workaround, you can move the file \\("{peers_dat}"\\) out of the way \\(rename, ' "move, or delete\\) to have a new one created on the next start." ) self.log.info("Check that mocked addrman is valid") self.stop_node(0) write_addrman(peers_dat) with self.nodes[0].assert_debug_log(["Loaded 0 addresses from peers.dat"]): self.start_node(0, extra_args=["-checkaddrman=1"]) assert_equal(self.nodes[0].getnodeaddresses(), []) self.log.info( "Check that addrman with negative lowest_compatible cannot be read") self.stop_node(0) write_addrman(peers_dat, lowest_compatible=-32) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error( "Corrupted addrman database: The compat value \\(0\\) is lower " "than the expected minimum value 32.: (.+)" ), match=ErrorMatch.FULL_REGEX, ) self.log.info( "Check that addrman from future is overwritten with new addrman") self.stop_node(0) write_addrman(peers_dat, lowest_compatible=111) assert_equal(os.path.exists(f"{peers_dat}.bak"), False) with self.nodes[0].assert_debug_log([ f'Creating new peers.dat because the file version was not compatible ("{peers_dat}"). Original backed up to peers.dat.bak', ]): self.start_node(0) assert_equal(self.nodes[0].getnodeaddresses(), []) assert_equal(os.path.exists(f"{peers_dat}.bak"), True) self.log.info("Check that corrupt addrman cannot be read (EOF)") self.stop_node(0) with open(peers_dat, "wb") as f: f.write(serialize_addrman()[:-1]) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error("CAutoFile::read: end of file.*"), match=ErrorMatch.FULL_REGEX, ) self.log.info("Check that corrupt addrman cannot be read (magic)") self.stop_node(0) write_addrman(peers_dat, net_magic=b"\xde\xad\xbe\xef") self.nodes[0].assert_start_raises_init_error( expected_msg=init_error("Invalid network magic number"), match=ErrorMatch.FULL_REGEX, ) self.log.info("Check that corrupt addrman cannot be read (checksum)") self.stop_node(0) write_addrman(peers_dat, mock_checksum=b"ab" * 32) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error("Checksum mismatch, data corrupted"), match=ErrorMatch.FULL_REGEX, ) self.log.info("Check that corrupt addrman cannot be read (len_tried)") self.stop_node(0) write_addrman(peers_dat, len_tried=-1) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error( "Corrupt AddrMan serialization: nTried=-1, should be in \\[0, 16384\\]:.*"), match=ErrorMatch.FULL_REGEX, ) self.log.info("Check that corrupt addrman cannot be read (len_new)") self.stop_node(0) write_addrman(peers_dat, len_new=-1) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error( "Corrupt AddrMan serialization: nNew=-1, should be in \\[0, 65536\\]:.*"), match=ErrorMatch.FULL_REGEX, ) self.log.info( "Check that corrupt addrman cannot be read (failed check)") self.stop_node(0) write_addrman(peers_dat, bucket_key=0) self.nodes[0].assert_start_raises_init_error( expected_msg=init_error( "Corrupt data. Consistency check failed with code -16: .*"), match=ErrorMatch.FULL_REGEX, ) self.log.info("Check that missing addrman is recreated") self.stop_node(0) os.remove(peers_dat) with self.nodes[0].assert_debug_log([ f'Creating peers.dat because the file was not found ("{peers_dat}")', ]): self.start_node(0) assert_equal(self.nodes[0].getnodeaddresses(), []) if __name__ == "__main__": AddrmanTest().main() diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py index bffbe9668..645a20db0 100755 --- a/test/functional/feature_csv_activation.py +++ b/test/functional/feature_csv_activation.py @@ -1,673 +1,673 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test activation of the first version bits soft fork. This soft fork will activate the following BIPS: BIP 68 - nSequence relative lock times BIP 112 - CHECKSEQUENCEVERIFY BIP 113 - MedianTimePast semantics for nLockTime regtest lock-in with 108/144 block signalling activation after a further 144 blocks mine 82 blocks whose coinbases will be used to generate inputs for our tests mine 489 blocks and seed block chain with the 82 inputs will use for our tests at height 572 mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered mine 1 block and test that enforcement has triggered (which triggers ACTIVE) Test BIP 113 is enforced Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height Mine 1 block so next height is 581 and test BIP 68 now passes time but not height Mine 1 block so next height is 582 and test BIP 68 now passes time and height Test that BIP 112 is enforced Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates And that after the soft fork activates transactions pass and fail as they should according to the rules. For each BIP, transactions of versions 1 and 2 will be tested. ---------------- BIP 113: bip113tx - modify the nLocktime variable BIP 68: bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below BIP 112: bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP bip112tx_special - test negative argument to OP_CSV """ import time from decimal import Decimal from itertools import product from test_framework.blocktools import ( create_block, create_coinbase, make_conform_to_ctor, ) from test_framework.messages import XEC, CTransaction, FromHex, ToHex from test_framework.p2p import P2PDataStore from test_framework.script import OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE, CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal BASE_RELATIVE_LOCKTIME = 10 SEQ_DISABLE_FLAG = 1 << 31 SEQ_RANDOM_HIGH_BIT = 1 << 25 SEQ_TYPE_FLAG = 1 << 22 SEQ_RANDOM_LOW_BIT = 1 << 18 def relative_locktime(sdf, srhb, stf, srlb): """Returns a locktime with certain bits set.""" locktime = BASE_RELATIVE_LOCKTIME if sdf: locktime |= SEQ_DISABLE_FLAG if srhb: locktime |= SEQ_RANDOM_HIGH_BIT if stf: locktime |= SEQ_TYPE_FLAG if srlb: locktime |= SEQ_RANDOM_LOW_BIT return locktime def all_rlt_txs(txs): return [tx['tx'] for tx in txs] def get_csv_status(node): height = node.getblockchaininfo()['blocks'] return height >= 576 def create_transaction(node, txid, to_address, *, amount): inputs = [{"txid": txid, "vout": 0}] outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) tx = FromHex(CTransaction(), rawtx) return tx def sign_transaction(node, unsignedtx): rawtx = ToHex(unsignedtx) signresult = node.signrawtransactionwithwallet(rawtx) tx = FromHex(CTransaction(), signresult['hex']) return tx def spend_tx(node, prev_tx, address): spendtx = create_transaction( node, prev_tx.hash, address, amount=(prev_tx.vout[0].nValue - 1000) / XEC) spendtx.nVersion = prev_tx.nVersion pad_tx(spendtx) spendtx.rehash() return spendtx -def create_bip112special(node, input, txversion, address): +def create_bip112special(node, txid, txversion, address): tx = create_transaction( - node, input, address, amount=Decimal("49980000")) + node, txid, address, amount=Decimal("49980000")) tx.nVersion = txversion tx.vout[0].scriptPubKey = CScript( [-1, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) tx.rehash() signtx = sign_transaction(node, tx) signtx.rehash() return signtx def send_generic_input_tx(node, coinbases, address): return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction( node, node.getblock(coinbases.pop())['tx'][0], address, amount=Decimal("49990000"))))) def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0): """Returns a list of bip68 transactions with different bits set.""" txs = [] assert len(bip68inputs) >= 16 for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): locktime = relative_locktime(sdf, srhb, stf, srlb) tx = create_transaction( node, bip68inputs[i], address, amount=Decimal("49980000")) tx.nVersion = txversion tx.vin[0].nSequence = locktime + locktime_delta tx = sign_transaction(node, tx) tx.rehash() txs.append({'tx': tx, 'sdf': sdf, 'stf': stf}) return txs def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0): """Returns a list of bip112 transactions with different bits set.""" txs = [] assert len(bip112inputs) >= 16 for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): locktime = relative_locktime(sdf, srhb, stf, srlb) tx = create_transaction( node, bip112inputs[i], address, amount=Decimal("49980000")) if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta else: # vary nSequence instead, OP_CSV is fixed tx.vin[0].nSequence = locktime + locktime_delta tx.nVersion = txversion if (varyOP_CSV): tx.vout[0].scriptPubKey = CScript( [locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) else: tx.vout[0].scriptPubKey = CScript( [BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP, OP_TRUE]) tx.rehash() signtx = sign_transaction(node, tx) signtx.rehash() txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf}) return txs class BIP68_112_113Test(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.extra_args = [['-whitelist=noban@127.0.0.1']] def skip_test_if_missing_module(self): self.skip_if_no_wallet() def generate_blocks(self, number): test_blocks = [] for _ in range(number): block = self.create_test_block([]) test_blocks.append(block) self.last_block_time += 600 self.tip = block.sha256 self.tipheight += 1 return test_blocks def create_test_block(self, txs, version=536870912): block = create_block(self.tip, create_coinbase( self.tipheight + 1), self.last_block_time + 600) block.nVersion = version block.vtx.extend(txs) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() return block # Create a block with given txs, and spend these txs in the same block. # Spending utxos in the same block is OK as long as nSequence is not enforced. # Otherwise a number of intermediate blocks should be generated, and this # method should not be used. def create_test_block_spend_utxos(self, node, txs, version=536870912): block = self.create_test_block(txs, version) block.vtx.extend([spend_tx(node, tx, self.nodeaddress) for tx in txs]) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() block.solve() return block def send_blocks(self, blocks, success=True): """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block. Call with success = False if the tip shouldn't advance to the most recent block.""" self.helper_peer.send_blocks_and_test( blocks, self.nodes[0], success=success) def run_test(self): self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore()) self.log.info("Generate blocks in the past for coinbase outputs.") # Enough to build up to 1000 blocks 10 minutes apart without worrying # about getting into the future long_past_time = int(time.time()) - 600 * 1000 # Enough so that the generated blocks will still all be before # long_past_time self.nodes[0].setmocktime(long_past_time - 100) # 82 blocks generated for inputs self.coinbase_blocks = self.generate( self.nodes[0], 1 + 16 + 2 * 32 + 1) # Set time back to present so yielded blocks aren't in the future as # we advance last_block_time self.nodes[0].setmocktime(0) # height of the next block to build self.tipheight = 82 self.last_block_time = long_past_time self.tip = int(self.nodes[0].getbestblockhash(), 16) self.nodeaddress = self.nodes[0].getnewaddress() # CSV is not activated yet. assert_equal(get_csv_status(self.nodes[0]), False) # Generate 489 more version 4 blocks test_blocks = self.generate_blocks(489) # Test #1 self.send_blocks(test_blocks) # Still not activated. assert_equal(get_csv_status(self.nodes[0]), False) # Inputs at height = 572 # # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) # Note we reuse inputs for v1 and v2 txs so must test these separately # 16 normal inputs bip68inputs = [] for _ in range(16): bip68inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be # prepended to spending scriptSig) bip112basicinputs = [] for _ in range(2): inputs = [] for _ in range(16): inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) bip112basicinputs.append(inputs) # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP # (actually will be prepended to spending scriptSig) bip112diverseinputs = [] for _ in range(2): inputs = [] for _ in range(16): inputs.append(send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress)) bip112diverseinputs.append(inputs) # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to # spending scriptSig) bip112specialinput = send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress) # 1 normal input bip113input = send_generic_input_tx( self.nodes[0], self.coinbase_blocks, self.nodeaddress) self.nodes[0].setmocktime(self.last_block_time + 600) # 1 block generated for inputs to be in chain at height 572 inputblockhash = self.generate(self.nodes[0], 1)[0] self.nodes[0].setmocktime(0) self.tip = int(inputblockhash, 16) self.tipheight += 1 self.last_block_time += 600 assert_equal(len(self.nodes[0].getblock( inputblockhash, True)["tx"]), 82 + 1) # 2 more version 4 blocks test_blocks = self.generate_blocks(2) # Test #2 self.send_blocks(test_blocks) self.log.info( "Not yet activated, height = 574 (will activate for block 576, not 575)") assert_equal(get_csv_status(self.nodes[0]), False) # Test both version 1 and version 2 transactions for all tests # BIP113 test transaction will be modified before each use to # put in appropriate block time bip113tx_v1 = create_transaction( self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49980000")) bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE bip113tx_v1.nVersion = 1 bip113tx_v2 = create_transaction( self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49980000")) bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE bip113tx_v2.nVersion = 2 # For BIP68 test all 16 relative sequence locktimes bip68txs_v1 = create_bip68txs( self.nodes[0], bip68inputs, 1, self.nodeaddress) bip68txs_v2 = create_bip68txs( self.nodes[0], bip68inputs, 2, self.nodeaddress) # For BIP112 test: # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_v1 = create_bip112txs( self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress) bip112txs_vary_nSequence_v2 = create_bip112txs( self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress) # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs bip112txs_vary_nSequence_9_v1 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1) bip112txs_vary_nSequence_9_v2 = create_bip112txs( self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1) # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV # OP_DROP inputs bip112txs_vary_OP_CSV_v1 = create_bip112txs( self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress) bip112txs_vary_OP_CSV_v2 = create_bip112txs( self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress) # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV # OP_DROP inputs bip112txs_vary_OP_CSV_9_v1 = create_bip112txs( self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1) bip112txs_vary_OP_CSV_9_v2 = create_bip112txs( self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1) # -1 OP_CSV OP_DROP input bip112tx_special_v1 = create_bip112special( self.nodes[0], bip112specialinput, 1, self.nodeaddress) bip112tx_special_v2 = create_bip112special( self.nodes[0], bip112specialinput, 2, self.nodeaddress) self.log.info("TESTING") self.log.info("Pre-Soft Fork Tests. All txs should pass.") self.log.info("Test version 1 txs") success_txs = [] # add BIP113 tx and -1 CSV tx # = MTP of prior block (not <) but < time put on current block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) success_txs.append(bip113signed1) success_txs.append(bip112tx_special_v1) success_txs.append( spend_tx(self.nodes[0], bip112tx_special_v1, self.nodeaddress)) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v1)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_v1)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_v1)]) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_9_v1)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)]) # Test #3 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") success_txs = [] # add BIP113 tx and -1 CSV tx # = MTP of prior block (not <) but < time put on current block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) success_txs.append(bip113signed2) success_txs.append(bip112tx_special_v2) success_txs.append( spend_tx(self.nodes[0], bip112tx_special_v2, self.nodeaddress)) # add BIP 68 txs success_txs.extend(all_rlt_txs(bip68txs_v2)) # add BIP 112 with seq=10 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_v2)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_v2)]) # try BIP 112 with seq=9 txs success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_nSequence_9_v2)]) success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) success_txs.extend([spend_tx(self.nodes[0], tx, self.nodeaddress) for tx in all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)]) # Test #4 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # 1 more version 4 block to get us to height 575 so the fork should # now be active for the next block test_blocks = self.generate_blocks(1) # Test #5 self.send_blocks(test_blocks) assert_equal(get_csv_status(self.nodes[0]), False) self.generate(self.nodes[0], 1) assert_equal(get_csv_status(self.nodes[0]), True) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Post-Soft Fork Tests.") self.log.info("BIP 113 tests") # BIP 113 tests should now fail regardless of version number # if nLockTime isn't satisfied by new rules # = MTP of prior block (not <) but < time put on current block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) # = MTP of prior block (not <) but < time put on current block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: # Test #6, Test #7 self.send_blocks( [self.create_test_block([bip113tx])], success=False) # BIP 113 tests should now pass if the locktime is < MTP # < MTP of prior block bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) # < MTP of prior block bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) for bip113tx in [bip113signed1, bip113signed2]: # Test #8, Test #9 self.send_blocks([self.create_test_block([bip113tx])]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Next block height = 580 after 4 blocks of random version test_blocks = self.generate_blocks(4) # Test #10 self.send_blocks(test_blocks) self.log.info("BIP 68 tests") self.log.info("Test version 1 txs - all should still pass") success_txs = [] success_txs.extend(all_rlt_txs(bip68txs_v1)) # Test #11 self.send_blocks([self.create_test_block(success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("Test version 2 txs") # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']] self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # All txs without flag fail as we are at delta height = 8 < 10 and # delta time = 8 * 600 < 10 * 512 bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']] for tx in bip68timetxs: # Test #13 - Test #16 self.send_blocks([self.create_test_block([tx])], success=False) bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']] for tx in bip68heighttxs: # Test #17 - Test #20 self.send_blocks([self.create_test_block([tx])], success=False) # Advance one block to 581 test_blocks = self.generate_blocks(1) # Test #21 self.send_blocks(test_blocks,) # Height txs should fail and time txs should now pass 9 * 600 > 10 * # 512 bip68success_txs.extend(bip68timetxs) # Test #22 self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) for tx in bip68heighttxs: # Test #23 - Test #26 self.send_blocks([self.create_test_block([tx])], success=False) # Advance one block to 582 test_blocks = self.generate_blocks(1) # Test #27 self.send_blocks(test_blocks) # All BIP 68 txs should pass bip68success_txs.extend(bip68heighttxs) # Test #28 self.send_blocks([self.create_test_block(bip68success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) self.log.info("BIP 112 tests") self.log.info("Test version 1 txs") # -1 OP_CSV tx should fail # Test #29 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [bip112tx_special_v1])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, # version 1 txs should still pass success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']] # Test #30 self.send_blocks( [self.create_test_block_spend_utxos(self.nodes[0], success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, # version 1 txs should now fail fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1) fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1) fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] for tx in fail_txs: # Test #31 - Test #78 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) self.log.info("Test version 2 txs") # -1 OP_CSV tx should fail # Test #79 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [bip112tx_special_v2])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, # version 2 txs should pass success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']] # Test #80 self.send_blocks( [self.create_test_block_spend_utxos(self.nodes[0], success_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all # remaining txs ## # All txs with nSequence 9 should fail either due to earlier mismatch # or failing the CSV check fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2) fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']] for tx in fail_txs: # Test #81 - Test #104 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']] for tx in fail_txs: # Test #105 - Test #112 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # If sequencelock types mismatch, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']] fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']] for tx in fail_txs: # Test #113 - Test #120 self.send_blocks([self.create_test_block_spend_utxos( self.nodes[0], [tx])], success=False) # Remaining txs should pass, just test masking works properly success_txs = [ tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']] success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']] # Test #121 self.send_blocks([self.create_test_block(success_txs)]) # Spending the previous block utxos requires a difference of 10 blocks (nSequence = 10). # Generate 9 blocks then spend in the 10th block = self.nodes[0].getbestblockhash() self.last_block_time += 600 self.tip = int(f"0x{block}", 0) self.tipheight += 1 # Test #122 self.send_blocks(self.generate_blocks(9)) spend_txs = [] for tx in success_txs: raw_tx = spend_tx(self.nodes[0], tx, self.nodeaddress) raw_tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME raw_tx.rehash() spend_txs.append(raw_tx) # Test #123 self.send_blocks([self.create_test_block(spend_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Additional test, of checking that comparison of two time types works # properly time_txs = [] for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]: signtx = sign_transaction(self.nodes[0], tx) time_txs.append(signtx) # Test #124 self.send_blocks([self.create_test_block(time_txs)]) # Spending the previous block utxos requires a block time difference of # at least 10 * 512s (nSequence = 10). # Generate 8 blocks then spend in the 9th (9 * 600 > 10 * 512) block = self.nodes[0].getbestblockhash() self.last_block_time += 600 self.tip = int(f"0x{block}", 0) self.tipheight += 1 # Test #125 self.send_blocks(self.generate_blocks(8)) spend_txs = [] for tx in time_txs: raw_tx = spend_tx(self.nodes[0], tx, self.nodeaddress) raw_tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG raw_tx.rehash() spend_txs.append(raw_tx) # Test #126 self.send_blocks([self.create_test_block(spend_txs)]) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # TODO: Test empty stack fails if __name__ == '__main__': BIP68_112_113Test().main() diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py index 505a14cee..65b67b937 100755 --- a/test/functional/interface_bitcoin_cli.py +++ b/test/functional/interface_bitcoin_cli.py @@ -1,336 +1,336 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoin-cli""" from decimal import Decimal from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_process_error, assert_raises_rpc_error, get_auth_cookie, ) # The block reward of coinbaseoutput.nValue (50) BTC/block matures after # COINBASE_MATURITY (100) blocks. Therefore, after mining 101 blocks we expect # node 0 to have a balance of (BLOCKS - COINBASE_MATURITY) * 50 BTC/block. BLOCKS = 101 BALANCE = (BLOCKS - 100) * 50000000 JSON_PARSING_ERROR = 'error: Error parsing JSON: foo' BLOCKS_VALUE_OF_ZERO = 'error: the first argument (number of blocks to generate, default: 1) must be an integer value greater than zero' TOO_MANY_ARGS = 'error: too many arguments (maximum 2 for nblocks and maxtries)' WALLET_NOT_LOADED = 'Requested wallet does not exist or is not loaded' WALLET_NOT_SPECIFIED = 'Wallet file not specified' class TestBitcoinCli(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def skip_test_if_missing_module(self): self.skip_if_no_cli() def run_test(self): """Main test logic""" self.generate(self.nodes[0], BLOCKS) self.log.info( "Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`") cli_response = self.nodes[0].cli.getblockchaininfo() rpc_response = self.nodes[0].getblockchaininfo() assert_equal(cli_response, rpc_response) user, password = get_auth_cookie(self.nodes[0].datadir, self.chain) self.log.info("Test -stdinrpcpass option") assert_equal(BLOCKS, self.nodes[0].cli( - f'-rpcuser={user}', '-stdinrpcpass', input=password).getblockcount()) + f'-rpcuser={user}', '-stdinrpcpass', cli_input=password).getblockcount()) assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli( - f'-rpcuser={user}', '-stdinrpcpass', input="foo").echo) + f'-rpcuser={user}', '-stdinrpcpass', cli_input="foo").echo) self.log.info("Test -stdin and -stdinrpcpass") assert_equal(["foo", "bar"], self.nodes[0].cli(f'-rpcuser={user}', - '-stdin', '-stdinrpcpass', input=f"{password}\nfoo\nbar").echo()) + '-stdin', '-stdinrpcpass', cli_input=f"{password}\nfoo\nbar").echo()) assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli( - f'-rpcuser={user}', '-stdin', '-stdinrpcpass', input="foo").echo) + f'-rpcuser={user}', '-stdin', '-stdinrpcpass', cli_input="foo").echo) self.log.info("Test connecting to a non-existing server") assert_raises_process_error( 1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo) self.log.info("Test connecting with non-existing RPC cookie file") assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli( '-rpccookiefile=does-not-exist', '-rpcpassword=').echo) self.log.info("Test -getinfo with arguments fails") assert_raises_process_error( 1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help) self.log.info( "Test -getinfo returns expected network and blockchain info") if self.is_wallet_compiled(): self.nodes[0].encryptwallet(password) cli_get_info = self.nodes[0].cli().send_cli('-getinfo') network_info = self.nodes[0].getnetworkinfo() blockchain_info = self.nodes[0].getblockchaininfo() assert_equal(cli_get_info['version'], network_info['version']) assert_equal(cli_get_info['blocks'], blockchain_info['blocks']) assert_equal(cli_get_info['headers'], blockchain_info['headers']) assert_equal(cli_get_info['timeoffset'], network_info['timeoffset']) assert_equal( cli_get_info['connections'], { 'in': network_info['connections_in'], 'out': network_info['connections_out'], 'total': network_info['connections'] } ) assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy']) assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty']) assert_equal(cli_get_info['chain'], blockchain_info['chain']) if self.is_wallet_compiled(): self.log.info( "Test -getinfo and bitcoin-cli getwalletinfo return expected wallet info") assert_equal(cli_get_info['balance'], BALANCE) assert 'balances' not in cli_get_info.keys() wallet_info = self.nodes[0].getwalletinfo() assert_equal( cli_get_info['keypoolsize'], wallet_info['keypoolsize']) assert_equal( cli_get_info['unlocked_until'], wallet_info['unlocked_until']) assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee']) assert_equal(cli_get_info['relayfee'], network_info['relayfee']) assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info) # Setup to test -getinfo, -generate, and -rpcwallet= with multiple # wallets. wallets = [self.default_wallet_name, 'Encrypted', 'secret'] amounts = [ BALANCE + Decimal('9999995.50'), Decimal(9000000), Decimal(31000000)] self.nodes[0].createwallet(wallet_name=wallets[1]) self.nodes[0].createwallet(wallet_name=wallets[2]) w1 = self.nodes[0].get_wallet_rpc(wallets[0]) w2 = self.nodes[0].get_wallet_rpc(wallets[1]) w3 = self.nodes[0].get_wallet_rpc(wallets[2]) rpcwallet2 = f'-rpcwallet={wallets[1]}' rpcwallet3 = f'-rpcwallet={wallets[2]}' w1.walletpassphrase(password, self.rpc_timeout) w2.encryptwallet(password) w1.sendtoaddress(w2.getnewaddress(), amounts[1]) w1.sendtoaddress(w3.getnewaddress(), amounts[2]) # Mine a block to confirm; adds a block reward (50 BTC) to the # default wallet. self.generate(self.nodes[0], 1) self.log.info( "Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance") for i in range(len(wallets)): cli_get_info = self.nodes[0].cli( '-getinfo', f'-rpcwallet={wallets[i]}').send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[i]) self.log.info( "Test -getinfo with multiple wallets and " "-rpcwallet=non-existing-wallet returns no balances") cli_get_info_keys = self.nodes[0].cli( '-getinfo', '-rpcwallet=does-not-exist').send_cli().keys() assert 'balance' not in cli_get_info_keys assert 'balances' not in cli_get_info_keys self.log.info( "Test -getinfo with multiple wallets returns all loaded " "wallet names and balances") assert_equal(set(self.nodes[0].listwallets()), set(wallets)) cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balance' not in cli_get_info.keys() assert_equal(cli_get_info['balances'], dict(zip(wallets, amounts))) # Unload the default wallet and re-verify. self.nodes[0].unloadwallet(wallets[0]) assert wallets[0] not in self.nodes[0].listwallets() cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balance' not in cli_get_info.keys() assert_equal(cli_get_info['balances'], dict(zip(wallets[1:], amounts[1:]))) self.log.info( "Test -getinfo after unloading all wallets except a " "non-default one returns its balance") self.nodes[0].unloadwallet(wallets[2]) assert_equal(self.nodes[0].listwallets(), [wallets[1]]) cli_get_info = self.nodes[0].cli('-getinfo').send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[1]) self.log.info( "Test -getinfo with -rpcwallet=remaining-non-default-wallet" " returns only its balance") cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet2).send_cli() assert 'balances' not in cli_get_info.keys() assert_equal(cli_get_info['balance'], amounts[1]) self.log.info( "Test -getinfo with -rpcwallet=unloaded wallet returns" " no balances") cli_get_info_keys = self.nodes[0].cli( '-getinfo', rpcwallet3).send_cli().keys() assert 'balance' not in cli_get_info_keys assert 'balances' not in cli_get_info_keys # Test bitcoin-cli -generate. n1 = 3 n2 = 4 w2.walletpassphrase(password, self.rpc_timeout) blocks = self.nodes[0].getblockcount() self.log.info('Test -generate with no args') generate = self.nodes[0].cli('-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 1) self.log.info('Test -generate with bad args') assert_raises_process_error( 1, JSON_PARSING_ERROR, self.nodes[0].cli( '-generate', 'foo').echo) assert_raises_process_error( 1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli( '-generate', 0).echo) assert_raises_process_error( 1, TOO_MANY_ARGS, self.nodes[0].cli( '-generate', 1, 2, 3).echo) self.log.info('Test -generate with nblocks') generate = self.nodes[0].cli('-generate', n1).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n1) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1) self.log.info('Test -generate with nblocks and maxtries') generate = self.nodes[0].cli('-generate', n2, 1000000).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n2) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1 + n2) self.log.info('Test -generate -rpcwallet in single-wallet mode') generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 2 + n1 + n2) self.log.info( 'Test -generate -rpcwallet=unloaded wallet raises RPC error') assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate').echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 'foo').echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 0).echo) assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 1, 2, 3).echo) # Test bitcoin-cli -generate with -rpcwallet in multiwallet mode. self.nodes[0].loadwallet(wallets[2]) n3 = 4 n4 = 10 blocks = self.nodes[0].getblockcount() self.log.info('Test -generate -rpcwallet with no args') generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), 1) assert_equal(self.nodes[0].getblockcount(), blocks + 1) self.log.info('Test -generate -rpcwallet with bad args') assert_raises_process_error( 1, JSON_PARSING_ERROR, self.nodes[0].cli( rpcwallet2, '-generate', 'foo').echo) assert_raises_process_error( 1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli( rpcwallet2, '-generate', 0).echo) assert_raises_process_error( 1, TOO_MANY_ARGS, self.nodes[0].cli( rpcwallet2, '-generate', 1, 2, 3).echo) self.log.info('Test -generate -rpcwallet with nblocks') generate = self.nodes[0].cli( rpcwallet2, '-generate', n3).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n3) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3) self.log.info( 'Test -generate -rpcwallet with nblocks and maxtries') generate = self.nodes[0].cli( rpcwallet2, '-generate', n4, 1000000).send_cli() assert_equal(set(generate.keys()), {'address', 'blocks'}) assert_equal(len(generate["blocks"]), n4) assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3 + n4) self.log.info( 'Test -generate without -rpcwallet in multiwallet mode raises RPC error') assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate').echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 'foo').echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 0).echo) assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo) else: self.log.info( "*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped") # maintain block parity with the wallet_compiled conditional branch self.generate(self.nodes[0], 25) self.log.info("Test -version with node stopped") self.stop_node(0) cli_response = self.nodes[0].cli().send_cli('-version') assert ( f"{self.config['environment']['PACKAGE_NAME']} RPC client version" in cli_response ) self.log.info( "Test -rpcwait option successfully waits for RPC connection") # Start node without RPC connection. self.nodes[0].start() # ensure cookie file is available to avoid race condition self.nodes[0].wait_for_cookie_credentials() blocks = self.nodes[0].cli('-rpcwait').send_cli('getblockcount') self.nodes[0].wait_for_rpc_connection() assert_equal(blocks, BLOCKS + 25) if __name__ == '__main__': TestBitcoinCli().main() diff --git a/test/functional/rpc_getblockfrompeer.py b/test/functional/rpc_getblockfrompeer.py index 5e2794801..9b7590ba0 100755 --- a/test/functional/rpc_getblockfrompeer.py +++ b/test/functional/rpc_getblockfrompeer.py @@ -1,94 +1,94 @@ #!/usr/bin/env python3 # Copyright (c) 2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the getblockfrompeer RPC.""" from test_framework.authproxy import JSONRPCException from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class GetBlockFromPeerTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.extra_args = [[], ["-noparkdeepreorg"]] def setup_network(self): self.setup_nodes() # Don't connect the nodes initially - def check_for_block(self, hash): + def check_for_block(self, blockhash): try: - self.nodes[0].getblock(hash) + self.nodes[0].getblock(blockhash) return True except JSONRPCException: return False def run_test(self): self.log.info("Mine 4 blocks on Node 0") self.generate(self.nodes[0], 4, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 204) self.log.info("Mine competing 3 blocks on Node 1") self.generate(self.nodes[1], 3, sync_fun=self.no_op) assert_equal(self.nodes[1].getblockcount(), 203) short_tip = self.nodes[1].getbestblockhash() self.log.info("Connect nodes to sync headers") self.connect_nodes(0, 1) self.sync_blocks() self.log.info( "Node 0 should only have the header for node 1's block 3") x = next( filter( lambda x: x['hash'] == short_tip, self.nodes[0].getchaintips())) assert_equal(x['status'], "headers-only") assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, short_tip) self.log.info("Fetch block from node 1") peers = self.nodes[0].getpeerinfo() assert_equal(len(peers), 1) peer_0_peer_1_id = peers[0]["id"] self.log.info("Arguments must be sensible") assert_raises_rpc_error(-8, "hash must be of length 64 (not 4, for '1234')", self.nodes[0].getblockfrompeer, "1234", 0) self.log.info("We must already have the header") assert_raises_rpc_error(-1, "Block header missing", self.nodes[0].getblockfrompeer, "00" * 32, 0) self.log.info("Non-existent peer generates error") assert_raises_rpc_error(-1, "Peer does not exist", self.nodes[0].getblockfrompeer, short_tip, peer_0_peer_1_id + 1) self.log.info("Successful fetch") result = self.nodes[0].getblockfrompeer(short_tip, peer_0_peer_1_id) self.wait_until(lambda: self.check_for_block(short_tip), timeout=1) assert_equal(result, {}) self.log.info("Don't fetch blocks we already have") assert_raises_rpc_error(-1, "Block already downloaded", self.nodes[0].getblockfrompeer, short_tip, peer_0_peer_1_id) if __name__ == '__main__': GetBlockFromPeerTest().main() diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py index e1228e4d0..0f801e32c 100644 --- a/test/functional/test_framework/address.py +++ b/test/functional/test_framework/address.py @@ -1,137 +1,137 @@ #!/usr/bin/env python3 # Copyright (c) 2016-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Encode and decode BASE58, P2PKH and P2SH addresses.""" import unittest from .script import OP_TRUE, CScript, CScriptOp, hash160, hash256 from .util import assert_equal ADDRESS_ECREG_UNSPENDABLE = 'ecregtest:qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqcrl5mqkt' ADDRESS_ECREG_UNSPENDABLE_DESCRIPTOR = 'addr(ecregtest:qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqcrl5mqkt)#u6xx93xc' # Coins sent to this address can be spent with a scriptSig of just OP_TRUE ADDRESS_ECREG_P2SH_OP_TRUE = 'ecregtest:prdpw30fk4ym6zl6rftfjuw806arpn26fvkgfu97xt' P2SH_OP_TRUE = CScript.fromhex( 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87') SCRIPTSIG_OP_TRUE = CScriptOp.encode_op_pushdata(CScript([OP_TRUE])) chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' def byte_to_base58(b, version): result = '' # prepend version b = bytes([version]) + b # append checksum b += hash256(b)[:4] value = int.from_bytes(b, 'big') while value > 0: result = chars[value % 58] + result value //= 58 while b[0] == 0: result = chars[0] + result b = b[1:] return result def base58_to_byte(s): """Converts a base58-encoded string to its data and version. Throws if the base58 checksum is invalid.""" if not s: return b'' n = 0 for c in s: n *= 58 assert c in chars digit = chars.index(c) n += digit h = f'{n:x}' if len(h) % 2: h = f"0{h}" res = n.to_bytes((n.bit_length() + 7) // 8, 'big') pad = 0 for c in s: if c == chars[0]: pad += 1 else: break res = b'\x00' * pad + res # Assert if the checksum is invalid assert_equal(hash256(res[:-4])[:4], res[-4:]) return res[1:-4], int(res[0]) -def keyhash_to_p2pkh(hash, main=False): - assert (len(hash) == 20) +def keyhash_to_p2pkh(keyhash, main=False): + assert (len(keyhash) == 20) version = 0 if main else 111 - return byte_to_base58(hash, version) + return byte_to_base58(keyhash, version) -def scripthash_to_p2sh(hash, main=False): - assert (len(hash) == 20) +def scripthash_to_p2sh(scripthash, main=False): + assert (len(scripthash) == 20) version = 5 if main else 196 - return byte_to_base58(hash, version) + return byte_to_base58(scripthash, version) def key_to_p2pkh(key, main=False): key = check_key(key) return keyhash_to_p2pkh(hash160(key), main) def script_to_p2sh(script, main=False): script = check_script(script) return scripthash_to_p2sh(hash160(script), main) def check_key(key): if (isinstance(key, str)): key = bytes.fromhex(key) # Assuming this is hex string if (isinstance(key, bytes) and (len(key) == 33 or len(key) == 65)): return key assert False def check_script(script): if (isinstance(script, str)): script = bytes.fromhex(script) # Assuming this is hex string if (isinstance(script, bytes) or isinstance(script, CScript)): return script assert False class TestFrameworkScript(unittest.TestCase): def test_base58encodedecode(self): def check_base58(data, version): self.assertEqual( base58_to_byte(byte_to_base58(data, version)), (data, version)) check_base58( bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 111) check_base58( bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 111) check_base58( bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 111) check_base58( bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 111) check_base58( bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 111) check_base58( bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 111) check_base58( bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 0) check_base58( bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 0) check_base58( bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 0) check_base58( bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 0) check_base58( bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 0) check_base58( bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 0) diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py index 25fe1dcb3..6dacefa93 100755 --- a/test/functional/test_framework/messages.py +++ b/test/functional/test_framework/messages.py @@ -1,2307 +1,2307 @@ #!/usr/bin/env python3 # Copyright (c) 2010 ArtForz -- public domain half-a-node # Copyright (c) 2012 Jeff Garzik # Copyright (c) 2010-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Bitcoin test framework primitive and message structures CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: data structures that should map to corresponding structures in bitcoin/primitives msg_block, msg_tx, msg_headers, etc.: data structures that represent network messages ser_*, deser_*: functions that handle serialization/deserialization. Classes use __slots__ to ensure extraneous attributes aren't accidentally added by tests, compromising their intended effect. """ import copy import hashlib import random import socket import struct import time import unittest from base64 import b64decode, b64encode from enum import IntEnum from io import BytesIO from typing import List from test_framework.siphash import siphash256 from test_framework.util import assert_equal, uint256_hex MAX_LOCATOR_SZ = 101 MAX_BLOCK_BASE_SIZE = 1000000 MAX_BLOOM_FILTER_SIZE = 36000 MAX_BLOOM_HASH_FUNCS = 50 # 1,000,000 XEC in satoshis (legacy BCHA) COIN = 100000000 # 1 XEC in satoshis XEC = 100 MAX_MONEY = 21000000 * COIN # Maximum length of incoming protocol messages MAX_PROTOCOL_MESSAGE_LENGTH = 2 * 1024 * 1024 MAX_HEADERS_RESULTS = 2000 # Number of headers sent in one getheaders result MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message NODE_NETWORK = (1 << 0) NODE_GETUTXO = (1 << 1) NODE_BLOOM = (1 << 2) # NODE_WITNESS = (1 << 3) # NODE_XTHIN = (1 << 4) # removed in v0.22.12 NODE_COMPACT_FILTERS = (1 << 6) NODE_NETWORK_LIMITED = (1 << 10) NODE_AVALANCHE = (1 << 24) MSG_TX = 1 MSG_BLOCK = 2 MSG_FILTERED_BLOCK = 3 MSG_CMPCT_BLOCK = 4 MSG_AVA_PROOF = 0x1f000001 MSG_TYPE_MASK = 0xffffffff >> 2 FILTER_TYPE_BASIC = 0 # Serialization/deserialization tools def sha256(s): return hashlib.new('sha256', s).digest() def hash256(s): return sha256(sha256(s)) def ser_compact_size(size): r = b"" if size < 253: r = struct.pack("B", size) elif size < 0x10000: r = struct.pack(">= 32 return rs def uint256_from_str(s): r = 0 t = struct.unpack("> 24) & 0xFF v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) return v # deser_function_name: Allow for an alternate deserialization function on the # entries in the vector. def deser_vector(f, c, deser_function_name=None): nit = deser_compact_size(f) r = [] for _ in range(nit): t = c() if deser_function_name: getattr(t, deser_function_name)(f) else: t.deserialize(f) r.append(t) return r # ser_function_name: Allow for an alternate serialization function on the # entries in the vector. def ser_vector(v, ser_function_name=None): r = ser_compact_size(len(v)) for i in v: if ser_function_name: r += getattr(i, ser_function_name)() else: r += i.serialize() return r def deser_uint256_vector(f): nit = deser_compact_size(f) r = [] for _ in range(nit): t = deser_uint256(f) r.append(t) return r def ser_uint256_vector(v): r = ser_compact_size(len(v)) for i in v: r += ser_uint256(i) return r def deser_string_vector(f): nit = deser_compact_size(f) r = [] for _ in range(nit): t = deser_string(f) r.append(t) return r def ser_string_vector(v): r = ser_compact_size(len(v)) for sv in v: r += ser_string(sv) return r def FromHex(obj, hex_string): """Deserialize from a hex string representation (eg from RPC)""" obj.deserialize(BytesIO(bytes.fromhex(hex_string))) return obj def ToHex(obj): """Convert a binary-serializable object to hex (eg for submission via RPC)""" return obj.serialize().hex() # Objects that map to bitcoind objects, which can be serialized/deserialized class CAddress: __slots__ = ("net", "ip", "nServices", "port", "time") # see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki NET_IPV4 = 1 ADDRV2_NET_NAME = { NET_IPV4: "IPv4" } ADDRV2_ADDRESS_LENGTH = { NET_IPV4: 4 } def __init__(self): self.time = 0 self.nServices = 1 self.net = self.NET_IPV4 self.ip = "0.0.0.0" self.port = 0 def deserialize(self, f, *, with_time=True): """Deserialize from addrv1 format (pre-BIP155)""" if with_time: # VERSION messages serialize CAddress objects without time self.time = struct.unpack("H", f.read(2))[0] def serialize(self, *, with_time=True): """Serialize in addrv1 format (pre-BIP155)""" assert self.net == self.NET_IPV4 r = b"" if with_time: # VERSION messages serialize CAddress objects without time r += struct.pack("H", self.port) return r def deserialize_v2(self, f): """Deserialize from addrv2 format (BIP155)""" self.time = struct.unpack("H", f.read(2))[0] def serialize_v2(self): """Serialize in addrv2 format (BIP155)""" assert self.net == self.NET_IPV4 r = b"" r += struct.pack("H", self.port) return r def __repr__(self): return ( f"CAddress(nServices={self.nServices} net={self.ADDRV2_NET_NAME[self.net]} " f"addr={self.ip} port={self.port})" ) class CInv: __slots__ = ("hash", "type") typemap = { 0: "Error", MSG_TX: "TX", MSG_BLOCK: "Block", MSG_FILTERED_BLOCK: "filtered Block", MSG_CMPCT_BLOCK: "CompactBlock", MSG_AVA_PROOF: "avalanche proof", } def __init__(self, t=0, h=0): self.type = t self.hash = h def deserialize(self, f): self.type = struct.unpack(" MAX_MONEY: return False return True def __repr__(self): return ( f"CTransaction(nVersion={self.nVersion} vin={self.vin!r} " f"vout={self.vout!r} nLockTime={self.nLockTime})" ) class CBlockHeader: __slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "nBits", "nNonce", "nTime", "nVersion", "sha256") def __init__(self, header=None): if header is None: self.set_null() else: self.nVersion = header.nVersion self.hashPrevBlock = header.hashPrevBlock self.hashMerkleRoot = header.hashMerkleRoot self.nTime = header.nTime self.nBits = header.nBits self.nNonce = header.nNonce self.sha256 = header.sha256 self.hash = header.hash self.calc_sha256() def set_null(self): self.nVersion = 1 self.hashPrevBlock = 0 self.hashMerkleRoot = 0 self.nTime = 0 self.nBits = 0 self.nNonce = 0 self.sha256 = None self.hash = None def deserialize(self, f): self.nVersion = struct.unpack(" 1: newhashes = [] for i in range(0, len(hashes), 2): i2 = min(i + 1, len(hashes) - 1) newhashes.append(hash256(hashes[i] + hashes[i2])) hashes = newhashes return uint256_from_str(hashes[0]) def calc_merkle_root(self): hashes = [] for tx in self.vtx: tx.calc_sha256() hashes.append(ser_uint256(tx.sha256)) return self.get_merkle_root(hashes) def is_valid(self): self.calc_sha256() target = uint256_from_compact(self.nBits) if self.sha256 > target: return False for tx in self.vtx: if not tx.is_valid(): return False if self.calc_merkle_root() != self.hashMerkleRoot: return False return True def solve(self): self.rehash() target = uint256_from_compact(self.nBits) while self.sha256 > target: self.nNonce += 1 self.rehash() def __repr__(self): return ( f"CBlock(nVersion={self.nVersion} " f"hashPrevBlock={uint256_hex(self.hashPrevBlock)} " f"hashMerkleRoot={uint256_hex(self.hashMerkleRoot)} " f"nTime={self.nTime} nBits={self.nBits:08x} " f"nNonce={self.nNonce:08x} vtx={self.vtx!r})" ) class PrefilledTransaction: __slots__ = ("index", "tx") def __init__(self, index=0, tx=None): self.index = index self.tx = tx def deserialize(self, f): self.index = deser_compact_size(f) self.tx = CTransaction() self.tx.deserialize(f) def serialize(self): r = b"" r += ser_compact_size(self.index) r += self.tx.serialize() return r def __repr__(self): return f"PrefilledTransaction(index={self.index}, tx={self.tx!r})" # This is what we send on the wire, in a cmpctblock message. class P2PHeaderAndShortIDs: __slots__ = ("header", "nonce", "prefilled_txn", "prefilled_txn_length", "shortids", "shortids_length") def __init__(self): self.header = CBlockHeader() self.nonce = 0 self.shortids_length = 0 self.shortids = [] self.prefilled_txn_length = 0 self.prefilled_txn = [] def deserialize(self, f): self.header.deserialize(f) self.nonce = struct.unpack("> 1 self.pubkey = deser_string(f) def serialize(self) -> bytes: r = self.utxo.serialize() height_ser = self.height << 1 | int(self.is_coinbase) r += struct.pack(' bytes: return self.stake.serialize() + self.sig class AvalancheProof: __slots__ = ( "sequence", "expiration", "master", "stakes", "payout_script", "signature", "limited_proofid", "proofid") def __init__(self, sequence=0, expiration=0, master=b"", signed_stakes=None, payout_script=b"", signature=b""): self.sequence: int = sequence self.expiration: int = expiration self.master: bytes = master self.stakes: List[AvalancheSignedStake] = signed_stakes or [ AvalancheSignedStake()] self.payout_script = payout_script self.signature = signature self.limited_proofid: int = None self.proofid: int = None self.compute_proof_id() def compute_proof_id(self): """Compute Bitcoin's 256-bit hash (double SHA-256) of the serialized proof data. """ ss = struct.pack(" int: return uint256_from_str(hash256( ser_uint256(self.limited_proofid) + ser_string(self.proof_master))) def deserialize(self, f): self.limited_proofid = deser_uint256(f) self.proof_master = deser_string(f) self.levels = deser_vector(f, AvalancheDelegationLevel) self.proofid = self.compute_proofid() def serialize(self): r = b"" r += ser_uint256(self.limited_proofid) r += ser_string(self.proof_master) r += ser_vector(self.levels) return r def __repr__(self): return f"AvalancheDelegation(" \ f"limitedProofId={uint256_hex(self.limited_proofid)}, " \ f"proofMaster={self.proof_master.hex()}, " \ f"proofid={uint256_hex(self.proofid)}, " \ f"levels={self.levels})" def getid(self): h = ser_uint256(self.proofid) for level in self.levels: h = hash256(h + ser_string(level.pubkey)) return h class AvalancheHello: __slots__ = ("delegation", "sig") def __init__(self, delegation=AvalancheDelegation(), sig=b"\0" * 64): self.delegation = delegation self.sig = sig def deserialize(self, f): self.delegation.deserialize(f) self.sig = f.read(64) def serialize(self): r = b"" r += self.delegation.serialize() r += self.sig return r def __repr__(self): return f"AvalancheHello(delegation={self.delegation!r}, sig={self.sig})" def get_sighash(self, node): b = self.delegation.getid() b += struct.pack(" class msg_headers: __slots__ = ("headers",) msgtype = b"headers" def __init__(self, headers=None): self.headers = headers if headers is not None else [] def deserialize(self, f): # comment in bitcoind indicates these should be deserialized as blocks blocks = deser_vector(f, CBlock) for x in blocks: self.headers.append(CBlockHeader(x)) def serialize(self): blocks = [CBlock(x) for x in self.headers] return ser_vector(blocks) def __repr__(self): return f"msg_headers(headers={self.headers!r})" class msg_merkleblock: __slots__ = ("merkleblock",) msgtype = b"merkleblock" def __init__(self, merkleblock=None): if merkleblock is None: self.merkleblock = CMerkleBlock() else: self.merkleblock = merkleblock def deserialize(self, f): self.merkleblock.deserialize(f) def serialize(self): return self.merkleblock.serialize() def __repr__(self): return f"msg_merkleblock(merkleblock={self.merkleblock!r})" class msg_filterload: __slots__ = ("data", "nHashFuncs", "nTweak", "nFlags") msgtype = b"filterload" def __init__(self, data=b'00', nHashFuncs=0, nTweak=0, nFlags=0): self.data = data self.nHashFuncs = nHashFuncs self.nTweak = nTweak self.nFlags = nFlags def deserialize(self, f): self.data = deser_string(f) self.nHashFuncs = struct.unpack(" {sys.argv[0]}") self.coverage_dir = coverage_dir self.cwd = cwd self.descriptors = descriptors if extra_conf is not None: append_config(datadir, extra_conf) # Most callers will just need to add extra args to the default list # below. # For those callers that need more flexibility, they can access the # default args using the provided facilities. # Note that common args are set in the config file (see # initialize_datadir) self.extra_args = extra_args # Configuration for logging is set as command-line args rather than in the bitcoin.conf file. # This means that starting a bitcoind using the temp dir to debug a failed test won't # spam debug.log. self.default_args = [ "-datadir=" + self.datadir, "-logtimemicros", "-logthreadnames", "-logsourcelocations", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-uacomment=" + self.name ] if use_valgrind: default_suppressions_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), "..", "..", "..", "contrib", "valgrind.supp") suppressions_file = os.getenv("VALGRIND_SUPPRESSIONS_FILE", default_suppressions_file) self.binary = "valgrind" self.bitcoind_args = [bitcoind] + self.default_args self.default_args = [f"--suppressions={suppressions_file}", "--gen-suppressions=all", "--exit-on-first-error=yes", "--error-exitcode=1", "--quiet"] + self.bitcoind_args if emulator is not None: if not os.path.isfile(emulator): raise FileNotFoundError( f"Emulator '{emulator}' could not be found.") self.emulator = emulator if use_cli and not os.path.isfile(bitcoin_cli): raise FileNotFoundError( f"Binary '{bitcoin_cli}' could not be found.\nTry setting it manually:\n" f"\tBITCOINCLI= {sys.argv[0]}") self.cli = TestNodeCLI(bitcoin_cli, self.datadir, self.emulator) self.use_cli = use_cli self.start_perf = start_perf self.running = False self.process = None self.rpc_connected = False self.rpc = None self.url = None self.relay_fee_cache = None self.log = logging.getLogger(f'TestFramework.node{i}') # Whether to kill the node when this object goes away self.cleanup_on_exit = True # Cache perf subprocesses here by their data output filename. self.perf_subprocesses = {} self.p2ps = [] self.timeout_factor = timeout_factor AddressKeyPair = collections.namedtuple( 'AddressKeyPair', ['address', 'key']) PRIV_KEYS = [ # address , privkey AddressKeyPair( 'mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), AddressKeyPair( 'msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), AddressKeyPair( 'mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), AddressKeyPair( 'mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), AddressKeyPair( 'msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), AddressKeyPair( 'n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), AddressKeyPair( 'myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), AddressKeyPair( 'mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), AddressKeyPair( 'mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), AddressKeyPair( 'mq4fBNdckGtvY2mijd9am7DRsbRB4KjUkf', 'cN55daf1HotwBAgAKWVgDcoppmUNDtQSfb7XLutTLeAgVc3u8hik'), AddressKeyPair( 'mpFAHDjX7KregM3rVotdXzQmkbwtbQEnZ6', 'cT7qK7g1wkYEMvKowd2ZrX1E5f6JQ7TM246UfqbCiyF7kZhorpX3'), AddressKeyPair( 'mzRe8QZMfGi58KyWCse2exxEFry2sfF2Y7', 'cPiRWE8KMjTRxH1MWkPerhfoHFn5iHPWVK5aPqjW8NxmdwenFinJ'), ] def get_deterministic_priv_key(self): """Return a deterministic priv key in base58, that only depends on the node's index""" num_keys = len(self.PRIV_KEYS) assert self.index < num_keys, \ f"Only {num_keys} keys are defined, please extend TestNode.PRIV_KEYS if " \ f"more are needed." return self.PRIV_KEYS[self.index] def _node_msg(self, msg: str) -> str: """Return a modified msg that identifies this node by its index as a debugging aid.""" return f"[node {self.index}] {msg}" def _raise_assertion_error(self, msg: str): """Raise an AssertionError with msg modified to identify this node.""" raise AssertionError(self._node_msg(msg)) def __del__(self): # Ensure that we don't leave any bitcoind processes lying around after # the test ends if self.process and self.cleanup_on_exit: # Should only happen on test failure # Avoid using logger, as that may have already been shutdown when # this destructor is called. print(self._node_msg("Cleaning up leftover process")) self.process.kill() def __getattr__(self, name): """Dispatches any unrecognised messages to the RPC connection or a CLI instance.""" if self.use_cli: return getattr( RPCOverloadWrapper(self.cli, True, self.descriptors), name) else: assert self.rpc is not None, self._node_msg( "Error: RPC not initialized") assert self.rpc_connected, self._node_msg( "Error: No RPC connection") return getattr( RPCOverloadWrapper(self.rpc, descriptors=self.descriptors), name) def clear_default_args(self): self.default_args.clear() def extend_default_args(self, args): self.default_args.extend(args) def remove_default_args(self, args): for rm_arg in args: # Remove all occurrences of rm_arg in self.default_args: # - if the arg is a flag (-flag), then the names must match # - if the arg is a value (-key=value) then the name must starts # with "-key=" (the '"' char is to avoid removing "-key_suffix" # arg is "-key" is the argument to remove). self.default_args = [def_arg for def_arg in self.default_args if rm_arg != def_arg and not def_arg.startswith(rm_arg + '=')] def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, **kwargs): """Start the node.""" if extra_args is None: extra_args = self.extra_args # Add a new stdout and stderr file each time bitcoind is started if stderr is None: stderr = tempfile.NamedTemporaryFile( dir=self.stderr_dir, delete=False) if stdout is None: stdout = tempfile.NamedTemporaryFile( dir=self.stdout_dir, delete=False) self.stderr = stderr self.stdout = stdout if cwd is None: cwd = self.cwd # Delete any existing cookie file -- if such a file exists (eg due to # unclean shutdown), it will get overwritten anyway by bitcoind, and # potentially interfere with our attempt to authenticate delete_cookie_file(self.datadir, self.chain) # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are # written to stderr and not the terminal subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") p_args = [self.binary] + self.default_args + extra_args if self.emulator is not None: p_args = [self.emulator] + p_args self.process = subprocess.Popen( p_args, env=subp_env, stdout=stdout, stderr=stderr, cwd=cwd, **kwargs) self.running = True self.log.debug("bitcoind started, waiting for RPC to come up") if self.start_perf: self._start_perf() def wait_for_rpc_connection(self): """Sets up an RPC connection to the bitcoind process. Returns False if unable to connect.""" # Poll at a rate of four times per second poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): if self.process.poll() is not None: raise FailedToStartError(self._node_msg( f'bitcoind exited with status {self.process.returncode} during ' f'initialization')) try: rpc = get_rpc_proxy( rpc_url( self.datadir, self.chain, self.host, self.rpc_port), self.index, # Shorter timeout to allow for one retry in case of # ETIMEDOUT timeout=self.rpc_timeout // 2, coveragedir=self.coverage_dir ) rpc.getblockcount() # If the call to getblockcount() succeeds then the RPC # connection is up wait_until_helper(lambda: rpc.getmempoolinfo()['loaded'], timeout_factor=self.timeout_factor) # Wait for the node to finish reindex, block import, and # loading the mempool. Usually importing happens fast or # even "immediate" when the node is started. However, there # is no guarantee and sometimes ThreadImport might finish # later. This is going to cause intermittent test failures, # because generally the tests assume the node is fully # ready after being started. # # For example, the node will reject block messages from p2p # when it is still importing with the error "Unexpected # block message received" # # The wait is done here to make tests as robust as possible # and prevent racy tests and intermittent failures as much # as possible. Some tests might not need this, but the # overhead is trivial, and the added guarantees are worth # the minimal performance cost. self.log.debug("RPC successfully started") if self.use_cli: return self.rpc = rpc self.rpc_connected = True self.url = self.rpc.url return except JSONRPCException as e: # Initialization phase # -28 RPC in warmup # -342 Service unavailable, RPC server started but is shutting down due to error if e.error['code'] != -28 and e.error['code'] != -342: raise # unknown JSON RPC exception except ConnectionResetError: # This might happen when the RPC server is in warmup, but shut down before the call to getblockcount # succeeds. Try again to properly raise the FailedToStartError pass except OSError as e: if e.errno == errno.ETIMEDOUT: # Treat identical to ConnectionResetError pass elif e.errno == errno.ECONNREFUSED: # Port not yet open? pass else: # unknown OS error raise except ValueError as e: # cookie file not found and no rpcuser or rpcpassword; # bitcoind is still starting if "No RPC credentials" not in str(e): raise time.sleep(1.0 / poll_per_s) self._raise_assertion_error( f"Unable to connect to bitcoind after {self.rpc_timeout}s") def wait_for_cookie_credentials(self): """Ensures auth cookie credentials can be read, e.g. for testing CLI with -rpcwait before RPC connection is up.""" self.log.debug("Waiting for cookie credentials") # Poll at a rate of four times per second. poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): try: get_auth_cookie(self.datadir, self.chain) self.log.debug("Cookie credentials successfully retrieved") return except ValueError: # cookie file not found and no rpcuser or rpcpassword; # bitcoind is still starting so we continue polling until # RPC credentials are retrieved pass time.sleep(1.0 / poll_per_s) self._raise_assertion_error( f"Unable to retrieve cookie credentials after {self.rpc_timeout}s") def generate(self, nblocks, maxtries=1000000, **kwargs): self.log.debug( "TestNode.generate() dispatches `generate` call to `generatetoaddress`") return self.generatetoaddress( nblocks=nblocks, address=self.get_deterministic_priv_key().address, maxtries=maxtries, **kwargs) def generateblock(self, *args, invalid_call, **kwargs): assert not invalid_call return self.__getattr__('generateblock')(*args, **kwargs) def generatetoaddress(self, *args, invalid_call, **kwargs): assert not invalid_call return self.__getattr__('generatetoaddress')(*args, **kwargs) def generatetodescriptor(self, *args, invalid_call, **kwargs): assert not invalid_call return self.__getattr__('generatetodescriptor')(*args, **kwargs) def buildavalancheproof(self, sequence: int, expiration: int, master: str, stakes: List[Dict[str, Any]], payoutAddress: Optional[str] = ADDRESS_ECREG_UNSPENDABLE) -> str: return self.__getattr__('buildavalancheproof')( sequence=sequence, expiration=expiration, master=master, stakes=stakes, payoutAddress=payoutAddress, ) def get_wallet_rpc(self, wallet_name): if self.use_cli: return RPCOverloadWrapper( self.cli(f"-rpcwallet={wallet_name}"), True, self.descriptors) else: assert self.rpc is not None, self._node_msg( "Error: RPC not initialized") assert self.rpc_connected, self._node_msg( "Error: RPC not connected") wallet_path = f"wallet/{urllib.parse.quote(wallet_name)}" return RPCOverloadWrapper(self.rpc / wallet_path, descriptors=self.descriptors) def stop_node(self, expected_stderr='', *, wait=0, wait_until_stopped=True): """Stop the node.""" if not self.running: return self.log.debug("Stopping node") try: self.stop(wait=wait) except http.client.CannotSendRequest: self.log.exception("Unable to stop node.") # If there are any running perf processes, stop them. for profile_name in tuple(self.perf_subprocesses.keys()): self._stop_perf(profile_name) # Check that stderr is as expected self.stderr.seek(0) stderr = self.stderr.read().decode('utf-8').strip() if stderr != expected_stderr: raise AssertionError( f"Unexpected stderr {stderr} != {expected_stderr}") self.stdout.close() self.stderr.close() del self.p2ps[:] if wait_until_stopped: self.wait_until_stopped() def is_node_stopped(self): """Checks whether the node has stopped. Returns True if the node has stopped. False otherwise. This method is responsible for freeing resources (self.process).""" if not self.running: return True return_code = self.process.poll() if return_code is None: return False # process has stopped. Assert that it didn't return an error code. assert return_code == 0, self._node_msg( f"Node returned non-zero exit code ({return_code}) when stopping") self.running = False self.process = None self.rpc_connected = False self.rpc = None self.log.debug("Node stopped") return True def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT): wait_until_helper( self.is_node_stopped, timeout=timeout, timeout_factor=self.timeout_factor) @property def chain_path(self) -> Path: return Path(self.datadir) / self.chain @property def debug_log_path(self) -> Path: return self.chain_path / 'debug.log' def debug_log_bytes(self) -> int: with open(self.debug_log_path, encoding='utf-8') as dl: dl.seek(0, 2) return dl.tell() @contextlib.contextmanager def assert_debug_log(self, expected_msgs, unexpected_msgs=None, timeout=2): """Assert that some debug messages are present within some timeout. Unexpected debug messages may be optionally provided to fail a test if they appear before expected messages. Note: expected_msgs must always be non-empty even if the goal is to check for unexpected_msgs. This provides a bounded scenario such that "we expect to reach some target resulting in expected_msgs without seeing unexpected_msgs. Otherwise, we are testing that something never happens, which is fundamentally not robust test logic. """ if not expected_msgs: raise AssertionError("Expected debug messages is empty") if unexpected_msgs is None: unexpected_msgs = [] time_end = time.time() + timeout * self.timeout_factor prev_size = self.debug_log_bytes() yield while True: found = True with open(self.debug_log_path, encoding='utf-8') as dl: dl.seek(prev_size) log = dl.read() print_log = " - " + "\n - ".join(log.splitlines()) for unexpected_msg in unexpected_msgs: if re.search(re.escape(unexpected_msg), log, flags=re.MULTILINE): self._raise_assertion_error( f'Unexpected message "{unexpected_msg}" partially matches ' f'log:\n\n{print_log}\n\n') for expected_msg in expected_msgs: if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None: found = False if found: return if time.time() >= time_end: break time.sleep(0.05) self._raise_assertion_error( f'Expected messages "{expected_msgs}" does not partially match ' f'log:\n\n{print_log}\n\n') @contextlib.contextmanager def wait_for_debug_log( self, expected_msgs: List[bytes], timeout=60, interval=0.05, chatty_callable=None): """ Block until we see all the debug log messages or until we exceed the timeout. If a chatty_callable is provided, it is repeated at every iteration. """ time_end = time.time() + timeout * self.timeout_factor prev_size = self.debug_log_bytes() yield while True: found = True if chatty_callable is not None: # Ignore the chatty_callable returned value, as we are only # interested in the debug log content here. chatty_callable() with open(self.debug_log_path, "rb") as dl: dl.seek(prev_size) log = dl.read() for expected_msg in expected_msgs: if expected_msg not in log: found = False if found: return if time.time() >= time_end: print_log = " - " + \ "\n - ".join([f"\n - {line.decode()}" for line in log.splitlines()]) break time.sleep(interval) self._raise_assertion_error( f'Expected messages "{str(expected_msgs)}" does not partially match ' f'log:\n\n{print_log}\n\n') @contextlib.contextmanager def profile_with_perf(self, profile_name: str): """ Context manager that allows easy profiling of node activity using `perf`. See `test/functional/README.md` for details on perf usage. Args: profile_name: This string will be appended to the profile data filename generated by perf. """ subp = self._start_perf(profile_name) yield if subp: self._stop_perf(profile_name) def _start_perf(self, profile_name=None): """Start a perf process to profile this node. Returns the subprocess running perf.""" subp = None def test_success(cmd): return subprocess.call( # shell=True required for pipe use below cmd, shell=True, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL) == 0 if not sys.platform.startswith('linux'): self.log.warning( "Can't profile with perf; only availabe on Linux platforms") return None if not test_success('which perf'): self.log.warning( "Can't profile with perf; must install perf-tools") return None if not test_success( f'readelf -S {shlex.quote(self.binary)} | grep .debug_str'): self.log.warning( "perf output won't be very useful without debug symbols compiled into bitcoind") output_path = tempfile.NamedTemporaryFile( dir=self.datadir, prefix=f"{profile_name or 'test'}.perf.data.", delete=False, ).name cmd = [ 'perf', 'record', '-g', # Record the callgraph. # Compatibility for gcc's --fomit-frame-pointer. '--call-graph', 'dwarf', '-F', '101', # Sampling frequency in Hz. '-p', str(self.process.pid), '-o', output_path, ] subp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.perf_subprocesses[profile_name] = subp return subp def _stop_perf(self, profile_name): """Stop (and pop) a perf subprocess.""" subp = self.perf_subprocesses.pop(profile_name) output_path = subp.args[subp.args.index('-o') + 1] subp.terminate() subp.wait(timeout=10) stderr = subp.stderr.read().decode() if 'Consider tweaking /proc/sys/kernel/perf_event_paranoid' in stderr: self.log.warning( "perf couldn't collect data! Try " "'sudo sysctl -w kernel.perf_event_paranoid=-1'") else: report_cmd = f"perf report -i {output_path}" self.log.info(f"See perf output by running '{report_cmd}'") def assert_start_raises_init_error( self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs): """Attempt to start the node and expect it to raise an error. extra_args: extra arguments to pass through to bitcoind expected_msg: regex that stderr should match when bitcoind fails Will throw if bitcoind starts without an error. Will throw if an expected_msg is provided and it does not match bitcoind's stdout.""" with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \ tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout: try: self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs) ret = self.process.wait(timeout=self.rpc_timeout) self.log.debug(self._node_msg( f'bitcoind exited with status {ret} during initialization')) self.running = False self.process = None # Check stderr for expected message if expected_msg is not None: log_stderr.seek(0) stderr = log_stderr.read().decode('utf-8').strip() if match == ErrorMatch.PARTIAL_REGEX: if re.search(expected_msg, stderr, flags=re.MULTILINE) is None: self._raise_assertion_error( f'Expected message "{expected_msg}" does not partially ' f'match stderr:\n"{stderr}"') elif match == ErrorMatch.FULL_REGEX: if re.fullmatch(expected_msg, stderr) is None: self._raise_assertion_error( f'Expected message "{expected_msg}" does not fully ' f'match stderr:\n"{stderr}"') elif match == ErrorMatch.FULL_TEXT: if expected_msg != stderr: self._raise_assertion_error( f'Expected message "{expected_msg}" does not fully ' f'match stderr:\n"{stderr}"') except subprocess.TimeoutExpired: self.process.kill() self.running = False self.process = None assert_msg = f'bitcoind should have exited within {self.rpc_timeout}s ' if expected_msg is None: assert_msg += "with an error" else: assert_msg += "with expected error " + expected_msg self._raise_assertion_error(assert_msg) def relay_fee(self, cached=True): if not self.relay_fee_cache or not cached: self.relay_fee_cache = self.getnetworkinfo()["relayfee"] return self.relay_fee_cache def calculate_fee(self, tx): """ Estimate the necessary fees (in sats) for an unsigned CTransaction assuming: - the current relayfee on node - all inputs are compressed-key p2pkh, and will be signed ecdsa or schnorr - all inputs currently unsigned (empty scriptSig) """ billable_size_estimate = tx.billable_size() # Add some padding for signatures / public keys # 107 = length of PUSH(longest_sig = 72 bytes), PUSH(pubkey = 33 bytes) billable_size_estimate += len(tx.vin) * 107 # relay_fee gives a value in XEC per kB. return int(self.relay_fee() / 1000 * billable_size_estimate * XEC) def calculate_fee_from_txid(self, txid): ctx = FromHex(CTransaction(), self.getrawtransaction(txid)) return self.calculate_fee(ctx) def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs): """Add an inbound p2p connection to the node. This method adds the p2p connection to the self.p2ps list and also returns the connection to the caller.""" if 'dstport' not in kwargs: kwargs['dstport'] = p2p_port(self.index) if 'dstaddr' not in kwargs: kwargs['dstaddr'] = '127.0.0.1' p2p_conn.peer_connect( **kwargs, net=self.chain, timeout_factor=self.timeout_factor)() self.p2ps.append(p2p_conn) p2p_conn.wait_until( lambda: p2p_conn.is_connected, check_connected=False) if wait_for_verack: # Wait for the node to send us the version and verack p2p_conn.wait_for_verack() # At this point we have sent our version message and received the version and verack, however the full node # has not yet received the verack from us (in reply to their version). So, the connection is not yet fully # established (fSuccessfullyConnected). # # This shouldn't lead to any issues when sending messages, since the verack will be in-flight before the # message we send. However, it might lead to races where we are expecting to receive a message. E.g. a # transaction that will be added to the mempool as soon as we return here. # # So syncing here is redundant when we only want to send a message, but the cost is low (a few milliseconds) # in comparison to the upside of making tests less fragile and # unexpected intermittent errors less likely. p2p_conn.sync_with_ping() # Consistency check that the Bitcoin ABC has received our user agent # string. This checks the node's newest peer. It could be racy if # another Bitcoin ABC node has connected since we opened our # connection, but we don't expect that to happen. assert_equal(self.getpeerinfo()[-1]['subver'], P2P_SUBVERSION) return p2p_conn def add_outbound_p2p_connection( self, p2p_conn, *, p2p_idx, connection_type="outbound-full-relay", **kwargs): """Add an outbound p2p connection from node. Must be an "outbound-full-relay", "block-relay-only", "addr-fetch", "feeler" or "avalanche" connection. This method adds the p2p connection to the self.p2ps list and returns the connection to the caller. """ def addconnection_callback(address, port): self.log.debug( f"Connecting to {address}:{port} {connection_type}") self.addconnection(f'{address}:{port}', connection_type) p2p_conn.peer_accept_connection( connect_cb=addconnection_callback, connect_id=p2p_idx + 1, net=self.chain, timeout_factor=self.timeout_factor, **kwargs)() if connection_type == "feeler": # feeler connections are closed as soon as the node receives a # `version` message p2p_conn.wait_until( lambda: p2p_conn.message_count["version"] == 1, check_connected=False) p2p_conn.wait_until( lambda: not p2p_conn.is_connected, check_connected=False) else: p2p_conn.wait_for_connect() self.p2ps.append(p2p_conn) p2p_conn.wait_for_verack() p2p_conn.sync_with_ping() return p2p_conn def num_test_p2p_connections(self): """Return number of test framework p2p connections to the node.""" return len([peer for peer in self.getpeerinfo() if peer['subver'] == P2P_SUBVERSION]) def disconnect_p2ps(self): """Close all p2p connections to the node.""" for p in self.p2ps: p.peer_disconnect() del self.p2ps[:] wait_until_helper(lambda: self.num_test_p2p_connections() == 0, timeout_factor=self.timeout_factor) class TestNodeCLIAttr: def __init__(self, cli, command): self.cli = cli self.command = command def __call__(self, *args, **kwargs): return self.cli.send_cli(self.command, *args, **kwargs) def get_request(self, *args, **kwargs): return lambda: self(*args, **kwargs) def arg_to_cli(arg): if isinstance(arg, bool): return str(arg).lower() elif arg is None: return 'null' elif isinstance(arg, dict) or isinstance(arg, list): return json.dumps(arg, default=EncodeDecimal) else: return str(arg) class TestNodeCLI: """Interface to bitcoin-cli for an individual node""" def __init__(self, binary, datadir, emulator=None): self.options = [] self.binary = binary self.datadir = datadir self.input = None self.log = logging.getLogger('TestFramework.bitcoincli') self.emulator = emulator - def __call__(self, *options, input=None): + def __call__(self, *options, cli_input=None): # TestNodeCLI is callable with bitcoin-cli command-line options cli = TestNodeCLI(self.binary, self.datadir, self.emulator) cli.options = [str(o) for o in options] - cli.input = input + cli.input = cli_input return cli def __getattr__(self, command): return TestNodeCLIAttr(self, command) def batch(self, requests): results = [] for request in requests: try: results.append({"result": request()}) except JSONRPCException as e: results.append({"error": e}) return results def send_cli(self, command=None, *args, **kwargs): """Run bitcoin-cli command. Deserializes returned string as python object.""" pos_args = [arg_to_cli(arg) for arg in args] named_args = [str(key) + "=" + arg_to_cli(value) for (key, value) in kwargs.items()] assert not (pos_args and named_args), \ "Cannot use positional arguments and named arguments in the same " \ "bitcoin-cli call" p_args = [self.binary, "-datadir=" + self.datadir] + self.options if named_args: p_args += ["-named"] if command is not None: p_args += [command] p_args += pos_args + named_args self.log.debug(f"Running bitcoin-cli {p_args[2:]}") if self.emulator is not None: p_args = [self.emulator] + p_args process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) cli_stdout, cli_stderr = process.communicate(input=self.input) returncode = process.poll() if returncode: match = re.match( r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr) if match: code, message = match.groups() raise JSONRPCException({"code": int(code), "message": message}) # Ignore cli_stdout, raise with cli_stderr raise subprocess.CalledProcessError( returncode, self.binary, output=cli_stderr) try: return json.loads(cli_stdout, parse_float=decimal.Decimal) except (json.JSONDecodeError, decimal.InvalidOperation): return cli_stdout.rstrip("\n") class RPCOverloadWrapper: def __init__(self, rpc, cli=False, descriptors=False): self.rpc = rpc self.is_cli = cli self.descriptors = descriptors def __getattr__(self, name): return getattr(self.rpc, name) def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None, load_on_startup=None): if descriptors is None: descriptors = self.descriptors return self.__getattr__('createwallet')( wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors, load_on_startup) def importprivkey(self, privkey, label=None, rescan=None): wallet_info = self.getwalletinfo() if 'descriptors' not in wallet_info or ( 'descriptors' in wallet_info and not wallet_info['descriptors']): return self.__getattr__('importprivkey')(privkey, label, rescan) desc = descsum_create('combo(' + privkey + ')') req = [{ 'desc': desc, 'timestamp': 0 if rescan else 'now', 'label': label if label else '' }] import_res = self.importdescriptors(req) if not import_res[0]['success']: raise JSONRPCException(import_res[0]['error']) def addmultisigaddress(self, nrequired, keys, label=None): wallet_info = self.getwalletinfo() if 'descriptors' not in wallet_info or ( 'descriptors' in wallet_info and not wallet_info['descriptors']): return self.__getattr__('addmultisigaddress')( nrequired, keys, label) cms = self.createmultisig(nrequired, keys) req = [{ 'desc': cms['descriptor'], 'timestamp': 0, 'label': label if label else '' }] import_res = self.importdescriptors(req) if not import_res[0]['success']: raise JSONRPCException(import_res[0]['error']) return cms def importpubkey(self, pubkey, label=None, rescan=None): wallet_info = self.getwalletinfo() if 'descriptors' not in wallet_info or ( 'descriptors' in wallet_info and not wallet_info['descriptors']): return self.__getattr__('importpubkey')(pubkey, label, rescan) desc = descsum_create('combo(' + pubkey + ')') req = [{ 'desc': desc, 'timestamp': 0 if rescan else 'now', 'label': label if label else '' }] import_res = self.importdescriptors(req) if not import_res[0]['success']: raise JSONRPCException(import_res[0]['error']) def importaddress(self, address, label=None, rescan=None, p2sh=None): wallet_info = self.getwalletinfo() if 'descriptors' not in wallet_info or ( 'descriptors' in wallet_info and not wallet_info['descriptors']): return self.__getattr__('importaddress')( address, label, rescan, p2sh) is_hex = False try: int(address, 16) is_hex = True desc = descsum_create('raw(' + address + ')') except BaseException: desc = descsum_create('addr(' + address + ')') reqs = [{ 'desc': desc, 'timestamp': 0 if rescan else 'now', 'label': label if label else '' }] if is_hex and p2sh: reqs.append({ 'desc': descsum_create('p2sh(raw(' + address + '))'), 'timestamp': 0 if rescan else 'now', 'label': label if label else '' }) import_res = self.importdescriptors(reqs) for res in import_res: if not res['success']: raise JSONRPCException(res['error'])