diff --git a/.arclint b/.arclint --- a/.arclint +++ b/.arclint @@ -19,11 +19,7 @@ "autopep8": { "type": "autopep8", "version": ">=1.3.4", - "include": "(^contrib/.*\\.py$)", - "exclude": [ - "(^contrib/gitian-builder/)", - "(^contrib/apple-sdk-tools/)" - ], + "include": "(^contrib/buildbot/.*\\.py$)", "flags": [ "--aggressive", "--ignore=W503,W504", @@ -37,7 +33,7 @@ "exclude": [ "(^contrib/gitian-builder/)", "(^contrib/apple-sdk-tools/)", - "(^contrib/)" + "(^contrib/buildbot/)" ], "flags": [ "--preview" diff --git a/contrib/devtools/chainparams/generate_chainparams_constants.py b/contrib/devtools/chainparams/generate_chainparams_constants.py --- a/contrib/devtools/chainparams/generate_chainparams_constants.py +++ b/contrib/devtools/chainparams/generate_chainparams_constants.py @@ -2,7 +2,7 @@ # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Script to generate list of chainparams constants (ie. assumevalid and minimum chainwork). @@ -20,14 +20,14 @@ The outputted constants should be pasted into `src/chainparamsconstants.h`. -''' +""" import os import sys def process_constants(indir, file_name): - with open(os.path.join(indir, file_name), 'r', encoding="utf8") as f: + with open(os.path.join(indir, file_name), "r", encoding="utf8") as f: constants = f.readlines() # Ensure only the expected number of lines are read from the file @@ -38,12 +38,13 @@ def main(): if len(sys.argv) != 2: - print(f'Usage: {sys.argv[0]} ', file=sys.stderr) + print(f"Usage: {sys.argv[0]} ", file=sys.stderr) sys.exit(1) indir = sys.argv[1] - print('''\ + print( + """\ /** * @{} by contrib/devtools/chainparams/generate_chainparams_constants.py */ @@ -61,13 +62,14 @@ const uint64_t TESTNET_ASSUMED_BLOCKCHAIN_SIZE = {}; const uint64_t TESTNET_ASSUMED_CHAINSTATE_SIZE = {}; }} // namespace ChainParamsConstants -'''.format( - # 'generated' is split out so this file is not identified as generated. - "generated", - *process_constants(indir, 'chainparams_main.txt'), - *process_constants(indir, 'chainparams_test.txt')) +""".format( + # 'generated' is split out so this file is not identified as generated. + "generated", + *process_constants(indir, "chainparams_main.txt"), + *process_constants(indir, "chainparams_test.txt"), + ) ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/contrib/devtools/chainparams/make_chainparams.py b/contrib/devtools/chainparams/make_chainparams.py --- a/contrib/devtools/chainparams/make_chainparams.py +++ b/contrib/devtools/chainparams/make_chainparams.py @@ -10,7 +10,7 @@ import sys from enum import Enum -sys.path.append('../../../test/functional/test_framework') +sys.path.append("../../../test/functional/test_framework") from authproxy import AuthServiceProxy # noqa: E402 @@ -25,28 +25,28 @@ def get_chainparams(rpc_caller, block): # Fetch initial chain info chaininfo = rpc_caller.getblockchaininfo() - if chaininfo['chain'] == 'main': + if chaininfo["chain"] == "main": chain = Chain.MainNet else: chain = Chain.TestNet # Use highest valid chainwork. This doesn't need to match the block hash # used by assume valid. - chainwork = chaininfo['chainwork'] - if not re.match('^[0-9a-z]{64}$', chainwork): + chainwork = chaininfo["chainwork"] + if not re.match("^[0-9a-z]{64}$", chainwork): raise Exception("Chain work is not a valid uint256 hex value.") # Default to N blocks from the chain tip, depending on which chain we're on if not block: - block = chaininfo['blocks'] + block = chaininfo["blocks"] if chain == Chain.MainNet: block -= 10 else: block -= 2000 block = str(block) - if not re.match('^[0-9a-z]{64}$', block): - if re.match('^[0-9]*$', block): + if not re.match("^[0-9a-z]{64}$", block): + if re.match("^[0-9]*$", block): # Fetch block hash using block height block = rpc_caller.getblockhash(int(block)) else: @@ -57,37 +57,54 @@ rpc_caller.getblockheader(block) # Block size on disk (in GB) with some margin for growth - diskSizeBlocks = str( - int(math.ceil(chaininfo['size_on_disk'] / GIGABYTE * 1.3))) + diskSizeBlocks = str(int(math.ceil(chaininfo["size_on_disk"] / GIGABYTE * 1.3))) # Chainstate size on disk (in GB) with some margin for growth utxos = rpc_caller.gettxoutsetinfo() - diskSizeChainstate = str( - int(math.ceil(utxos['disk_size'] / GIGABYTE * 1.3))) + diskSizeChainstate = str(int(math.ceil(utxos["disk_size"] / GIGABYTE * 1.3))) return (block, chainwork, diskSizeBlocks, diskSizeChainstate) def main(args): - return "\n".join(get_chainparams(args['rpc'], args['block'])) + return "\n".join(get_chainparams(args["rpc"], args["block"])) if __name__ == "__main__": - parser = argparse.ArgumentParser(description=( - "Make chainparams file.\n" - "Prerequisites: RPC access to a bitcoind node.\n\n"), - formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('--address', '-a', default="127.0.0.1:8332", - help="Node address for making RPC calls.\n" - "The chain (MainNet or TestNet) will be automatically detected.\n" - "Default: '127.0.0.1:8332'") - parser.add_argument('--block', '-b', - help="The block hash or height to use for fetching chainparams.\n" - "MainNet default: 10 blocks from the chain tip." - "TestNet default: 2000 blocks from the chain tip.") - parser.add_argument('--config', '-c', default="~/.bitcoin/bitcoin.conf", - help="Path to bitcoin.conf for RPC authentication arguments (rpcuser & rpcpassword).\n" - "Default: ~/.bitcoin/bitcoin.conf") + parser = argparse.ArgumentParser( + description=( + "Make chainparams file.\nPrerequisites: RPC access to a bitcoind node.\n\n" + ), + formatter_class=argparse.RawTextHelpFormatter, + ) + parser.add_argument( + "--address", + "-a", + default="127.0.0.1:8332", + help=( + "Node address for making RPC calls.\n" + "The chain (MainNet or TestNet) will be automatically detected.\n" + "Default: '127.0.0.1:8332'" + ), + ) + parser.add_argument( + "--block", + "-b", + help=( + "The block hash or height to use for fetching chainparams.\n" + "MainNet default: 10 blocks from the chain tip." + "TestNet default: 2000 blocks from the chain tip." + ), + ) + parser.add_argument( + "--config", + "-c", + default="~/.bitcoin/bitcoin.conf", + help=( + "Path to bitcoin.conf for RPC authentication arguments (rpcuser &" + " rpcpassword).\nDefault: ~/.bitcoin/bitcoin.conf" + ), + ) args = parser.parse_args() args.config = os.path.expanduser(args.config) @@ -95,7 +112,7 @@ user = None password = None if os.path.isfile(args.config): - with open(args.config, 'r', encoding='utf8') as f: + with open(args.config, "r", encoding="utf8") as f: for line in f: if line.startswith("rpcuser="): # Ensure that there is only one rpcuser line @@ -113,7 +130,8 @@ raise ValueError("Config is missing rpcpassword") args.rpc = AuthServiceProxy( - service_url=f'http://{user}:{password}@{args.address}', timeout=1200) + service_url=f"http://{user}:{password}@{args.address}", timeout=1200 + ) output = main(vars(args)) if output: print(output) diff --git a/contrib/devtools/chainparams/test_make_chainparams.py b/contrib/devtools/chainparams/test_make_chainparams.py --- a/contrib/devtools/chainparams/test_make_chainparams.py +++ b/contrib/devtools/chainparams/test_make_chainparams.py @@ -9,8 +9,17 @@ class MockRPC: - def __init__(self, test, chain, numBlocks, expectedBlock, - blockHash, chainWork, blockchainSize, chainstateSize): + def __init__( + self, + test, + chain, + numBlocks, + expectedBlock, + blockHash, + chainWork, + blockchainSize, + chainstateSize, + ): self.test = test self.chain = chain self.numBlocks = numBlocks @@ -25,7 +34,9 @@ "chain": self.chain, "blocks": self.numBlocks, "headers": self.numBlocks, - "bestblockhash": "0000000000000000039c96605d7fca74a5e185ea5634198346e9e07fd235b666", + "bestblockhash": ( + "0000000000000000039c96605d7fca74a5e185ea5634198346e9e07fd235b666" + ), "difficulty": 274412074285.6605, "mediantime": 1562168718, "verificationprogress": 0.9999958005632363, @@ -42,40 +53,60 @@ # Tests should always request the right block height. Even though a # real node will rarely raise an exception for this call, we are # more strict during testing. - self.test.assertEqual(block, self.expectedBlock, "Called 'getblockhash {}' when expected was 'getblockhash {}'".format( - block, self.expectedBlock)) + self.test.assertEqual( + block, + self.expectedBlock, + "Called 'getblockhash {}' when expected was 'getblockhash {}'".format( + block, self.expectedBlock + ), + ) return self.blockHash def getblockheader(self, blockHash): # Make sure to raise an exception in the same way a real node would # when calling 'getblockheader' on a block hash that is not part of # the chain. - self.test.assertEqual(blockHash, self.blockHash, "Called 'getblockheader {}' when expected was 'getblockheader {}'".format( - blockHash, self.blockHash)) + self.test.assertEqual( + blockHash, + self.blockHash, + "Called 'getblockheader {}' when expected was 'getblockheader {}'".format( + blockHash, self.blockHash + ), + ) return { "hash": blockHash, "confirmations": 1, "height": 591463, "version": 536870912, "versionHex": "20000000", - "merkleroot": "51c898f034b6c5a5513a7c35912e86d009188311e550bb3096e04afb11f40aba", + "merkleroot": ( + "51c898f034b6c5a5513a7c35912e86d009188311e550bb3096e04afb11f40aba" + ), "time": 1563212034, "mediantime": 1563208994, "nonce": 3501699724, "bits": "18040cd6", "difficulty": 271470800310.0635, - "chainwork": "000000000000000000000000000000000000000000f4c5e639fa012518a48a57", - "previousblockhash": "00000000000000000307b45e4a6cf8d49e70b9012ea1d72a5ce334a4213f66bd", + "chainwork": ( + "000000000000000000000000000000000000000000f4c5e639fa012518a48a57" + ), + "previousblockhash": ( + "00000000000000000307b45e4a6cf8d49e70b9012ea1d72a5ce334a4213f66bd" + ), } def gettxoutsetinfo(self): return { "height": 636013, - "bestblock": "00000000000000000250a6ab6c6c4778086807f5b39910a8c108efa511282280", + "bestblock": ( + "00000000000000000250a6ab6c6c4778086807f5b39910a8c108efa511282280" + ), "transactions": 19360831, "txouts": 42145889, "bogosize": 3187119531, - "hash_serialized": "1b1cc457771e8b6f849ac21c4da43ebe5c614df9e61a943252978437ad774ce5", + "hash_serialized": ( + "1b1cc457771e8b6f849ac21c4da43ebe5c614df9e61a943252978437ad774ce5" + ), "disk_size": self.chainstateSize, "total_amount": 18412423.42452419, } @@ -84,8 +115,18 @@ class MockFailRPC(MockRPC): # Provides a fail counter to fail after the Nth RPC command - def __init__(self, test, chain, numBlocks, expectedBlock, blockHash, - chainWork, blockchainSize, chainstateSize, failCounter): + def __init__( + self, + test, + chain, + numBlocks, + expectedBlock, + blockHash, + chainWork, + blockchainSize, + chainstateSize, + failCounter, + ): super().__init__( test, chain, @@ -94,7 +135,8 @@ blockHash, chainWork, blockchainSize, - chainstateSize) + chainstateSize, + ) self.failCounter = failCounter def checkFailCounter(self): @@ -121,7 +163,7 @@ return super().gettxoutsetinfo() -def CheckMockFailure(test, args, errorMessage='error code: -99'): +def CheckMockFailure(test, args, errorMessage="error code: -99"): with test.assertRaises(Exception) as context: GenerateChainParams(args) test.assertIn(errorMessage, str(context.exception)) @@ -131,89 +173,128 @@ maxDiff = None def setUp(self): - self.blockHash1 = '0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8' - self.chainWork1 = '000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1' - self.blockHash2 = '0000000000000298a9fa227f0ec32f2b7585f3e64c8b3369e7f8b4fd8ea3d836' - self.chainWork2 = '00000000000000000000000000000000000000000000004fdb4795a837f19671' + self.blockHash1 = ( + "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8" + ) + self.chainWork1 = ( + "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1" + ) + self.blockHash2 = ( + "0000000000000298a9fa227f0ec32f2b7585f3e64c8b3369e7f8b4fd8ea3d836" + ) + self.chainWork2 = ( + "00000000000000000000000000000000000000000000004fdb4795a837f19671" + ) def test_happy_path_mainnet(self): mockRPC = MockRPC( test=self, - chain='main', + chain="main", numBlocks=123000, expectedBlock=122990, blockHash=self.blockHash1, chainWork=self.chainWork1, blockchainSize=160111222333, - chainstateSize=2000111222) + chainstateSize=2000111222, + ) args = { - 'rpc': mockRPC, - 'block': None, + "rpc": mockRPC, + "block": None, } - self.assertEqual(GenerateChainParams(args), "\n".join([ - "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", - "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", - "194", "3"])) + self.assertEqual( + GenerateChainParams(args), + "\n".join( + [ + "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", + "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", + "194", + "3", + ] + ), + ) def test_happy_path_testnet(self): mockRPC = MockRPC( test=self, - chain='test', + chain="test", numBlocks=234000, expectedBlock=232000, blockHash=self.blockHash1, chainWork=self.chainWork1, blockchainSize=50111222333, - chainstateSize=1000111222) + chainstateSize=1000111222, + ) args = { - 'rpc': mockRPC, - 'block': None, + "rpc": mockRPC, + "block": None, } - self.assertEqual(GenerateChainParams(args), "\n".join([ - "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", - "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", - "61", "2"])) + self.assertEqual( + GenerateChainParams(args), + "\n".join( + [ + "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", + "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", + "61", + "2", + ] + ), + ) def test_specific_block(self): mockRPC = MockRPC( test=self, - chain='main', + chain="main", numBlocks=123000, expectedBlock=122990, blockHash=self.blockHash1, chainWork=self.chainWork1, blockchainSize=160111222333, - chainstateSize=2000111222) + chainstateSize=2000111222, + ) args = { - 'rpc': mockRPC, - 'block': self.blockHash1, + "rpc": mockRPC, + "block": self.blockHash1, } - self.assertEqual(GenerateChainParams(args), "\n".join([ - "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", - "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", - "194", "3"])) + self.assertEqual( + GenerateChainParams(args), + "\n".join( + [ + "0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8", + "000000000000000000000000000000000000000000f2537ccf2e07bbe15e70e1", + "194", + "3", + ] + ), + ) def test_wrong_chain(self): mockRPC = MockRPC( test=self, - chain='main', + chain="main", numBlocks=123000, expectedBlock=122990, blockHash=self.blockHash1, chainWork=self.chainWork1, blockchainSize=160111222333, - chainstateSize=2000111222) + chainstateSize=2000111222, + ) args = { - 'rpc': mockRPC, - 'block': self.blockHash2, + "rpc": mockRPC, + "block": self.blockHash2, } CheckMockFailure( - self, args, "expected was 'getblockheader 0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8'") + self, + args, + ( + "expected was 'getblockheader" + " 0000000000000000003ef673ae12bc6017481830d37b9c52ce1e79c080e812b8'" + ), + ) def test_bitcoin_cli_failures_testnet(self): - for chain in ['main', 'test']: + for chain in ["main", "test"]: expectedBlock = 133990 - if chain == 'test': + if chain == "test": expectedBlock = 132000 for failCounter in range(4): @@ -226,10 +307,11 @@ chainWork=self.chainWork1, failCounter=failCounter, blockchainSize=160111222333, - chainstateSize=2000111222) + chainstateSize=2000111222, + ) argsFail = { - 'rpc': mockFailRPC, - 'block': None, + "rpc": mockFailRPC, + "block": None, } CheckMockFailure(self, argsFail) diff --git a/contrib/devtools/circular-dependencies.py b/contrib/devtools/circular-dependencies.py --- a/contrib/devtools/circular-dependencies.py +++ b/contrib/devtools/circular-dependencies.py @@ -8,16 +8,14 @@ from typing import Dict, List, Set MAPPING = { - 'core_read.cpp': 'core_io.cpp', - 'core_write.cpp': 'core_io.cpp', + "core_read.cpp": "core_io.cpp", + "core_write.cpp": "core_io.cpp", } # Directories with header-based modules, where the assumption that .cpp files # define functions and variables declared in corresponding .h files is # incorrect. -HEADER_MODULE_PATHS = [ - 'interfaces/' -] +HEADER_MODULE_PATHS = ["interfaces/"] def module_name(path): @@ -52,13 +50,17 @@ # TODO: implement support for multiple include directories for arg in sorted(files.keys()): module = files[arg] - with open(arg, 'r', encoding="utf8") as f: + with open(arg, "r", encoding="utf8") as f: for line in f: match = RE.match(line) if match: include = match.group(1) included_module = module_name(include) - if included_module is not None and included_module in deps and included_module != module: + if ( + included_module is not None + and included_module in deps + and included_module != module + ): deps[module].add(included_module) # Loop to find the shortest (remaining) circular dependency @@ -79,8 +81,9 @@ break # If module is in its own transitive closure, it's a circular # dependency; check if it is the shortest - if module in closure and (shortest_cycle is None or len( - closure[module]) + 1 < len(shortest_cycle)): + if module in closure and ( + shortest_cycle is None or len(closure[module]) + 1 < len(shortest_cycle) + ): shortest_cycle = [module] + closure[module] if shortest_cycle is None: break diff --git a/contrib/devtools/copyright_header.py b/contrib/devtools/copyright_header.py --- a/contrib/devtools/copyright_header.py +++ b/contrib/devtools/copyright_header.py @@ -17,19 +17,18 @@ EXCLUDE = [ # auto generated: - 'src/qt/bitcoinstrings.cpp', - 'src/chainparamsseeds.h', + "src/qt/bitcoinstrings.cpp", + "src/chainparamsseeds.h", # other external copyrights: - 'src/reverse_iterator.h', - 'src/test/fuzz/FuzzedDataProvider.h', - 'src/tinyformat.h', - 'src/bench/nanobench.h', - 'test/functional/test_framework/bignum.py', + "src/reverse_iterator.h", + "src/test/fuzz/FuzzedDataProvider.h", + "src/tinyformat.h", + "src/bench/nanobench.h", + "test/functional/test_framework/bignum.py", # python init: - '*__init__.py', + "*__init__.py", ] -EXCLUDE_COMPILED = re.compile( - '|'.join([fnmatch.translate(m) for m in EXCLUDE])) +EXCLUDE_COMPILED = re.compile("|".join([fnmatch.translate(m) for m in EXCLUDE])) EXCLUDE_DIRS = [ # git subtrees @@ -39,31 +38,31 @@ "src/univalue/", ] -INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.mm', '*.py', '*.sh', - '*.bash-completion'] -INCLUDE_COMPILED = re.compile( - '|'.join([fnmatch.translate(m) for m in INCLUDE])) +INCLUDE = ["*.h", "*.cpp", "*.cc", "*.c", "*.mm", "*.py", "*.sh", "*.bash-completion"] +INCLUDE_COMPILED = re.compile("|".join([fnmatch.translate(m) for m in INCLUDE])) def applies_to_file(filename): for excluded_dir in EXCLUDE_DIRS: if filename.startswith(excluded_dir): return False - return ((EXCLUDE_COMPILED.match(filename) is None) - and (INCLUDE_COMPILED.match(filename) is not None)) + return (EXCLUDE_COMPILED.match(filename) is None) and ( + INCLUDE_COMPILED.match(filename) is not None + ) + ########################################################################## # obtain list of files in repo according to INCLUDE and EXCLUDE ########################################################################## -GIT_LS_CMD = 'git ls-files --full-name'.split(' ') -GIT_TOPLEVEL_CMD = 'git rev-parse --show-toplevel'.split(' ') +GIT_LS_CMD = "git ls-files --full-name".split(" ") +GIT_TOPLEVEL_CMD = "git rev-parse --show-toplevel".split(" ") def call_git_ls(base_directory): out = subprocess.check_output([*GIT_LS_CMD, base_directory]) - return [f for f in out.decode("utf-8").split('\n') if f != ''] + return [f for f in out.decode("utf-8").split("\n") if f != ""] def call_git_toplevel(): @@ -75,30 +74,35 @@ "Returns an array of absolute paths to any project files in the base_directory that pass the include/exclude filters" root = call_git_toplevel() filenames = call_git_ls(base_directory) - return sorted([os.path.join(root, filename) for filename in filenames if - applies_to_file(filename)]) + return sorted( + [ + os.path.join(root, filename) + for filename in filenames + if applies_to_file(filename) + ] + ) + ########################################################################## # define and compile regexes for the patterns we are looking for ########################################################################## -COPYRIGHT_WITH_C = r'Copyright \(c\)' -COPYRIGHT_WITHOUT_C = 'Copyright' -ANY_COPYRIGHT_STYLE = f'({COPYRIGHT_WITH_C}|{COPYRIGHT_WITHOUT_C})' +COPYRIGHT_WITH_C = r"Copyright \(c\)" +COPYRIGHT_WITHOUT_C = "Copyright" +ANY_COPYRIGHT_STYLE = f"({COPYRIGHT_WITH_C}|{COPYRIGHT_WITHOUT_C})" YEAR = "20[0-9][0-9]" -YEAR_RANGE = f'({YEAR})(-{YEAR})?' -YEAR_LIST = f'({YEAR})(, {YEAR})+' -ANY_YEAR_STYLE = f'({YEAR_RANGE}|{YEAR_LIST})' -ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ( - f"{ANY_COPYRIGHT_STYLE} {ANY_YEAR_STYLE}") +YEAR_RANGE = f"({YEAR})(-{YEAR})?" +YEAR_LIST = f"({YEAR})(, {YEAR})+" +ANY_YEAR_STYLE = f"({YEAR_RANGE}|{YEAR_LIST})" +ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = f"{ANY_COPYRIGHT_STYLE} {ANY_YEAR_STYLE}" ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE) def compile_copyright_regex(copyright_style, year_style, name): - return re.compile(f'{copyright_style} {year_style},? {name}') + return re.compile(f"{copyright_style} {year_style},? {name}") EXPECTED_HOLDER_NAMES = [ @@ -125,13 +129,15 @@ WITHOUT_C_STYLE_COMPILED = {} for holder_name in EXPECTED_HOLDER_NAMES: - DOMINANT_STYLE_COMPILED[holder_name] = ( - compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name)) - YEAR_LIST_STYLE_COMPILED[holder_name] = ( - compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name)) - WITHOUT_C_STYLE_COMPILED[holder_name] = ( - compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE, - holder_name)) + DOMINANT_STYLE_COMPILED[holder_name] = compile_copyright_regex( + COPYRIGHT_WITH_C, YEAR_RANGE, holder_name + ) + YEAR_LIST_STYLE_COMPILED[holder_name] = compile_copyright_regex( + COPYRIGHT_WITH_C, YEAR_LIST, holder_name + ) + WITHOUT_C_STYLE_COMPILED[holder_name] = compile_copyright_regex( + COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE, holder_name + ) ########################################################################## # search file contents for copyright message of particular category @@ -156,47 +162,52 @@ match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents) return match is not None + ########################################################################## # get file info ########################################################################## def read_file(filename): - return open(filename, 'r', encoding="utf8").read() + return open(filename, "r", encoding="utf8").read() def gather_file_info(filename): info = {} - info['filename'] = filename + info["filename"] = filename c = read_file(filename) - info['contents'] = c + info["contents"] = c - info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c) + info["all_copyrights"] = get_count_of_copyrights_of_any_style_any_holder(c) - info['classified_copyrights'] = 0 - info['dominant_style'] = {} - info['year_list_style'] = {} - info['without_c_style'] = {} + info["classified_copyrights"] = 0 + info["dominant_style"] = {} + info["year_list_style"] = {} + info["without_c_style"] = {} for holder_name in EXPECTED_HOLDER_NAMES: - has_dominant_style = ( - file_has_dominant_style_copyright_for_holder(c, holder_name)) - has_year_list_style = ( - file_has_year_list_style_copyright_for_holder(c, holder_name)) - has_without_c_style = ( - file_has_without_c_style_copyright_for_holder(c, holder_name)) - info['dominant_style'][holder_name] = has_dominant_style - info['year_list_style'][holder_name] = has_year_list_style - info['without_c_style'][holder_name] = has_without_c_style + has_dominant_style = file_has_dominant_style_copyright_for_holder( + c, holder_name + ) + has_year_list_style = file_has_year_list_style_copyright_for_holder( + c, holder_name + ) + has_without_c_style = file_has_without_c_style_copyright_for_holder( + c, holder_name + ) + info["dominant_style"][holder_name] = has_dominant_style + info["year_list_style"][holder_name] = has_year_list_style + info["without_c_style"][holder_name] = has_without_c_style if has_dominant_style or has_year_list_style or has_without_c_style: - info['classified_copyrights'] = info['classified_copyrights'] + 1 + info["classified_copyrights"] = info["classified_copyrights"] + 1 return info + ########################################################################## # report execution ########################################################################## -SEPARATOR = '-'.join(['' for _ in range(80)]) +SEPARATOR = "-".join(["" for _ in range(80)]) def print_filenames(filenames, verbose): @@ -208,72 +219,91 @@ def print_report(file_infos, verbose): print(SEPARATOR) - examined = [i['filename'] for i in file_infos] - print("{} files examined according to INCLUDE and EXCLUDE fnmatch rules".format( - len(examined))) + examined = [i["filename"] for i in file_infos] + print( + f"{len(examined)} files examined according to INCLUDE and EXCLUDE fnmatch rules" + ) print_filenames(examined, verbose) print(SEPARATOR) - print('') - zero_copyrights = [i['filename'] for i in file_infos if - i['all_copyrights'] == 0] + print("") + zero_copyrights = [i["filename"] for i in file_infos if i["all_copyrights"] == 0] print(f"{len(zero_copyrights):4d} with zero copyrights") print_filenames(zero_copyrights, verbose) - one_copyright = [i['filename'] for i in file_infos if - i['all_copyrights'] == 1] + one_copyright = [i["filename"] for i in file_infos if i["all_copyrights"] == 1] print(f"{len(one_copyright):4d} with one copyright") print_filenames(one_copyright, verbose) - two_copyrights = [i['filename'] for i in file_infos if - i['all_copyrights'] == 2] + two_copyrights = [i["filename"] for i in file_infos if i["all_copyrights"] == 2] print(f"{len(two_copyrights):4d} with two copyrights") print_filenames(two_copyrights, verbose) - three_copyrights = [i['filename'] for i in file_infos if - i['all_copyrights'] == 3] + three_copyrights = [i["filename"] for i in file_infos if i["all_copyrights"] == 3] print(f"{len(three_copyrights):4d} with three copyrights") print_filenames(three_copyrights, verbose) - four_or_more_copyrights = [i['filename'] for i in file_infos if - i['all_copyrights'] >= 4] + four_or_more_copyrights = [ + i["filename"] for i in file_infos if i["all_copyrights"] >= 4 + ] print(f"{len(four_or_more_copyrights):4d} with four or more copyrights") print_filenames(four_or_more_copyrights, verbose) - print('') + print("") print(SEPARATOR) - print('Copyrights with dominant style:\ne.g. "Copyright (c)" and ' - '"" or "-":\n') + print( + 'Copyrights with dominant style:\ne.g. "Copyright (c)" and ' + '"" or "-":\n' + ) for holder_name in EXPECTED_HOLDER_NAMES: - dominant_style = [i['filename'] for i in file_infos if - i['dominant_style'][holder_name]] + dominant_style = [ + i["filename"] for i in file_infos if i["dominant_style"][holder_name] + ] if len(dominant_style) > 0: - print("{:4d} with '{}'".format( - len(dominant_style), holder_name.replace('\n', '\\n'))) + print( + "{:4d} with '{}'".format( + len(dominant_style), holder_name.replace("\n", "\\n") + ) + ) print_filenames(dominant_style, verbose) - print('') + print("") print(SEPARATOR) - print('Copyrights with year list style:\ne.g. "Copyright (c)" and ' - '", , ...":\n') + print( + 'Copyrights with year list style:\ne.g. "Copyright (c)" and ' + '", , ...":\n' + ) for holder_name in EXPECTED_HOLDER_NAMES: - year_list_style = [i['filename'] for i in file_infos if - i['year_list_style'][holder_name]] + year_list_style = [ + i["filename"] for i in file_infos if i["year_list_style"][holder_name] + ] if len(year_list_style) > 0: - print("{:4d} with '{}'".format( - len(year_list_style), holder_name.replace('\n', '\\n'))) + print( + "{:4d} with '{}'".format( + len(year_list_style), holder_name.replace("\n", "\\n") + ) + ) print_filenames(year_list_style, verbose) - print('') + print("") print(SEPARATOR) - print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "" or ' - '"-":\n') + print( + 'Copyrights with no "(c)" style:\ne.g. "Copyright" and "" or ' + '"-":\n' + ) for holder_name in EXPECTED_HOLDER_NAMES: - without_c_style = [i['filename'] for i in file_infos if - i['without_c_style'][holder_name]] + without_c_style = [ + i["filename"] for i in file_infos if i["without_c_style"][holder_name] + ] if len(without_c_style) > 0: - print("{:4d} with '{}'".format( - len(without_c_style), holder_name.replace('\n', '\\n'))) + print( + "{:4d} with '{}'".format( + len(without_c_style), holder_name.replace("\n", "\\n") + ) + ) print_filenames(without_c_style, verbose) - print('') + print("") print(SEPARATOR) - unclassified_copyrights = [i['filename'] for i in file_infos if - i['classified_copyrights'] < i['all_copyrights']] + unclassified_copyrights = [ + i["filename"] + for i in file_infos + if i["classified_copyrights"] < i["all_copyrights"] + ] print(f"{len(unclassified_copyrights)} with unexpected copyright holder names") print_filenames(unclassified_copyrights, verbose) print(SEPARATOR) @@ -284,6 +314,7 @@ file_infos = [gather_file_info(f) for f in filenames] print_report(file_infos, verbose) + ########################################################################## # report cmd ########################################################################## @@ -312,13 +343,14 @@ if len(argv) == 3: verbose = False - elif argv[3] == 'verbose': + elif argv[3] == "verbose": verbose = True else: sys.exit(f"*** unknown argument: {argv[2]}") exec_report(base_directory, verbose) + ########################################################################## # query git for year of last change ########################################################################## @@ -328,8 +360,8 @@ def call_git_log(filename): - out = subprocess.check_output((GIT_LOG_CMD.format(filename)).split(' ')) - return out.decode("utf-8").split('\n') + out = subprocess.check_output((GIT_LOG_CMD.format(filename)).split(" ")) + return out.decode("utf-8").split("\n") def get_git_change_years(filename): @@ -337,43 +369,45 @@ if len(git_log_lines) == 0: return [datetime.date.today().year] # timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600" - return [line.split(' ')[0].split('-')[0] for line in git_log_lines] + return [line.split(" ")[0].split("-")[0] for line in git_log_lines] def get_most_recent_git_change_year(filename): return max(get_git_change_years(filename)) + ########################################################################## # read and write to file ########################################################################## def read_file_lines(filename): - f = open(filename, 'r', encoding="utf8") + f = open(filename, "r", encoding="utf8") file_lines = f.readlines() f.close() return file_lines def write_file_lines(filename, file_lines): - f = open(filename, 'w', encoding="utf8") - f.write(''.join(file_lines)) + f = open(filename, "w", encoding="utf8") + f.write("".join(file_lines)) f.close() + ########################################################################## # update header years execution ########################################################################## -COPYRIGHT = r'Copyright \(c\)' +COPYRIGHT = r"Copyright \(c\)" YEAR = "20[0-9][0-9]" -YEAR_RANGE = f'({YEAR})(-{YEAR})?' -HOLDER = 'The Bitcoin developers' -UPDATEABLE_LINE_COMPILED = re.compile( - ' '.join([COPYRIGHT, YEAR_RANGE, HOLDER])) +YEAR_RANGE = f"({YEAR})(-{YEAR})?" +HOLDER = "The Bitcoin developers" +UPDATEABLE_LINE_COMPILED = re.compile(" ".join([COPYRIGHT, YEAR_RANGE, HOLDER])) DISTRIBUTION_LINE = re.compile( - r"Distributed under the MIT software license, see the accompanying") + r"Distributed under the MIT software license, see the accompanying" +) def get_updatable_copyright_line(file_lines): @@ -395,7 +429,7 @@ def parse_year_range(year_range): - year_split = year_range.split('-') + year_split = year_range.split("-") start_year = year_split[0] if len(year_split) == 1: return start_year, start_year @@ -409,21 +443,25 @@ def create_updated_copyright_line(line, last_git_change_year): - copyright_splitter = 'Copyright (c) ' + copyright_splitter = "Copyright (c) " copyright_split = line.split(copyright_splitter) # Preserve characters on line that are ahead of the start of the copyright # notice - they are part of the comment block and vary from file-to-file. before_copyright = copyright_split[0] after_copyright = copyright_split[1] - space_split = after_copyright.split(' ') + space_split = after_copyright.split(" ") year_range = space_split[0] start_year, end_year = parse_year_range(year_range) if end_year == last_git_change_year: return line - return (before_copyright + copyright_splitter - + year_range_to_str(start_year, last_git_change_year) + ' ' - + ' '.join(space_split[1:])) + return ( + before_copyright + + copyright_splitter + + year_range_to_str(start_year, last_git_change_year) + + " " + + " ".join(space_split[1:]) + ) def update_updatable_copyright(filename): @@ -439,14 +477,14 @@ return file_lines[index] = new_line write_file_lines(filename, file_lines) - print_file_action_message(filename, - f"Copyright updated! -> {last_git_change_year}") + print_file_action_message(filename, f"Copyright updated! -> {last_git_change_year}") def exec_update_header_year(base_directory): for filename in get_filenames_to_examine(base_directory): update_updatable_copyright(filename) + ########################################################################## # update cmd ########################################################################## @@ -495,38 +533,40 @@ sys.exit(f"*** bad base_directory: {base_directory}") exec_update_header_year(base_directory) + ########################################################################## # inserted copyright header format ########################################################################## def get_header_lines(header, start_year, end_year): - lines = header.split('\n')[1:-1] + lines = header.split("\n")[1:-1] lines[0] = lines[0].format(year_range_to_str(start_year, end_year)) - return [line + '\n' for line in lines] + return [line + "\n" for line in lines] -CPP_HEADER = ''' +CPP_HEADER = """ // Copyright (c) {} The Bitcoin developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" def get_cpp_header_lines_to_insert(start_year, end_year): return reversed(get_header_lines(CPP_HEADER, start_year, end_year)) -SCRIPT_HEADER = ''' +SCRIPT_HEADER = """ # Copyright (c) {} The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" def get_script_header_lines_to_insert(start_year, end_year): return reversed(get_header_lines(SCRIPT_HEADER, start_year, end_year)) + ########################################################################## # query git for year of last change ########################################################################## @@ -536,6 +576,7 @@ years = get_git_change_years(filename) return min(years), max(years) + ########################################################################## # check for existing ABC copyright ########################################################################## @@ -545,6 +586,7 @@ index, _ = get_updatable_copyright_line(file_lines) return index is not None + ########################################################################## # insert header execution ########################################################################## @@ -555,7 +597,7 @@ return False if len(file_lines[0]) <= 2: return False - return file_lines[0][:2] == '#!' + return file_lines[0][:2] == "#!" def insert_script_header(filename, file_lines, start_year, end_year): @@ -574,7 +616,7 @@ def insert_cpp_header(filename, file_lines, start_year, end_year): - file_lines.insert(0, '\n') + file_lines.insert(0, "\n") header_lines = get_cpp_header_lines_to_insert(start_year, end_year) insert_idx = find_distribution_line_index(file_lines) if insert_idx is not None: @@ -588,13 +630,14 @@ def exec_insert_header(filename, style): file_lines = read_file_lines(filename) if file_already_has_bitcoin_copyright(file_lines): - sys.exit(f'*** {filename} already has a copyright by The Bitcoin developers') + sys.exit(f"*** {filename} already has a copyright by The Bitcoin developers") start_year, end_year = get_git_change_year_range(filename) - if style in ['python', 'shell']: + if style in ["python", "shell"]: insert_script_header(filename, file_lines, start_year, end_year) else: insert_cpp_header(filename, file_lines, start_year, end_year) + ########################################################################## # insert cmd ########################################################################## @@ -634,17 +677,18 @@ if not os.path.isfile(filename): sys.exit(f"*** bad filename: {filename}") _, extension = os.path.splitext(filename) - if extension not in ['.h', '.cpp', '.cc', '.c', '.py', '.sh']: + if extension not in [".h", ".cpp", ".cc", ".c", ".py", ".sh"]: sys.exit(f"*** cannot insert for file extension {extension}") - if extension == '.py': - style = 'python' - elif extension == '.sh': - style = 'shell' + if extension == ".py": + style = "python" + elif extension == ".sh": + style = "shell" else: - style = 'cpp' + style = "cpp" exec_insert_header(filename, style) + ########################################################################## # UI ########################################################################## @@ -665,7 +709,7 @@ To see subcommand usage, run them without arguments. """ -SUBCOMMANDS = ['report', 'update', 'insert'] +SUBCOMMANDS = ["report", "update", "insert"] if __name__ == "__main__": if len(sys.argv) == 1: @@ -673,9 +717,9 @@ subcommand = sys.argv[1] if subcommand not in SUBCOMMANDS: sys.exit(USAGE) - if subcommand == 'report': + if subcommand == "report": report_cmd(sys.argv) - elif subcommand == 'update': + elif subcommand == "update": update_cmd(sys.argv) - elif subcommand == 'insert': + elif subcommand == "insert": insert_cmd(sys.argv) diff --git a/contrib/devtools/optimize-pngs.py b/contrib/devtools/optimize-pngs.py --- a/contrib/devtools/optimize-pngs.py +++ b/contrib/devtools/optimize-pngs.py @@ -2,10 +2,10 @@ # Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Run this script every time you change one of the png files. Using pngcrush, it will optimize the png files, remove various color profiles, remove ancillary chunks (alla) and text chunks (text). #pngcrush -brute -ow -rem gAMA -rem cHRM -rem iCCP -rem sRGB -rem alla -rem text -''' +""" import hashlib import os import subprocess @@ -16,24 +16,25 @@ def file_hash(filename): - '''Return hash of raw file contents''' - with open(filename, 'rb') as f: + """Return hash of raw file contents""" + with open(filename, "rb") as f: return hashlib.sha256(f.read()).hexdigest() def content_hash(filename): - '''Return hash of RGBA contents of image''' + """Return hash of RGBA contents of image""" i = Image.open(filename) - i = i.convert('RGBA') + i = i.convert("RGBA") data = i.tobytes() return hashlib.sha256(data).hexdigest() -pngcrush = 'pngcrush' -git = 'git' +pngcrush = "pngcrush" +git = "git" folders = ["src/qt/res/animation", "src/qt/res/icons", "share/pixmaps"] basePath = subprocess.check_output( - [git, 'rev-parse', '--show-toplevel'], universal_newlines=True, encoding='utf8').rstrip('\n') + [git, "rev-parse", "--show-toplevel"], universal_newlines=True, encoding="utf8" +).rstrip("\n") totalSaveBytes = 0 noHashChange = True @@ -42,47 +43,95 @@ absFolder = os.path.join(basePath, folder) for file in os.listdir(absFolder): extension = os.path.splitext(file)[1] - if extension.lower() == '.png': - print(f"optimizing {file}...", end=' ') + if extension.lower() == ".png": + print(f"optimizing {file}...", end=" ") file_path = os.path.join(absFolder, file) - fileMetaMap = {'file': file, 'osize': os.path.getsize( - file_path), 'sha256Old': file_hash(file_path)} - fileMetaMap['contentHashPre'] = content_hash(file_path) + fileMetaMap = { + "file": file, + "osize": os.path.getsize(file_path), + "sha256Old": file_hash(file_path), + } + fileMetaMap["contentHashPre"] = content_hash(file_path) try: - subprocess.call([pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", "iCCP", "-rem", "sRGB", - "-rem", "alla", "-rem", "text", file_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + subprocess.call( + [ + pngcrush, + "-brute", + "-ow", + "-rem", + "gAMA", + "-rem", + "cHRM", + "-rem", + "iCCP", + "-rem", + "sRGB", + "-rem", + "alla", + "-rem", + "text", + file_path, + ], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) except OSError: print("pngcrush is not installed, aborting...") sys.exit(0) # verify if "Not a PNG file" in subprocess.check_output( - [pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT, universal_newlines=True, encoding='utf8'): - print("PNG file " + file + - " is corrupted after crushing, check out pngcursh version") + [pngcrush, "-n", "-v", file_path], + stderr=subprocess.STDOUT, + universal_newlines=True, + encoding="utf8", + ): + print( + "PNG file " + + file + + " is corrupted after crushing, check out pngcursh version" + ) sys.exit(1) - fileMetaMap['sha256New'] = file_hash(file_path) - fileMetaMap['contentHashPost'] = content_hash(file_path) + fileMetaMap["sha256New"] = file_hash(file_path) + fileMetaMap["contentHashPost"] = content_hash(file_path) - if fileMetaMap['contentHashPre'] != fileMetaMap['contentHashPost']: - print("Image contents of PNG file " + file + - " before and after crushing don't match") + if fileMetaMap["contentHashPre"] != fileMetaMap["contentHashPost"]: + print( + "Image contents of PNG file " + + file + + " before and after crushing don't match" + ) sys.exit(1) - fileMetaMap['psize'] = os.path.getsize(file_path) + fileMetaMap["psize"] = os.path.getsize(file_path) outputArray.append(fileMetaMap) print("done") print("summary:\n+++++++++++++++++") for fileDict in outputArray: - oldHash = fileDict['sha256Old'] - newHash = fileDict['sha256New'] - totalSaveBytes += fileDict['osize'] - fileDict['psize'] + oldHash = fileDict["sha256Old"] + newHash = fileDict["sha256New"] + totalSaveBytes += fileDict["osize"] - fileDict["psize"] noHashChange = noHashChange and (oldHash == newHash) - print(fileDict['file'] + "\n size diff from: " + str(fileDict['osize']) + " to: " + - str(fileDict['psize']) + "\n old sha256: " + oldHash + "\n new sha256: " + newHash + "\n") + print( + fileDict["file"] + + "\n size diff from: " + + str(fileDict["osize"]) + + " to: " + + str(fileDict["psize"]) + + "\n old sha256: " + + oldHash + + "\n new sha256: " + + newHash + + "\n" + ) -print("completed. Checksum stable: " + str(noHashChange) + - ". Total reduction: " + str(totalSaveBytes) + " bytes") +print( + "completed. Checksum stable: " + + str(noHashChange) + + ". Total reduction: " + + str(totalSaveBytes) + + " bytes" +) diff --git a/contrib/devtools/pixie.py b/contrib/devtools/pixie.py --- a/contrib/devtools/pixie.py +++ b/contrib/devtools/pixie.py @@ -2,9 +2,9 @@ # Copyright (c) 2020 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Compact, self-contained ELF implementation for bitcoin-abc security checks. -''' +""" import struct import types from typing import Dict, List, Optional, Tuple, Union @@ -38,18 +38,18 @@ SHT_STRTAB = 3 SHT_DYNAMIC = 6 SHT_DYNSYM = 11 -SHT_GNU_verneed = 0x6ffffffe -SHT_GNU_versym = 0x6fffffff +SHT_GNU_verneed = 0x6FFFFFFE +SHT_GNU_versym = 0x6FFFFFFF # relevant values for p_type PT_LOAD = 1 -PT_GNU_STACK = 0x6474e551 -PT_GNU_RELRO = 0x6474e552 +PT_GNU_STACK = 0x6474E551 +PT_GNU_RELRO = 0x6474E552 # relevant values for p_flags -PF_X = (1 << 0) -PF_W = (1 << 1) -PF_R = (1 << 2) +PF_X = 1 << 0 +PF_W = 1 << 1 +PF_R = 1 << 2 # relevant values for d_tag DT_NEEDED = 1 @@ -66,76 +66,91 @@ class ELFRecord(types.SimpleNamespace): - '''Unified parsing for ELF records.''' + """Unified parsing for ELF records.""" - def __init__(self, data: bytes, offset: int, eh: 'ELFHeader', - total_size: Optional[int]) -> None: + def __init__( + self, data: bytes, offset: int, eh: "ELFHeader", total_size: Optional[int] + ) -> None: hdr_struct = self.STRUCT[eh.ei_class][0][eh.ei_data] if total_size is not None and hdr_struct.size > total_size: raise ValueError( - f'{self.__class__.__name__} header size too small ({total_size} < {hdr_struct.size})') - for field, value in zip(self.STRUCT[eh.ei_class][1], hdr_struct.unpack( - data[offset:offset + hdr_struct.size])): + f"{self.__class__.__name__} header size too small ({total_size} <" + f" {hdr_struct.size})" + ) + for field, value in zip( + self.STRUCT[eh.ei_class][1], + hdr_struct.unpack(data[offset : offset + hdr_struct.size]), + ): setattr(self, field, value) def BiStruct(chars: str) -> Dict[int, struct.Struct]: - '''Compile a struct parser for both endians.''' + """Compile a struct parser for both endians.""" return { - ELFDATA2LSB: struct.Struct('<' + chars), - ELFDATA2MSB: struct.Struct('>' + chars), + ELFDATA2LSB: struct.Struct("<" + chars), + ELFDATA2MSB: struct.Struct(">" + chars), } class ELFHeader(ELFRecord): FIELDS = [ - 'e_type', - 'e_machine', - 'e_version', - 'e_entry', - 'e_phoff', - 'e_shoff', - 'e_flags', - 'e_ehsize', - 'e_phentsize', - 'e_phnum', - 'e_shentsize', - 'e_shnum', - 'e_shstrndx'] + "e_type", + "e_machine", + "e_version", + "e_entry", + "e_phoff", + "e_shoff", + "e_flags", + "e_ehsize", + "e_phentsize", + "e_phnum", + "e_shentsize", + "e_shnum", + "e_shstrndx", + ] STRUCT = { - ELFCLASS32: (BiStruct('HHIIIIIHHHHHH'), FIELDS), - ELFCLASS64: (BiStruct('HHIQQQIHHHHHH'), FIELDS), + ELFCLASS32: (BiStruct("HHIIIIIHHHHHH"), FIELDS), + ELFCLASS64: (BiStruct("HHIQQQIHHHHHH"), FIELDS), } def __init__(self, data: bytes, offset: int) -> None: - self.e_ident = data[offset:offset + EI_NIDENT] - if self.e_ident[0:4] != b'\x7fELF': - raise ValueError('invalid ELF magic') + self.e_ident = data[offset : offset + EI_NIDENT] + if self.e_ident[0:4] != b"\x7fELF": + raise ValueError("invalid ELF magic") self.ei_class = self.e_ident[EI_CLASS] self.ei_data = self.e_ident[EI_DATA] super().__init__(data, offset + EI_NIDENT, self, None) def __repr__(self) -> str: - return f'Header(e_ident={self.e_ident!r}, e_type={self.e_type}, e_machine={self.e_machine}, e_version={self.e_version}, e_entry={self.e_entry}, e_phoff={self.e_phoff}, e_shoff={self.e_shoff}, e_flags={self.e_flags}, e_ehsize={self.e_ehsize}, e_phentsize={self.e_phentsize}, e_phnum={self.e_phnum}, e_shentsize={self.e_shentsize}, e_shnum={self.e_shnum}, e_shstrndx={self.e_shstrndx})' + return ( + f"Header(e_ident={self.e_ident!r}, e_type={self.e_type}," + f" e_machine={self.e_machine}, e_version={self.e_version}," + f" e_entry={self.e_entry}, e_phoff={self.e_phoff}, e_shoff={self.e_shoff}," + f" e_flags={self.e_flags}, e_ehsize={self.e_ehsize}," + f" e_phentsize={self.e_phentsize}, e_phnum={self.e_phnum}," + f" e_shentsize={self.e_shentsize}, e_shnum={self.e_shnum}," + f" e_shstrndx={self.e_shstrndx})" + ) class Section(ELFRecord): name: Optional[bytes] = None FIELDS = [ - 'sh_name', - 'sh_type', - 'sh_flags', - 'sh_addr', - 'sh_offset', - 'sh_size', - 'sh_link', - 'sh_info', - 'sh_addralign', - 'sh_entsize'] + "sh_name", + "sh_type", + "sh_flags", + "sh_addr", + "sh_offset", + "sh_size", + "sh_link", + "sh_info", + "sh_addralign", + "sh_entsize", + ] STRUCT = { - ELFCLASS32: (BiStruct('IIIIIIIIII'), FIELDS), - ELFCLASS64: (BiStruct('IIQQQQIIQQ'), FIELDS), + ELFCLASS32: (BiStruct("IIIIIIIIII"), FIELDS), + ELFCLASS64: (BiStruct("IIQQQQIIQQ"), FIELDS), } def __init__(self, data: bytes, offset: int, eh: ELFHeader) -> None: @@ -143,75 +158,133 @@ self._data = data def __repr__(self) -> str: - return f'Section(sh_name={self.sh_name}({self.name!r}), sh_type=0x{self.sh_type:x}, sh_flags={self.sh_flags}, sh_addr=0x{self.sh_addr:x}, sh_offset=0x{self.sh_offset:x}, sh_size={self.sh_size}, sh_link={self.sh_link}, sh_info={self.sh_info}, sh_addralign={self.sh_addralign}, sh_entsize={self.sh_entsize})' + return ( + f"Section(sh_name={self.sh_name}({self.name!r})," + f" sh_type=0x{self.sh_type:x}, sh_flags={self.sh_flags}," + f" sh_addr=0x{self.sh_addr:x}, sh_offset=0x{self.sh_offset:x}," + f" sh_size={self.sh_size}, sh_link={self.sh_link}, sh_info={self.sh_info}," + f" sh_addralign={self.sh_addralign}, sh_entsize={self.sh_entsize})" + ) def contents(self) -> bytes: - '''Return section contents.''' - return self._data[self.sh_offset:self.sh_offset + self.sh_size] + """Return section contents.""" + return self._data[self.sh_offset : self.sh_offset + self.sh_size] class ProgramHeader(ELFRecord): STRUCT = { # different ELF classes have the same fields, but in a different order to # optimize space versus alignment - ELFCLASS32: (BiStruct('IIIIIIII'), ['p_type', 'p_offset', 'p_vaddr', 'p_paddr', 'p_filesz', 'p_memsz', 'p_flags', 'p_align']), - ELFCLASS64: (BiStruct('IIQQQQQQ'), ['p_type', 'p_flags', 'p_offset', 'p_vaddr', 'p_paddr', 'p_filesz', 'p_memsz', 'p_align']), + ELFCLASS32: ( + BiStruct("IIIIIIII"), + [ + "p_type", + "p_offset", + "p_vaddr", + "p_paddr", + "p_filesz", + "p_memsz", + "p_flags", + "p_align", + ], + ), + ELFCLASS64: ( + BiStruct("IIQQQQQQ"), + [ + "p_type", + "p_flags", + "p_offset", + "p_vaddr", + "p_paddr", + "p_filesz", + "p_memsz", + "p_align", + ], + ), } def __init__(self, data: bytes, offset: int, eh: ELFHeader) -> None: super().__init__(data, offset, eh, eh.e_phentsize) def __repr__(self) -> str: - return f'ProgramHeader(p_type={self.p_type}, p_offset={self.p_offset}, p_vaddr={self.p_vaddr}, p_paddr={self.p_paddr}, p_filesz={self.p_filesz}, p_memsz={self.p_memsz}, p_flags={self.p_flags}, p_align={self.p_align})' + return ( + f"ProgramHeader(p_type={self.p_type}, p_offset={self.p_offset}," + f" p_vaddr={self.p_vaddr}, p_paddr={self.p_paddr}," + f" p_filesz={self.p_filesz}, p_memsz={self.p_memsz}," + f" p_flags={self.p_flags}, p_align={self.p_align})" + ) class Symbol(ELFRecord): STRUCT = { # different ELF classes have the same fields, but in a different order to # optimize space versus alignment - ELFCLASS32: (BiStruct('IIIBBH'), ['st_name', 'st_value', 'st_size', 'st_info', 'st_other', 'st_shndx']), - ELFCLASS64: (BiStruct('IBBHQQ'), ['st_name', 'st_info', 'st_other', 'st_shndx', 'st_value', 'st_size']), + ELFCLASS32: ( + BiStruct("IIIBBH"), + ["st_name", "st_value", "st_size", "st_info", "st_other", "st_shndx"], + ), + ELFCLASS64: ( + BiStruct("IBBHQQ"), + ["st_name", "st_info", "st_other", "st_shndx", "st_value", "st_size"], + ), } - def __init__(self, data: bytes, offset: int, eh: ELFHeader, - symtab: Section, strings: bytes, version: Optional[bytes]) -> None: + def __init__( + self, + data: bytes, + offset: int, + eh: ELFHeader, + symtab: Section, + strings: bytes, + version: Optional[bytes], + ) -> None: super().__init__(data, offset, eh, symtab.sh_entsize) self.name = _lookup_string(strings, self.st_name) self.version = version def __repr__(self) -> str: - return f'Symbol(st_name={self.st_name}({self.name!r}), st_value={self.st_value}, st_size={self.st_size}, st_info={self.st_info}, st_other={self.st_other}, st_shndx={self.st_shndx}, version={self.version!r})' + return ( + f"Symbol(st_name={self.st_name}({self.name!r}), st_value={self.st_value}," + f" st_size={self.st_size}, st_info={self.st_info}," + f" st_other={self.st_other}, st_shndx={self.st_shndx}," + f" version={self.version!r})" + ) @property def is_import(self) -> bool: - '''Returns whether the symbol is an imported symbol.''' + """Returns whether the symbol is an imported symbol.""" return self.st_bind != STB_LOCAL and self.st_shndx == 0 @property def is_export(self) -> bool: - '''Returns whether the symbol is an exported symbol.''' + """Returns whether the symbol is an exported symbol.""" return self.st_bind != STB_LOCAL and self.st_shndx != 0 @property def st_bind(self) -> int: - '''Returns STB_*.''' + """Returns STB_*.""" return self.st_info >> 4 class Verneed(ELFRecord): - DEF = (BiStruct('HHIII'), ['vn_version', 'vn_cnt', 'vn_file', 'vn_aux', 'vn_next']) + DEF = (BiStruct("HHIII"), ["vn_version", "vn_cnt", "vn_file", "vn_aux", "vn_next"]) STRUCT = {ELFCLASS32: DEF, ELFCLASS64: DEF} def __init__(self, data: bytes, offset: int, eh: ELFHeader) -> None: super().__init__(data, offset, eh, None) def __repr__(self) -> str: - return f'Verneed(vn_version={self.vn_version}, vn_cnt={self.vn_cnt}, vn_file={self.vn_file}, vn_aux={self.vn_aux}, vn_next={self.vn_next})' + return ( + f"Verneed(vn_version={self.vn_version}, vn_cnt={self.vn_cnt}," + f" vn_file={self.vn_file}, vn_aux={self.vn_aux}, vn_next={self.vn_next})" + ) class Vernaux(ELFRecord): - DEF = (BiStruct('IHHII'), ['vna_hash', 'vna_flags', - 'vna_other', 'vna_name', 'vna_next']) + DEF = ( + BiStruct("IHHII"), + ["vna_hash", "vna_flags", "vna_other", "vna_name", "vna_next"], + ) STRUCT = {ELFCLASS32: DEF, ELFCLASS64: DEF} def __init__(self, data: bytes, offset: int, eh: ELFHeader, strings: bytes) -> None: @@ -219,37 +292,47 @@ self.name = _lookup_string(strings, self.vna_name) def __repr__(self) -> str: - return f'Veraux(vna_hash={self.vna_hash}, vna_flags={self.vna_flags}, vna_other={self.vna_other}, vna_name={self.vna_name}({self.name!r}), vna_next={self.vna_next})' + return ( + f"Veraux(vna_hash={self.vna_hash}, vna_flags={self.vna_flags}," + f" vna_other={self.vna_other}, vna_name={self.vna_name}({self.name!r})," + f" vna_next={self.vna_next})" + ) class DynTag(ELFRecord): STRUCT = { - ELFCLASS32: (BiStruct('II'), ['d_tag', 'd_val']), - ELFCLASS64: (BiStruct('QQ'), ['d_tag', 'd_val']), + ELFCLASS32: (BiStruct("II"), ["d_tag", "d_val"]), + ELFCLASS64: (BiStruct("QQ"), ["d_tag", "d_val"]), } - def __init__(self, data: bytes, offset: int, - eh: ELFHeader, section: Section) -> None: + def __init__( + self, data: bytes, offset: int, eh: ELFHeader, section: Section + ) -> None: super().__init__(data, offset, eh, section.sh_entsize) def __repr__(self) -> str: - return f'DynTag(d_tag={self.d_tag}, d_val={self.d_val})' + return f"DynTag(d_tag={self.d_tag}, d_val={self.d_val})" def _lookup_string(data: bytes, index: int) -> bytes: - '''Look up string by offset in ELF string table.''' - endx = data.find(b'\x00', index) + """Look up string by offset in ELF string table.""" + endx = data.find(b"\x00", index) assert endx != -1 return data[index:endx] # .gnu_version section has a single 16-bit integer per symbol in the linked section -VERSYM_S = BiStruct('H') +VERSYM_S = BiStruct("H") -def _parse_symbol_table(section: Section, strings: bytes, eh: ELFHeader, - versym: bytes, verneed: Dict[int, bytes]) -> List[Symbol]: - '''Parse symbol table, return a list of symbols.''' +def _parse_symbol_table( + section: Section, + strings: bytes, + eh: ELFHeader, + versym: bytes, + verneed: Dict[int, bytes], +) -> List[Symbol]: + """Parse symbol table, return a list of symbols.""" data = section.contents() symbols = [] versym_iter = (verneed.get(v[0]) for v in VERSYM_S[eh.ei_data].iter_unpack(versym)) @@ -259,7 +342,7 @@ def _parse_verneed(section: Section, strings: bytes, eh: ELFHeader) -> Dict[int, bytes]: - '''Parse .gnu.version_r section, return a dictionary of {versym: 'GLIBC_...'}.''' + """Parse .gnu.version_r section, return a dictionary of {versym: 'GLIBC_...'}.""" data = section.contents() ofs = 0 result = {} @@ -280,16 +363,20 @@ return result -def _parse_dyn_tags(section: Section, strings: bytes, - eh: ELFHeader) -> List[Tuple[int, Union[bytes, int]]]: - '''Parse dynamic tags. Return array of tuples.''' +def _parse_dyn_tags( + section: Section, strings: bytes, eh: ELFHeader +) -> List[Tuple[int, Union[bytes, int]]]: + """Parse dynamic tags. Return array of tuples.""" data = section.contents() ofs = 0 result = [] for ofs in range(0, len(data), section.sh_entsize): tag = DynTag(data, ofs, eh, section) - val = _lookup_string( - strings, tag.d_val) if tag.d_tag in STRING_TAGS else tag.d_val + val = ( + _lookup_string(strings, tag.d_val) + if tag.d_tag in STRING_TAGS + else tag.d_val + ) result.append((tag.d_tag, val)) return result @@ -351,7 +438,8 @@ strtab_data = self.sections[section.sh_link].contents() versym_data = versym[idx].contents() # associated symbol version table self.dyn_symbols += _parse_symbol_table( - section, strtab_data, self.hdr, versym_data, verneed) + section, strtab_data, self.hdr, versym_data, verneed + ) def _load_dyn_tags(self) -> None: self.dyn_tags = [] @@ -369,11 +457,11 @@ ph.sections.append(section) def query_dyn_tags(self, tag_in: int) -> List[Union[int, bytes]]: - '''Return the values of all dyn tags with the specified tag.''' + """Return the values of all dyn tags with the specified tag.""" return [val for (tag, val) in self.dyn_tags if tag == tag_in] def load(filename: str) -> ELFFile: - with open(filename, 'rb') as f: + with open(filename, "rb") as f: data = f.read() return ELFFile(data) diff --git a/contrib/devtools/security-check.py b/contrib/devtools/security-check.py --- a/contrib/devtools/security-check.py +++ b/contrib/devtools/security-check.py @@ -2,11 +2,11 @@ # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Perform basic security checks on a series of executables. Exit status will be 0 if successful, and the program will be silent. Otherwise the exit status will be 1 and it will log which executables failed which checks. -''' +""" import sys from typing import List, Optional @@ -15,18 +15,18 @@ def check_ELF_PIE(executable) -> bool: - ''' + """ Check for position independent executable (PIE), allowing for address space randomization. - ''' + """ elf = pixie.load(executable) return elf.hdr.e_type == pixie.ET_DYN def check_ELF_NX(executable) -> bool: - ''' + """ Check that no sections are writable and executable (including the stack) - ''' + """ elf = pixie.load(executable) have_wx = False have_gnu_stack = False @@ -40,11 +40,11 @@ def check_ELF_RELRO(executable) -> bool: - ''' + """ Check for read-only relocations. GNU_RELRO program header must exist Dynamic section must have BIND_NOW flag - ''' + """ elf = pixie.load(executable) have_gnu_relro = False for ph in elf.program_headers: @@ -68,63 +68,63 @@ def check_ELF_Canary(executable) -> bool: - ''' + """ Check for use of stack canary - ''' + """ elf = pixie.load(executable) for symbol in elf.dyn_symbols: - if symbol.name == b'__stack_chk_fail': + if symbol.name == b"__stack_chk_fail": return True return False def check_ELF_separate_code(executable): - ''' + """ Check that sections are appropriately separated in virtual memory, based on their permissions. This checks for missing -Wl,-z,separate-code and potentially other problems. - ''' + """ elf = pixie.load(executable) R = pixie.PF_R W = pixie.PF_W E = pixie.PF_X EXPECTED_FLAGS = { # Read + execute - b'.init': R | E, - b'.plt': R | E, - b'.plt.got': R | E, - b'.plt.sec': R | E, - b'.text': R | E, - b'.fini': R | E, + b".init": R | E, + b".plt": R | E, + b".plt.got": R | E, + b".plt.sec": R | E, + b".text": R | E, + b".fini": R | E, # Read-only data - b'.interp': R, - b'.note.gnu.property': R, - b'.note.gnu.build-id': R, - b'.note.ABI-tag': R, - b'.gnu.hash': R, - b'.dynsym': R, - b'.dynstr': R, - b'.gnu.version': R, - b'.gnu.version_r': R, - b'.rela.dyn': R, - b'.rela.plt': R, - b'.rodata': R, - b'.eh_frame_hdr': R, - b'.eh_frame': R, - b'.qtmetadata': R, - b'.gcc_except_table': R, - b'.stapsdt.base': R, + b".interp": R, + b".note.gnu.property": R, + b".note.gnu.build-id": R, + b".note.ABI-tag": R, + b".gnu.hash": R, + b".dynsym": R, + b".dynstr": R, + b".gnu.version": R, + b".gnu.version_r": R, + b".rela.dyn": R, + b".rela.plt": R, + b".rodata": R, + b".eh_frame_hdr": R, + b".eh_frame": R, + b".qtmetadata": R, + b".gcc_except_table": R, + b".stapsdt.base": R, # Writable data - b'.init_array': R | W, - b'.fini_array': R | W, - b'.dynamic': R | W, - b'.got': R | W, - b'.data': R | W, - b'.bss': R | W, + b".init_array": R | W, + b".fini_array": R | W, + b".dynamic": R | W, + b".got": R | W, + b".data": R | W, + b".bss": R | W, } if elf.hdr.e_machine == pixie.EM_PPC64: # .plt is RW on ppc64 even with separate-code - EXPECTED_FLAGS[b'.plt'] = R | W + EXPECTED_FLAGS[b".plt"] = R | W # For all LOAD program headers get mapping to the list of sections, # and for each section, remember the flags of the associated program header. flags_per_section = {} @@ -135,7 +135,7 @@ flags_per_section[section.name] = ph.p_flags # Spot-check ELF LOAD program header flags per section # If these sections exist, check them against the expected R/W/E flags - for (section, flags) in flags_per_section.items(): + for section, flags in flags_per_section.items(): if section in EXPECTED_FLAGS: if EXPECTED_FLAGS[section] != flags: return False @@ -143,113 +143,120 @@ def check_PE_DYNAMIC_BASE(executable) -> bool: - '''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)''' + """PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)""" binary = lief.parse(executable) - return lief.PE.DLL_CHARACTERISTICS.DYNAMIC_BASE in binary.optional_header.dll_characteristics_lists + return ( + lief.PE.DLL_CHARACTERISTICS.DYNAMIC_BASE + in binary.optional_header.dll_characteristics_lists + ) + # Must support high-entropy 64-bit address space layout randomization # in addition to DYNAMIC_BASE to have secure ASLR. def check_PE_HIGH_ENTROPY_VA(executable) -> bool: - '''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR''' + """PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR""" binary = lief.parse(executable) - return lief.PE.DLL_CHARACTERISTICS.HIGH_ENTROPY_VA in binary.optional_header.dll_characteristics_lists + return ( + lief.PE.DLL_CHARACTERISTICS.HIGH_ENTROPY_VA + in binary.optional_header.dll_characteristics_lists + ) def check_PE_RELOC_SECTION(executable) -> bool: - '''Check for a reloc section. This is required for functional ASLR.''' + """Check for a reloc section. This is required for functional ASLR.""" binary = lief.parse(executable) return binary.has_relocations def check_MACHO_NOUNDEFS(executable) -> bool: - ''' + """ Check for no undefined references. - ''' + """ binary = lief.parse(executable) return binary.header.has(lief.MachO.HEADER_FLAGS.NOUNDEFS) def check_MACHO_Canary(executable) -> bool: - ''' + """ Check for use of stack canary - ''' + """ binary = lief.parse(executable) - return binary.has_symbol('___stack_chk_fail') + return binary.has_symbol("___stack_chk_fail") def check_PIE(executable) -> bool: - ''' + """ Check for position independent executable (PIE), allowing for address space randomization. - ''' + """ binary = lief.parse(executable) return binary.is_pie def check_NX(executable) -> bool: - ''' + """ Check for no stack execution - ''' + """ binary = lief.parse(executable) return binary.has_nx CHECKS = { - 'ELF': [ - ('PIE', check_ELF_PIE), - ('NX', check_ELF_NX), - ('RELRO', check_ELF_RELRO), - ('Canary', check_ELF_Canary), - ('separate_code', check_ELF_separate_code), + "ELF": [ + ("PIE", check_ELF_PIE), + ("NX", check_ELF_NX), + ("RELRO", check_ELF_RELRO), + ("Canary", check_ELF_Canary), + ("separate_code", check_ELF_separate_code), + ], + "PE": [ + ("PIE", check_PIE), + ("DYNAMIC_BASE", check_PE_DYNAMIC_BASE), + ("HIGH_ENTROPY_VA", check_PE_HIGH_ENTROPY_VA), + ("NX", check_NX), + ("RELOC_SECTION", check_PE_RELOC_SECTION), ], - 'PE': [ - ('PIE', check_PIE), - ('DYNAMIC_BASE', check_PE_DYNAMIC_BASE), - ('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA), - ('NX', check_NX), - ('RELOC_SECTION', check_PE_RELOC_SECTION), + "MACHO": [ + ("PIE", check_PIE), + ("NOUNDEFS", check_MACHO_NOUNDEFS), + ("NX", check_NX), + ("Canary", check_MACHO_Canary), ], - 'MACHO': [ - ('PIE', check_PIE), - ('NOUNDEFS', check_MACHO_NOUNDEFS), - ('NX', check_NX), - ('Canary', check_MACHO_Canary), - ] } def identify_executable(executable) -> Optional[str]: - with open(filename, 'rb') as f: + with open(filename, "rb") as f: magic = f.read(4) - if magic.startswith(b'MZ'): - return 'PE' - elif magic.startswith(b'\x7fELF'): - return 'ELF' - elif magic.startswith(b'\xcf\xfa'): - return 'MACHO' + if magic.startswith(b"MZ"): + return "PE" + elif magic.startswith(b"\x7fELF"): + return "ELF" + elif magic.startswith(b"\xcf\xfa"): + return "MACHO" return None -if __name__ == '__main__': +if __name__ == "__main__": retval: int = 0 for filename in sys.argv[1:]: try: etype = identify_executable(filename) if etype is None: - print(f'{filename}: unknown format') + print(f"{filename}: unknown format") retval = 1 continue failed: List[str] = [] - for (name, func) in CHECKS[etype]: + for name, func in CHECKS[etype]: if not func(filename): failed.append(name) if failed: print(f"{filename}: failed {' '.join(failed)}") retval = 1 except IOError: - print(f'{filename}: cannot open') + print(f"{filename}: cannot open") retval = 1 sys.exit(retval) diff --git a/contrib/devtools/symbol-check.py b/contrib/devtools/symbol-check.py --- a/contrib/devtools/symbol-check.py +++ b/contrib/devtools/symbol-check.py @@ -2,14 +2,14 @@ # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" A script to check that the executables produced by gitian only contain certain symbols and are only linked against allowed libraries. Example usage: find contrib/gitian-builder/build -type f -executable | xargs python3 contrib/devtools/symbol-check.py -''' +""" import subprocess import sys from typing import Optional @@ -34,46 +34,65 @@ # - libc version 2.28 (http://mirror.centos.org/centos/8-stream/AppStream/x86_64/os/Packages/) # # See https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html for more info. -MAX_VERSIONS = { - 'GCC': (8, 3, 0), - 'GLIBC': (2, 27), - 'LIBATOMIC': (1, 0) -} +MAX_VERSIONS = {"GCC": (8, 3, 0), "GLIBC": (2, 27), "LIBATOMIC": (1, 0)} # See here for a description of _IO_stdin_used: # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109 # Ignore symbols that are exported as part of every executable IGNORE_EXPORTS = { - '_edata', '_end', '__end__', '_init', '__bss_start', '__bss_start__', '_bss_end__', '__bss_end__', '_fini', '_IO_stdin_used', 'stdin', 'stdout', 'stderr', + "_edata", + "_end", + "__end__", + "_init", + "__bss_start", + "__bss_start__", + "_bss_end__", + "__bss_end__", + "_fini", + "_IO_stdin_used", + "stdin", + "stdout", + "stderr", # Jemalloc exported symbols - '__malloc_hook', 'malloc', 'calloc', 'malloc_usable_size', - '__free_hook', 'free', - '__realloc_hook', 'realloc', - '__memalign_hook', 'memalign', 'posix_memalign', 'aligned_alloc', 'valloc', + "__malloc_hook", + "malloc", + "calloc", + "malloc_usable_size", + "__free_hook", + "free", + "__realloc_hook", + "realloc", + "__memalign_hook", + "memalign", + "posix_memalign", + "aligned_alloc", + "valloc", # Figure out why we get these symbols exported on xenial. - '_ZNKSt5ctypeIcE8do_widenEc', 'in6addr_any', 'optarg', - '_ZNSt16_Sp_counted_baseILN9__gnu_cxx12_Lock_policyE2EE10_M_destroyEv' + "_ZNKSt5ctypeIcE8do_widenEc", + "in6addr_any", + "optarg", + "_ZNSt16_Sp_counted_baseILN9__gnu_cxx12_Lock_policyE2EE10_M_destroyEv", } # Allowed NEEDED libraries ELF_ALLOWED_LIBRARIES = { # bitcoind and bitcoin-qt - 'libgcc_s.so.1', # GCC base support - 'libc.so.6', # C library - 'libpthread.so.0', # threading - 'libanl.so.1', # DNS resolve - 'libm.so.6', # math library - 'librt.so.1', # real-time (clock) - 'libatomic.so.1', - 'ld-linux-x86-64.so.2', # 64-bit dynamic linker - 'ld-linux.so.2', # 32-bit dynamic linker - 'ld-linux-aarch64.so.1', # 64-bit ARM dynamic linker - 'ld-linux-armhf.so.3', # 32-bit ARM dynamic linker + "libgcc_s.so.1", # GCC base support + "libc.so.6", # C library + "libpthread.so.0", # threading + "libanl.so.1", # DNS resolve + "libm.so.6", # math library + "librt.so.1", # real-time (clock) + "libatomic.so.1", + "ld-linux-x86-64.so.2", # 64-bit dynamic linker + "ld-linux.so.2", # 32-bit dynamic linker + "ld-linux-aarch64.so.1", # 64-bit ARM dynamic linker + "ld-linux-armhf.so.3", # 32-bit ARM dynamic linker # bitcoin-qt only - 'libxcb.so.1', # part of X11 - 'libfontconfig.so.1', # font support - 'libfreetype.so.6', # font parsing - 'libdl.so.2' # programming interface to dynamic linker + "libxcb.so.1", # part of X11 + "libfontconfig.so.1", # font support + "libfreetype.so.6", # font parsing + "libdl.so.2", # programming interface to dynamic linker } ARCH_MIN_GLIBC_VER = { pixie.EM_386: (2, 1), @@ -84,63 +103,64 @@ MACHO_ALLOWED_LIBRARIES = { # bitcoind and bitcoin-qt - 'libc++.1.dylib', # C++ Standard Library - 'libSystem.B.dylib', # libc, libm, libpthread, libinfo + "libc++.1.dylib", # C++ Standard Library + "libSystem.B.dylib", # libc, libm, libpthread, libinfo # bitcoin-qt only - 'AppKit', # user interface - 'ApplicationServices', # common application tasks. - 'Carbon', # deprecated c back-compat API - 'CFNetwork', # network services and changes in network configurations - 'CoreFoundation', # low level func, data types - 'CoreGraphics', # 2D rendering - 'CoreServices', # operating system services - 'CoreText', # interface for laying out text and handling fonts. - 'Foundation', # base layer functionality for apps/frameworks - 'ImageIO', # read and write image file formats. - 'IOKit', # user-space access to hardware devices and drivers. - 'libobjc.A.dylib', # Objective-C runtime library - 'Security', # access control and authentication - 'SystemConfiguration', # access network configuration settings + "AppKit", # user interface + "ApplicationServices", # common application tasks. + "Carbon", # deprecated c back-compat API + "CFNetwork", # network services and changes in network configurations + "CoreFoundation", # low level func, data types + "CoreGraphics", # 2D rendering + "CoreServices", # operating system services + "CoreText", # interface for laying out text and handling fonts. + "Foundation", # base layer functionality for apps/frameworks + "ImageIO", # read and write image file formats. + "IOKit", # user-space access to hardware devices and drivers. + "libobjc.A.dylib", # Objective-C runtime library + "Security", # access control and authentication + "SystemConfiguration", # access network configuration settings } PE_ALLOWED_LIBRARIES = { - 'ADVAPI32.dll', # security & registry - 'IPHLPAPI.DLL', # IP helper API - 'KERNEL32.dll', # win32 base APIs - 'msvcrt.dll', # C standard library for MSVC - 'SHELL32.dll', # shell API - 'USER32.dll', # user interface - 'WS2_32.dll', # sockets + "ADVAPI32.dll", # security & registry + "IPHLPAPI.DLL", # IP helper API + "KERNEL32.dll", # win32 base APIs + "msvcrt.dll", # C standard library for MSVC + "SHELL32.dll", # shell API + "USER32.dll", # user interface + "WS2_32.dll", # sockets # bitcoin-qt only - 'dwmapi.dll', # desktop window manager - 'CRYPT32.dll', # openssl - 'GDI32.dll', # graphics device interface - 'IMM32.dll', # input method editor - 'ole32.dll', # component object model - 'OLEAUT32.dll', # OLE Automation API - 'SHLWAPI.dll', # light weight shell API - 'UxTheme.dll', - 'VERSION.dll', # version checking - 'WINMM.dll', # WinMM audio API + "dwmapi.dll", # desktop window manager + "CRYPT32.dll", # openssl + "GDI32.dll", # graphics device interface + "IMM32.dll", # input method editor + "ole32.dll", # component object model + "OLEAUT32.dll", # OLE Automation API + "SHLWAPI.dll", # light weight shell API + "UxTheme.dll", + "VERSION.dll", # version checking + "WINMM.dll", # WinMM audio API } class CPPFilt(object): - ''' + """ Demangle C++ symbol names. Use a pipe to the 'c++filt' command. - ''' + """ def __init__(self): self.proc = subprocess.Popen( - determine_wellknown_cmd('CPPFILT', 'c++filt'), + determine_wellknown_cmd("CPPFILT", "c++filt"), stdin=subprocess.PIPE, stdout=subprocess.PIPE, - universal_newlines=True) + universal_newlines=True, + ) def __call__(self, mangled): - self.proc.stdin.write(mangled + '\n') + self.proc.stdin.write(mangled + "\n") self.proc.stdin.flush() return self.proc.stdout.readline().rstrip() @@ -151,15 +171,17 @@ def check_version(max_versions, version, arch) -> bool: - if '_' in version: - (lib, _, ver) = version.rpartition('_') + if "_" in version: + (lib, _, ver) = version.rpartition("_") else: lib = version - ver = '0' - ver = tuple([int(x) for x in ver.split('.')]) + ver = "0" + ver = tuple([int(x) for x in ver.split(".")]) if lib not in max_versions: return False - return ver <= max_versions[lib] or lib == 'GLIBC' and ver <= ARCH_MIN_GLIBC_VER[arch] + return ( + ver <= max_versions[lib] or lib == "GLIBC" and ver <= ARCH_MIN_GLIBC_VER[arch] + ) def check_imported_symbols(filename) -> bool: @@ -173,8 +195,9 @@ sym = symbol.name.decode() version = symbol.version.decode() if symbol.version is not None else None if version and not check_version(MAX_VERSIONS, version, elf.hdr.e_machine): - print(f'{filename}: symbol {cppfilt(sym)} from unsupported version ' - f'{version}') + print( + f"{filename}: symbol {cppfilt(sym)} from unsupported version {version}" + ) ok = False return ok @@ -189,7 +212,7 @@ sym = symbol.name.decode() if sym in IGNORE_EXPORTS: continue - print(f'{filename}: export of symbol {cppfilt(sym)} not allowed') + print(f"{filename}: export of symbol {cppfilt(sym)} not allowed") ok = False return ok @@ -200,7 +223,7 @@ for library_name in elf.query_dyn_tags(pixie.DT_NEEDED): assert isinstance(library_name, bytes) if library_name.decode() not in ELF_ALLOWED_LIBRARIES: - print(f'{filename}: NEEDED library {library_name.decode()} is not allowed') + print(f"{filename}: NEEDED library {library_name.decode()} is not allowed") ok = False return ok @@ -209,9 +232,9 @@ ok: bool = True binary = lief.parse(filename) for dylib in binary.libraries: - split = dylib.name.split('/') + split = dylib.name.split("/") if split[-1] not in MACHO_ALLOWED_LIBRARIES: - print(f'{split[-1]} is not in ALLOWED_LIBRARIES!') + print(f"{split[-1]} is not in ALLOWED_LIBRARIES!") ok = False return ok @@ -231,7 +254,7 @@ binary = lief.parse(filename) for dylib in binary.libraries: if dylib not in PE_ALLOWED_LIBRARIES: - print(f'{dylib} is not in ALLOWED_LIBRARIES!') + print(f"{dylib} is not in ALLOWED_LIBRARIES!") ok = False return ok @@ -244,53 +267,53 @@ CHECKS = { - 'ELF': [ - ('IMPORTED_SYMBOLS', check_imported_symbols), - ('EXPORTED_SYMBOLS', check_exported_symbols), - ('LIBRARY_DEPENDENCIES', check_ELF_libraries) + "ELF": [ + ("IMPORTED_SYMBOLS", check_imported_symbols), + ("EXPORTED_SYMBOLS", check_exported_symbols), + ("LIBRARY_DEPENDENCIES", check_ELF_libraries), + ], + "MACHO": [ + ("DYNAMIC_LIBRARIES", check_MACHO_libraries), + ("MIN_OS", check_MACHO_min_os), + ("SDK", check_MACHO_sdk), ], - 'MACHO': [ - ('DYNAMIC_LIBRARIES', check_MACHO_libraries), - ('MIN_OS', check_MACHO_min_os), - ('SDK', check_MACHO_sdk), + "PE": [ + ("DYNAMIC_LIBRARIES", check_PE_libraries), + ("SUBSYSTEM_VERSION", check_PE_subsystem_version), ], - 'PE': [ - ('DYNAMIC_LIBRARIES', check_PE_libraries), - ('SUBSYSTEM_VERSION', check_PE_subsystem_version), - ] } def identify_executable(filename) -> Optional[str]: - with open(filename, 'rb') as f: + with open(filename, "rb") as f: magic = f.read(4) - if magic.startswith(b'MZ'): - return 'PE' - elif magic.startswith(b'\x7fELF'): - return 'ELF' - elif magic.startswith(b'\xcf\xfa'): - return 'MACHO' + if magic.startswith(b"MZ"): + return "PE" + elif magic.startswith(b"\x7fELF"): + return "ELF" + elif magic.startswith(b"\xcf\xfa"): + return "MACHO" return None -if __name__ == '__main__': +if __name__ == "__main__": retval = 0 for filename in sys.argv[1:]: try: etype = identify_executable(filename) if etype is None: - print(f'{filename}: unknown format') + print(f"{filename}: unknown format") retval = 1 continue failed = [] - for (name, func) in CHECKS[etype]: + for name, func in CHECKS[etype]: if not func(filename): failed.append(name) if failed: print(f'{filename}: failed {" ".join(failed)}') retval = 1 except IOError: - print(f'{filename}: cannot open') + print(f"{filename}: cannot open") retval = 1 sys.exit(retval) diff --git a/contrib/devtools/test-security-check.py b/contrib/devtools/test-security-check.py --- a/contrib/devtools/test-security-check.py +++ b/contrib/devtools/test-security-check.py @@ -2,9 +2,9 @@ # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Test script for security-check.py -''' +""" import os import subprocess import unittest @@ -13,15 +13,15 @@ def write_testcode(filename): - with open(filename, 'w', encoding="utf8") as f: - f.write(''' + with open(filename, "w", encoding="utf8") as f: + f.write(""" #include int main() { printf("the quick brown fox jumps over the lazy god\\n"); return 0; } - ''') + """) def clean_files(source, executable): @@ -30,73 +30,268 @@ def call_security_check(cc, source, executable, options): - subprocess.check_call([*cc, source, '-o', executable] + options) - p = subprocess.Popen(['./security-check.py', executable], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) + subprocess.check_call([*cc, source, "-o", executable] + options) + p = subprocess.Popen( + ["./security-check.py", executable], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=subprocess.PIPE, + universal_newlines=True, + ) (stdout, stderr) = p.communicate() return (p.returncode, stdout.rstrip()) class TestSecurityChecks(unittest.TestCase): def test_ELF(self): - source = 'test1.c' - executable = 'test1' - cc = determine_wellknown_cmd('CC', 'gcc') + source = "test1.c" + executable = "test1" + cc = determine_wellknown_cmd("CC", "gcc") write_testcode(source) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-zexecstack', '-fno-stack-protector', '-Wl,-znorelro', '-no-pie', '-fno-PIE', '-Wl,-z,separate-code']), - (1, executable + ': failed PIE NX RELRO Canary')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack', '-fno-stack-protector', '-Wl,-znorelro', '-no-pie', '-fno-PIE', '-Wl,-z,separate-code']), - (1, executable + ': failed PIE RELRO Canary')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack', '-fstack-protector-all', '-Wl,-znorelro', '-no-pie', '-fno-PIE', '-Wl,-z,separate-code']), - (1, executable + ': failed PIE RELRO')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack', '-fstack-protector-all', '-Wl,-znorelro', '-pie', '-fPIE', '-Wl,-z,separate-code']), - (1, executable + ': failed RELRO')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack', '-fstack-protector-all', '-Wl,-zrelro', '-Wl,-z,now', '-pie', '-fPIE', '-Wl,-z,noseparate-code']), - (1, executable + ': failed separate_code')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack', '-fstack-protector-all', '-Wl,-zrelro', '-Wl,-z,now', '-pie', '-fPIE', '-Wl,-z,separate-code']), - (0, '')) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-zexecstack", + "-fno-stack-protector", + "-Wl,-znorelro", + "-no-pie", + "-fno-PIE", + "-Wl,-z,separate-code", + ], + ), + (1, executable + ": failed PIE NX RELRO Canary"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-znoexecstack", + "-fno-stack-protector", + "-Wl,-znorelro", + "-no-pie", + "-fno-PIE", + "-Wl,-z,separate-code", + ], + ), + (1, executable + ": failed PIE RELRO Canary"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-znoexecstack", + "-fstack-protector-all", + "-Wl,-znorelro", + "-no-pie", + "-fno-PIE", + "-Wl,-z,separate-code", + ], + ), + (1, executable + ": failed PIE RELRO"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-znoexecstack", + "-fstack-protector-all", + "-Wl,-znorelro", + "-pie", + "-fPIE", + "-Wl,-z,separate-code", + ], + ), + (1, executable + ": failed RELRO"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-znoexecstack", + "-fstack-protector-all", + "-Wl,-zrelro", + "-Wl,-z,now", + "-pie", + "-fPIE", + "-Wl,-z,noseparate-code", + ], + ), + (1, executable + ": failed separate_code"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-znoexecstack", + "-fstack-protector-all", + "-Wl,-zrelro", + "-Wl,-z,now", + "-pie", + "-fPIE", + "-Wl,-z,separate-code", + ], + ), + (0, ""), + ) clean_files(source, executable) def test_PE(self): - source = 'test1.c' - executable = 'test1.exe' - cc = determine_wellknown_cmd('CC', 'x86_64-w64-mingw32-gcc') + source = "test1.c" + executable = "test1.exe" + cc = determine_wellknown_cmd("CC", "x86_64-w64-mingw32-gcc") write_testcode(source) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--no-nxcompat', '-Wl,--no-dynamicbase', '-Wl,--no-high-entropy-va', '-no-pie', '-fno-PIE']), - (1, executable + ': failed DYNAMIC_BASE HIGH_ENTROPY_VA NX RELOC_SECTION')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat', '-Wl,--no-dynamicbase', '-Wl,--no-high-entropy-va', '-no-pie', '-fno-PIE']), - (1, executable + ': failed DYNAMIC_BASE HIGH_ENTROPY_VA RELOC_SECTION')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat', '-Wl,--dynamicbase', '-Wl,--no-high-entropy-va', '-no-pie', '-fno-PIE']), - (1, executable + ': failed HIGH_ENTROPY_VA RELOC_SECTION')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat', '-Wl,--dynamicbase', '-Wl,--high-entropy-va', '-no-pie', '-fno-PIE']), - (1, executable + ': failed RELOC_SECTION')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat', '-Wl,--dynamicbase', '-Wl,--high-entropy-va', '-pie', '-fPIE']), - (0, '')) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,--no-nxcompat", + "-Wl,--no-dynamicbase", + "-Wl,--no-high-entropy-va", + "-no-pie", + "-fno-PIE", + ], + ), + (1, executable + ": failed DYNAMIC_BASE HIGH_ENTROPY_VA NX RELOC_SECTION"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,--nxcompat", + "-Wl,--no-dynamicbase", + "-Wl,--no-high-entropy-va", + "-no-pie", + "-fno-PIE", + ], + ), + (1, executable + ": failed DYNAMIC_BASE HIGH_ENTROPY_VA RELOC_SECTION"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,--nxcompat", + "-Wl,--dynamicbase", + "-Wl,--no-high-entropy-va", + "-no-pie", + "-fno-PIE", + ], + ), + (1, executable + ": failed HIGH_ENTROPY_VA RELOC_SECTION"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,--nxcompat", + "-Wl,--dynamicbase", + "-Wl,--high-entropy-va", + "-no-pie", + "-fno-PIE", + ], + ), + (1, executable + ": failed RELOC_SECTION"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,--nxcompat", + "-Wl,--dynamicbase", + "-Wl,--high-entropy-va", + "-pie", + "-fPIE", + ], + ), + (0, ""), + ) clean_files(source, executable) def test_MACHO(self): - source = 'test1.c' - executable = 'test1' - cc = determine_wellknown_cmd('CC', 'clang') + source = "test1.c" + executable = "test1" + cc = determine_wellknown_cmd("CC", "clang") write_testcode(source) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie', '-Wl,-flat_namespace', '-Wl,-allow_stack_execute', '-fno-stack-protector']), - (1, executable + ': failed PIE NOUNDEFS NX Canary')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie', '-Wl,-flat_namespace', '-Wl,-allow_stack_execute', '-fstack-protector-all']), - (1, executable + ': failed PIE NOUNDEFS NX')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie', '-Wl,-flat_namespace', '-fstack-protector-all']), - (1, executable + ': failed PIE NOUNDEFS')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie', '-fstack-protector-all']), - (1, executable + ': failed PIE')) - self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-pie', '-fstack-protector-all']), - (0, '')) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-no_pie", + "-Wl,-flat_namespace", + "-Wl,-allow_stack_execute", + "-fno-stack-protector", + ], + ), + (1, executable + ": failed PIE NOUNDEFS NX Canary"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + [ + "-Wl,-no_pie", + "-Wl,-flat_namespace", + "-Wl,-allow_stack_execute", + "-fstack-protector-all", + ], + ), + (1, executable + ": failed PIE NOUNDEFS NX"), + ) + self.assertEqual( + call_security_check( + cc, + source, + executable, + ["-Wl,-no_pie", "-Wl,-flat_namespace", "-fstack-protector-all"], + ), + (1, executable + ": failed PIE NOUNDEFS"), + ) + self.assertEqual( + call_security_check( + cc, source, executable, ["-Wl,-no_pie", "-fstack-protector-all"] + ), + (1, executable + ": failed PIE"), + ) + self.assertEqual( + call_security_check( + cc, source, executable, ["-Wl,-pie", "-fstack-protector-all"] + ), + (0, ""), + ) clean_files(source, executable) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/contrib/devtools/test-symbol-check.py b/contrib/devtools/test-symbol-check.py --- a/contrib/devtools/test-symbol-check.py +++ b/contrib/devtools/test-symbol-check.py @@ -2,9 +2,9 @@ # Copyright (c) 2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Test script for symbol-check.py -''' +""" import os import subprocess import unittest @@ -14,9 +14,12 @@ def call_symbol_check(cc: List[str], source, executable, options): - subprocess.run([*cc, source, '-o', executable] + options, check=True) - p = subprocess.run(['./contrib/devtools/symbol-check.py', executable], - stdout=subprocess.PIPE, universal_newlines=True) + subprocess.run([*cc, source, "-o", executable] + options, check=True) + p = subprocess.run( + ["./contrib/devtools/symbol-check.py", executable], + stdout=subprocess.PIPE, + universal_newlines=True, + ) os.remove(source) os.remove(executable) return (p.returncode, p.stdout.rstrip()) @@ -24,14 +27,14 @@ class TestSymbolChecks(unittest.TestCase): def test_ELF(self): - source = 'test1.c' - executable = 'test1' - cc = determine_wellknown_cmd('CC', 'gcc') + source = "test1.c" + executable = "test1" + cc = determine_wellknown_cmd("CC", "gcc") # renameat2 was introduced in GLIBC 2.28, so is newer than the upper limit # of glibc for all platforms - with open(source, 'w', encoding="utf8") as f: - f.write(''' + with open(source, "w", encoding="utf8") as f: + f.write(""" #define _GNU_SOURCE #include #include @@ -44,19 +47,26 @@ renameat2(0, "test", 0, "test_", RENAME_EXCHANGE); return 0; } - ''') + """) - self.assertEqual(call_symbol_check(cc, source, executable, []), - (1, executable + ': symbol renameat2 from unsupported version GLIBC_2.28\n' + - executable + ': failed IMPORTED_SYMBOLS')) + self.assertEqual( + call_symbol_check(cc, source, executable, []), + ( + 1, + executable + + ": symbol renameat2 from unsupported version GLIBC_2.28\n" + + executable + + ": failed IMPORTED_SYMBOLS", + ), + ) # -lutil is part of the libc6 package so a safe bet that it's installed # it's also out of context enough that it's unlikely to ever become a real # dependency - source = 'test2.c' - executable = 'test2' - with open(source, 'w', encoding="utf8") as f: - f.write(''' + source = "test2.c" + executable = "test2" + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() @@ -64,35 +74,41 @@ login(0); return 0; } - ''') + """) - self.assertEqual(call_symbol_check(cc, source, executable, ['-lutil']), - (1, executable + ': NEEDED library libutil.so.1 is not allowed\n' + - executable + ': failed LIBRARY_DEPENDENCIES')) + self.assertEqual( + call_symbol_check(cc, source, executable, ["-lutil"]), + ( + 1, + executable + + ": NEEDED library libutil.so.1 is not allowed\n" + + executable + + ": failed LIBRARY_DEPENDENCIES", + ), + ) # finally, check a conforming file that simply uses a math function - source = 'test3.c' - executable = 'test3' - with open(source, 'w', encoding="utf8") as f: - f.write(''' + source = "test3.c" + executable = "test3" + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() { return (int)pow(2.0, 4.0); } - ''') + """) - self.assertEqual(call_symbol_check(cc, source, executable, ['-lm']), - (0, '')) + self.assertEqual(call_symbol_check(cc, source, executable, ["-lm"]), (0, "")) def test_MACHO(self): - source = 'test1.c' - executable = 'test1' - cc = determine_wellknown_cmd('CC', 'clang') + source = "test1.c" + executable = "test1" + cc = determine_wellknown_cmd("CC", "clang") - with open(source, 'w', encoding="utf8") as f: - f.write(''' + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() @@ -101,19 +117,32 @@ return 0; } - ''') + """) self.assertEqual( - call_symbol_check(cc, source, executable, - ['-lexpat', '-Wl,-platform_version', '-Wl,macos', - '-Wl,11.4', '-Wl,11.4']), - (1, 'libexpat.1.dylib is not in ALLOWED_LIBRARIES!\n' + - f'{executable}: failed DYNAMIC_LIBRARIES MIN_OS SDK')) - - source = 'test2.c' - executable = 'test2' - with open(source, 'w', encoding="utf8") as f: - f.write(''' + call_symbol_check( + cc, + source, + executable, + [ + "-lexpat", + "-Wl,-platform_version", + "-Wl,macos", + "-Wl,11.4", + "-Wl,11.4", + ], + ), + ( + 1, + "libexpat.1.dylib is not in ALLOWED_LIBRARIES!\n" + + f"{executable}: failed DYNAMIC_LIBRARIES MIN_OS SDK", + ), + ) + + source = "test2.c" + executable = "test2" + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() @@ -121,36 +150,52 @@ CGMainDisplayID(); return 0; } - ''') + """) self.assertEqual( - call_symbol_check(cc, source, executable, - ['-framework', 'CoreGraphics', '-Wl,-platform_version', - '-Wl,macos', '-Wl,11.4', '-Wl,11.4']), - (1, f'{executable}: failed MIN_OS SDK')) - - source = 'test3.c' - executable = 'test3' - with open(source, 'w', encoding="utf8") as f: - f.write(''' + call_symbol_check( + cc, + source, + executable, + [ + "-framework", + "CoreGraphics", + "-Wl,-platform_version", + "-Wl,macos", + "-Wl,11.4", + "-Wl,11.4", + ], + ), + (1, f"{executable}: failed MIN_OS SDK"), + ) + + source = "test3.c" + executable = "test3" + with open(source, "w", encoding="utf8") as f: + f.write(""" int main() { return 0; } - ''') + """) - self.assertEqual(call_symbol_check(cc, source, executable, - ['-Wl,-platform_version', '-Wl,macos', - '-Wl,10.15', '-Wl,11.4']), - (1, f'{executable}: failed SDK')) + self.assertEqual( + call_symbol_check( + cc, + source, + executable, + ["-Wl,-platform_version", "-Wl,macos", "-Wl,10.15", "-Wl,11.4"], + ), + (1, f"{executable}: failed SDK"), + ) def test_PE(self): - source = 'test1.c' - executable = 'test1.exe' - cc = determine_wellknown_cmd('CC', 'x86_64-w64-mingw32-gcc') + source = "test1.c" + executable = "test1.exe" + cc = determine_wellknown_cmd("CC", "x86_64-w64-mingw32-gcc") - with open(source, 'w', encoding="utf8") as f: - f.write(''' + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() @@ -158,34 +203,59 @@ PdhConnectMachineA(NULL); return 0; } - ''') + """) self.assertEqual( call_symbol_check( - cc, source, executable, - ['-lpdh', '-Wl,--major-subsystem-version', '-Wl,6', - '-Wl,--minor-subsystem-version', '-Wl,1']), - (1, 'pdh.dll is not in ALLOWED_LIBRARIES!\n' + - executable + ': failed DYNAMIC_LIBRARIES')) - - source = 'test2.c' - executable = 'test2.exe' - - with open(source, 'w', encoding="utf8") as f: - f.write(''' + cc, + source, + executable, + [ + "-lpdh", + "-Wl,--major-subsystem-version", + "-Wl,6", + "-Wl,--minor-subsystem-version", + "-Wl,1", + ], + ), + ( + 1, + "pdh.dll is not in ALLOWED_LIBRARIES!\n" + + executable + + ": failed DYNAMIC_LIBRARIES", + ), + ) + + source = "test2.c" + executable = "test2.exe" + + with open(source, "w", encoding="utf8") as f: + f.write(""" int main() { return 0; } - ''') - - self.assertEqual(call_symbol_check(cc, source, executable, ['-Wl,--major-subsystem-version', '-Wl,9', '-Wl,--minor-subsystem-version', '-Wl,9']), - (1, executable + ': failed SUBSYSTEM_VERSION')) + """) - source = 'test3.c' - executable = 'test3.exe' - with open(source, 'w', encoding="utf8") as f: - f.write(''' + self.assertEqual( + call_symbol_check( + cc, + source, + executable, + [ + "-Wl,--major-subsystem-version", + "-Wl,9", + "-Wl,--minor-subsystem-version", + "-Wl,9", + ], + ), + (1, executable + ": failed SUBSYSTEM_VERSION"), + ) + + source = "test3.c" + executable = "test3.exe" + with open(source, "w", encoding="utf8") as f: + f.write(""" #include int main() @@ -193,14 +263,24 @@ CoFreeUnusedLibrariesEx(0,0); return 0; } - ''') + """) self.assertEqual( - call_symbol_check(cc, source, executable, - ['-lole32', '-Wl,--major-subsystem-version', '-Wl,6', - '-Wl,--minor-subsystem-version', '-Wl,1']), - (0, '')) - - -if __name__ == '__main__': + call_symbol_check( + cc, + source, + executable, + [ + "-lole32", + "-Wl,--major-subsystem-version", + "-Wl,6", + "-Wl,--minor-subsystem-version", + "-Wl,1", + ], + ), + (0, ""), + ) + + +if __name__ == "__main__": unittest.main() diff --git a/contrib/devtools/update-translations.py b/contrib/devtools/update-translations.py --- a/contrib/devtools/update-translations.py +++ b/contrib/devtools/update-translations.py @@ -2,7 +2,7 @@ # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Run this script from the root of the repository to update all translations from transifex. It will do the following automatically: @@ -14,7 +14,7 @@ TODO: - auto-add new translations to the build system according to the translation process -''' +""" import io import os import re @@ -23,36 +23,34 @@ import xml.etree.ElementTree as ET # Name of transifex tool -TX = 'tx' +TX = "tx" # Name of source language file -SOURCE_LANG = 'bitcoin_en.ts' +SOURCE_LANG = "bitcoin_en.ts" # Directory with locale files -LOCALE_DIR = 'src/qt/locale' +LOCALE_DIR = "src/qt/locale" # Minimum number of messages for translation to be considered at all MIN_NUM_MESSAGES = 10 def check_at_repository_root(): - if not os.path.exists('.git'): - print('No .git directory found') - print( - 'Execute this script at the root of the repository', - file=sys.stderr) + if not os.path.exists(".git"): + print("No .git directory found") + print("Execute this script at the root of the repository", file=sys.stderr) sys.exit(1) def fetch_all_translations(): - if subprocess.call([TX, 'pull', '-f', '-a']): - print('Error while fetching translations', file=sys.stderr) + if subprocess.call([TX, "pull", "-f", "-a"]): + print("Error while fetching translations", file=sys.stderr) sys.exit(1) def find_format_specifiers(s): - '''Find all format specifiers in a string.''' + """Find all format specifiers in a string.""" pos = 0 specifiers = [] while True: - percent = s.find('%', pos) + percent = s.find("%", pos) if percent < 0: break specifiers.append(s[percent + 1]) @@ -61,11 +59,11 @@ def split_format_specifiers(specifiers): - '''Split format specifiers between numeric (Qt) and others (strprintf)''' + """Split format specifiers between numeric (Qt) and others (strprintf)""" numeric = [] other = [] for s in specifiers: - if s in {'1', '2', '3', '4', '5', '6', '7', '8', '9'}: + if s in {"1", "2", "3", "4", "5", "6", "7", "8", "9"}: numeric.append(s) else: other.append(s) @@ -84,8 +82,8 @@ def sanitize_string(s): - '''Sanitize string for printing''' - return s.replace('\n', ' ') + """Sanitize string for printing""" + return s.replace("\n", " ") def check_format_specifiers(source, translation, errors, numerus): @@ -94,43 +92,51 @@ # if this fails, go change the source as this is hacky and confusing! assert not (source_f[0] and source_f[1]) try: - translation_f = split_format_specifiers( - find_format_specifiers(translation)) + translation_f = split_format_specifiers(find_format_specifiers(translation)) except IndexError: - errors.append("Parse error in translation for '{}': '{}'".format( - sanitize_string(source), sanitize_string(translation))) + errors.append( + "Parse error in translation for '{}': '{}'".format( + sanitize_string(source), sanitize_string(translation) + ) + ) return False else: if source_f != translation_f: - if numerus and source_f == (set(), ['n']) and translation_f == ( - set(), []) and translation.find('%') == -1: + if ( + numerus + and source_f == (set(), ["n"]) + and translation_f == (set(), []) + and translation.find("%") == -1 + ): # Allow numerus translations to omit %n specifier (usually when # it only has one possible value) return True - errors.append("Mismatch between '{}' and '{}'".format( - sanitize_string(source), sanitize_string(translation))) + errors.append( + "Mismatch between '{}' and '{}'".format( + sanitize_string(source), sanitize_string(translation) + ) + ) return False return True -def all_ts_files(suffix=''): +def all_ts_files(suffix=""): for filename in os.listdir(LOCALE_DIR): # process only language files, and do not process source language - if not filename.endswith( - '.ts' + suffix) or filename == SOURCE_LANG + suffix: + if not filename.endswith(".ts" + suffix) or filename == SOURCE_LANG + suffix: continue if suffix: # remove provided suffix - filename = filename[0:-len(suffix)] + filename = filename[0 : -len(suffix)] filepath = os.path.join(LOCALE_DIR, filename) yield filename, filepath -FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]') +FIX_RE = re.compile(b"[\x00-\x09\x0b\x0c\x0e-\x1f]") def remove_invalid_characters(s): - '''Remove invalid characters from translation string''' - return FIX_RE.sub(b'', s) + """Remove invalid characters from translation string""" + return FIX_RE.sub(b"", s) # Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for @@ -140,29 +146,29 @@ def escape_cdata(text): text = _orig_escape_cdata(text) - text = text.replace("'", ''') - text = text.replace('"', '"') + text = text.replace("'", "'") + text = text.replace('"', """) return text def postprocess_translations(reduce_diff_hacks=False): - print('Checking and postprocessing...') + print("Checking and postprocessing...") if reduce_diff_hacks: global _orig_escape_cdata _orig_escape_cdata = ET._escape_cdata ET._escape_cdata = escape_cdata - for (filename, filepath) in all_ts_files(): - os.rename(filepath, filepath + '.orig') + for filename, filepath in all_ts_files(): + os.rename(filepath, filepath + ".orig") have_errors = False - for (filename, filepath) in all_ts_files('.orig'): + for filename, filepath in all_ts_files(".orig"): # pre-fixups to cope with transifex output # need to override encoding because 'utf8' is not understood only # 'utf-8' - parser = ET.XMLParser(encoding='utf-8') - with open(filepath + '.orig', 'rb') as f: + parser = ET.XMLParser(encoding="utf-8") + with open(filepath + ".orig", "rb") as f: data = f.read() # remove control characters; this must be done over the entire file # otherwise the XML parser will fail @@ -171,15 +177,16 @@ # iterate over all messages in file root = tree.getroot() - for context in root.findall('context'): - for message in context.findall('message'): - numerus = message.get('numerus') == 'yes' - source = message.find('source').text - translation_node = message.find('translation') + for context in root.findall("context"): + for message in context.findall("message"): + numerus = message.get("numerus") == "yes" + source = message.find("source").text + translation_node = message.find("translation") # pick all numerusforms if numerus: translations = [ - i.text for i in translation_node.findall('numerusform')] + i.text for i in translation_node.findall("numerusform") + ] else: translations = [translation_node.text] @@ -188,31 +195,32 @@ continue errors = [] valid = check_format_specifiers( - source, translation, errors, numerus) + source, translation, errors, numerus + ) for error in errors: - print(f'{filename}: {error}') + print(f"{filename}: {error}") if not valid: # set type to unfinished and clear string if invalid translation_node.clear() - translation_node.set('type', 'unfinished') + translation_node.set("type", "unfinished") have_errors = True # Remove location tags - for location in message.findall('location'): + for location in message.findall("location"): message.remove(location) # Remove entire message if it is an unfinished translation - if translation_node.get('type') == 'unfinished': + if translation_node.get("type") == "unfinished": context.remove(message) # check if document is (virtually) empty, and remove it if so num_messages = 0 - for context in root.findall('context'): - for message in context.findall('message'): + for context in root.findall("context"): + for message in context.findall("message"): num_messages += 1 if num_messages < MIN_NUM_MESSAGES: - print(f'Removing {filepath}, as it contains only {num_messages} messages') + print(f"Removing {filepath}, as it contains only {num_messages} messages") continue # write fixed-up tree @@ -220,17 +228,17 @@ # formatting if reduce_diff_hacks: out = io.BytesIO() - tree.write(out, encoding='utf-8') + tree.write(out, encoding="utf-8") out = out.getvalue() - out = out.replace(b' />', b'/>') - with open(filepath, 'wb') as f: + out = out.replace(b" />", b"/>") + with open(filepath, "wb") as f: f.write(out) else: - tree.write(filepath, encoding='utf-8') + tree.write(filepath, encoding="utf-8") return have_errors -if __name__ == '__main__': +if __name__ == "__main__": check_at_repository_root() fetch_all_translations() postprocess_translations() diff --git a/contrib/devtools/utils.py b/contrib/devtools/utils.py --- a/contrib/devtools/utils.py +++ b/contrib/devtools/utils.py @@ -2,9 +2,9 @@ # Copyright (c) 2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Common utility functions -''' +""" import os import shutil import sys @@ -16,7 +16,7 @@ maybe_which = shutil.which(progname) if maybe_env: # Well-known vars are often meant to be word-split - return maybe_env.split(' ') + return maybe_env.split(" ") elif maybe_which: return [maybe_which] else: diff --git a/contrib/gitian-build.py b/contrib/gitian-build.py --- a/contrib/gitian-build.py +++ b/contrib/gitian-build.py @@ -12,181 +12,397 @@ def setup(): global args, workdir - programs = ['ruby', 'git', 'apt-cacher-ng', 'make', 'wget'] + programs = ["ruby", "git", "apt-cacher-ng", "make", "wget"] if args.kvm: - programs += ['python-vm-builder', 'qemu-kvm', 'qemu-utils'] + programs += ["python-vm-builder", "qemu-kvm", "qemu-utils"] elif args.docker: - dockers = ['docker.io', 'docker-ce'] + dockers = ["docker.io", "docker-ce"] for i in dockers: - return_code = subprocess.call( - ['sudo', 'apt-get', 'install', '-qq', i]) + return_code = subprocess.call(["sudo", "apt-get", "install", "-qq", i]) if return_code == 0: break if return_code != 0: - print('Cannot find any way to install docker', file=sys.stderr) + print("Cannot find any way to install docker", file=sys.stderr) exit(1) else: - programs += ['lxc', 'debootstrap'] - subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs) - if not os.path.isdir('gitian-builder'): + programs += ["lxc", "debootstrap"] + subprocess.check_call(["sudo", "apt-get", "install", "-qq"] + programs) + if not os.path.isdir("gitian-builder"): subprocess.check_call( - ['git', 'clone', 'https://github.com/devrandom/gitian-builder.git']) - if not os.path.isdir('bitcoin-abc'): + ["git", "clone", "https://github.com/devrandom/gitian-builder.git"] + ) + if not os.path.isdir("bitcoin-abc"): subprocess.check_call( - ['git', 'clone', 'https://github.com/Bitcoin-ABC/bitcoin-abc.git']) - os.chdir('gitian-builder') - make_image_prog = ['bin/make-base-vm', - '--distro', 'debian', '--suite', 'buster', '--arch', 'amd64'] + ["git", "clone", "https://github.com/Bitcoin-ABC/bitcoin-abc.git"] + ) + os.chdir("gitian-builder") + make_image_prog = [ + "bin/make-base-vm", + "--distro", + "debian", + "--suite", + "buster", + "--arch", + "amd64", + ] if args.docker: - make_image_prog += ['--docker'] + make_image_prog += ["--docker"] elif not args.kvm: - make_image_prog += ['--lxc'] + make_image_prog += ["--lxc"] subprocess.check_call(make_image_prog) os.chdir(workdir) if args.is_bionic and not args.kvm and not args.docker: subprocess.check_call( - ['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net']) - print('Reboot is required') + ["sudo", "sed", "-i", "s/lxcbr0/br0/", "/etc/default/lxc-net"] + ) + print("Reboot is required") exit(0) def build(): global args, workdir - base_output_dir = 'bitcoin-binaries/' + args.version - os.makedirs(base_output_dir + '/src', exist_ok=True) - print('\nBuilding Dependencies\n') - os.chdir('gitian-builder') - os.makedirs('inputs', exist_ok=True) - - subprocess.check_call(['make', '-C', '../bitcoin-abc/depends', - 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) - - output_dir_src = '../' + base_output_dir + '/src' + base_output_dir = "bitcoin-binaries/" + args.version + os.makedirs(base_output_dir + "/src", exist_ok=True) + print("\nBuilding Dependencies\n") + os.chdir("gitian-builder") + os.makedirs("inputs", exist_ok=True) + + subprocess.check_call( + [ + "make", + "-C", + "../bitcoin-abc/depends", + "download", + "SOURCES_PATH=" + os.getcwd() + "/cache/common", + ] + ) + + output_dir_src = "../" + base_output_dir + "/src" if args.linux: - print('\nCompiling ' + args.version + ' Linux') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, - '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + - '-linux', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) - output_dir_linux = '../' + base_output_dir + '/linux' + print("\nCompiling " + args.version + " Linux") + subprocess.check_call( + [ + "bin/gbuild", + "-j", + args.jobs, + "-m", + args.memory, + "--commit", + "bitcoin=" + args.commit, + "--url", + "bitcoin=" + args.url, + "../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml", + ] + ) + subprocess.check_call( + [ + "bin/gsign", + "-p", + args.sign_prog, + "--signer", + args.signer, + "--release", + args.version + "-linux", + "--destination", + "../gitian.sigs/", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml", + ] + ) + output_dir_linux = "../" + base_output_dir + "/linux" os.makedirs(output_dir_linux, exist_ok=True) subprocess.check_call( - 'mv build/out/bitcoin-*.tar.gz ' + output_dir_linux, shell=True) + "mv build/out/bitcoin-*.tar.gz " + output_dir_linux, shell=True + ) subprocess.check_call( - 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) + "mv build/out/src/bitcoin-*.tar.gz " + output_dir_src, shell=True + ) subprocess.check_call( - 'mv result/bitcoin-*-linux-res.yml ' + output_dir_linux, shell=True) + "mv result/bitcoin-*-linux-res.yml " + output_dir_linux, shell=True + ) if args.windows: - print('\nCompiling ' + args.version + ' Windows') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, - '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + - '-win-unsigned', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) - output_dir_win = '../' + base_output_dir + '/win' + print("\nCompiling " + args.version + " Windows") + subprocess.check_call( + [ + "bin/gbuild", + "-j", + args.jobs, + "-m", + args.memory, + "--commit", + "bitcoin=" + args.commit, + "--url", + "bitcoin=" + args.url, + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml", + ] + ) + subprocess.check_call( + [ + "bin/gsign", + "-p", + args.sign_prog, + "--signer", + args.signer, + "--release", + args.version + "-win-unsigned", + "--destination", + "../gitian.sigs/", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml", + ] + ) + output_dir_win = "../" + base_output_dir + "/win" os.makedirs(output_dir_win, exist_ok=True) subprocess.check_call( - 'mv build/out/bitcoin-*-win-unsigned.tar.gz inputs/', shell=True) + "mv build/out/bitcoin-*-win-unsigned.tar.gz inputs/", shell=True + ) subprocess.check_call( - 'mv build/out/bitcoin-*.zip build/out/bitcoin-*.exe ' + output_dir_win, shell=True) + "mv build/out/bitcoin-*.zip build/out/bitcoin-*.exe " + output_dir_win, + shell=True, + ) subprocess.check_call( - 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) + "mv build/out/src/bitcoin-*.tar.gz " + output_dir_src, shell=True + ) subprocess.check_call( - 'mv result/bitcoin-*-win-res.yml ' + output_dir_win, shell=True) + "mv result/bitcoin-*-win-res.yml " + output_dir_win, shell=True + ) if args.macos: - print('\nCompiling ' + args.version + ' MacOS') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, - '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + - '-osx-unsigned', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) - output_dir_osx = '../' + base_output_dir + '/osx' + print("\nCompiling " + args.version + " MacOS") + subprocess.check_call( + [ + "bin/gbuild", + "-j", + args.jobs, + "-m", + args.memory, + "--commit", + "bitcoin=" + args.commit, + "--url", + "bitcoin=" + args.url, + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml", + ] + ) + subprocess.check_call( + [ + "bin/gsign", + "-p", + args.sign_prog, + "--signer", + args.signer, + "--release", + args.version + "-osx-unsigned", + "--destination", + "../gitian.sigs/", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml", + ] + ) + output_dir_osx = "../" + base_output_dir + "/osx" os.makedirs(output_dir_osx, exist_ok=True) subprocess.check_call( - 'mv build/out/bitcoin-*-osx-unsigned.tar.gz inputs/', shell=True) + "mv build/out/bitcoin-*-osx-unsigned.tar.gz inputs/", shell=True + ) subprocess.check_call( - 'mv build/out/bitcoin-*.tar.gz build/out/bitcoin-*.dmg ' + output_dir_osx, shell=True) + "mv build/out/bitcoin-*.tar.gz build/out/bitcoin-*.dmg " + output_dir_osx, + shell=True, + ) subprocess.check_call( - 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) + "mv build/out/src/bitcoin-*.tar.gz " + output_dir_src, shell=True + ) subprocess.check_call( - 'mv result/bitcoin-*-osx-res.yml ' + output_dir_osx, shell=True) + "mv result/bitcoin-*-osx-res.yml " + output_dir_osx, shell=True + ) os.chdir(workdir) if args.commit_files: - print('\nCommitting ' + args.version + ' Unsigned Sigs\n') - os.chdir('gitian.sigs') + print("\nCommitting " + args.version + " Unsigned Sigs\n") + os.chdir("gitian.sigs") + subprocess.check_call(["git", "add", args.version + "-linux/" + args.signer]) subprocess.check_call( - ['git', 'add', args.version + '-linux/' + args.signer]) + ["git", "add", args.version + "-win-unsigned/" + args.signer] + ) subprocess.check_call( - ['git', 'add', args.version + '-win-unsigned/' + args.signer]) + ["git", "add", args.version + "-osx-unsigned/" + args.signer] + ) subprocess.check_call( - ['git', 'add', args.version + '-osx-unsigned/' + args.signer]) - subprocess.check_call( - ['git', 'commit', '-m', 'Add ' + args.version + ' unsigned sigs for ' + args.signer]) + [ + "git", + "commit", + "-m", + "Add " + args.version + " unsigned sigs for " + args.signer, + ] + ) os.chdir(workdir) def sign(): global args, workdir - os.chdir('gitian-builder') + os.chdir("gitian-builder") if args.windows: - print('\nSigning ' + args.version + ' Windows') - subprocess.check_call('cp inputs/bitcoin-' + args.version + - '-win-unsigned.tar.gz inputs/bitcoin-win-unsigned.tar.gz', shell=True) - subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature=' + args.commit, - '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-win-signed', - '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) + print("\nSigning " + args.version + " Windows") + subprocess.check_call( + "cp inputs/bitcoin-" + + args.version + + "-win-unsigned.tar.gz inputs/bitcoin-win-unsigned.tar.gz", + shell=True, + ) + subprocess.check_call( + [ + "bin/gbuild", + "-i", + "--commit", + "signature=" + args.commit, + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml", + ] + ) + subprocess.check_call( + [ + "bin/gsign", + "-p", + args.sign_prog, + "--signer", + args.signer, + "--release", + args.version + "-win-signed", + "--destination", + "../gitian.sigs/", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml", + ] + ) subprocess.check_call( - 'mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/' + args.version, shell=True) + "mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/" + args.version, + shell=True, + ) if args.macos: - print('\nSigning ' + args.version + ' MacOS') - subprocess.check_call('cp inputs/bitcoin-' + args.version + - '-osx-unsigned.tar.gz inputs/bitcoin-osx-unsigned.tar.gz', shell=True) - subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature=' + args.commit, - '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-osx-signed', - '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) - subprocess.check_call('mv build/out/bitcoin-osx-signed.dmg ../bitcoin-binaries/' + - args.version + '/bitcoin-' + args.version + '-osx.dmg', shell=True) + print("\nSigning " + args.version + " MacOS") + subprocess.check_call( + "cp inputs/bitcoin-" + + args.version + + "-osx-unsigned.tar.gz inputs/bitcoin-osx-unsigned.tar.gz", + shell=True, + ) + subprocess.check_call( + [ + "bin/gbuild", + "-i", + "--commit", + "signature=" + args.commit, + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml", + ] + ) + subprocess.check_call( + [ + "bin/gsign", + "-p", + args.sign_prog, + "--signer", + args.signer, + "--release", + args.version + "-osx-signed", + "--destination", + "../gitian.sigs/", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml", + ] + ) + subprocess.check_call( + "mv build/out/bitcoin-osx-signed.dmg ../bitcoin-binaries/" + + args.version + + "/bitcoin-" + + args.version + + "-osx.dmg", + shell=True, + ) os.chdir(workdir) if args.commit_files: - print('\nCommitting ' + args.version + ' Signed Sigs\n') - os.chdir('gitian.sigs') + print("\nCommitting " + args.version + " Signed Sigs\n") + os.chdir("gitian.sigs") subprocess.check_call( - ['git', 'add', args.version + '-win-signed/' + args.signer]) + ["git", "add", args.version + "-win-signed/" + args.signer] + ) subprocess.check_call( - ['git', 'add', args.version + '-osx-signed/' + args.signer]) - subprocess.check_call(['git', 'commit', '-a', '-m', 'Add ' + - args.version + ' signed binary sigs for ' + args.signer]) + ["git", "add", args.version + "-osx-signed/" + args.signer] + ) + subprocess.check_call( + [ + "git", + "commit", + "-a", + "-m", + "Add " + args.version + " signed binary sigs for " + args.signer, + ] + ) os.chdir(workdir) def verify(): global args, workdir - os.chdir('gitian-builder') - - print('\nVerifying v' + args.version + ' Linux\n') - subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + - '-linux', '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) - print('\nVerifying v' + args.version + ' Windows\n') - subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + - '-win-unsigned', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) - print('\nVerifying v' + args.version + ' MacOS\n') - subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + - '-osx-unsigned', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) - print('\nVerifying v' + args.version + ' Signed Windows\n') - subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + - '-win-signed', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) - print('\nVerifying v' + args.version + ' Signed MacOS\n') - subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + - '-osx-signed', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) + os.chdir("gitian-builder") + + print("\nVerifying v" + args.version + " Linux\n") + subprocess.check_call( + [ + "bin/gverify", + "-v", + "-d", + "../gitian.sigs/", + "-r", + args.version + "-linux", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml", + ] + ) + print("\nVerifying v" + args.version + " Windows\n") + subprocess.check_call( + [ + "bin/gverify", + "-v", + "-d", + "../gitian.sigs/", + "-r", + args.version + "-win-unsigned", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml", + ] + ) + print("\nVerifying v" + args.version + " MacOS\n") + subprocess.check_call( + [ + "bin/gverify", + "-v", + "-d", + "../gitian.sigs/", + "-r", + args.version + "-osx-unsigned", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml", + ] + ) + print("\nVerifying v" + args.version + " Signed Windows\n") + subprocess.check_call( + [ + "bin/gverify", + "-v", + "-d", + "../gitian.sigs/", + "-r", + args.version + "-win-signed", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml", + ] + ) + print("\nVerifying v" + args.version + " Signed MacOS\n") + subprocess.check_call( + [ + "bin/gverify", + "-v", + "-d", + "../gitian.sigs/", + "-r", + args.version + "-osx-signed", + "../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml", + ] + ) os.chdir(workdir) @@ -195,110 +411,195 @@ global args, workdir num_cpus = multiprocessing.cpu_count() - parser = argparse.ArgumentParser(usage='%(prog)s [options] signer version') - parser.add_argument('-c', '--commit', action='store_true', dest='commit', - help='Indicate that the version argument is for a commit or branch') - parser.add_argument('-p', '--pull', action='store_true', dest='pull', - help='Indicate that the version argument is the number of a github repository pull request') - parser.add_argument('-u', '--url', dest='url', default='https://github.com/Bitcoin-ABC/bitcoin-abc.git', - help='Specify the URL of the repository. Default is %(default)s') - parser.add_argument('-v', '--verify', action='store_true', - dest='verify', help='Verify the Gitian build') - parser.add_argument('-b', '--build', action='store_true', - dest='build', help='Do a Gitian build') - parser.add_argument('-s', '--sign', action='store_true', dest='sign', - help='Make signed binaries for Windows and MacOS') - parser.add_argument('-B', '--buildsign', action='store_true', - dest='buildsign', help='Build both signed and unsigned binaries') - parser.add_argument('-o', '--os', dest='os', default='lwm', - help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS') - parser.add_argument('-j', '--jobs', dest='jobs', default=str(num_cpus), - help='Number of processes to use. Default %(default)s') - parser.add_argument('-m', '--memory', dest='memory', default='3500', - help='Memory to allocate in MiB. Default %(default)s') - parser.add_argument('-k', '--kvm', action='store_true', - dest='kvm', help='Use KVM instead of LXC') - parser.add_argument('-d', '--docker', action='store_true', - dest='docker', help='Use Docker instead of LXC') - parser.add_argument('-S', '--setup', action='store_true', dest='setup', - help='Set up the Gitian building environment. Uses LXC. If you want to use KVM, use the --kvm option. Only works on Debian-based systems (Ubuntu, Debian)') - parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', - help='Create the assert file for detached signing. Will not commit anything.') - parser.add_argument('-n', '--no-commit', action='store_false', - dest='commit_files', help='Do not commit anything to git') + parser = argparse.ArgumentParser(usage="%(prog)s [options] signer version") + parser.add_argument( + "-c", + "--commit", + action="store_true", + dest="commit", + help="Indicate that the version argument is for a commit or branch", + ) + parser.add_argument( + "-p", + "--pull", + action="store_true", + dest="pull", + help=( + "Indicate that the version argument is the number of a github repository" + " pull request" + ), + ) + parser.add_argument( + "-u", + "--url", + dest="url", + default="https://github.com/Bitcoin-ABC/bitcoin-abc.git", + help="Specify the URL of the repository. Default is %(default)s", + ) + parser.add_argument( + "-v", + "--verify", + action="store_true", + dest="verify", + help="Verify the Gitian build", + ) + parser.add_argument( + "-b", "--build", action="store_true", dest="build", help="Do a Gitian build" + ) + parser.add_argument( + "-s", + "--sign", + action="store_true", + dest="sign", + help="Make signed binaries for Windows and MacOS", + ) + parser.add_argument( + "-B", + "--buildsign", + action="store_true", + dest="buildsign", + help="Build both signed and unsigned binaries", + ) + parser.add_argument( + "-o", + "--os", + dest="os", + default="lwm", + help=( + "Specify which Operating Systems the build is for. Default is %(default)s." + " l for Linux, w for Windows, m for MacOS" + ), + ) + parser.add_argument( + "-j", + "--jobs", + dest="jobs", + default=str(num_cpus), + help="Number of processes to use. Default %(default)s", + ) + parser.add_argument( + "-m", + "--memory", + dest="memory", + default="3500", + help="Memory to allocate in MiB. Default %(default)s", + ) + parser.add_argument( + "-k", "--kvm", action="store_true", dest="kvm", help="Use KVM instead of LXC" + ) + parser.add_argument( + "-d", + "--docker", + action="store_true", + dest="docker", + help="Use Docker instead of LXC", + ) + parser.add_argument( + "-S", + "--setup", + action="store_true", + dest="setup", + help=( + "Set up the Gitian building environment. Uses LXC. If you want to use KVM," + " use the --kvm option. Only works on Debian-based systems (Ubuntu, Debian)" + ), + ) + parser.add_argument( + "-D", + "--detach-sign", + action="store_true", + dest="detach_sign", + help="Create the assert file for detached signing. Will not commit anything.", + ) parser.add_argument( - 'signer', help='GPG signer to sign each build assert file') + "-n", + "--no-commit", + action="store_false", + dest="commit_files", + help="Do not commit anything to git", + ) + parser.add_argument("signer", help="GPG signer to sign each build assert file") parser.add_argument( - 'version', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified') + "version", + help=( + "Version number, commit, or branch to build. If building a commit or" + " branch, the -c option must be specified" + ), + ) args = parser.parse_args() workdir = os.getcwd() - args.linux = 'l' in args.os - args.windows = 'w' in args.os - args.macos = 'm' in args.os + args.linux = "l" in args.os + args.windows = "w" in args.os + args.macos = "m" in args.os - args.is_bionic = b'bionic' in subprocess.check_output( - ['lsb_release', '-cs']) + args.is_bionic = b"bionic" in subprocess.check_output(["lsb_release", "-cs"]) if args.buildsign: args.build = True args.sign = True if args.kvm and args.docker: - raise Exception('Error: cannot have both kvm and docker') + raise Exception("Error: cannot have both kvm and docker") - args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign' + args.sign_prog = "true" if args.detach_sign else "gpg --detach-sign" # Set environment variable USE_LXC or USE_DOCKER, let gitian-builder know # that we use lxc or docker if args.docker: - os.environ['USE_DOCKER'] = '1' + os.environ["USE_DOCKER"] = "1" elif not args.kvm: - os.environ['USE_LXC'] = '1' - if 'GITIAN_HOST_IP' not in os.environ.keys(): - os.environ['GITIAN_HOST_IP'] = '10.0.3.1' - if 'LXC_GUEST_IP' not in os.environ.keys(): - os.environ['LXC_GUEST_IP'] = '10.0.3.5' + os.environ["USE_LXC"] = "1" + if "GITIAN_HOST_IP" not in os.environ.keys(): + os.environ["GITIAN_HOST_IP"] = "10.0.3.1" + if "LXC_GUEST_IP" not in os.environ.keys(): + os.environ["LXC_GUEST_IP"] = "10.0.3.5" # Disable for MacOS if no SDK found if args.macos and not os.path.isfile( - 'gitian-builder/inputs/Xcode-12.1-12A7403-extracted-SDK-with-libcxx-headers.tar.gz'): - print('Cannot build for MacOS, SDK does not exist. Will build for other OSes') + "gitian-builder/inputs/Xcode-12.1-12A7403-extracted-SDK-with-libcxx-headers.tar.gz" + ): + print("Cannot build for MacOS, SDK does not exist. Will build for other OSes") args.macos = False script_name = os.path.basename(sys.argv[0]) # Signer and version shouldn't be empty - if args.signer == '': - print(script_name + ': Missing signer.') - print('Try ' + script_name + ' --help for more information') + if args.signer == "": + print(script_name + ": Missing signer.") + print("Try " + script_name + " --help for more information") exit(1) - if args.version == '': - print(script_name + ': Missing version.') - print('Try ' + script_name + ' --help for more information') + if args.version == "": + print(script_name + ": Missing version.") + print("Try " + script_name + " --help for more information") exit(1) # Add leading 'v' for tags if args.commit and args.pull: - raise Exception('Cannot have both commit and pull') - args.commit = ('' if args.commit else 'v') + args.version + raise Exception("Cannot have both commit and pull") + args.commit = ("" if args.commit else "v") + args.version if args.setup: setup() - os.chdir('bitcoin-abc') + os.chdir("bitcoin-abc") if args.pull: subprocess.check_call( - ['git', 'fetch', args.url, 'refs/pull/' + args.version + '/merge']) - os.chdir('../gitian-builder/inputs/bitcoin') + ["git", "fetch", args.url, "refs/pull/" + args.version + "/merge"] + ) + os.chdir("../gitian-builder/inputs/bitcoin") subprocess.check_call( - ['git', 'fetch', args.url, 'refs/pull/' + args.version + '/merge']) + ["git", "fetch", args.url, "refs/pull/" + args.version + "/merge"] + ) args.commit = subprocess.check_output( - ['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip() - args.version = 'pull-' + args.version + ["git", "show", "-s", "--format=%H", "FETCH_HEAD"], + universal_newlines=True, + encoding="utf8", + ).strip() + args.version = "pull-" + args.version print(args.commit) - subprocess.check_call(['git', 'fetch']) - subprocess.check_call(['git', 'checkout', args.commit]) + subprocess.check_call(["git", "fetch"]) + subprocess.check_call(["git", "checkout", args.commit]) os.chdir(workdir) if args.build: @@ -311,5 +612,5 @@ verify() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/contrib/linearize/linearize-data.py b/contrib/linearize/linearize-data.py --- a/contrib/linearize/linearize-data.py +++ b/contrib/linearize/linearize-data.py @@ -25,34 +25,40 @@ def hex_switchEndian(s): - """ Switches the endianness of a hex string (in pairs of hex chars) """ - pairList = [s[i:i + 2].encode() for i in range(0, len(s), 2)] - return b''.join(pairList[::-1]).decode() + """Switches the endianness of a hex string (in pairs of hex chars)""" + pairList = [s[i : i + 2].encode() for i in range(0, len(s), 2)] + return b"".join(pairList[::-1]).decode() def uint32(x): - return x & 0xffffffff + return x & 0xFFFFFFFF def bytereverse(x): - return uint32((((x) << 24) | (((x) << 8) & 0x00ff0000) | - (((x) >> 8) & 0x0000ff00) | ((x) >> 24))) + return uint32( + ( + ((x) << 24) + | (((x) << 8) & 0x00FF0000) + | (((x) >> 8) & 0x0000FF00) + | ((x) >> 24) + ) + ) def bufreverse(in_buf): out_words = [] for i in range(0, len(in_buf), 4): - word = struct.unpack('@I', in_buf[i:i + 4])[0] - out_words.append(struct.pack('@I', bytereverse(word))) - return b''.join(out_words) + word = struct.unpack("@I", in_buf[i : i + 4])[0] + out_words.append(struct.pack("@I", bytereverse(word))) + return b"".join(out_words) def wordreverse(in_buf): out_words = [] for i in range(0, len(in_buf), 4): - out_words.append(in_buf[i:i + 4]) + out_words.append(in_buf[i : i + 4]) out_words.reverse() - return b''.join(out_words) + return b"".join(out_words) def calc_hdr_hash(blk_hdr): @@ -76,21 +82,22 @@ def get_blk_dt(blk_hdr): - members = struct.unpack(" self.maxOutSz): + if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz): self.outF.close() if self.setFileTime: os.utime(self.outFname, (int(time.time()), self.highTS)) @@ -159,8 +165,7 @@ (blkDate, blkTS) = get_blk_dt(blk_hdr) if self.timestampSplit and (blkDate > self.lastDate): - print("New month " + blkDate.strftime("%Y-%m") + - " @ " + self.hash_str) + print("New month " + blkDate.strftime("%Y-%m") + " @ " + self.hash_str) self.lastDate = blkDate if self.outF: self.outF.close() @@ -173,10 +178,11 @@ if not self.outF: if self.fileOutput: - self.outFname = self.settings['output_file'] + self.outFname = self.settings["output_file"] else: self.outFname = os.path.join( - self.settings['output'], f"blk{self.outFn:05d}.dat") + self.settings["output"], f"blk{self.outFn:05d}.dat" + ) print("Output file " + self.outFname) self.outF = open(self.outFname, "wb") @@ -190,20 +196,26 @@ self.highTS = blkTS if (self.blkCountOut % 1000) == 0: - print('{} blocks scanned, {} blocks written (of {}, {:.1f}% complete)'.format( - self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex))) + print( + "{} blocks scanned, {} blocks written (of {}, {:.1f}% complete)".format( + self.blkCountIn, + self.blkCountOut, + len(self.blkindex), + 100.0 * self.blkCountOut / len(self.blkindex), + ) + ) def inFileName(self, fn): - return os.path.join(self.settings['input'], f"blk{fn:05d}.dat") + return os.path.join(self.settings["input"], f"blk{fn:05d}.dat") def fetchBlock(self, extent): - '''Fetch block contents from disk given extents''' + """Fetch block contents from disk given extents""" with open(self.inFileName(extent.fn), "rb") as f: f.seek(extent.offset) return f.read(extent.size) def copyOneBlock(self): - '''Find the next block to be written in the input, and copy it to the output.''' + """Find the next block to be written in the input, and copy it to the output.""" extent = self.blockExtents.pop(self.blkCountOut) if self.blkCountOut in self.outOfOrderData: # If the data is cached, use it from memory and remove from the @@ -227,29 +239,28 @@ return inhdr = self.inF.read(8) - if (not inhdr or (inhdr[0] == "\0")): + if not inhdr or (inhdr[0] == "\0"): self.inF.close() self.inF = None self.inFn = self.inFn + 1 continue inMagic = inhdr[:4] - if (inMagic != self.settings['netmagic']): + if inMagic != self.settings["netmagic"]: print("Invalid magic: " + inMagic.hex()) return inLenLE = inhdr[4:] su = struct.unpack(" Optional['FrameworkInfo']: + def fromOtoolLibraryLine(cls, line: str) -> Optional["FrameworkInfo"]: # Note: line must be trimmed if line == "": return None # Don't deploy system libraries (exception for libQtuitools and # libQtlucene). - if line.startswith("/System/Library/") or line.startswith( - "@executable_path") or (line.startswith("/usr/lib/") and "libQt" not in line): + if ( + line.startswith("/System/Library/") + or line.startswith("@executable_path") + or (line.startswith("/usr/lib/") and "libQt" not in line) + ): return None m = cls.reOLine.match(line) @@ -118,7 +122,9 @@ info.version = "-" info.installName = path - info.deployedInstallName = "@executable_path/../Frameworks/" + info.binaryName + info.deployedInstallName = ( + "@executable_path/../Frameworks/" + info.binaryName + ) info.sourceFilePath = path info.destinationDirectory = cls.bundleFrameworkDirectory else: @@ -131,38 +137,44 @@ i += 1 if i == len(parts): raise RuntimeError( - "Could not find .framework or .dylib in otool line: " + line) + "Could not find .framework or .dylib in otool line: " + line + ) info.frameworkName = parts[i] info.frameworkDirectory = "/".join(parts[:i]) info.frameworkPath = os.path.join( - info.frameworkDirectory, info.frameworkName) + info.frameworkDirectory, info.frameworkName + ) info.binaryName = parts[i + 3] - info.binaryDirectory = "/".join(parts[i + 1:i + 3]) - info.binaryPath = os.path.join( - info.binaryDirectory, info.binaryName) + info.binaryDirectory = "/".join(parts[i + 1 : i + 3]) + info.binaryPath = os.path.join(info.binaryDirectory, info.binaryName) info.version = parts[i + 2] - info.deployedInstallName = "@executable_path/../Frameworks/" + \ - os.path.join(info.frameworkName, info.binaryPath) + info.deployedInstallName = "@executable_path/../Frameworks/" + os.path.join( + info.frameworkName, info.binaryPath + ) info.destinationDirectory = os.path.join( - cls.bundleFrameworkDirectory, info.frameworkName, info.binaryDirectory) + cls.bundleFrameworkDirectory, info.frameworkName, info.binaryDirectory + ) info.sourceResourcesDirectory = os.path.join( - info.frameworkPath, "Resources") - info.sourceContentsDirectory = os.path.join( - info.frameworkPath, "Contents") + info.frameworkPath, "Resources" + ) + info.sourceContentsDirectory = os.path.join(info.frameworkPath, "Contents") info.sourceVersionContentsDirectory = os.path.join( - info.frameworkPath, "Versions", info.version, "Contents") + info.frameworkPath, "Versions", info.version, "Contents" + ) info.destinationResourcesDirectory = os.path.join( - cls.bundleFrameworkDirectory, info.frameworkName, "Resources") + cls.bundleFrameworkDirectory, info.frameworkName, "Resources" + ) info.destinationVersionContentsDirectory = os.path.join( cls.bundleFrameworkDirectory, info.frameworkName, "Versions", info.version, - "Contents") + "Contents", + ) return info @@ -214,19 +226,18 @@ if verbose >= 3: print("Inspecting with otool: " + binaryPath) otoolbin = os.getenv("OTOOL", "otool") - otool = subprocess.Popen([otoolbin, - "-L", - binaryPath], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) + otool = subprocess.Popen( + [otoolbin, "-L", binaryPath], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) o_stdout, o_stderr = otool.communicate() if otool.returncode != 0: if verbose >= 1: sys.stderr.write(o_stderr) sys.stderr.flush() - raise RuntimeError( - f"otool failed with return code {otool.returncode}") + raise RuntimeError(f"otool failed with return code {otool.returncode}") otoolLines = o_stdout.split("\n") otoolLines.pop(0) # First line is the inspected binary @@ -252,8 +263,7 @@ subprocess.check_call([installnametoolbin, "-" + action] + list(args)) -def changeInstallName(oldName: str, newName: str, - binaryPath: str, verbose: int): +def changeInstallName(oldName: str, newName: str, binaryPath: str, verbose: int): if verbose >= 3: print("Using install_name_tool:") print(" in", binaryPath) @@ -278,8 +288,7 @@ subprocess.check_call([stripbin, "-x", binaryPath]) -def copyFramework(framework: FrameworkInfo, path: str, - verbose: int) -> Optional[str]: +def copyFramework(framework: FrameworkInfo, path: str, verbose: int) -> Optional[str]: if framework.sourceFilePath.startswith("Qt"): # standard place for Nokia Qt installer's frameworks fromPath = "/Library/Frameworks/" + framework.sourceFilePath @@ -307,14 +316,14 @@ os.chmod(toPath, permissions.st_mode | stat.S_IWRITE) if not framework.isDylib(): # Copy resources for real frameworks - linkfrom = os.path.join( path, "Contents", "Frameworks", framework.frameworkName, "Versions", - "Current") + "Current", + ) linkto = framework.version if not os.path.exists(linkfrom): os.symlink(linkto, linkfrom) @@ -322,8 +331,7 @@ print("Linked:", linkfrom, "->", linkto) fromResourcesDir = framework.sourceResourcesDirectory if os.path.exists(fromResourcesDir): - toResourcesDir = os.path.join( - path, framework.destinationResourcesDirectory) + toResourcesDir = os.path.join(path, framework.destinationResourcesDirectory) shutil.copytree(fromResourcesDir, toResourcesDir, symlinks=True) if verbose >= 3: print("Copied resources:", fromResourcesDir) @@ -333,7 +341,8 @@ fromContentsDir = framework.sourceContentsDirectory if os.path.exists(fromContentsDir): toContentsDir = os.path.join( - path, framework.destinationVersionContentsDirectory) + path, framework.destinationVersionContentsDirectory + ) shutil.copytree(fromContentsDir, toContentsDir, symlinks=True) if verbose >= 3: print("Copied Contents:", fromContentsDir) @@ -341,15 +350,17 @@ # Copy qt_menu.nib (applies to non-framework layout) elif framework.frameworkName.startswith("libQtGui"): qtMenuNibSourcePath = os.path.join( - framework.frameworkDirectory, "Resources", "qt_menu.nib") + framework.frameworkDirectory, "Resources", "qt_menu.nib" + ) qtMenuNibDestinationPath = os.path.join( - path, "Contents", "Resources", "qt_menu.nib") + path, "Contents", "Resources", "qt_menu.nib" + ) if os.path.exists(qtMenuNibSourcePath) and not os.path.exists( - qtMenuNibDestinationPath): + qtMenuNibDestinationPath + ): shutil.copytree( - qtMenuNibSourcePath, - qtMenuNibDestinationPath, - symlinks=True) + qtMenuNibSourcePath, qtMenuNibDestinationPath, symlinks=True + ) if verbose >= 3: print("Copied for libQtGui:", qtMenuNibSourcePath) print(" to:", qtMenuNibDestinationPath) @@ -357,8 +368,14 @@ return toPath -def deployFrameworks(frameworks: List[FrameworkInfo], bundlePath: str, binaryPath: str, strip: bool, - verbose: int, deploymentInfo: Optional[DeploymentInfo] = None) -> DeploymentInfo: +def deployFrameworks( + frameworks: List[FrameworkInfo], + bundlePath: str, + binaryPath: str, + strip: bool, + verbose: int, + deploymentInfo: Optional[DeploymentInfo] = None, +) -> DeploymentInfo: if deploymentInfo is None: deploymentInfo = DeploymentInfo() @@ -374,17 +391,16 @@ deploymentInfo.detectQtPath(framework.frameworkDirectory) if framework.installName.startswith( - "@executable_path") or framework.installName.startswith(bundlePath): + "@executable_path" + ) or framework.installName.startswith(bundlePath): if verbose >= 2: print(framework.frameworkName, "already deployed, skipping.") continue # install_name_tool the new id into the binary changeInstallName( - framework.installName, - framework.deployedInstallName, - binaryPath, - verbose) + framework.installName, framework.deployedInstallName, binaryPath, verbose + ) # Copy framework to app bundle. deployedBinaryPath = copyFramework(framework, bundlePath, verbose) @@ -396,10 +412,7 @@ runStrip(deployedBinaryPath, verbose) # install_name_tool it a new id. - changeIdentification( - framework.deployedInstallName, - deployedBinaryPath, - verbose) + changeIdentification(framework.deployedInstallName, deployedBinaryPath, verbose) # Check for framework dependencies dependencies = getFrameworks(deployedBinaryPath, verbose) @@ -408,30 +421,46 @@ dependency.installName, dependency.deployedInstallName, deployedBinaryPath, - verbose) + verbose, + ) # Deploy framework if necessary. - if dependency.frameworkName not in deploymentInfo.deployedFrameworks and dependency not in frameworks: + if ( + dependency.frameworkName not in deploymentInfo.deployedFrameworks + and dependency not in frameworks + ): frameworks.append(dependency) return deploymentInfo def deployFrameworksForAppBundle( - applicationBundle: ApplicationBundleInfo, strip: bool, verbose: int) -> DeploymentInfo: + applicationBundle: ApplicationBundleInfo, strip: bool, verbose: int +) -> DeploymentInfo: frameworks = getFrameworks(applicationBundle.binaryPath, verbose) if len(frameworks) == 0 and verbose >= 1: print( "Warning: Could not find any external frameworks to deploy in {}.".format( - applicationBundle.path)) + applicationBundle.path + ) + ) return DeploymentInfo() else: return deployFrameworks( - frameworks, applicationBundle.path, applicationBundle.binaryPath, strip, verbose) - - -def deployPlugins(appBundleInfo: ApplicationBundleInfo, - deploymentInfo: DeploymentInfo, strip: bool, verbose: int): + frameworks, + applicationBundle.path, + applicationBundle.binaryPath, + strip, + verbose, + ) + + +def deployPlugins( + appBundleInfo: ApplicationBundleInfo, + deploymentInfo: DeploymentInfo, + strip: bool, + verbose: int, +): # Lookup available plugins, exclude unneeded plugins = [] if deploymentInfo.pluginPath is None: @@ -523,7 +552,10 @@ if pluginName.endswith("_debug.dylib"): # Skip debug plugins continue - elif pluginPath == "imageformats/libqsvg.dylib" or pluginPath == "iconengines/libqsvgicon.dylib": + elif ( + pluginPath == "imageformats/libqsvg.dylib" + or pluginPath == "iconengines/libqsvgicon.dylib" + ): # Deploy the svg plugins only if QtSvg is in use if not deploymentInfo.usesFramework("QtSvg"): continue @@ -552,19 +584,12 @@ for pluginDirectory, pluginName in plugins: if verbose >= 2: - print( - "Processing plugin", - os.path.join( - pluginDirectory, - pluginName), - "...") + print("Processing plugin", os.path.join(pluginDirectory, pluginName), "...") sourcePath = os.path.join( - deploymentInfo.pluginPath, - pluginDirectory, - pluginName) - destinationDirectory = os.path.join( - appBundleInfo.pluginPath, pluginDirectory) + deploymentInfo.pluginPath, pluginDirectory, pluginName + ) + destinationDirectory = os.path.join(appBundleInfo.pluginPath, pluginDirectory) if not os.path.exists(destinationDirectory): os.makedirs(destinationDirectory) @@ -584,7 +609,8 @@ dependency.installName, dependency.deployedInstallName, destinationPath, - verbose) + verbose, + ) # Deploy framework if necessary. if dependency.frameworkName not in deploymentInfo.deployedFrameworks: @@ -594,7 +620,8 @@ destinationPath, strip, verbose, - deploymentInfo) + deploymentInfo, + ) qt_conf = """[Paths] @@ -602,7 +629,8 @@ Plugins=PlugIns """ -ap = ArgumentParser(description="""Improved version of macdeployqt. +ap = ArgumentParser( + description="""Improved version of macdeployqt. Outputs a ready-to-deploy app in a folder "dist" and optionally wraps it in a .dmg file. Note, that the "dist" folder will be deleted before deploying on each run. @@ -611,71 +639,98 @@ Also optionally signs the .app bundle; set the CODESIGNARGS environment variable to pass arguments to the codesign tool. -E.g. CODESIGNARGS='--sign "Developer ID Application: ..." --keychain /encrypted/foo.keychain'""") +E.g. CODESIGNARGS='--sign "Developer ID Application: ..." --keychain /encrypted/foo.keychain'""" +) -ap.add_argument("app_bundle", nargs=1, metavar="app-bundle", - help="application bundle to be deployed") +ap.add_argument( + "app_bundle", + nargs=1, + metavar="app-bundle", + help="application bundle to be deployed", +) ap.add_argument( "-verbose", type=int, nargs=1, default=[1], metavar="<0-3>", - help="0 = no output, 1 = error/warning (default), 2 = normal, 3 = debug") + help="0 = no output, 1 = error/warning (default), 2 = normal, 3 = debug", +) ap.add_argument( "-no-plugins", dest="plugins", action="store_false", default=True, - help="skip plugin deployment") + help="skip plugin deployment", +) ap.add_argument( "-no-strip", dest="strip", action="store_false", default=True, - help="don't run 'strip' on the binaries") + help="don't run 'strip' on the binaries", +) ap.add_argument( "-sign", dest="sign", action="store_true", default=False, - help="sign .app bundle with codesign tool") + help="sign .app bundle with codesign tool", +) ap.add_argument( "-dmg", nargs="?", const="", metavar="basename", - help="create a .dmg disk image; if basename is not specified, a camel-cased version of the app name is used") + help=( + "create a .dmg disk image; if basename is not specified, a camel-cased version" + " of the app name is used" + ), +) ap.add_argument( "-fancy", nargs=1, metavar="plist", default=[], - help="make a fancy looking disk image using the given plist file with instructions; requires -dmg to work") + help=( + "make a fancy looking disk image using the given plist file with instructions;" + " requires -dmg to work" + ), +) ap.add_argument( "-add-qt-tr", nargs=1, metavar="languages", default=[], - help="add Qt translation files to the bundle's resources; the language list must be separated with commas, not with whitespace") + help=( + "add Qt translation files to the bundle's resources; the language list must be" + " separated with commas, not with whitespace" + ), +) ap.add_argument( "-translations-dir", nargs=1, metavar="path", default=None, - help="Path to Qt's translation files") + help="Path to Qt's translation files", +) ap.add_argument( "-add-resources", nargs="+", metavar="path", default=[], - help="list of additional files or folders to be copied into the bundle's resources; must be the last argument") + help=( + "list of additional files or folders to be copied into the bundle's resources;" + " must be the last argument" + ), +) ap.add_argument( "-volname", nargs=1, metavar="volname", default=[], - help="custom volume name for dmg") + help="custom volume name for dmg", +) config = ap.parse_args() @@ -687,8 +742,7 @@ if not os.path.exists(app_bundle): if verbose >= 1: - sys.stderr.write( - f"Error: Could not find app bundle \"{app_bundle}\"\n") + sys.stderr.write(f'Error: Could not find app bundle "{app_bundle}"\n') sys.exit(1) app_bundle_name = os.path.splitext(os.path.basename(app_bundle))[0] @@ -701,17 +755,17 @@ else: if verbose >= 1: sys.stderr.write( - f"Error: Could not find translation dir \"{translations_dir}\"\n") + f'Error: Could not find translation dir "{translations_dir}"\n' + ) sys.exit(1) # ------------------------------------------------ for p in config.add_resources: if verbose >= 3: - print(f"Checking for \"{p}\"...") + print(f'Checking for "{p}"...') if not os.path.exists(p): if verbose >= 1: - sys.stderr.write( - f"Error: Could not find additional resource file \"{p}\"\n") + sys.stderr.write(f'Error: Could not find additional resource file "{p}"\n') sys.exit(1) # ------------------------------------------------ @@ -724,16 +778,17 @@ except ImportError: if verbose >= 1: sys.stderr.write( - "Error: Could not import plistlib which is required for fancy disk images.\n") + "Error: Could not import plistlib which is required for fancy disk" + " images.\n" + ) sys.exit(1) p = config.fancy[0] if verbose >= 3: - print(f"Fancy: Loading \"{p}\"...") + print(f'Fancy: Loading "{p}"...') if not os.path.exists(p): if verbose >= 1: - sys.stderr.write( - f"Error: Could not find fancy disk image plist at \"{p}\"\n") + sys.stderr.write(f'Error: Could not find fancy disk image plist at "{p}"\n') sys.exit(1) try: @@ -741,47 +796,48 @@ except BaseException: if verbose >= 1: sys.stderr.write( - f"Error: Could not parse fancy disk image plist at \"{p}\"\n") + f'Error: Could not parse fancy disk image plist at "{p}"\n' + ) sys.exit(1) try: assert "window_bounds" not in fancy or ( - isinstance( - fancy["window_bounds"], - list) and len( - fancy["window_bounds"]) == 4) + isinstance(fancy["window_bounds"], list) + and len(fancy["window_bounds"]) == 4 + ) assert "background_picture" not in fancy or isinstance( - fancy["background_picture"], str) + fancy["background_picture"], str + ) assert "icon_size" not in fancy or isinstance(fancy["icon_size"], int) assert "applications_symlink" not in fancy or isinstance( - fancy["applications_symlink"], bool) + fancy["applications_symlink"], bool + ) if "items_position" in fancy: assert isinstance(fancy["items_position"], dict) for key, value in fancy["items_position"].items(): - assert isinstance( - value, - list) and len(value) == 2 and isinstance( - value[0], - int) and isinstance( - value[1], - int) + assert ( + isinstance(value, list) + and len(value) == 2 + and isinstance(value[0], int) + and isinstance(value[1], int) + ) except BaseException: if verbose >= 1: - sys.stderr.write( - f"Error: Bad format of fancy disk image plist at \"{p}\"\n") + sys.stderr.write(f'Error: Bad format of fancy disk image plist at "{p}"\n') sys.exit(1) if "background_picture" in fancy: bp = fancy["background_picture"] if verbose >= 3: - print(f"Fancy: Resolving background picture \"{bp}\"...") + print(f'Fancy: Resolving background picture "{bp}"...') if not os.path.exists(bp): bp = os.path.join(os.path.dirname(p), bp) if not os.path.exists(bp): if verbose >= 1: sys.stderr.write( - "Error: Could not find background picture at \"{}\" or \"{}\"\n".format( - fancy["background_picture"], bp)) + 'Error: Could not find background picture at "{}" or "{}"\n' + .format(fancy["background_picture"], bp) + ) sys.exit(1) else: fancy["background_picture"] = bp @@ -824,13 +880,15 @@ try: deploymentInfo = deployFrameworksForAppBundle( - applicationBundle, config.strip, verbose) + applicationBundle, config.strip, verbose + ) if deploymentInfo.qtPath is None: deploymentInfo.qtPath = os.getenv("QTDIR", None) if deploymentInfo.qtPath is None: if verbose >= 1: sys.stderr.write( - "Warning: Could not detect Qt's path, skipping plugin deployment!\n") + "Warning: Could not detect Qt's path, skipping plugin deployment!\n" + ) config.plugins = False except RuntimeError as e: if verbose >= 1: @@ -863,16 +921,16 @@ else: sys.stderr.write("Error: Could not find Qt translation path\n") sys.exit(1) - add_qt_tr = [f"qt_{lng}.qm" - for lng in config.add_qt_tr[0].split(",")] + add_qt_tr = [f"qt_{lng}.qm" for lng in config.add_qt_tr[0].split(",")] for lng_file in add_qt_tr: p = os.path.join(qt_tr_dir, lng_file) if verbose >= 3: - print(f"Checking for \"{p}\"...") + print(f'Checking for "{p}"...') if not os.path.exists(p): if verbose >= 1: sys.stderr.write( - f"Error: Could not find Qt translation file \"{lng_file}\"\n") + f'Error: Could not find Qt translation file "{lng_file}"\n' + ) sys.exit(1) # ------------------------------------------------ @@ -891,17 +949,14 @@ for lng_file in add_qt_tr: if verbose >= 3: print( - os.path.join( - qt_tr_dir, - lng_file), + os.path.join(qt_tr_dir, lng_file), "->", - os.path.join( - applicationBundle.resourcesPath, - lng_file)) + os.path.join(applicationBundle.resourcesPath, lng_file), + ) shutil.copy2( - os.path.join( - qt_tr_dir, lng_file), os.path.join( - applicationBundle.resourcesPath, lng_file)) + os.path.join(qt_tr_dir, lng_file), + os.path.join(applicationBundle.resourcesPath, lng_file), + ) # ------------------------------------------------ @@ -919,13 +974,14 @@ # ------------------------------------------------ -if config.sign and 'CODESIGNARGS' not in os.environ: +if config.sign and "CODESIGNARGS" not in os.environ: print("You must set the CODESIGNARGS environment variable. Skipping signing.") elif config.sign: if verbose >= 1: print(f"Code-signing app bundle {target}") subprocess.check_call( - f"codesign --force {os.environ['CODESIGNARGS']} {target}", shell=True) + f"codesign --force {os.environ['CODESIGNARGS']} {target}", shell=True + ) # ------------------------------------------------ @@ -970,12 +1026,13 @@ srcfolder="dist", format="UDBZ", volname=volname, - ov=True) + ov=True, + ) except subprocess.CalledProcessError as e: sys.exit(e.returncode) else: if verbose >= 3: - print("Determining size of \"dist\"...") + print('Determining size of "dist"...') size = 0 for path, dirs, files in os.walk("dist"): for file in files: @@ -992,7 +1049,8 @@ format="UDRW", size=size, volname=volname, - ov=True) + ov=True, + ) except subprocess.CalledProcessError as e: sys.exit(e.returncode) @@ -1005,7 +1063,8 @@ readwrite=True, noverify=True, noautoopen=True, - capture_stdout=True) + capture_stdout=True, + ) except subprocess.CalledProcessError as e: sys.exit(e.returncode) @@ -1018,8 +1077,8 @@ if "background_picture" in fancy: bg_path = os.path.join( - disk_root, ".background", os.path.basename( - fancy["background_picture"])) + disk_root, ".background", os.path.basename(fancy["background_picture"]) + ) os.mkdir(os.path.dirname(bg_path)) if verbose >= 3: print(fancy["background_picture"], "->", bg_path) @@ -1028,11 +1087,7 @@ bg_path = None if fancy.get("applications_symlink", False): - os.symlink( - "/Applications", - os.path.join( - disk_root, - "Applications")) + os.symlink("/Applications", os.path.join(disk_root, "Applications")) # The Python appscript package broke with OSX 10.8 and isn't being fixed. # So we now build up an AppleScript string and use the osascript command @@ -1062,12 +1117,15 @@ """) itemscript = Template( - 'set position of item "${item}" of container window to {${position}}') + 'set position of item "${item}" of container window to {${position}}' + ) items_positions = [] if "items_position" in fancy: for name, position in fancy["items_position"].items(): - params = {"item": name, "position": ",".join( - [str(p) for p in position])} + params = { + "item": name, + "position": ",".join([str(p) for p in position]), + } items_positions.append(itemscript.substitute(params)) params = { @@ -1075,28 +1133,30 @@ "window_bounds": "300,300,800,620", "icon_size": "96", "background_commands": "", - "items_positions": "\n ".join(items_positions) + "items_positions": "\n ".join(items_positions), } if "window_bounds" in fancy: - params["window_bounds"] = ",".join( - [str(p) for p in fancy["window_bounds"]]) + params["window_bounds"] = ",".join([str(p) for p in fancy["window_bounds"]]) if "icon_size" in fancy: params["icon_size"] = str(fancy["icon_size"]) if bg_path is not None: # Set background file, then call SetFile to make it invisible. # (note: making it invisible first makes set background picture fail) - bgscript = Template("""set background picture of theViewOptions to file ".background:$bgpic" - do shell script "SetFile -a V /Volumes/$disk/.background/$bgpic" """) + bgscript = Template( + """set background picture of theViewOptions to file ".background:$bgpic" + do shell script "SetFile -a V /Volumes/$disk/.background/$bgpic" """ + ) params["background_commands"] = bgscript.substitute( - {"bgpic": os.path.basename(bg_path), "disk": params["disk"]}) + {"bgpic": os.path.basename(bg_path), "disk": params["disk"]} + ) s = appscript.substitute(params) if verbose >= 2: print("Running AppleScript:") print(s) - p = subprocess.Popen(['osascript', '-'], stdin=subprocess.PIPE) - p.communicate(input=s.encode('utf-8')) + p = subprocess.Popen(["osascript", "-"], stdin=subprocess.PIPE) + p.communicate(input=s.encode("utf-8")) if p.returncode: print("Error running osascript.") @@ -1110,7 +1170,8 @@ dmg_name + ".temp", format="UDBZ", o=dmg_name + ".dmg", - ov=True) + ov=True, + ) except subprocess.CalledProcessError as e: sys.exit(e.returncode) diff --git a/contrib/message-capture/message-capture-parser.py b/contrib/message-capture/message-capture-parser.py --- a/contrib/message-capture/message-capture-parser.py +++ b/contrib/message-capture/message-capture-parser.py @@ -15,10 +15,7 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union -sys.path.append( - os.path.join( - os.path.dirname(__file__), - '../../test/functional')) +sys.path.append(os.path.join(os.path.dirname(__file__), "../../test/functional")) from test_framework.messages import ser_uint256 # noqa: E402 from test_framework.p2p import MESSAGEMAP # noqa: E402 @@ -61,7 +58,7 @@ class ProgressBar: def __init__(self, total: float): self.total = total - self.running = 0. + self.running = 0.0 def set_progress(self, progress: float): cols = shutil.get_terminal_size()[0] @@ -69,11 +66,12 @@ return max_blocks = cols - 9 num_blocks = int(max_blocks * progress) - print('\r[ {}{} ] {:3.0f}%' - .format('#' * num_blocks, - ' ' * (max_blocks - num_blocks), - progress * 100), - end='') + print( + "\r[ {}{} ] {:3.0f}%".format( + "#" * num_blocks, " " * (max_blocks - num_blocks), progress * 100 + ), + end="", + ) def update(self, more: float): self.running += more @@ -89,7 +87,11 @@ val = getattr(obj, slot, None) if slot in HASH_INTS and isinstance(val, int): ret[slot] = ser_uint256(val).hex() - elif slot in HASH_INT_VECTORS and isinstance(val, list) and isinstance(val[0], int): + elif ( + slot in HASH_INT_VECTORS + and isinstance(val, list) + and isinstance(val[0], int) + ): ret[slot] = [ser_uint256(a).hex() for a in val] else: ret[slot] = to_jsonable(val) @@ -102,9 +104,10 @@ return obj -def process_file(path: str, messages: List[Any], recv: bool, - progress_bar: Optional[ProgressBar]) -> None: - with open(path, 'rb') as f_in: +def process_file( + path: str, messages: List[Any], recv: bool, progress_bar: Optional[ProgressBar] +) -> None: + with open(path, "rb") as f_in: if progress_bar: bytes_read = 0 @@ -121,7 +124,7 @@ break tmp_header = BytesIO(tmp_header_raw) time = int.from_bytes(tmp_header.read(TIME_SIZE), "little") - msgtype: bytes = tmp_header.read(MSGTYPE_SIZE).split(b'\x00', 1)[0] + msgtype: bytes = tmp_header.read(MSGTYPE_SIZE).split(b"\x00", 1)[0] length = int.from_bytes(tmp_header.read(LENGTH_SIZE), "little") # Start converting the message to a dictionary @@ -148,7 +151,8 @@ messages.append(msg_dict) print( f"WARNING - Unrecognized message type {msgtype!r} in {path}", - file=sys.stderr) + file=sys.stderr, + ) continue # Deserialize the message @@ -167,7 +171,8 @@ messages.append(msg_dict) print( f"WARNING - Unable to deserialize message in {path}", - file=sys.stderr) + file=sys.stderr, + ) continue # Convert body of message into a jsonable object @@ -187,23 +192,26 @@ def main(): parser = argparse.ArgumentParser( description=__doc__, - epilog="EXAMPLE \n\t{0} -o out.json /message_capture/**/*.dat".format( - sys.argv[0]), - formatter_class=argparse.RawTextHelpFormatter) + epilog=( + f"EXAMPLE \n\t{sys.argv[0]} -o out.json /message_capture/**/*.dat" + ), + formatter_class=argparse.RawTextHelpFormatter, + ) parser.add_argument( - "capturepaths", - nargs='+', - help="binary message capture files to parse.") + "capturepaths", nargs="+", help="binary message capture files to parse." + ) + parser.add_argument("-o", "--output", help="output file. If unset print to stdout") parser.add_argument( - "-o", "--output", - help="output file. If unset print to stdout") - parser.add_argument( - "-n", "--no-progress-bar", - action='store_true', - help="disable the progress bar. Automatically set if the output is not a terminal") + "-n", + "--no-progress-bar", + action="store_true", + help=( + "disable the progress bar. Automatically set if the output is not a" + " terminal" + ), + ) args = parser.parse_args() - capturepaths = [Path.cwd() / Path(capturepath) - for capturepath in args.capturepaths] + capturepaths = [Path.cwd() / Path(capturepath) for capturepath in args.capturepaths] output = Path.cwd() / Path(args.output) if args.output else False use_progress_bar = (not args.no_progress_bar) and sys.stdout.isatty() @@ -215,19 +223,15 @@ progress_bar = None for capture in capturepaths: - process_file( - str(capture), - messages, - "recv" in capture.stem, - progress_bar) + process_file(str(capture), messages, "recv" in capture.stem, progress_bar) - messages.sort(key=lambda msg: msg['time']) + messages.sort(key=lambda msg: msg["time"]) if use_progress_bar: progress_bar.set_progress(1) jsonrep = json.dumps(messages) if output: - with open(str(output), 'w+', encoding="utf8") as f_out: + with open(str(output), "w+", encoding="utf8") as f_out: f_out.write(jsonrep) else: print(jsonrep) diff --git a/contrib/seeds/makeseeds.py b/contrib/seeds/makeseeds.py --- a/contrib/seeds/makeseeds.py +++ b/contrib/seeds/makeseeds.py @@ -16,17 +16,15 @@ NSEEDS = 512 MAX_SEEDS_PER_ASN = { - 'ipv4': 6, - 'ipv6': 10, + "ipv4": 6, + "ipv6": 10, } MIN_BLOCKS = 760000 -PATTERN_IPV4 = re.compile( - r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$") +PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$") PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$") -PATTERN_ONION = re.compile( - r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$") +PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$") # Used to only select nodes with a user agent string compatible with the # eCash network. @@ -34,7 +32,7 @@ def parseline(line: str) -> Union[dict, None]: - """ Parses a line from `seeds_main.txt` into a dictionary of details for that line. + """Parses a line from `seeds_main.txt` into a dictionary of details for that line. or `None`, if the line could not be parsed. """ sline = line.split() @@ -45,7 +43,7 @@ # The user agent is at the end of the line. It may contain space, so we # concatenate. for i in range(12, len(sline)): - sline[11] += ' ' + sline[i] + sline[11] += " " + sline[i] # Remove leftovers del sline[12:] @@ -60,13 +58,13 @@ if m is None: return None else: - net = 'onion' + net = "onion" ipstr = sortkey = m.group(1) port = int(m.group(2)) else: - net = 'ipv6' + net = "ipv6" # Not interested in localhost - if m.group(1) in ['::']: + if m.group(1) in ["::"]: return None ipstr = m.group(1) # XXX parse IPv6 into number, could use name_to_ipv6 from @@ -82,7 +80,7 @@ ip = ip + (int(m.group(i + 2)) << (8 * (3 - i))) if ip == 0: return None - net = 'ipv4' + net = "ipv4" sortkey = ip ipstr = m.group(1) port = int(m.group(6)) @@ -103,80 +101,88 @@ blocks = int(sline[8]) # Construct result. return { - 'net': net, - 'ip': ipstr, - 'port': port, - 'ipnum': ip, - 'uptime': uptime30, - 'lastsuccess': lastsuccess, - 'version': version, - 'agent': agent, - 'service': service, - 'blocks': blocks, - 'sortkey': sortkey, + "net": net, + "ip": ipstr, + "port": port, + "ipnum": ip, + "uptime": uptime30, + "lastsuccess": lastsuccess, + "version": version, + "agent": agent, + "service": service, + "blocks": blocks, + "sortkey": sortkey, } def dedup(ips: List[Dict]) -> List[Dict]: - """ Remove duplicates from `ips` where multiple ips share address and port. """ + """Remove duplicates from `ips` where multiple ips share address and port.""" d = {} for ip in ips: - d[ip['ip'], ip['port']] = ip + d[ip["ip"], ip["port"]] = ip return list(d.values()) def filtermultiport(ips: List[Dict]) -> List[Dict]: - """ Filter out hosts with more nodes per IP""" + """Filter out hosts with more nodes per IP""" hist = collections.defaultdict(list) for ip in ips: - hist[ip['sortkey']].append(ip) + hist[ip["sortkey"]].append(ip) return [value[0] for (key, value) in list(hist.items()) if len(value) == 1] def lookup_asn(net: str, ip: str) -> Union[int, None]: - """ Look up the asn for an `ip` address by querying cymru.com + """Look up the asn for an `ip` address by querying cymru.com on network `net` (e.g. ipv4 or ipv6). Returns in integer ASN or None if it could not be found. """ try: - if net == 'ipv4': + if net == "ipv4": ipaddr = ip - prefix = '.origin' + prefix = ".origin" else: # http://www.team-cymru.com/IP-ASN-mapping.html # 2001:4860:b002:23::68 res = str() # pick the first 4 nibbles - for nb in ip.split(':')[:4]: + for nb in ip.split(":")[:4]: # right padded with '0' for c in nb.zfill(4): # 2001 4860 b002 0023 - res += c + '.' + res += c + "." # 2.0.0.1.4.8.6.0.b.0.0.2.0.0.2.3 - ipaddr = res.rstrip('.') - prefix = '.origin6' - - asn = int([x.to_text() for x in dns.resolver.query('.'.join( - reversed(ipaddr.split('.'))) + prefix + '.asn.cymru.com', - 'TXT').response.answer][0].split('\"')[1].split(' ')[0]) + ipaddr = res.rstrip(".") + prefix = ".origin6" + + asn = int( + [ + x.to_text() + for x in dns.resolver.query( + ".".join(reversed(ipaddr.split("."))) + prefix + ".asn.cymru.com", + "TXT", + ).response.answer + ][0] + .split('"')[1] + .split(" ")[0] + ) return asn except Exception: sys.stderr.write('ERR: Could not resolve ASN for "' + ip + '"\n') return None + # Based on Greg Maxwell's seed_filter.py -def filterbyasn(ips: List[Dict], max_per_asn: Dict, - max_per_net: int) -> List[Dict]: - """ Prunes `ips` by +def filterbyasn(ips: List[Dict], max_per_asn: Dict, max_per_net: int) -> List[Dict]: + """Prunes `ips` by (a) trimming ips to have at most `max_per_net` ips from each net (e.g. ipv4, ipv6); and (b) trimming ips to have at most `max_per_asn` ips from each asn in each net. """ # Sift out ips by type - ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']] - ips_onion = [ip for ip in ips if ip['net'] == 'onion'] + ips_ipv46 = [ip for ip in ips if ip["net"] in ["ipv4", "ipv6"]] + ips_onion = [ip for ip in ips if ip["net"] == "onion"] # Filter IPv46 by ASN, and limit to max_per_net per network result = [] @@ -189,20 +195,21 @@ print( f"{i:6d}/{len(ips_ipv46)} [{100*i/len(ips_ipv46):04.1f}%]\r", file=sys.stderr, - end='', - flush=True) + end="", + flush=True, + ) - if net_count[ip['net']] == max_per_net: + if net_count[ip["net"]] == max_per_net: # do not add this ip as we already too many # ips from this network continue - asn = lookup_asn(ip['net'], ip['ip']) - if asn is None or asn_count[asn] == max_per_asn[ip['net']]: + asn = lookup_asn(ip["net"], ip["ip"]) + if asn is None or asn_count[asn] == max_per_asn[ip["net"]]: # do not add this ip as we already have too many # ips from this ASN on this network continue asn_count[asn] += 1 - net_count[ip['net']] += 1 + net_count[ip["net"]] += 1 result.append(ip) # Add back Onions (up to max_per_net) @@ -211,11 +218,11 @@ def ip_stats(ips: List[Dict]) -> str: - """ Format and return pretty string from `ips`. """ + """Format and return pretty string from `ips`.""" hist: Dict[str, int] = collections.defaultdict(int) for ip in ips: if ip is not None: - hist[ip['net']] += 1 + hist[ip["net"]] += 1 return f"{hist['ipv4']:6d} {hist['ipv6']:6d} {hist['onion']:6d}" @@ -225,59 +232,58 @@ ips = [parseline(line) for line in lines] print( - '\x1b[7m IPv4 IPv6 Onion Pass \x1b[0m', - file=sys.stderr) - print(f'{ip_stats(ips):s} Initial', file=sys.stderr) + ( + "\x1b[7m IPv4 IPv6 Onion Pass " + " \x1b[0m" + ), + file=sys.stderr, + ) + print(f"{ip_stats(ips):s} Initial", file=sys.stderr) # Skip entries with invalid address. ips = [ip for ip in ips if ip is not None] - print( - f'{ip_stats(ips):s} Skip entries with invalid address', - file=sys.stderr) + print(f"{ip_stats(ips):s} Skip entries with invalid address", file=sys.stderr) # Skip duplicates (in case multiple seeds files were concatenated) ips = dedup(ips) - print(f'{ip_stats(ips):s} After removing duplicates', file=sys.stderr) + print(f"{ip_stats(ips):s} After removing duplicates", file=sys.stderr) # Enforce minimal number of blocks. - ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS] - print( - f'{ip_stats(ips):s} Enforce minimal number of blocks', - file=sys.stderr) + ips = [ip for ip in ips if ip["blocks"] >= MIN_BLOCKS] + print(f"{ip_stats(ips):s} Enforce minimal number of blocks", file=sys.stderr) # Require service bit 1. - ips = [ip for ip in ips if (ip['service'] & 1) == 1] - print(f'{ip_stats(ips):s} Require service bit 1', file=sys.stderr) + ips = [ip for ip in ips if (ip["service"] & 1) == 1] + print(f"{ip_stats(ips):s} Require service bit 1", file=sys.stderr) # Require at least 50% 30-day uptime for clearnet, 10% for onion. req_uptime = { - 'ipv4': 50, - 'ipv6': 50, - 'onion': 10, + "ipv4": 50, + "ipv6": 50, + "onion": 10, } - ips = [ip for ip in ips if ip['uptime'] > req_uptime[ip['net']]] - print(f'{ip_stats(ips):s} Require minimum uptime', file=sys.stderr) + ips = [ip for ip in ips if ip["uptime"] > req_uptime[ip["net"]]] + print(f"{ip_stats(ips):s} Require minimum uptime", file=sys.stderr) # Require a known and recent user agent. - ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])] - print( - f'{ip_stats(ips):s} Require a known and recent user agent', - file=sys.stderr) + ips = [ip for ip in ips if PATTERN_AGENT.match(ip["agent"])] + print(f"{ip_stats(ips):s} Require a known and recent user agent", file=sys.stderr) # Sort by availability (and use last success as tie breaker) - ips.sort(key=lambda x: - (x['uptime'], x['lastsuccess'], x['ip']), reverse=True) + ips.sort(key=lambda x: (x["uptime"], x["lastsuccess"], x["ip"]), reverse=True) # Filter out hosts with multiple bitcoin ports, these are likely abusive ips = filtermultiport(ips) print( - f'{ip_stats(ips):s} Filter out hosts with multiple bitcoin ports', - file=sys.stderr) + f"{ip_stats(ips):s} Filter out hosts with multiple bitcoin ports", + file=sys.stderr, + ) # Look up ASNs and limit results, both per ASN and globally. ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS) print( - f'{ip_stats(ips):s} Look up ASNs and limit results per ASN and per net', - file=sys.stderr) + f"{ip_stats(ips):s} Look up ASNs and limit results per ASN and per net", + file=sys.stderr, + ) # Sort the results by IP address (for deterministic output). - ips.sort(key=lambda x: (x['net'], x['sortkey'])) + ips.sort(key=lambda x: (x["net"], x["sortkey"])) for ip in ips: - if ip['net'] == 'ipv6': + if ip["net"] == "ipv6": print(f"[{ip['ip']}]:{ip['port']}") else: print(f"{ip['ip']}:{ip['port']}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/contrib/teamcity/build-configurations.py b/contrib/teamcity/build-configurations.py --- a/contrib/teamcity/build-configurations.py +++ b/contrib/teamcity/build-configurations.py @@ -42,19 +42,19 @@ self.project_root = PurePath( subprocess.run( - ['git', 'rev-parse', '--show-toplevel'], + ["git", "rev-parse", "--show-toplevel"], capture_output=True, check=True, - encoding='utf-8', + encoding="utf-8", text=True, ).stdout.strip() ) self.project_commit = subprocess.run( - ['git', 'rev-parse', '--short', 'HEAD'], + ["git", "rev-parse", "--short", "HEAD"], capture_output=True, check=True, - encoding='utf-8', + encoding="utf-8", text=True, ).stdout.strip() @@ -77,9 +77,8 @@ # it should not be empty. if not config.get("builds", None): raise AssertionError( - "Invalid configuration file {}: the \"builds\" element is missing or empty".format( - str(self.config_file) - ) + 'Invalid configuration file {}: the "builds" element is missing or' + " empty".format(str(self.config_file)) ) # Check the target build has an entry in the configuration file @@ -103,10 +102,8 @@ # Raise an error if the template does not exist if template_name not in templates: raise AssertionError( - "Build {} configuration inherits from template {}, but the template does not exist.".format( - self.name, - template_name - ) + "Build {} configuration inherits from template {}, but the template" + " does not exist.".format(self.name, template_name) ) always_merger.merge(template_config, templates.get(template_name)) @@ -114,15 +111,15 @@ # Create the build directory as needed self.build_directory = Path( - self.project_root.joinpath( - 'abc-ci-builds', - self.name)) + self.project_root.joinpath("abc-ci-builds", self.name) + ) # Define the junit and logs directories self.junit_reports_dir = self.build_directory.joinpath("test/junit") self.test_logs_dir = self.build_directory.joinpath("test/log") self.functional_test_logs = self.build_directory.joinpath( - "test/tmp/test_runner_*") + "test/tmp/test_runner_*" + ) # We will provide the required environment variables self.environment_variables = { @@ -134,12 +131,14 @@ def create_script_file(self, dest, content): # Write the content to a script file using a template - with open(self.script_root.joinpath("bash_script.sh.in"), encoding='utf-8') as f: + with open( + self.script_root.joinpath("bash_script.sh.in"), encoding="utf-8" + ) as f: script_template_content = f.read() template = Template(script_template_content) - with open(dest, 'w', encoding='utf-8') as f: + with open(dest, "w", encoding="utf-8") as f: f.write( template.safe_substitute( **self.environment_variables, @@ -176,7 +175,8 @@ context = docker_config.get("context", None) if context is None: raise AssertionError( - f"The docker configuration for build {self.name} is missing a context, aborting" + f"The docker configuration for build {self.name} is missing a" + " context, aborting" ) # Make the context path absolute context = self.project_root.joinpath(context) @@ -188,8 +188,11 @@ ) dockerfile = docker_config.get("dockerfile", None) - dockerfile_args = [ - "-f", str(self.project_root.joinpath(dockerfile))] if dockerfile else [] + dockerfile_args = ( + ["-f", str(self.project_root.joinpath(dockerfile))] + if dockerfile + else [] + ) tag_name = "-".join([self.name, self.project_commit]) @@ -197,7 +200,9 @@ self.build_steps.append( { "bin": "docker", - "args": ["build"] + dockerfile_args + ["-t", tag_name, str(context)], + "args": ( + ["build"] + dockerfile_args + ["-t", tag_name, str(context)] + ), } ) @@ -211,15 +216,32 @@ self.build_steps.append( { "bin": "docker", - "args": ["run", "--rm", "-d", "--name", tag_name, "--stop-signal", "SIGTERM", "--stop-timeout", "60"] + port_args + [tag_name], + "args": ( + [ + "run", + "--rm", + "-d", + "--name", + tag_name, + "--stop-signal", + "SIGTERM", + "--stop-timeout", + "60", + ] + + port_args + + [tag_name] + ), } ) timeout_minutes = docker_config.get("timeout_minutes", 60) # Write the address to stdout and to the preview_url log file - preview_msg = f"Preview is available at http://{ip_address}:{outer_port} for the next {timeout_minutes} minutes." - with open(preview_url, 'w', encoding='utf-8') as f: + preview_msg = ( + f"Preview is available at http://{ip_address}:{outer_port} for the next" + f" {timeout_minutes} minutes." + ) + with open(preview_url, "w", encoding="utf-8") as f: f.write(preview_msg) self.build_steps.append( { @@ -245,7 +267,11 @@ script_file = self.build_directory.joinpath("docker_timeout.sh") self.create_script_file( script_file, - f'cd "${{HOME}}" && echo "docker stop {tag_name}" | at now +{timeout_minutes} minutes') + ( + f'cd "${{HOME}}" && echo "docker stop {tag_name}" | at now' + f" +{timeout_minutes} minutes" + ), + ) self.build_steps.append( { @@ -263,31 +289,38 @@ targets = self.config.get("targets", None) if not targets: raise AssertionError( - "No build target has been provided for build {} and no script is defined, aborting".format( - self.name - ) + "No build target has been provided for build {} and no script is" + " defined, aborting".format(self.name) ) # Some more flags for the build_cmake.sh script if self.config.get("clang", False): - self.cmake_flags.extend([ - "-DCMAKE_C_COMPILER=clang", - "-DCMAKE_CXX_COMPILER=clang++", - ]) + self.cmake_flags.extend( + [ + "-DCMAKE_C_COMPILER=clang", + "-DCMAKE_CXX_COMPILER=clang++", + ] + ) if self.config.get("gcc", False): - self.cmake_flags.extend([ - "-DCMAKE_C_COMPILER=gcc", - "-DCMAKE_CXX_COMPILER=g++", - ]) + self.cmake_flags.extend( + [ + "-DCMAKE_C_COMPILER=gcc", + "-DCMAKE_CXX_COMPILER=g++", + ] + ) if self.config.get("junit", True): - self.cmake_flags.extend([ - "-DENABLE_JUNIT_REPORT=ON", - ]) + self.cmake_flags.extend( + [ + "-DENABLE_JUNIT_REPORT=ON", + ] + ) if self.config.get("Werror", False): - self.cmake_flags.extend([ - "-DCMAKE_C_FLAGS=-Werror", - "-DCMAKE_CXX_FLAGS=-Werror", - ]) + self.cmake_flags.extend( + [ + "-DCMAKE_C_FLAGS=-Werror", + "-DCMAKE_CXX_FLAGS=-Werror", + ] + ) # Get the generator, default to ninja generator = self.config.get("generator", {}) @@ -295,11 +328,10 @@ generator_command = generator.get("command", "ninja") # If the build runs on diff or has the fail_fast flag, exit on first error. # Otherwise keep running so we can gather more test result. - fail_fast = self.config.get( - "fail_fast", False) or self.config.get( - "runOnDiff", False) - generator_flags = generator.get( - "flags", ["-k0"] if not fail_fast else []) + fail_fast = self.config.get("fail_fast", False) or self.config.get( + "runOnDiff", False + ) + generator_flags = generator.get("flags", ["-k0"] if not fail_fast else []) # Max out the jobs by default when the generator uses make if generator_command == "make": @@ -315,14 +347,18 @@ # Both static_depends and toochain are mandatory for cross builds if not static_depends: raise AssertionError( - "`static_depends` configuration is required for cross builds") + "`static_depends` configuration is required for cross builds" + ) if not toolchain: raise AssertionError( - "`toolchain` configuration is required for cross builds") + "`toolchain` configuration is required for cross builds" + ) self.build_steps.append( { - "bin": str(self.project_root.joinpath("contrib/devtools/build_depends.sh")), + "bin": str( + self.project_root.joinpath("contrib/devtools/build_depends.sh") + ), "args": [static_depends], } ) @@ -330,9 +366,7 @@ toolchain_file = self.project_root.joinpath( f"cmake/platforms/{toolchain}.cmake" ) - self.cmake_flags.append( - f"-DCMAKE_TOOLCHAIN_FILE={str(toolchain_file)}" - ) + self.cmake_flags.append(f"-DCMAKE_TOOLCHAIN_FILE={str(toolchain_file)}") if emulator: self.cmake_flags.append( @@ -343,7 +377,11 @@ self.build_steps.append( { "bin": "cmake", - "args": ["-G", generator_name, str(self.project_root)] + self.cmake_flags, + "args": [ + "-G", + generator_name, + str(self.project_root), + ] + self.cmake_flags, } ) @@ -372,7 +410,7 @@ return self.config.get(key, default) -class UserBuild(): +class UserBuild: def __init__(self, configuration): self.configuration = configuration @@ -385,8 +423,7 @@ # - the clean log will contain the same filtered content as what is # printed to stdout. This filter is done in print_line_to_logs(). self.logs = {} - self.logs["clean_log"] = build_directory.joinpath( - "build.clean.log") + self.logs["clean_log"] = build_directory.joinpath("build.clean.log") self.logs["full_log"] = build_directory.joinpath("build.full.log") # Clean the build directory before any build step is run. @@ -395,7 +432,7 @@ self.configuration.build_directory.mkdir(exist_ok=True, parents=True) self.preview_url = build_directory.joinpath("preview_url.log") - self.ip_address = '127.0.0.1' + self.ip_address = "127.0.0.1" def copy_artifacts(self, artifacts): # Make sure the artifact directory always exists. It is created before @@ -412,8 +449,11 @@ # from it needs to be excluded from the glob matches to prevent infinite # recursion. for pattern, dest in artifacts.items(): - matches = [m for m in sorted(self.configuration.build_directory.glob( - pattern)) if self.artifact_dir not in m.parents and self.artifact_dir != m] + matches = [ + m + for m in sorted(self.configuration.build_directory.glob(pattern)) + if self.artifact_dir not in m.parents and self.artifact_dir != m + ] dest = self.artifact_dir.joinpath(dest) # Pattern did not match @@ -443,12 +483,12 @@ def print_line_to_logs(self, line): # Always print to the full log - with open(self.logs["full_log"], 'a', encoding='utf-8') as log: + with open(self.logs["full_log"], "a", encoding="utf-8") as log: log.write(line) # Discard the set -x bash output for stdout and the clean log if not line.startswith("+"): - with open(self.logs["clean_log"], 'a', encoding='utf-8') as log: + with open(self.logs["clean_log"], "a", encoding="utf-8") as log: log.write(line) print(line.rstrip()) @@ -456,7 +496,7 @@ while True: try: line = await stdout.readline() - line = line.decode('utf-8') + line = line.decode("utf-8") if not line: break @@ -501,7 +541,9 @@ await asyncio.wait_for(logging_task, timeout=5) except asyncio.TimeoutError: self.print_line_to_logs( - "Warning: Timed out while waiting for logging to flush. Some log lines may be missing.") + "Warning: Timed out while waiting for logging to flush. Some log lines" + " may be missing." + ) return result @@ -510,11 +552,12 @@ message = f"Build {self.configuration.name} completed successfully" try: for step in self.configuration.build_steps: - return_code = await asyncio.wait_for(self.run_build(step["bin"], step["args"]), timeout) + return_code = await asyncio.wait_for( + self.run_build(step["bin"], step["args"]), timeout + ) if return_code != 0: message = "Build {} failed with exit code {}".format( - self.configuration.name, - return_code + self.configuration.name, return_code ) return @@ -534,9 +577,13 @@ **self.configuration.get("artifacts", {}), str(self.logs["full_log"].relative_to(build_directory)): "", str(self.logs["clean_log"].relative_to(build_directory)): "", - str(self.configuration.junit_reports_dir.relative_to(build_directory)): "", + str( + self.configuration.junit_reports_dir.relative_to(build_directory) + ): "", str(self.configuration.test_logs_dir.relative_to(build_directory)): "", - str(self.configuration.functional_test_logs.relative_to(build_directory)): "functional", + str( + self.configuration.functional_test_logs.relative_to(build_directory) + ): "functional", str(self.preview_url.relative_to(build_directory)): "", } @@ -551,12 +598,11 @@ self.artifact_dir.mkdir(exist_ok=True) self.configuration.create_build_steps( - self.artifact_dir, self.preview_url, self.ip_address) + self.artifact_dir, self.preview_url, self.ip_address + ) return_code, message = asyncio.run( - self.wait_for_build( - self.configuration.get( - "timeout", DEFAULT_TIMEOUT)) + self.wait_for_build(self.configuration.get("timeout", DEFAULT_TIMEOUT)) ) return (return_code, message) @@ -577,6 +623,7 @@ # Only gather the public IP if we are running on a TC build agent from whatismyip import whatismyip + self.ip_address = whatismyip() def copy_artifacts(self, artifacts): @@ -598,8 +645,7 @@ # Let the user know what build is being run. # This makes it easier to retrieve the info from the logs. self.teamcity_messages.customMessage( - f"Starting build {self.configuration.name}", - status="NORMAL" + f"Starting build {self.configuration.name}", status="NORMAL" ) return_code, message = super().run() @@ -612,20 +658,20 @@ self.teamcity_messages.buildProblem( message, # Let Teamcity calculate an ID from our message - None + None, ) # Change the final build message self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to failure None, - message + message, ) else: # Change the final build message but keep the original one as well self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to success None, - f"{message} ({{build.status.text}})" + f"{message} ({{build.status.text}})", ) return (return_code, message) @@ -636,29 +682,23 @@ # By default search for a configuration file in the same directory as this # script. - default_config_path = Path( - script_dir.joinpath("build-configurations.yml") - ) + default_config_path = Path(script_dir.joinpath("build-configurations.yml")) parser = argparse.ArgumentParser(description="Run a CI build") - parser.add_argument( - "build", - help="The name of the build to run" - ) + parser.add_argument("build", help="The name of the build to run") parser.add_argument( "--config", "-c", help="Path to the builds configuration file (default to {})".format( str(default_config_path) - ) + ), ) args, unknown_args = parser.parse_known_args() # Check the configuration file exists config_path = Path(args.config) if args.config else default_config_path - build_configuration = BuildConfiguration( - script_dir, config_path, args.build) + build_configuration = BuildConfiguration(script_dir, config_path, args.build) if is_running_under_teamcity(): build = TeamcityBuild(build_configuration) @@ -668,5 +708,5 @@ sys.exit(build.run(unknown_args)[0]) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/contrib/teamcity/nanobench_json_to_teamcity_messages.py b/contrib/teamcity/nanobench_json_to_teamcity_messages.py --- a/contrib/teamcity/nanobench_json_to_teamcity_messages.py +++ b/contrib/teamcity/nanobench_json_to_teamcity_messages.py @@ -10,8 +10,7 @@ from teamcity.messages import TeamcityServiceMessages if len(sys.argv) != 3: - print( - f""" + print(f""" Usage: {sys.argv[0]} @@ -24,62 +23,56 @@ sys.exit(1) suite_name = sys.argv[1] -with open(sys.argv[2], encoding='utf-8') as f: +with open(sys.argv[2], encoding="utf-8") as f: json_results = json.load(f) teamcity_messages = TeamcityServiceMessages() -teamcity_messages.testSuiteStarted( - suite_name -) +teamcity_messages.testSuiteStarted(suite_name) def testMetadata_number_message(test_name, param_name, param_value): teamcity_messages.message( - 'testMetadata', - type='number', + "testMetadata", + type="number", testName=test_name, name=param_name, - value=f'{param_value:.2f}', + value=f"{param_value:.2f}", ) -for result in json_results.get('results', []): - test_name = result['name'] +for result in json_results.get("results", []): + test_name = result["name"] - teamcity_messages.testStarted( - test_name - ) + teamcity_messages.testStarted(test_name) testMetadata_number_message( test_name, f"ns/{result['unit']}", - 1e9 * result['median(elapsed)'] / result['batch'], + 1e9 * result["median(elapsed)"] / result["batch"], ) testMetadata_number_message( test_name, f"{result['unit']}/s", - result['batch'] / result['median(elapsed)'], + result["batch"] / result["median(elapsed)"], ) testMetadata_number_message( test_name, - 'err%', - 100 * result['medianAbsolutePercentError(elapsed)'], + "err%", + 100 * result["medianAbsolutePercentError(elapsed)"], ) testMetadata_number_message( test_name, f"ins/{result['unit']}", - result['median(instructions)'] / result['batch'], + result["median(instructions)"] / result["batch"], ) teamcity_messages.testFinished( test_name, - testDuration=timedelta(seconds=result['totalTime']), + testDuration=timedelta(seconds=result["totalTime"]), ) -teamcity_messages.testSuiteFinished( - suite_name -) +teamcity_messages.testSuiteFinished(suite_name) diff --git a/contrib/testgen/base58.py b/contrib/testgen/base58.py --- a/contrib/testgen/base58.py +++ b/contrib/testgen/base58.py @@ -2,11 +2,11 @@ # Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Bitcoin base58 encoding and decoding. Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain) -''' +""" import hashlib # for compatibility with following code... @@ -24,21 +24,21 @@ def chr(n): # noqa: A001 return bytes((n,)) -__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' + +__b58chars = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" __b58base = len(__b58chars) b58chars = __b58chars def b58encode(v): - """ encode v, which is a string of bytes, to base58. - """ + """encode v, which is a string of bytes, to base58.""" long_value = 0 - for (i, c) in enumerate(v[::-1]): + for i, c in enumerate(v[::-1]): if isinstance(c, str): c = ord(c) long_value += (256**i) * c - result = '' + result = "" while long_value >= __b58base: div, mod = divmod(long_value, __b58base) result = __b58chars[mod] + result @@ -58,8 +58,7 @@ def b58decode(v, length=None): - """ decode v into a string of len bytes - """ + """decode v into a string of len bytes""" long_value = 0 for i, c in enumerate(v[::-1]): pos = __b58chars.find(c) @@ -109,7 +108,7 @@ def get_bcaddress_version(strAddress): - """ Returns None if strAddress is invalid. Otherwise returns integer version of address. """ + """Returns None if strAddress is invalid. Otherwise returns integer version of address.""" addr = b58decode_chk(strAddress) if addr is None or len(addr) != 21: return None @@ -117,11 +116,11 @@ return ord(version) -if __name__ == '__main__': +if __name__ == "__main__": # Test case (from http://gitorious.org/bitcoin/python-base58.git) - assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') == 0 - _ohai = 'o hai'.encode('ascii') + assert get_bcaddress_version("15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC") == 0 + _ohai = "o hai".encode("ascii") _tmp = b58encode(_ohai) - assert _tmp == 'DYB3oMS' + assert _tmp == "DYB3oMS" assert b58decode(_tmp, 5) == _ohai print("Tests passed") diff --git a/contrib/testgen/gen_base58_test_vectors.py b/contrib/testgen/gen_base58_test_vectors.py --- a/contrib/testgen/gen_base58_test_vectors.py +++ b/contrib/testgen/gen_base58_test_vectors.py @@ -2,13 +2,13 @@ # Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -''' +""" Generate valid and invalid base58 address and private key test vectors. Usage: gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json -''' +""" # 2012 Wladimir J. van der Laan # Released under MIT License import os @@ -26,24 +26,24 @@ PRIVKEY = 128 PRIVKEY_TEST = 239 -metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed'] +metadata_keys = ["isPrivkey", "isTestnet", "addrType", "isCompressed"] # templates for valid sequences templates = [ # prefix, payload_size, suffix, metadata # None = N/A - ((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)), - ((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)), - ((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)), - ((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)), + ((PUBKEY_ADDRESS,), 20, (), (False, False, "pubkey", None)), + ((SCRIPT_ADDRESS,), 20, (), (False, False, "script", None)), + ((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, "pubkey", None)), + ((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, "script", None)), ((PRIVKEY,), 32, (), (True, False, None, False)), ((PRIVKEY,), 32, (1,), (True, False, None, True)), ((PRIVKEY_TEST,), 32, (), (True, True, None, False)), - ((PRIVKEY_TEST,), 32, (1,), (True, True, None, True)) + ((PRIVKEY_TEST,), 32, (1,), (True, True, None, True)), ] def is_valid(v): - '''Check vector v for validity''' + """Check vector v for validity""" result = b58decode_chk(v) if result is None: return False @@ -57,7 +57,7 @@ def gen_valid_vectors(): - '''Generate valid test vectors''' + """Generate valid test vectors""" while True: for template in templates: prefix = bytearray(template[0]) @@ -65,17 +65,19 @@ suffix = bytearray(template[2]) rv = b58encode_chk(prefix + payload + suffix) assert is_valid(rv) - metadata = {x: y for x, y in zip( - metadata_keys, template[3]) if y is not None} + metadata = { + x: y for x, y in zip(metadata_keys, template[3]) if y is not None + } hexrepr = b2a_hex(payload) if isinstance(hexrepr, bytes): - hexrepr = hexrepr.decode('utf8') + hexrepr = hexrepr.decode("utf8") yield (rv, hexrepr, metadata) -def gen_invalid_vector(template, corrupt_prefix, - randomize_payload_size, corrupt_suffix): - '''Generate possibly invalid vector''' +def gen_invalid_vector( + template, corrupt_prefix, randomize_payload_size, corrupt_suffix +): + """Generate possibly invalid vector""" if corrupt_prefix: prefix = os.urandom(1) else: @@ -95,12 +97,12 @@ def randbool(p=0.5): - '''Return True with P(p)''' + """Return True with P(p)""" return random.random() < p def gen_invalid_vectors(): - '''Generate invalid test vectors''' + """Generate invalid test vectors""" # start with some manual edge-cases yield "", yield "x", @@ -111,22 +113,24 @@ # invalid (randomized) suffix (add random data) # corrupt checksum for template in templates: - val = gen_invalid_vector(template, randbool( - 0.2), randbool(0.2), randbool(0.2)) + val = gen_invalid_vector( + template, randbool(0.2), randbool(0.2), randbool(0.2) + ) if random.randint(0, 10) < 1: # line corruption if randbool(): # add random character to end val += random.choice(b58chars) else: # replace random character in the middle n = random.randint(0, len(val)) - val = val[0:n] + random.choice(b58chars) + val[n + 1:] + val = val[0:n] + random.choice(b58chars) + val[n + 1 :] if not is_valid(val): yield val, -if __name__ == '__main__': +if __name__ == "__main__": import json import sys - iters = {'valid': gen_valid_vectors, 'invalid': gen_invalid_vectors} + + iters = {"valid": gen_valid_vectors, "invalid": gen_invalid_vectors} try: uiter = iters[sys.argv[1]] except IndexError: @@ -138,4 +142,4 @@ data = list(islice(uiter(), count)) json.dump(data, sys.stdout, sort_keys=True, indent=4) - sys.stdout.write('\n') + sys.stdout.write("\n") diff --git a/contrib/tracing/log_raw_p2p_msgs.py b/contrib/tracing/log_raw_p2p_msgs.py --- a/contrib/tracing/log_raw_p2p_msgs.py +++ b/contrib/tracing/log_raw_p2p_msgs.py @@ -118,17 +118,20 @@ def print_message(event, inbound): print( "{} {} msg '{}' from peer {} ({}, {}) with {} bytes: {}".format( - "Warning: incomplete message (only {} out of {} bytes)!".format( - len(event.msg), - event.msg_size - ) if len(event.msg) < event.msg_size else "", + ( + "Warning: incomplete message (only {} out of {} bytes)!".format( + len(event.msg), event.msg_size + ) + if len(event.msg) < event.msg_size + else "" + ), "inbound" if inbound else "outbound", event.msg_type.decode("utf-8"), event.peer_id, event.peer_conn_type.decode("utf-8"), event.peer_addr.decode("utf-8"), event.msg_size, - bytes(event.msg[:event.msg_size]).hex(), + bytes(event.msg[: event.msg_size]).hex(), ) ) @@ -139,14 +142,16 @@ # attaching the trace functions defined in the BPF program to the # tracepoints bitcoind_with_usdts.enable_probe( - probe="inbound_message", fn_name="trace_inbound_message") + probe="inbound_message", fn_name="trace_inbound_message" + ) bitcoind_with_usdts.enable_probe( - probe="outbound_message", fn_name="trace_outbound_message") + probe="outbound_message", fn_name="trace_outbound_message" + ) bpf = BPF(text=program, usdt_contexts=[bitcoind_with_usdts]) # BCC: perf buffer handle function for inbound_messages def handle_inbound(_, data, size): - """ Inbound message handler. + """Inbound message handler. Called each time a message is submitted to the inbound_messages BPF table.""" @@ -156,7 +161,7 @@ # BCC: perf buffer handle function for outbound_messages def handle_outbound(_, data, size): - """ Outbound message handler. + """Outbound message handler. Called each time a message is submitted to the outbound_messages BPF table.""" diff --git a/contrib/tracing/log_utxocache_flush.py b/contrib/tracing/log_utxocache_flush.py --- a/contrib/tracing/log_utxocache_flush.py +++ b/contrib/tracing/log_utxocache_flush.py @@ -39,12 +39,7 @@ } """ -FLUSH_MODES = [ - 'NONE', - 'IF_NEEDED', - 'PERIODIC', - 'ALWAYS' -] +FLUSH_MODES = ["NONE", "IF_NEEDED", "PERIODIC", "ALWAYS"] class Data(ctypes.Structure): @@ -54,18 +49,20 @@ ("mode", ctypes.c_uint32), ("coins_count", ctypes.c_uint64), ("coins_mem_usage", ctypes.c_uint64), - ("is_flush_for_prune", ctypes.c_bool) + ("is_flush_for_prune", ctypes.c_bool), ] def print_event(event): - print("{:15d} {:10s} {:15d} {:15s} {:8s}".format( - event.duration, - FLUSH_MODES[event.mode], - event.coins_count, - "{:.2f} kB".format(event.coins_mem_usage / 1000), - str(event.is_flush_for_prune), - )) + print( + "{:15d} {:10s} {:15d} {:15s} {:8s}".format( + event.duration, + FLUSH_MODES[event.mode], + event.coins_count, + "{:.2f} kB".format(event.coins_mem_usage / 1000), + str(event.is_flush_for_prune), + ) + ) def main(bitcoind_path): @@ -73,13 +70,12 @@ # attaching the trace functions defined in the BPF program # to the tracepoints - bitcoind_with_usdts.enable_probe( - probe="flush", fn_name="trace_flush") + bitcoind_with_usdts.enable_probe(probe="flush", fn_name="trace_flush") b = BPF(text=program, usdt_contexts=[bitcoind_with_usdts]) def handle_flush(_, data, size): - """ Coins Flush handler. - Called each time coin caches and indexes are flushed.""" + """Coins Flush handler. + Called each time coin caches and indexes are flushed.""" event = ctypes.cast(data, ctypes.POINTER(Data)).contents print_event(event) @@ -87,11 +83,9 @@ print("Logging utxocache flushes. Ctrl-C to end...") print( "{:15s} {:10s} {:15s} {:15s} {:8s}".format( - "Duration (µs)", - "Mode", - "Coins Count", - "Memory Usage", - "Flush for Prune")) + "Duration (µs)", "Mode", "Coins Count", "Memory Usage", "Flush for Prune" + ) + ) while True: try: diff --git a/contrib/tracing/p2p_monitor.py b/contrib/tracing/p2p_monitor.py --- a/contrib/tracing/p2p_monitor.py +++ b/contrib/tracing/p2p_monitor.py @@ -72,7 +72,8 @@ class Message: - """ A P2P network message. """ + """A P2P network message.""" + msg_type = "" size = 0 data = bytes() @@ -85,7 +86,8 @@ class Peer: - """ A P2P network peer. """ + """A P2P network peer.""" + id = 0 address = "" connection_type = "" @@ -122,36 +124,46 @@ # attaching the trace functions defined in the BPF program to the # tracepoints bitcoind_with_usdts.enable_probe( - probe="inbound_message", fn_name="trace_inbound_message") + probe="inbound_message", fn_name="trace_inbound_message" + ) bitcoind_with_usdts.enable_probe( - probe="outbound_message", fn_name="trace_outbound_message") + probe="outbound_message", fn_name="trace_outbound_message" + ) bpf = BPF(text=program, usdt_contexts=[bitcoind_with_usdts]) # BCC: perf buffer handle function for inbound_messages def handle_inbound(_, data, size): - """ Inbound message handler. + """Inbound message handler. Called each time a message is submitted to the inbound_messages BPF table.""" event = bpf["inbound_messages"].event(data) if event.peer_id not in peers: - peer = Peer(event.peer_id, event.peer_addr.decode( - "utf-8"), event.peer_conn_type.decode("utf-8")) + peer = Peer( + event.peer_id, + event.peer_addr.decode("utf-8"), + event.peer_conn_type.decode("utf-8"), + ) peers[peer.id] = peer peers[event.peer_id].add_message( - Message(event.msg_type.decode("utf-8"), event.msg_size, True)) + Message(event.msg_type.decode("utf-8"), event.msg_size, True) + ) # BCC: perf buffer handle function for outbound_messages def handle_outbound(_, data, size): - """ Outbound message handler. + """Outbound message handler. Called each time a message is submitted to the outbound_messages BPF table.""" event = bpf["outbound_messages"].event(data) if event.peer_id not in peers: - peer = Peer(event.peer_id, event.peer_addr.decode( - "utf-8"), event.peer_conn_type.decode("utf-8")) + peer = Peer( + event.peer_id, + event.peer_addr.decode("utf-8"), + event.peer_conn_type.decode("utf-8"), + ) peers[peer.id] = peer peers[event.peer_id].add_message( - Message(event.msg_type.decode("utf-8"), event.msg_size, False)) + Message(event.msg_type.decode("utf-8"), event.msg_size, False) + ) # BCC: add handlers to the inbound and outbound perf buffers bpf["inbound_messages"].open_perf_buffer(handle_inbound) @@ -165,8 +177,9 @@ cur_list_pos = 0 win = curses.newwin(30, 70, 2, 7) win.erase() - win.border(ord("|"), ord("|"), ord("-"), ord("-"), - ord("-"), ord("-"), ord("-"), ord("-")) + win.border( + ord("|"), ord("|"), ord("-"), ord("-"), ord("-"), ord("-"), ord("-"), ord("-") + ) info_panel = panel.new_panel(win) info_panel.hide() @@ -179,76 +192,117 @@ bpf.perf_buffer_poll(timeout=50) ch = screen.getch() - if (ch == curses.KEY_DOWN or ch == ord("j")) and cur_list_pos < len( - peers.keys()) - 1 and info_panel.hidden(): + if ( + (ch == curses.KEY_DOWN or ch == ord("j")) + and cur_list_pos < len(peers.keys()) - 1 + and info_panel.hidden() + ): cur_list_pos += 1 if cur_list_pos >= ROWS_AVALIABLE_FOR_LIST: scroll += 1 - if ((ch == curses.KEY_UP or ch == ord("k")) - and cur_list_pos > 0 and info_panel.hidden()): + if ( + (ch == curses.KEY_UP or ch == ord("k")) + and cur_list_pos > 0 + and info_panel.hidden() + ): cur_list_pos -= 1 if scroll > 0: scroll -= 1 - if ch == ord('\n') or ch == ord(' '): + if ch == ord("\n") or ch == ord(" "): if info_panel.hidden(): info_panel.show() else: info_panel.hide() screen.erase() render( - screen, - peers, - cur_list_pos, - scroll, - ROWS_AVALIABLE_FOR_LIST, - info_panel) + screen, peers, cur_list_pos, scroll, ROWS_AVALIABLE_FOR_LIST, info_panel + ) curses.panel.update_panels() screen.refresh() except KeyboardInterrupt: exit() -def render(screen, peers, cur_list_pos, scroll, - ROWS_AVALIABLE_FOR_LIST, info_panel): - """ renders the list of peers and details panel +def render(screen, peers, cur_list_pos, scroll, ROWS_AVALIABLE_FOR_LIST, info_panel): + """renders the list of peers and details panel This code is unrelated to USDT, BCC and BPF. """ header_format = "%6s %-20s %-20s %-22s %-67s" row_format = "%6s %-5d %9d byte %-5d %9d byte %-22s %-67s" - screen.addstr(0, 1, (" P2P Message Monitor "), curses.A_REVERSE) + screen.addstr(0, 1, " P2P Message Monitor ", curses.A_REVERSE) + screen.addstr( + 1, + 0, + ( + " Navigate with UP/DOWN or J/K and select a peer with ENTER or SPACE to see" + " individual P2P messages" + ), + curses.A_NORMAL, + ) screen.addstr( - 1, 0, (" Navigate with UP/DOWN or J/K and select a peer with ENTER or SPACE to see individual P2P messages"), curses.A_NORMAL) - screen.addstr(3, 0, - header_format % ("PEER", "OUTBOUND", "INBOUND", "TYPE", "ADDR"), curses.A_BOLD | curses.A_UNDERLINE) - peer_list = sorted(peers.keys())[scroll:ROWS_AVALIABLE_FOR_LIST + scroll] + 3, + 0, + header_format % ("PEER", "OUTBOUND", "INBOUND", "TYPE", "ADDR"), + curses.A_BOLD | curses.A_UNDERLINE, + ) + peer_list = sorted(peers.keys())[scroll : ROWS_AVALIABLE_FOR_LIST + scroll] for i, peer_id in enumerate(peer_list): peer = peers[peer_id] - screen.addstr(i + 4, 0, - row_format % (peer.id, peer.total_outbound_msgs, peer.total_outbound_bytes, - peer.total_inbound_msgs, peer.total_inbound_bytes, - peer.connection_type, peer.address), - curses.A_REVERSE if i + scroll == cur_list_pos else curses.A_NORMAL) + screen.addstr( + i + 4, + 0, + row_format + % ( + peer.id, + peer.total_outbound_msgs, + peer.total_outbound_bytes, + peer.total_inbound_msgs, + peer.total_inbound_bytes, + peer.connection_type, + peer.address, + ), + curses.A_REVERSE if i + scroll == cur_list_pos else curses.A_NORMAL, + ) if i + scroll == cur_list_pos: info_window = info_panel.window() info_window.erase() info_window.border( - ord("|"), ord("|"), ord("-"), ord("-"), - ord("-"), ord("-"), ord("-"), ord("-")) + ord("|"), + ord("|"), + ord("-"), + ord("-"), + ord("-"), + ord("-"), + ord("-"), + ord("-"), + ) info_window.addstr( - 1, 1, f"PEER {peer.id} ({peer.address})".center(68), curses.A_REVERSE | curses.A_BOLD) + 1, + 1, + f"PEER {peer.id} ({peer.address})".center(68), + curses.A_REVERSE | curses.A_BOLD, + ) info_window.addstr( - 2, 1, f" OUR NODE{peer.connection_type:^54}PEER ", - curses.A_BOLD) + 2, 1, f" OUR NODE{peer.connection_type:^54}PEER ", curses.A_BOLD + ) for i, msg in enumerate(peer.last_messages): if msg.inbound: info_window.addstr( - i + 3, 1, f"{f'<--- {msg.msg_type} ({msg.size} bytes) ':68s}", curses.A_NORMAL) + i + 3, + 1, + f"{f'<--- {msg.msg_type} ({msg.size} bytes) ':68s}", + curses.A_NORMAL, + ) else: info_window.addstr( - i + 3, 1, f" {msg.msg_type} ({msg.size} byte) --->", curses.A_NORMAL) + i + 3, + 1, + f" {msg.msg_type} ({msg.size} byte) --->", + curses.A_NORMAL, + ) if __name__ == "__main__": diff --git a/contrib/zmq/zmq_sub.py b/contrib/zmq/zmq_sub.py --- a/contrib/zmq/zmq_sub.py +++ b/contrib/zmq/zmq_sub.py @@ -41,7 +41,7 @@ ip = "127.0.0.1" -class ZMQHandler(): +class ZMQHandler: def __init__(self): self.loop = asyncio.get_event_loop() self.zmqContext = zmq.asyncio.Context() @@ -59,25 +59,28 @@ topic, body, seq = await self.zmqSubSocket.recv_multipart() sequence = "Unknown" if len(seq) == 4: - sequence = str(struct.unpack('