diff --git a/contrib/devtools/optimize-pngs.py b/contrib/devtools/optimize-pngs.py index f2d8815f95..39ce02389a 100755 --- a/contrib/devtools/optimize-pngs.py +++ b/contrib/devtools/optimize-pngs.py @@ -1,87 +1,86 @@ -#!/usr/bin/env python -# Copyright (c) 2014-2016 The Bitcoin Core developers +#!/usr/bin/env python3 +# Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Run this script every time you change one of the png files. Using pngcrush, it will optimize the png files, remove various color profiles, remove ancillary chunks (alla) and text chunks (text). #pngcrush -brute -ow -rem gAMA -rem cHRM -rem iCCP -rem sRGB -rem alla -rem text ''' import os import sys import subprocess import hashlib +# pip3 install Pillow from PIL import Image def file_hash(filename): '''Return hash of raw file contents''' with open(filename, 'rb') as f: return hashlib.sha256(f.read()).hexdigest() def content_hash(filename): '''Return hash of RGBA contents of image''' i = Image.open(filename) i = i.convert('RGBA') data = i.tobytes() return hashlib.sha256(data).hexdigest() pngcrush = 'pngcrush' git = 'git' folders = ["src/qt/res/movies", "src/qt/res/icons", "share/pixmaps"] basePath = subprocess.check_output( - [git, 'rev-parse', '--show-toplevel']).rstrip('\n') + [git, 'rev-parse', '--show-toplevel'], universal_newlines=True).rstrip('\n') totalSaveBytes = 0 noHashChange = True outputArray = [] for folder in folders: absFolder = os.path.join(basePath, folder) for file in os.listdir(absFolder): extension = os.path.splitext(file)[1] if extension.lower() == '.png': - print("optimizing " + file + "..."), + print("optimizing {}...".format(file), end=' ') file_path = os.path.join(absFolder, file) fileMetaMap = {'file': file, 'osize': os.path.getsize( file_path), 'sha256Old': file_hash(file_path)} fileMetaMap['contentHashPre'] = content_hash(file_path) - pngCrushOutput = "" try: - pngCrushOutput = subprocess.check_output( - [pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", - "iCCP", "-rem", "sRGB", "-rem", "alla", "-rem", "text", file_path], - stderr=subprocess.STDOUT).rstrip('\n') + subprocess.call([pngcrush, "-brute", "-ow", "-rem", "gAMA", "-rem", "cHRM", "-rem", "iCCP", "-rem", "sRGB", + "-rem", "alla", "-rem", "text", file_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: - print "pngcrush is not installed, aborting..." + print("pngcrush is not installed, aborting...") sys.exit(0) # verify - if "Not a PNG file" in subprocess.check_output([pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT): - print "PNG file " + file + " is corrupted after crushing, check out pngcursh version" + if "Not a PNG file" in subprocess.check_output([pngcrush, "-n", "-v", file_path], stderr=subprocess.STDOUT, universal_newlines=True): + print("PNG file " + file + + " is corrupted after crushing, check out pngcursh version") sys.exit(1) fileMetaMap['sha256New'] = file_hash(file_path) fileMetaMap['contentHashPost'] = content_hash(file_path) if fileMetaMap['contentHashPre'] != fileMetaMap['contentHashPost']: - print "Image contents of PNG file " + file + \ - " before and after crushing don't match" + print("Image contents of PNG file " + file + + " before and after crushing don't match") sys.exit(1) fileMetaMap['psize'] = os.path.getsize(file_path) outputArray.append(fileMetaMap) - print("done\n"), + print("done") -print "summary:\n+++++++++++++++++" +print("summary:\n+++++++++++++++++") for fileDict in outputArray: oldHash = fileDict['sha256Old'] newHash = fileDict['sha256New'] totalSaveBytes += fileDict['osize'] - fileDict['psize'] noHashChange = noHashChange and (oldHash == newHash) - print fileDict['file'] + "\n size diff from: " + str(fileDict['osize']) + " to: " + str( - fileDict['psize']) + "\n old sha256: " + oldHash + "\n new sha256: " + newHash + "\n" + print(fileDict['file'] + "\n size diff from: " + str(fileDict['osize']) + " to: " + + str(fileDict['psize']) + "\n old sha256: " + oldHash + "\n new sha256: " + newHash + "\n") -print "completed. Checksum stable: " + \ - str(noHashChange) + ". Total reduction: " + str(totalSaveBytes) + " bytes" +print("completed. Checksum stable: " + str(noHashChange) + + ". Total reduction: " + str(totalSaveBytes) + " bytes") diff --git a/contrib/devtools/security-check.py b/contrib/devtools/security-check.py index b25deb45eb..e5d8c1f293 100755 --- a/contrib/devtools/security-check.py +++ b/contrib/devtools/security-check.py @@ -1,235 +1,234 @@ -#!/usr/bin/env python -# Copyright (c) 2015-2016 The Bitcoin Core developers +#!/usr/bin/env python3 +# Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Perform basic ELF security checks on a series of executables. Exit status will be 0 if successful, and the program will be silent. Otherwise the exit status will be 1 and it will log which executables failed which checks. Needs `readelf` (for ELF) and `objdump` (for PE). ''' -from __future__ import division, print_function, unicode_literals import subprocess import sys import os READELF_CMD = os.getenv('READELF', '/usr/bin/readelf') OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump') # checks which are non-fatal for now but only generate a warning NONFATAL = {'HIGH_ENTROPY_VA'} def check_ELF_PIE(executable): ''' Check for position independent executable (PIE), allowing for address space randomization. ''' p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') ok = False for line in stdout.splitlines(): line = line.split() if len(line) >= 2 and line[0] == 'Type:' and line[1] == 'DYN': ok = True return ok def get_ELF_program_headers(executable): '''Return type and flags for ELF program headers''' p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') in_headers = False count = 0 headers = [] for line in stdout.splitlines(): if line.startswith('Program Headers:'): in_headers = True if line == '': in_headers = False if in_headers: if count == 1: # header line ofs_typ = line.find('Type') ofs_offset = line.find('Offset') ofs_flags = line.find('Flg') ofs_align = line.find('Align') if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1: raise ValueError('Cannot parse elfread -lW output') elif count > 1: typ = line[ofs_typ:ofs_offset].rstrip() flags = line[ofs_flags:ofs_align].rstrip() headers.append((typ, flags)) count += 1 return headers def check_ELF_NX(executable): ''' Check that no sections are writable and executable (including the stack) ''' have_wx = False have_gnu_stack = False for (typ, flags) in get_ELF_program_headers(executable): if typ == 'GNU_STACK': have_gnu_stack = True if 'W' in flags and 'E' in flags: # section is both writable and executable have_wx = True return have_gnu_stack and not have_wx def check_ELF_RELRO(executable): ''' Check for read-only relocations. GNU_RELRO program header must exist Dynamic section must have BIND_NOW flag ''' have_gnu_relro = False for (typ, flags) in get_ELF_program_headers(executable): # Note: not checking flags == 'R': here as linkers set the permission differently # This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions. # However, the dynamic linker need to write to this area so these are RW. # Glibc itself takes care of mprotecting this area R after relocations are finished. # See also http://permalink.gmane.org/gmane.comp.gnu.binutils/71347 if typ == 'GNU_RELRO': have_gnu_relro = True have_bindnow = False p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') for line in stdout.splitlines(): tokens = line.split() if len(tokens) > 1 and tokens[1] == '(BIND_NOW)' or (len(tokens) > 2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2]): have_bindnow = True return have_gnu_relro and have_bindnow def check_ELF_Canary(executable): ''' Check for use of stack canary ''' p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') ok = False for line in stdout.splitlines(): if '__stack_chk_fail' in line: ok = True return ok def get_PE_dll_characteristics(executable): ''' Get PE DllCharacteristics bits. Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386' and bits is the DllCharacteristics value. ''' p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') arch = '' bits = 0 for line in stdout.splitlines(): tokens = line.split() if len(tokens) >= 2 and tokens[0] == 'architecture:': arch = tokens[1].rstrip(',') if len(tokens) >= 2 and tokens[0] == 'DllCharacteristics': bits = int(tokens[1], 16) return (arch, bits) IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020 IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040 IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100 def check_PE_DYNAMIC_BASE(executable): '''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)''' (arch, bits) = get_PE_dll_characteristics(executable) reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE return (bits & reqbits) == reqbits # On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE # to have secure ASLR. def check_PE_HIGH_ENTROPY_VA(executable): '''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR''' (arch, bits) = get_PE_dll_characteristics(executable) if arch == 'i386:x86-64': reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA else: # Unnecessary on 32-bit assert(arch == 'i386') reqbits = 0 return (bits & reqbits) == reqbits def check_PE_NX(executable): '''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)''' (arch, bits) = get_PE_dll_characteristics(executable) return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT CHECKS = { 'ELF': [ ('PIE', check_ELF_PIE), ('NX', check_ELF_NX), ('RELRO', check_ELF_RELRO), ('Canary', check_ELF_Canary) ], 'PE': [ ('DYNAMIC_BASE', check_PE_DYNAMIC_BASE), ('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA), ('NX', check_PE_NX) ] } def identify_executable(executable): with open(filename, 'rb') as f: magic = f.read(4) if magic.startswith(b'MZ'): return 'PE' elif magic.startswith(b'\x7fELF'): return 'ELF' return None if __name__ == '__main__': retval = 0 for filename in sys.argv[1:]: try: etype = identify_executable(filename) if etype is None: print('{}: unknown format'.format(filename)) retval = 1 continue failed = [] warning = [] for (name, func) in CHECKS[etype]: if not func(filename): if name in NONFATAL: warning.append(name) else: failed.append(name) if failed: print('{}: failed {}'.format(filename, ' '.join(failed))) retval = 1 if warning: print('{}: warning {}'.format(filename, ' '.join(warning))) except IOError: print('{}: cannot open'.format(filename)) retval = 1 sys.exit(retval) diff --git a/contrib/devtools/symbol-check.py b/contrib/devtools/symbol-check.py index 2b7f163cec..54b24c691c 100755 --- a/contrib/devtools/symbol-check.py +++ b/contrib/devtools/symbol-check.py @@ -1,179 +1,178 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' A script to check that the (Linux) executables produced by gitian only contain allowed gcc, glibc and libstdc++ version symbols. This makes sure they are still compatible with the minimum supported Linux distribution versions. Example usage: find ../gitian-builder/build -type f -executable | xargs python contrib/devtools/symbol-check.py ''' -from __future__ import division, print_function, unicode_literals import subprocess import re import sys import os # Debian 8.11 (Jessie) has: # # - g++ version 4.9.2 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B) # - libc version 2.19.18 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6) # - libstdc++ version 4.8.4 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6) # # Ubuntu 14.04 (Trusty Tahr) has: # # - g++ version 4.8.2 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=g%2B%2B&searchon=names) # - libc version 2.19.0 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=libc6&searchon=names) # - libstdc++ version 4.8.2 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names) # # Taking the minimum of these as our target. # # According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to: # GCC 4.8.0: GCC_4.8.0 # GCC 4.8.0: GLIBCXX_3.4.18, CXXABI_1.3.7 # (glibc) GLIBC_2_19 # MAX_VERSIONS = { 'GCC': (4, 8, 0), 'CXXABI': (1, 3, 7), 'GLIBCXX': (3, 4, 18), 'GLIBC': (2, 19) } # See here for a description of _IO_stdin_used: # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109 # Ignore symbols that are exported as part of every executable IGNORE_EXPORTS = { '_edata', '_end', '_init', '__bss_start', '_fini', '_IO_stdin_used', 'stdin', 'stdout', 'stderr', # Figure out why we get these symbols exported on xenial. '_ZNKSt5ctypeIcE8do_widenEc', 'in6addr_any', 'optarg', '_ZNSt16_Sp_counted_baseILN9__gnu_cxx12_Lock_policyE2EE10_M_destroyEv' } READELF_CMD = os.getenv('READELF', '/usr/bin/readelf') CPPFILT_CMD = os.getenv('CPPFILT', '/usr/bin/c++filt') # Allowed NEEDED libraries ALLOWED_LIBRARIES = { # bitcoind and bitcoin-qt 'libgcc_s.so.1', # GCC base support 'libc.so.6', # C library 'libpthread.so.0', # threading 'libanl.so.1', # DNS resolve 'libm.so.6', # math library 'librt.so.1', # real-time (clock) 'ld-linux-x86-64.so.2', # 64-bit dynamic linker 'ld-linux.so.2', # 32-bit dynamic linker # bitcoin-qt only 'libX11-xcb.so.1', # part of X11 'libX11.so.6', # part of X11 'libxcb.so.1', # part of X11 'libfontconfig.so.1', # font support 'libfreetype.so.6', # font parsing 'libdl.so.2' # programming interface to dynamic linker } class CPPFilt(object): ''' Demangle C++ symbol names. Use a pipe to the 'c++filt' command. ''' def __init__(self): self.proc = subprocess.Popen( CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) def __call__(self, mangled): self.proc.stdin.write(mangled + '\n') self.proc.stdin.flush() return self.proc.stdout.readline().rstrip() def close(self): self.proc.stdin.close() self.proc.stdout.close() self.proc.wait() def read_symbols(executable, imports=True): ''' Parse an ELF executable and return a list of (symbol,version) tuples for dynamic, imported symbols. ''' p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Could not read symbols for {}: {}'.format( executable, stderr.strip())) syms = [] for line in stdout.splitlines(): line = line.split() if len(line) > 7 and re.match('[0-9]+:$', line[0]): (sym, _, version) = line[7].partition('@') is_import = line[6] == 'UND' if version.startswith('@'): version = version[1:] if is_import == imports: syms.append((sym, version)) return syms def check_version(max_versions, version): if '_' in version: (lib, _, ver) = version.rpartition('_') else: lib = version ver = '0' ver = tuple([int(x) for x in ver.split('.')]) if not lib in max_versions: return False return ver <= max_versions[lib] def read_libraries(filename): p = subprocess.Popen([READELF_CMD, '-d', '-W', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') libraries = [] for line in stdout.splitlines(): tokens = line.split() if len(tokens) > 2 and tokens[1] == '(NEEDED)': match = re.match( r'^Shared library: \[(.*)\]$', ' '.join(tokens[2:])) if match: libraries.append(match.group(1)) else: raise ValueError('Unparseable (NEEDED) specification') return libraries if __name__ == '__main__': cppfilt = CPPFilt() retval = 0 for filename in sys.argv[1:]: # Check imported symbols for sym, version in read_symbols(filename, True): if version and not check_version(MAX_VERSIONS, version): print('{}: symbol {} from unsupported version {}'.format( filename, cppfilt(sym), version)) retval = 1 # Check exported symbols for sym, version in read_symbols(filename, False): if sym in IGNORE_EXPORTS: continue print('{}: export of symbol {} not allowed'.format( filename, cppfilt(sym))) retval = 1 # Check dependency libraries for library_name in read_libraries(filename): if library_name not in ALLOWED_LIBRARIES: print('{}: NEEDED library {} is not allowed'.format( filename, library_name)) retval = 1 sys.exit(retval) diff --git a/contrib/devtools/update-translations.py b/contrib/devtools/update-translations.py index 887877f5d5..7af68ac2aa 100755 --- a/contrib/devtools/update-translations.py +++ b/contrib/devtools/update-translations.py @@ -1,229 +1,228 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Run this script from the root of the repository to update all translations from transifex. It will do the following automatically: - fetch all translations using the tx tool - post-process them into valid and committable format - remove invalid control characters - remove location tags (makes diffs less noisy) TODO: - auto-add new translations to the build system according to the translation process ''' -from __future__ import division, print_function import subprocess import re import sys import os import io import xml.etree.ElementTree as ET # Name of transifex tool TX = 'tx' # Name of source language file SOURCE_LANG = 'bitcoin_en.ts' # Directory with locale files LOCALE_DIR = 'src/qt/locale' # Minimum number of messages for translation to be considered at all MIN_NUM_MESSAGES = 10 def check_at_repository_root(): if not os.path.exists('.git'): print('No .git directory found') print('Execute this script at the root of the repository', file=sys.stderr) sys.exit(1) def fetch_all_translations(): if subprocess.call([TX, 'pull', '-f', '-a']): print('Error while fetching translations', file=sys.stderr) sys.exit(1) def find_format_specifiers(s): '''Find all format specifiers in a string.''' pos = 0 specifiers = [] while True: percent = s.find('%', pos) if percent < 0: break specifiers.append(s[percent + 1]) pos = percent + 2 return specifiers def split_format_specifiers(specifiers): '''Split format specifiers between numeric (Qt) and others (strprintf)''' numeric = [] other = [] for s in specifiers: if s in {'1', '2', '3', '4', '5', '6', '7', '8', '9'}: numeric.append(s) else: other.append(s) # If both numeric format specifiers and "others" are used, assume we're dealing # with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg) # only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing # any kind of escaping that would be necessary for strprintf. Without this, this function # would wrongly detect '%)' as a printf format specifier. if numeric: other = [] # numeric (Qt) can be present in any order, others (strprintf) must be in specified order return set(numeric), other def sanitize_string(s): '''Sanitize string for printing''' return s.replace('\n', ' ') def check_format_specifiers(source, translation, errors, numerus): source_f = split_format_specifiers(find_format_specifiers(source)) # assert that no source messages contain both Qt and strprintf format specifiers # if this fails, go change the source as this is hacky and confusing! assert(not(source_f[0] and source_f[1])) try: translation_f = split_format_specifiers( find_format_specifiers(translation)) except IndexError: errors.append("Parse error in translation for '{}': '{}'".format( sanitize_string(source), sanitize_string(translation))) return False else: if source_f != translation_f: if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1: # Allow numerus translations to omit %n specifier (usually when it only has one possible value) return True errors.append("Mismatch between '{}' and '{}'".format( sanitize_string(source), sanitize_string(translation))) return False return True def all_ts_files(suffix=''): for filename in os.listdir(LOCALE_DIR): # process only language files, and do not process source language if not filename.endswith('.ts' + suffix) or filename == SOURCE_LANG + suffix: continue if suffix: # remove provided suffix filename = filename[0:-len(suffix)] filepath = os.path.join(LOCALE_DIR, filename) yield(filename, filepath) FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]') def remove_invalid_characters(s): '''Remove invalid characters from translation string''' return FIX_RE.sub(b'', s) # Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for # comparison, disable by default) _orig_escape_cdata = None def escape_cdata(text): text = _orig_escape_cdata(text) text = text.replace("'", ''') text = text.replace('"', '"') return text def postprocess_translations(reduce_diff_hacks=False): print('Checking and postprocessing...') if reduce_diff_hacks: global _orig_escape_cdata _orig_escape_cdata = ET._escape_cdata ET._escape_cdata = escape_cdata for (filename, filepath) in all_ts_files(): os.rename(filepath, filepath + '.orig') have_errors = False for (filename, filepath) in all_ts_files('.orig'): # pre-fixups to cope with transifex output # need to override encoding because 'utf8' is not understood only 'utf-8' parser = ET.XMLParser(encoding='utf-8') with open(filepath + '.orig', 'rb') as f: data = f.read() # remove control characters; this must be done over the entire file otherwise the XML parser will fail data = remove_invalid_characters(data) tree = ET.parse(io.BytesIO(data), parser=parser) # iterate over all messages in file root = tree.getroot() for context in root.findall('context'): for message in context.findall('message'): numerus = message.get('numerus') == 'yes' source = message.find('source').text translation_node = message.find('translation') # pick all numerusforms if numerus: translations = [ i.text for i in translation_node.findall('numerusform')] else: translations = [translation_node.text] for translation in translations: if translation is None: continue errors = [] valid = check_format_specifiers( source, translation, errors, numerus) for error in errors: print('{}: {}'.format(filename, error)) if not valid: # set type to unfinished and clear string if invalid translation_node.clear() translation_node.set('type', 'unfinished') have_errors = True # Remove location tags for location in message.findall('location'): message.remove(location) # Remove entire message if it is an unfinished translation if translation_node.get('type') == 'unfinished': context.remove(message) # check if document is (virtually) empty, and remove it if so num_messages = 0 for context in root.findall('context'): for message in context.findall('message'): num_messages += 1 if num_messages < MIN_NUM_MESSAGES: print('Removing {}, as it contains only {} messages'.format( filepath, num_messages)) continue # write fixed-up tree # if diff reduction requested, replace some XML to 'sanitize' to qt formatting if reduce_diff_hacks: out = io.BytesIO() tree.write(out, encoding='utf-8') out = out.getvalue() out = out.replace(b' />', b'/>') with open(filepath, 'wb') as f: f.write(out) else: tree.write(filepath, encoding='utf-8') return have_errors if __name__ == '__main__': check_at_repository_root() fetch_all_translations() postprocess_translations() diff --git a/contrib/macdeploy/custom_dsstore.py b/contrib/macdeploy/custom_dsstore.py index a851b06f80..a13853b56f 100755 --- a/contrib/macdeploy/custom_dsstore.py +++ b/contrib/macdeploy/custom_dsstore.py @@ -1,62 +1,61 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2013-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -from __future__ import division, print_function, unicode_literals import biplist from ds_store import DSStore from mac_alias import Alias import sys output_file = sys.argv[1] package_name_ns = sys.argv[2] ds = DSStore.open(output_file, 'w+') ds['.']['bwsp'] = { 'ShowStatusBar': False, 'WindowBounds': '{{300, 280}, {500, 343}}', 'ContainerShowSidebar': False, 'SidebarWidth': 0, 'ShowTabView': False, 'PreviewPaneVisibility': False, 'ShowToolbar': False, 'ShowSidebar': False, 'ShowPathbar': True } icvp = { 'gridOffsetX': 0.0, 'textSize': 12.0, 'viewOptionsVersion': 1, 'backgroundImageAlias': b'\x00\x00\x00\x00\x02\x1e\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\x94\\\xb0H+\x00\x05\x00\x00\x00\x98\x0fbackground.tiff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\xd19\xb0\xf8\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\r\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b.background\x00\x00\x10\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x11\x00\x08\x00\x00\xd19\xb0\xf8\x00\x00\x00\x01\x00\x04\x00\x00\x00\x98\x00\x0e\x00 \x00\x0f\x00b\x00a\x00c\x00k\x00g\x00r\x00o\x00u\x00n\x00d\x00.\x00t\x00i\x00f\x00f\x00\x0f\x00\x02\x00\x00\x00\x12\x00\x1c/.background/background.tiff\x00\x14\x01\x06\x00\x00\x00\x00\x01\x06\x00\x02\x00\x00\x0cMacintosh HD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\x97\xab\xc3H+\x00\x00\x01\x88[\x88\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02u\xab\x8d\xd1\x94\\\xb0devrddsk\xff\xff\xff\xff\x00\x00\t \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07bitcoin\x00\x00\x10\x00\x08\x00\x00\xce\x97\xab\xc3\x00\x00\x00\x11\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x01\x00\x14\x01\x88[\x88\x00\x16\xa9\t\x00\x08\xfaR\x00\x08\xfaQ\x00\x02d\x8e\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x1a\x00\x0c\x00M\x00a\x00c\x00i\x00n\x00t\x00o\x00s\x00h\x00 \x00H\x00D\x00\x13\x00\x01/\x00\x00\x15\x00\x02\x00\x14\xff\xff\x00\x00\xff\xff\x00\x00', 'backgroundColorBlue': 1.0, 'iconSize': 96.0, 'backgroundColorGreen': 1.0, 'arrangeBy': 'none', 'showIconPreview': True, 'gridSpacing': 100.0, 'gridOffsetY': 0.0, 'showItemInfo': False, 'labelOnBottom': True, 'backgroundType': 2, 'backgroundColorRed': 1.0 } alias = Alias.from_bytes(icvp['backgroundImageAlias']) alias.volume.name = package_name_ns alias.volume.posix_path = '/Volumes/' + package_name_ns alias.volume.disk_image_alias.target.filename = package_name_ns + '.temp.dmg' alias.volume.disk_image_alias.target.carbon_path = 'Macintosh HD:Users:\x00bitcoinuser:\x00Documents:\x00bitcoin:\x00bitcoin:\x00' + \ package_name_ns + '.temp.dmg' alias.volume.disk_image_alias.target.posix_path = 'Users/bitcoinuser/Documents/bitcoin/bitcoin/' + \ package_name_ns + '.temp.dmg' alias.target.carbon_path = package_name_ns + ':.background:\x00background.tiff' icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes()) ds['.']['icvp'] = icvp ds['.']['vSrn'] = ('long', 1) ds['Applications']['Iloc'] = (370, 156) ds['BitcoinABC-Qt.app']['Iloc'] = (128, 156) ds.flush() ds.close() diff --git a/contrib/macdeploy/macdeployqtplus b/contrib/macdeploy/macdeployqtplus index 62ca70983f..6de3508564 100755 --- a/contrib/macdeploy/macdeployqtplus +++ b/contrib/macdeploy/macdeployqtplus @@ -1,891 +1,874 @@ -#!/usr/bin/env python -from __future__ import division, print_function, unicode_literals +#!/usr/bin/env python3 # # Copyright (C) 2011 Patrick "p2k" Schneider # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # import subprocess, sys, re, os, shutil, stat, os.path, time from string import Template from argparse import ArgumentParser # This is ported from the original macdeployqt with modifications class FrameworkInfo(object): def __init__(self): self.frameworkDirectory = "" self.frameworkName = "" self.frameworkPath = "" self.binaryDirectory = "" self.binaryName = "" self.binaryPath = "" self.version = "" self.installName = "" self.deployedInstallName = "" self.sourceFilePath = "" self.destinationDirectory = "" self.sourceResourcesDirectory = "" self.sourceVersionContentsDirectory = "" self.sourceContentsDirectory = "" self.destinationResourcesDirectory = "" self.destinationVersionContentsDirectory = "" def __eq__(self, other): if self.__class__ == other.__class__: return self.__dict__ == other.__dict__ else: return False def __str__(self): return """ Framework name: %s Framework directory: %s Framework path: %s Binary name: %s Binary directory: %s Binary path: %s Version: %s Install name: %s Deployed install name: %s Source file Path: %s Deployed Directory (relative to bundle): %s """ % (self.frameworkName, self.frameworkDirectory, self.frameworkPath, self.binaryName, self.binaryDirectory, self.binaryPath, self.version, self.installName, self.deployedInstallName, self.sourceFilePath, self.destinationDirectory) def isDylib(self): return self.frameworkName.endswith(".dylib") def isQtFramework(self): if self.isDylib(): return self.frameworkName.startswith("libQt") else: return self.frameworkName.startswith("Qt") reOLine = re.compile(r'^(.+) \(compatibility version [0-9.]+, current version [0-9.]+\)$') bundleFrameworkDirectory = "Contents/Frameworks" bundleBinaryDirectory = "Contents/MacOS" @classmethod def fromOtoolLibraryLine(cls, line): # Note: line must be trimmed if line == "": return None # Don't deploy system libraries (exception for libQtuitools and libQtlucene). if line.startswith("/System/Library/") or line.startswith("@executable_path") or (line.startswith("/usr/lib/") and "libQt" not in line): return None m = cls.reOLine.match(line) if m is None: raise RuntimeError("otool line could not be parsed: " + line) path = m.group(1) info = cls() info.sourceFilePath = path info.installName = path if path.endswith(".dylib"): dirname, filename = os.path.split(path) info.frameworkName = filename info.frameworkDirectory = dirname info.frameworkPath = path info.binaryDirectory = dirname info.binaryName = filename info.binaryPath = path info.version = "-" info.installName = path info.deployedInstallName = "@executable_path/../Frameworks/" + info.binaryName info.sourceFilePath = path info.destinationDirectory = cls.bundleFrameworkDirectory else: parts = path.split("/") i = 0 # Search for the .framework directory for part in parts: if part.endswith(".framework"): break i += 1 if i == len(parts): raise RuntimeError("Could not find .framework or .dylib in otool line: " + line) info.frameworkName = parts[i] info.frameworkDirectory = "/".join(parts[:i]) info.frameworkPath = os.path.join(info.frameworkDirectory, info.frameworkName) info.binaryName = parts[i+3] info.binaryDirectory = "/".join(parts[i+1:i+3]) info.binaryPath = os.path.join(info.binaryDirectory, info.binaryName) info.version = parts[i+2] info.deployedInstallName = "@executable_path/../Frameworks/" + os.path.join(info.frameworkName, info.binaryPath) info.destinationDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, info.binaryDirectory) info.sourceResourcesDirectory = os.path.join(info.frameworkPath, "Resources") info.sourceContentsDirectory = os.path.join(info.frameworkPath, "Contents") info.sourceVersionContentsDirectory = os.path.join(info.frameworkPath, "Versions", info.version, "Contents") info.destinationResourcesDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Resources") info.destinationContentsDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Contents") info.destinationVersionContentsDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Versions", info.version, "Contents") return info class ApplicationBundleInfo(object): def __init__(self, path): self.path = path appName = "BitcoinABC-Qt" self.binaryPath = os.path.join(path, "Contents", "MacOS", appName) if not os.path.exists(self.binaryPath): raise RuntimeError("Could not find bundle binary for " + path) self.resourcesPath = os.path.join(path, "Contents", "Resources") self.pluginPath = os.path.join(path, "Contents", "PlugIns") class DeploymentInfo(object): def __init__(self): self.qtPath = None self.pluginPath = None self.deployedFrameworks = [] def detectQtPath(self, frameworkDirectory): parentDir = os.path.dirname(frameworkDirectory) if os.path.exists(os.path.join(parentDir, "translations")): # Classic layout, e.g. "/usr/local/Trolltech/Qt-4.x.x" self.qtPath = parentDir elif os.path.exists(os.path.join(parentDir, "share", "qt4", "translations")): # MacPorts layout, e.g. "/opt/local/share/qt4" self.qtPath = os.path.join(parentDir, "share", "qt4") elif os.path.exists(os.path.join(os.path.dirname(parentDir), "share", "qt4", "translations")): # Newer Macports layout self.qtPath = os.path.join(os.path.dirname(parentDir), "share", "qt4") else: self.qtPath = os.getenv("QTDIR", None) if self.qtPath is not None: pluginPath = os.path.join(self.qtPath, "plugins") if os.path.exists(pluginPath): self.pluginPath = pluginPath def usesFramework(self, name): nameDot = "%s." % name libNameDot = "lib%s." % name for framework in self.deployedFrameworks: if framework.endswith(".framework"): if framework.startswith(nameDot): return True elif framework.endswith(".dylib"): if framework.startswith(libNameDot): return True return False def getFrameworks(binaryPath, verbose): if verbose >= 3: print("Inspecting with otool: " + binaryPath) otoolbin=os.getenv("OTOOL", "otool") - otool = subprocess.Popen([otoolbin, "-L", binaryPath], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + otool = subprocess.Popen([otoolbin, "-L", binaryPath], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) o_stdout, o_stderr = otool.communicate() if otool.returncode != 0: if verbose >= 1: sys.stderr.write(o_stderr) sys.stderr.flush() raise RuntimeError("otool failed with return code %d" % otool.returncode) - otoolLines = o_stdout.decode().split("\n") + otoolLines = o_stdout.split("\n") otoolLines.pop(0) # First line is the inspected binary if ".framework" in binaryPath or binaryPath.endswith(".dylib"): otoolLines.pop(0) # Frameworks and dylibs list themselves as a dependency. libraries = [] for line in otoolLines: line = line.replace("@loader_path", os.path.dirname(binaryPath)) info = FrameworkInfo.fromOtoolLibraryLine(line.strip()) if info is not None: if verbose >= 3: print("Found framework:") print(info) libraries.append(info) return libraries def runInstallNameTool(action, *args): installnametoolbin=os.getenv("INSTALLNAMETOOL", "install_name_tool") subprocess.check_call([installnametoolbin, "-"+action] + list(args)) def changeInstallName(oldName, newName, binaryPath, verbose): if verbose >= 3: print("Using install_name_tool:") print(" in", binaryPath) print(" change reference", oldName) print(" to", newName) runInstallNameTool("change", oldName, newName, binaryPath) def changeIdentification(id, binaryPath, verbose): if verbose >= 3: print("Using install_name_tool:") print(" change identification in", binaryPath) print(" to", id) runInstallNameTool("id", id, binaryPath) def runStrip(binaryPath, verbose): stripbin=os.getenv("STRIP", "strip") if verbose >= 3: print("Using strip:") print(" stripped", binaryPath) subprocess.check_call([stripbin, "-x", binaryPath]) def copyFramework(framework, path, verbose): if framework.sourceFilePath.startswith("Qt"): #standard place for Nokia Qt installer's frameworks fromPath = "/Library/Frameworks/" + framework.sourceFilePath else: fromPath = framework.sourceFilePath toDir = os.path.join(path, framework.destinationDirectory) toPath = os.path.join(toDir, framework.binaryName) if not os.path.exists(fromPath): raise RuntimeError("No file at " + fromPath) if os.path.exists(toPath): return None # Already there if not os.path.exists(toDir): os.makedirs(toDir) shutil.copy2(fromPath, toPath) if verbose >= 3: print("Copied:", fromPath) print(" to:", toPath) permissions = os.stat(toPath) if not permissions.st_mode & stat.S_IWRITE: os.chmod(toPath, permissions.st_mode | stat.S_IWRITE) if not framework.isDylib(): # Copy resources for real frameworks linkfrom = os.path.join(path, "Contents","Frameworks", framework.frameworkName, "Versions", "Current") linkto = framework.version if not os.path.exists(linkfrom): os.symlink(linkto, linkfrom) if verbose >= 2: print("Linked:", linkfrom, "->", linkto) fromResourcesDir = framework.sourceResourcesDirectory if os.path.exists(fromResourcesDir): toResourcesDir = os.path.join(path, framework.destinationResourcesDirectory) shutil.copytree(fromResourcesDir, toResourcesDir, symlinks=True) if verbose >= 3: print("Copied resources:", fromResourcesDir) print(" to:", toResourcesDir) fromContentsDir = framework.sourceVersionContentsDirectory if not os.path.exists(fromContentsDir): fromContentsDir = framework.sourceContentsDirectory if os.path.exists(fromContentsDir): toContentsDir = os.path.join(path, framework.destinationVersionContentsDirectory) shutil.copytree(fromContentsDir, toContentsDir, symlinks=True) contentslinkfrom = os.path.join(path, framework.destinationContentsDirectory) if verbose >= 3: print("Copied Contents:", fromContentsDir) print(" to:", toContentsDir) elif framework.frameworkName.startswith("libQtGui"): # Copy qt_menu.nib (applies to non-framework layout) qtMenuNibSourcePath = os.path.join(framework.frameworkDirectory, "Resources", "qt_menu.nib") qtMenuNibDestinationPath = os.path.join(path, "Contents", "Resources", "qt_menu.nib") if os.path.exists(qtMenuNibSourcePath) and not os.path.exists(qtMenuNibDestinationPath): shutil.copytree(qtMenuNibSourcePath, qtMenuNibDestinationPath, symlinks=True) if verbose >= 3: print("Copied for libQtGui:", qtMenuNibSourcePath) print(" to:", qtMenuNibDestinationPath) return toPath def deployFrameworks(frameworks, bundlePath, binaryPath, strip, verbose, deploymentInfo=None): if deploymentInfo is None: deploymentInfo = DeploymentInfo() while len(frameworks) > 0: framework = frameworks.pop(0) deploymentInfo.deployedFrameworks.append(framework.frameworkName) if verbose >= 2: print("Processing", framework.frameworkName, "...") # Get the Qt path from one of the Qt frameworks if deploymentInfo.qtPath is None and framework.isQtFramework(): deploymentInfo.detectQtPath(framework.frameworkDirectory) if framework.installName.startswith("@executable_path") or framework.installName.startswith(bundlePath): if verbose >= 2: print(framework.frameworkName, "already deployed, skipping.") continue # install_name_tool the new id into the binary changeInstallName(framework.installName, framework.deployedInstallName, binaryPath, verbose) # Copy framework to app bundle. deployedBinaryPath = copyFramework(framework, bundlePath, verbose) # Skip the rest if already was deployed. if deployedBinaryPath is None: continue if strip: runStrip(deployedBinaryPath, verbose) # install_name_tool it a new id. changeIdentification(framework.deployedInstallName, deployedBinaryPath, verbose) # Check for framework dependencies dependencies = getFrameworks(deployedBinaryPath, verbose) for dependency in dependencies: changeInstallName(dependency.installName, dependency.deployedInstallName, deployedBinaryPath, verbose) # Deploy framework if necessary. if dependency.frameworkName not in deploymentInfo.deployedFrameworks and dependency not in frameworks: frameworks.append(dependency) return deploymentInfo def deployFrameworksForAppBundle(applicationBundle, strip, verbose): frameworks = getFrameworks(applicationBundle.binaryPath, verbose) if len(frameworks) == 0 and verbose >= 1: print("Warning: Could not find any external frameworks to deploy in %s." % (applicationBundle.path)) return DeploymentInfo() else: return deployFrameworks(frameworks, applicationBundle.path, applicationBundle.binaryPath, strip, verbose) def deployPlugins(appBundleInfo, deploymentInfo, strip, verbose): # Lookup available plugins, exclude unneeded plugins = [] if deploymentInfo.pluginPath is None: return for dirpath, dirnames, filenames in os.walk(deploymentInfo.pluginPath): pluginDirectory = os.path.relpath(dirpath, deploymentInfo.pluginPath) if pluginDirectory == "designer": # Skip designer plugins continue elif pluginDirectory == "phonon" or pluginDirectory == "phonon_backend": # Deploy the phonon plugins only if phonon is in use if not deploymentInfo.usesFramework("phonon"): continue elif pluginDirectory == "sqldrivers": # Deploy the sql plugins only if QtSql is in use if not deploymentInfo.usesFramework("QtSql"): continue elif pluginDirectory == "script": # Deploy the script plugins only if QtScript is in use if not deploymentInfo.usesFramework("QtScript"): continue elif pluginDirectory == "qmltooling" or pluginDirectory == "qml1tooling": # Deploy the qml plugins only if QtDeclarative is in use if not deploymentInfo.usesFramework("QtDeclarative"): continue elif pluginDirectory == "bearer": # Deploy the bearer plugins only if QtNetwork is in use if not deploymentInfo.usesFramework("QtNetwork"): continue elif pluginDirectory == "position": # Deploy the position plugins only if QtPositioning is in use if not deploymentInfo.usesFramework("QtPositioning"): continue elif pluginDirectory == "sensors" or pluginDirectory == "sensorgestures": # Deploy the sensor plugins only if QtSensors is in use if not deploymentInfo.usesFramework("QtSensors"): continue elif pluginDirectory == "audio" or pluginDirectory == "playlistformats": # Deploy the audio plugins only if QtMultimedia is in use if not deploymentInfo.usesFramework("QtMultimedia"): continue elif pluginDirectory == "mediaservice": # Deploy the mediaservice plugins only if QtMultimediaWidgets is in use if not deploymentInfo.usesFramework("QtMultimediaWidgets"): continue for pluginName in filenames: pluginPath = os.path.join(pluginDirectory, pluginName) if pluginName.endswith("_debug.dylib"): # Skip debug plugins continue elif pluginPath == "imageformats/libqsvg.dylib" or pluginPath == "iconengines/libqsvgicon.dylib": # Deploy the svg plugins only if QtSvg is in use if not deploymentInfo.usesFramework("QtSvg"): continue elif pluginPath == "accessible/libqtaccessiblecompatwidgets.dylib": # Deploy accessibility for Qt3Support only if the Qt3Support is in use if not deploymentInfo.usesFramework("Qt3Support"): continue elif pluginPath == "graphicssystems/libqglgraphicssystem.dylib": # Deploy the opengl graphicssystem plugin only if QtOpenGL is in use if not deploymentInfo.usesFramework("QtOpenGL"): continue elif pluginPath == "accessible/libqtaccessiblequick.dylib": # Deploy the accessible qtquick plugin only if QtQuick is in use if not deploymentInfo.usesFramework("QtQuick"): continue plugins.append((pluginDirectory, pluginName)) for pluginDirectory, pluginName in plugins: if verbose >= 2: print("Processing plugin", os.path.join(pluginDirectory, pluginName), "...") sourcePath = os.path.join(deploymentInfo.pluginPath, pluginDirectory, pluginName) destinationDirectory = os.path.join(appBundleInfo.pluginPath, pluginDirectory) if not os.path.exists(destinationDirectory): os.makedirs(destinationDirectory) destinationPath = os.path.join(destinationDirectory, pluginName) shutil.copy2(sourcePath, destinationPath) if verbose >= 3: print("Copied:", sourcePath) print(" to:", destinationPath) if strip: runStrip(destinationPath, verbose) dependencies = getFrameworks(destinationPath, verbose) for dependency in dependencies: changeInstallName(dependency.installName, dependency.deployedInstallName, destinationPath, verbose) # Deploy framework if necessary. if dependency.frameworkName not in deploymentInfo.deployedFrameworks: deployFrameworks([dependency], appBundleInfo.path, destinationPath, strip, verbose, deploymentInfo) qt_conf="""[Paths] Translations=Resources Plugins=PlugIns """ ap = ArgumentParser(description="""Improved version of macdeployqt. Outputs a ready-to-deploy app in a folder "dist" and optionally wraps it in a .dmg file. Note, that the "dist" folder will be deleted before deploying on each run. Optionally, Qt translation files (.qm) and additional resources can be added to the bundle. Also optionally signs the .app bundle; set the CODESIGNARGS environment variable to pass arguments to the codesign tool. E.g. CODESIGNARGS='--sign "Developer ID Application: ..." --keychain /encrypted/foo.keychain'""") ap.add_argument("app_bundle", nargs=1, metavar="app-bundle", help="application bundle to be deployed") ap.add_argument("-verbose", type=int, nargs=1, default=[1], metavar="<0-3>", help="0 = no output, 1 = error/warning (default), 2 = normal, 3 = debug") ap.add_argument("-no-plugins", dest="plugins", action="store_false", default=True, help="skip plugin deployment") ap.add_argument("-no-strip", dest="strip", action="store_false", default=True, help="don't run 'strip' on the binaries") ap.add_argument("-sign", dest="sign", action="store_true", default=False, help="sign .app bundle with codesign tool") ap.add_argument("-dmg", nargs="?", const="", metavar="basename", help="create a .dmg disk image; if basename is not specified, a camel-cased version of the app name is used") ap.add_argument("-fancy", nargs=1, metavar="plist", default=[], help="make a fancy looking disk image using the given plist file with instructions; requires -dmg to work") ap.add_argument("-add-qt-tr", nargs=1, metavar="languages", default=[], help="add Qt translation files to the bundle's resources; the language list must be separated with commas, not with whitespace") ap.add_argument("-translations-dir", nargs=1, metavar="path", default=None, help="Path to Qt's translation files") ap.add_argument("-add-resources", nargs="+", metavar="path", default=[], help="list of additional files or folders to be copied into the bundle's resources; must be the last argument") ap.add_argument("-volname", nargs=1, metavar="volname", default=[], help="custom volume name for dmg") config = ap.parse_args() verbose = config.verbose[0] # ------------------------------------------------ app_bundle = config.app_bundle[0] if not os.path.exists(app_bundle): if verbose >= 1: sys.stderr.write("Error: Could not find app bundle \"%s\"\n" % (app_bundle)) sys.exit(1) app_bundle_name = os.path.splitext(os.path.basename(app_bundle))[0] # ------------------------------------------------ translations_dir = None if config.translations_dir and config.translations_dir[0]: if os.path.exists(config.translations_dir[0]): translations_dir = config.translations_dir[0] else: if verbose >= 1: sys.stderr.write("Error: Could not find translation dir \"%s\"\n" % (translations_dir)) sys.exit(1) # ------------------------------------------------ for p in config.add_resources: if verbose >= 3: print("Checking for \"%s\"..." % p) if not os.path.exists(p): if verbose >= 1: sys.stderr.write("Error: Could not find additional resource file \"%s\"\n" % (p)) sys.exit(1) # ------------------------------------------------ if len(config.fancy) == 1: if verbose >= 3: print("Fancy: Importing plistlib...") try: import plistlib except ImportError: if verbose >= 1: sys.stderr.write("Error: Could not import plistlib which is required for fancy disk images.\n") sys.exit(1) p = config.fancy[0] if verbose >= 3: print("Fancy: Loading \"%s\"..." % p) if not os.path.exists(p): if verbose >= 1: sys.stderr.write("Error: Could not find fancy disk image plist at \"%s\"\n" % (p)) sys.exit(1) try: fancy = plistlib.readPlist(p) except: if verbose >= 1: sys.stderr.write("Error: Could not parse fancy disk image plist at \"%s\"\n" % (p)) sys.exit(1) try: assert "window_bounds" not in fancy or (isinstance(fancy["window_bounds"], list) and len(fancy["window_bounds"]) == 4) assert "background_picture" not in fancy or isinstance(fancy["background_picture"], str) assert "icon_size" not in fancy or isinstance(fancy["icon_size"], int) assert "applications_symlink" not in fancy or isinstance(fancy["applications_symlink"], bool) if "items_position" in fancy: assert isinstance(fancy["items_position"], dict) for key, value in fancy["items_position"].items(): assert isinstance(value, list) and len(value) == 2 and isinstance(value[0], int) and isinstance(value[1], int) except: if verbose >= 1: sys.stderr.write("Error: Bad format of fancy disk image plist at \"%s\"\n" % (p)) sys.exit(1) if "background_picture" in fancy: bp = fancy["background_picture"] if verbose >= 3: print("Fancy: Resolving background picture \"%s\"..." % bp) if not os.path.exists(bp): bp = os.path.join(os.path.dirname(p), bp) if not os.path.exists(bp): if verbose >= 1: sys.stderr.write("Error: Could not find background picture at \"%s\" or \"%s\"\n" % (fancy["background_picture"], bp)) sys.exit(1) else: fancy["background_picture"] = bp else: fancy = None # ------------------------------------------------ if os.path.exists("dist"): if verbose >= 2: print("+ Removing old dist folder +") shutil.rmtree("dist") # ------------------------------------------------ if len(config.volname) == 1: volname = config.volname[0] else: volname = app_bundle_name # ------------------------------------------------ target = os.path.join("dist", "BitcoinABC-Qt.app") if verbose >= 2: print("+ Copying source bundle +") if verbose >= 3: print(app_bundle, "->", target) os.mkdir("dist") shutil.copytree(app_bundle, target, symlinks=True) applicationBundle = ApplicationBundleInfo(target) # ------------------------------------------------ if verbose >= 2: print("+ Deploying frameworks +") try: deploymentInfo = deployFrameworksForAppBundle(applicationBundle, config.strip, verbose) if deploymentInfo.qtPath is None: deploymentInfo.qtPath = os.getenv("QTDIR", None) if deploymentInfo.qtPath is None: if verbose >= 1: sys.stderr.write("Warning: Could not detect Qt's path, skipping plugin deployment!\n") config.plugins = False except RuntimeError as e: if verbose >= 1: sys.stderr.write("Error: %s\n" % str(e)) sys.exit(1) # ------------------------------------------------ if config.plugins: if verbose >= 2: print("+ Deploying plugins +") try: deployPlugins(applicationBundle, deploymentInfo, config.strip, verbose) except RuntimeError as e: if verbose >= 1: sys.stderr.write("Error: %s\n" % str(e)) sys.exit(1) # ------------------------------------------------ if len(config.add_qt_tr) == 0: add_qt_tr = [] else: if translations_dir is not None: qt_tr_dir = translations_dir else: if deploymentInfo.qtPath is not None: qt_tr_dir = os.path.join(deploymentInfo.qtPath, "translations") else: sys.stderr.write("Error: Could not find Qt translation path\n") sys.exit(1) add_qt_tr = ["qt_%s.qm" % lng for lng in config.add_qt_tr[0].split(",")] for lng_file in add_qt_tr: p = os.path.join(qt_tr_dir, lng_file) if verbose >= 3: print("Checking for \"%s\"..." % p) if not os.path.exists(p): if verbose >= 1: sys.stderr.write("Error: Could not find Qt translation file \"%s\"\n" % (lng_file)) sys.exit(1) # ------------------------------------------------ if verbose >= 2: print("+ Installing qt.conf +") f = open(os.path.join(applicationBundle.resourcesPath, "qt.conf"), "wb") f.write(qt_conf.encode()) f.close() # ------------------------------------------------ if len(add_qt_tr) > 0 and verbose >= 2: print("+ Adding Qt translations +") for lng_file in add_qt_tr: if verbose >= 3: print(os.path.join(qt_tr_dir, lng_file), "->", os.path.join(applicationBundle.resourcesPath, lng_file)) shutil.copy2(os.path.join(qt_tr_dir, lng_file), os.path.join(applicationBundle.resourcesPath, lng_file)) # ------------------------------------------------ if len(config.add_resources) > 0 and verbose >= 2: print("+ Adding additional resources +") for p in config.add_resources: t = os.path.join(applicationBundle.resourcesPath, os.path.basename(p)) if verbose >= 3: print(p, "->", t) if os.path.isdir(p): shutil.copytree(p, t, symlinks=True) else: shutil.copy2(p, t) # ------------------------------------------------ if config.sign and 'CODESIGNARGS' not in os.environ: print("You must set the CODESIGNARGS environment variable. Skipping signing.") elif config.sign: if verbose >= 1: print("Code-signing app bundle %s"%(target,)) subprocess.check_call("codesign --force %s %s"%(os.environ['CODESIGNARGS'], target), shell=True) # ------------------------------------------------ if config.dmg is not None: - #Patch in check_output for Python 2.6 - if "check_output" not in dir( subprocess ): - def f(*popenargs, **kwargs): - if 'stdout' in kwargs: - raise ValueError('stdout argument not allowed, it will be overridden.') - process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) - output, unused_err = process.communicate() - retcode = process.poll() - if retcode: - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - raise CalledProcessError(retcode, cmd) - return output - subprocess.check_output = f - def runHDIUtil(verb, image_basename, **kwargs): hdiutil_args = ["hdiutil", verb, image_basename + ".dmg"] if "capture_stdout" in kwargs: del kwargs["capture_stdout"] run = subprocess.check_output else: if verbose < 2: hdiutil_args.append("-quiet") elif verbose >= 3: hdiutil_args.append("-verbose") run = subprocess.check_call for key, value in kwargs.items(): hdiutil_args.append("-" + key) if not value is True: hdiutil_args.append(str(value)) - return run(hdiutil_args) + return run(hdiutil_args, universal_newlines=True) if verbose >= 2: if fancy is None: print("+ Creating .dmg disk image +") else: print("+ Preparing .dmg disk image +") if config.dmg != "": dmg_name = config.dmg else: spl = app_bundle_name.split(" ") dmg_name = spl[0] + "".join(p.capitalize() for p in spl[1:]) if fancy is None: try: runHDIUtil("create", dmg_name, srcfolder="dist", format="UDBZ", volname=volname, ov=True) except subprocess.CalledProcessError as e: sys.exit(e.returncode) else: if verbose >= 3: print("Determining size of \"dist\"...") size = 0 for path, dirs, files in os.walk("dist"): for file in files: size += os.path.getsize(os.path.join(path, file)) size += int(size * 0.15) if verbose >= 3: print("Creating temp image for modification...") try: runHDIUtil("create", dmg_name + ".temp", srcfolder="dist", format="UDRW", size=size, volname=volname, ov=True) except subprocess.CalledProcessError as e: sys.exit(e.returncode) if verbose >= 3: print("Attaching temp image...") try: output = runHDIUtil("attach", dmg_name + ".temp", readwrite=True, noverify=True, noautoopen=True, capture_stdout=True) except subprocess.CalledProcessError as e: sys.exit(e.returncode) - m = re.search("/Volumes/(.+$)", output.decode()) + m = re.search("/Volumes/(.+$)", output) disk_root = m.group(0) disk_name = m.group(1) if verbose >= 2: print("+ Applying fancy settings +") if "background_picture" in fancy: bg_path = os.path.join(disk_root, ".background", os.path.basename(fancy["background_picture"])) os.mkdir(os.path.dirname(bg_path)) if verbose >= 3: print(fancy["background_picture"], "->", bg_path) shutil.copy2(fancy["background_picture"], bg_path) else: bg_path = None if fancy.get("applications_symlink", False): os.symlink("/Applications", os.path.join(disk_root, "Applications")) # The Python appscript package broke with OSX 10.8 and isn't being fixed. # So we now build up an AppleScript string and use the osascript command # to make the .dmg file pretty: appscript = Template( """ on run argv tell application "Finder" tell disk "$disk" open set current view of container window to icon view set toolbar visible of container window to false set statusbar visible of container window to false set the bounds of container window to {$window_bounds} set theViewOptions to the icon view options of container window set arrangement of theViewOptions to not arranged set icon size of theViewOptions to $icon_size $background_commands $items_positions close -- close/reopen works around a bug... open update without registering applications delay 5 eject end tell end tell end run """) itemscript = Template('set position of item "${item}" of container window to {${position}}') items_positions = [] if "items_position" in fancy: for name, position in fancy["items_position"].items(): params = { "item" : name, "position" : ",".join([str(p) for p in position]) } items_positions.append(itemscript.substitute(params)) params = { "disk" : volname, "window_bounds" : "300,300,800,620", "icon_size" : "96", "background_commands" : "", "items_positions" : "\n ".join(items_positions) } if "window_bounds" in fancy: params["window_bounds"] = ",".join([str(p) for p in fancy["window_bounds"]]) if "icon_size" in fancy: params["icon_size"] = str(fancy["icon_size"]) if bg_path is not None: # Set background file, then call SetFile to make it invisible. # (note: making it invisible first makes set background picture fail) bgscript = Template("""set background picture of theViewOptions to file ".background:$bgpic" do shell script "SetFile -a V /Volumes/$disk/.background/$bgpic" """) params["background_commands"] = bgscript.substitute({"bgpic" : os.path.basename(bg_path), "disk" : params["disk"]}) s = appscript.substitute(params) if verbose >= 2: print("Running AppleScript:") print(s) p = subprocess.Popen(['osascript', '-'], stdin=subprocess.PIPE) p.communicate(input=s.encode('utf-8')) if p.returncode: print("Error running osascript.") if verbose >= 2: print("+ Finalizing .dmg disk image +") time.sleep(5) try: runHDIUtil("convert", dmg_name + ".temp", format="UDBZ", o=dmg_name + ".dmg", ov=True) except subprocess.CalledProcessError as e: sys.exit(e.returncode) os.unlink(dmg_name + ".temp.dmg") # ------------------------------------------------ if verbose >= 2: print("+ Done +") sys.exit(0) diff --git a/contrib/testgen/base58.py b/contrib/testgen/base58.py index f716d301ab..66077ba76a 100644 --- a/contrib/testgen/base58.py +++ b/contrib/testgen/base58.py @@ -1,122 +1,126 @@ -# Copyright (c) 2012-2016 The Bitcoin Core developers +# Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Bitcoin base58 encoding and decoding. Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain) ''' import hashlib # for compatibility with following code... class SHA256: new = hashlib.sha256 if str != bytes: # Python 3.x def ord(c): return c def chr(n): return bytes((n,)) __b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' __b58base = len(__b58chars) b58chars = __b58chars def b58encode(v): """ encode v, which is a string of bytes, to base58. """ long_value = 0 for (i, c) in enumerate(v[::-1]): - long_value += (256**i) * ord(c) + if isinstance(c, str): + c = ord(c) + long_value += (256**i) * c result = '' while long_value >= __b58base: div, mod = divmod(long_value, __b58base) result = __b58chars[mod] + result long_value = div result = __b58chars[long_value] + result # Bitcoin does a little leading-zero-compression: # leading 0-bytes in the input become leading-1s nPad = 0 for c in v: - if c == '\0': + if c == 0: nPad += 1 else: break return (__b58chars[0] * nPad) + result def b58decode(v, length=None): """ decode v into a string of len bytes """ long_value = 0 - for (i, c) in enumerate(v[::-1]): - long_value += __b58chars.find(c) * (__b58base**i) + for i, c in enumerate(v[::-1]): + pos = __b58chars.find(c) + assert pos != -1 + long_value += pos * (__b58base**i) result = bytes() while long_value >= 256: div, mod = divmod(long_value, 256) result = chr(mod) + result long_value = div result = chr(long_value) + result nPad = 0 for c in v: if c == __b58chars[0]: nPad += 1 - else: - break + continue + break - result = chr(0) * nPad + result + result = bytes(nPad) + result if length is not None and len(result) != length: return None return result def checksum(v): """Return 32-bit checksum based on SHA256""" return SHA256.new(SHA256.new(v).digest()).digest()[0:4] def b58encode_chk(v): """b58encode a string, with 32-bit checksum""" return b58encode(v + checksum(v)) def b58decode_chk(v): """decode a base58 string, check and remove checksum""" result = b58decode(v) if result is None: return None if result[-4:] == checksum(result[:-4]): return result[:-4] else: return None def get_bcaddress_version(strAddress): """ Returns None if strAddress is invalid. Otherwise returns integer version of address. """ addr = b58decode_chk(strAddress) if addr is None or len(addr) != 21: return None version = addr[0] return ord(version) if __name__ == '__main__': # Test case (from http://gitorious.org/bitcoin/python-base58.git) assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0 _ohai = 'o hai'.encode('ascii') _tmp = b58encode(_ohai) assert _tmp == 'DYB3oMS' assert b58decode(_tmp, 5) == _ohai print("Tests passed") diff --git a/contrib/testgen/gen_base58_test_vectors.py b/contrib/testgen/gen_base58_test_vectors.py index 31ce80006a..fe8dcb2ae6 100755 --- a/contrib/testgen/gen_base58_test_vectors.py +++ b/contrib/testgen/gen_base58_test_vectors.py @@ -1,136 +1,139 @@ -#!/usr/bin/env python -# Copyright (c) 2012-2016 The Bitcoin Core developers +#!/usr/bin/env python3 +# Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Generate valid and invalid base58 address and private key test vectors. -Usage: +Usage: gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json ''' # 2012 Wladimir J. van der Laan # Released under MIT License import os from itertools import islice from base58 import b58encode_chk, b58decode_chk, b58chars import random from binascii import b2a_hex # key types PUBKEY_ADDRESS = 0 SCRIPT_ADDRESS = 5 PUBKEY_ADDRESS_TEST = 111 SCRIPT_ADDRESS_TEST = 196 PRIVKEY = 128 PRIVKEY_TEST = 239 metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed'] # templates for valid sequences templates = [ # prefix, payload_size, suffix, metadata # None = N/A ((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)), ((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)), ((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)), ((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)), ((PRIVKEY,), 32, (), (True, False, None, False)), ((PRIVKEY,), 32, (1,), (True, False, None, True)), ((PRIVKEY_TEST,), 32, (), (True, True, None, False)), ((PRIVKEY_TEST,), 32, (1,), (True, True, None, True)) ] def is_valid(v): '''Check vector v for validity''' result = b58decode_chk(v) if result is None: return False for template in templates: - prefix = str(bytearray(template[0])) - suffix = str(bytearray(template[2])) + prefix = bytearray(template[0]) + suffix = bytearray(template[2]) if result.startswith(prefix) and result.endswith(suffix): if (len(result) - len(prefix) - len(suffix)) == template[1]: return True return False def gen_valid_vectors(): '''Generate valid test vectors''' while True: for template in templates: - prefix = str(bytearray(template[0])) - payload = os.urandom(template[1]) - suffix = str(bytearray(template[2])) + prefix = bytearray(template[0]) + payload = bytearray(os.urandom(template[1])) + suffix = bytearray(template[2]) rv = b58encode_chk(prefix + payload + suffix) assert is_valid(rv) - metadata = dict([(x, y) for (x, y) in zip( - metadata_keys, template[3]) if y is not None]) - yield (rv, b2a_hex(payload), metadata) + metadata = {x: y for x, y in zip( + metadata_keys, template[3]) if y is not None} + hexrepr = b2a_hex(payload) + if isinstance(hexrepr, bytes): + hexrepr = hexrepr.decode('utf8') + yield (rv, hexrepr, metadata) def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix): '''Generate possibly invalid vector''' if corrupt_prefix: prefix = os.urandom(1) else: - prefix = str(bytearray(template[0])) + prefix = bytearray(template[0]) if randomize_payload_size: payload = os.urandom(max(int(random.expovariate(0.5)), 50)) else: payload = os.urandom(template[1]) if corrupt_suffix: suffix = os.urandom(len(template[2])) else: - suffix = str(bytearray(template[2])) + suffix = bytearray(template[2]) return b58encode_chk(prefix + payload + suffix) def randbool(p=0.5): '''Return True with P(p)''' return random.random() < p def gen_invalid_vectors(): '''Generate invalid test vectors''' # start with some manual edge-cases yield "", yield "x", while True: # kinds of invalid vectors: # invalid prefix # invalid payload length # invalid (randomized) suffix (add random data) # corrupt checksum for template in templates: val = gen_invalid_vector(template, randbool( 0.2), randbool(0.2), randbool(0.2)) if random.randint(0, 10) < 1: # line corruption if randbool(): # add random character to end val += random.choice(b58chars) else: # replace random character in the middle n = random.randint(0, len(val)) val = val[0:n] + random.choice(b58chars) + val[n + 1:] if not is_valid(val): yield val, if __name__ == '__main__': import sys import json iters = {'valid': gen_valid_vectors, 'invalid': gen_invalid_vectors} try: uiter = iters[sys.argv[1]] except IndexError: uiter = gen_valid_vectors try: count = int(sys.argv[2]) except IndexError: count = 0 data = list(islice(uiter(), count)) json.dump(data, sys.stdout, sort_keys=True, indent=4) sys.stdout.write('\n') diff --git a/doc/release-notes.md b/doc/release-notes.md index b54111d26d..3c52834eae 100644 --- a/doc/release-notes.md +++ b/doc/release-notes.md @@ -1,14 +1,19 @@ Bitcoin ABC version 0.19.10 is now available from: This release includes the following features and fixes: - Add a `spentby` field to the following RPCs: `getrawmempool`, `getmempooldescendents`, `getmempoolsancestors`, `getmempoolentry`. - Default to use CashAddr in most places in the GUI even when `usecashaddr=0` is specified. Renamed script for creating JSON-RPC credentials ----------------------------- The `share/rpcuser/rpcuser.py` script was renamed to `share/rpcauth/rpcauth.py`. This script can be used to create `rpcauth` credentials for a JSON-RPC user. + +Python Support +-------------- + +Support for Python 2 has been discontinued for all test files and tools. diff --git a/share/qt/extract_strings_qt.py b/share/qt/extract_strings_qt.py index 3c6343c521..3d6f6842e2 100755 --- a/share/qt/extract_strings_qt.py +++ b/share/qt/extract_strings_qt.py @@ -1,94 +1,93 @@ -#!/usr/bin/env python -# Copyright (c) 2012-2016 The Bitcoin Core developers +#!/usr/bin/env python3 +# Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Extract _("...") strings for translation and convert to Qt stringdefs so that they can be picked up by Qt linguist. ''' -from __future__ import division, print_function, unicode_literals from subprocess import Popen, PIPE import operator import os import sys OUT_CPP = "qt/bitcoinstrings.cpp" EMPTY = ['""'] def parse_po(text): """ Parse 'po' format produced by xgettext. Return a list of (msgid,msgstr) tuples. """ messages = [] msgid = [] msgstr = [] in_msgid = False in_msgstr = False for line in text.split('\n'): line = line.rstrip('\r') if line.startswith('msgid '): if in_msgstr: messages.append((msgid, msgstr)) in_msgstr = False # message start in_msgid = True msgid = [line[6:]] elif line.startswith('msgstr '): in_msgid = False in_msgstr = True msgstr = [line[7:]] elif line.startswith('"'): if in_msgid: msgid.append(line) if in_msgstr: msgstr.append(line) if in_msgstr: messages.append((msgid, msgstr)) return messages files = sys.argv[1:] # xgettext -n --keyword=_ $FILES XGETTEXT = os.getenv('XGETTEXT', 'xgettext') if not XGETTEXT: print('Cannot extract strings: xgettext utility is not installed or not configured.', file=sys.stderr) print('Please install package "gettext" and re-run \'./configure\'.', file=sys.stderr) sys.exit(1) child = Popen([XGETTEXT, '--output=-', '-n', '--keyword=_'] + files, stdout=PIPE) (out, err) = child.communicate() messages = parse_po(out.decode('utf-8')) f = open(OUT_CPP, 'w') f.write(""" #include // Automatically generated by extract_strings_qt.py #ifdef __GNUC__ #define UNUSED __attribute__((unused)) #else #define UNUSED #endif """) f.write('static const char UNUSED *bitcoin_strings[] = {\n') f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format((os.getenv('PACKAGE_NAME'),))) f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format((os.getenv('COPYRIGHT_HOLDERS'),))) if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'): f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format( (os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))) messages.sort(key=operator.itemgetter(0)) for (msgid, msgstr) in messages: if msgid != EMPTY: f.write('QT_TRANSLATE_NOOP("bitcoin-core", {}),\n'.format('\n'.join(msgid))) f.write('};\n') f.close() diff --git a/share/rpcauth/rpcauth.py b/share/rpcauth/rpcauth.py index 3db05ecc4a..1f0dd0da75 100755 --- a/share/rpcauth/rpcauth.py +++ b/share/rpcauth/rpcauth.py @@ -1,41 +1,41 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import hashlib import sys import os from random import SystemRandom import base64 import hmac if len(sys.argv) < 2: sys.stderr.write('Please include username as an argument.\n') sys.exit(0) username = sys.argv[1] # This uses os.urandom() underneath cryptogen = SystemRandom() # Create 16 byte hex salt salt_sequence = [cryptogen.randrange(256) for i in range(16)] hexseq = list(map(hex, salt_sequence)) salt = "".join([x[2:] for x in hexseq]) # Create 32 byte b64 password password = base64.urlsafe_b64encode(os.urandom(32)) digestmod = hashlib.sha256 if sys.version_info.major >= 3: password = password.decode('utf-8') digestmod = 'SHA256' m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), digestmod) result = m.hexdigest() print("String to be appended to bitcoin.conf:") print("rpcauth=" + username + ":" + salt + "$" + result) print("Your password:\n" + password) diff --git a/test/lint/check-doc.py b/test/lint/check-doc.py index 520f736732..a4e0107a6b 100755 --- a/test/lint/check-doc.py +++ b/test/lint/check-doc.py @@ -1,88 +1,89 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' This checks if all command line args are documented. Return value is 0 to indicate no error. Author: @MarcoFalke ''' from subprocess import check_output from pprint import PrettyPrinter import glob import re TOP_LEVEL = 'git rev-parse --show-toplevel' FOLDER_SRC = '/src/**/' FOLDER_TEST = '/src/**/test/' EXTENSIONS = ["*.c", "*.h", "*.cpp", "*.cc", "*.hpp"] REGEX_ARG = r'(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\(\s*"(-[^"]+)"' REGEX_DOC = r'AddArg\(\s*"(-[^"=]+?)(?:=|")' # list unsupported, deprecated and duplicate args as they need no documentation SET_DOC_OPTIONAL = set(['-benchmark', '-blockminsize', '-dbcrashratio', '-debugnet', '-forcecompactdb', # TODO remove after the Nov 2019 upgrade '-gravitonactivationtime', '-h', '-help', '-parkdeepreorg', '-replayprotectionactivationtime', '-rpcssl', '-socks', '-tor', '-usehd', '-whitelistalwaysrelay']) # list false positive unknows arguments SET_FALSE_POSITIVE_UNKNOWNS = set(['-zmqpubhashblock', '-zmqpubhashtx', '-zmqpubrawblock', '-zmqpubrawtx']) def main(): - top_level = check_output(TOP_LEVEL, shell=True).decode().strip() + top_level = check_output(TOP_LEVEL, shell=True, + universal_newlines=True).strip() source_files = [] test_files = [] for extension in EXTENSIONS: source_files += glob.glob(top_level + FOLDER_SRC + extension, recursive=True) test_files += glob.glob(top_level + FOLDER_TEST + extension, recursive=True) files = set(source_files) - set(test_files) args_used = set() args_docd = set() for file in files: with open(file, 'r') as f: content = f.read() args_used |= set(re.findall(re.compile(REGEX_ARG), content)) args_docd |= set(re.findall(re.compile(REGEX_DOC), content)) args_used |= SET_FALSE_POSITIVE_UNKNOWNS args_need_doc = args_used - args_docd - SET_DOC_OPTIONAL args_unknown = args_docd - args_used pp = PrettyPrinter() print("Args used : {}".format(len(args_used))) print("Args documented : {}".format(len(args_docd))) print("Args undocumented: {} ({} don't need documentation)".format( len(args_need_doc), len(SET_DOC_OPTIONAL))) pp.pprint(args_need_doc) print("Args unknown : {}".format(len(args_unknown))) pp.pprint(args_unknown) if __name__ == "__main__": main()