diff --git a/.arclint b/.arclint index 7bb98b2b62..074256e6f4 100644 --- a/.arclint +++ b/.arclint @@ -1,185 +1,188 @@ { "linters": { "generated": { "type": "generated" }, "clang-format": { "type": "clang-format", "version": ">=8.0", "bin": ["clang-format-8", "clang-format"], "include": "(^src/.*\\.(h|c|cpp|mm)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "autopep8": { "type": "autopep8", "version": ">=1.3.4", - "include": "(\\.py$)" + "include": "(\\.py$)", + "flags": [ + "--global-config=.autopep8" + ] }, "flake8": { "type": "flake8", "include": "(\\.py$)", "flags": [ "--select=E112,E113,E115,E116,E125,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,E901,E902,F401,F402,F403,F404,F405,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W292,W293,W601,W602,W603,W604,W605,W606" ] }, "lint-format-strings": { "type": "lint-format-strings", "include": "(^src/.*\\.(h|c|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "check-doc": { "type": "check-doc", "include": "(^src/.*\\.(h|c|cpp)$)" }, "lint-tests": { "type": "lint-tests", "include": "(^src/(seeder/|rpc/|wallet/)?test/.*\\.(cpp)$)" }, "lint-python-format": { "type": "lint-python-format", "include": "(\\.py$)", "exclude": [ "(^test/lint/lint-python-format\\.py$)" ] }, "phpcs": { "type": "phpcs", "include": "(\\.php$)", "exclude": [ "(^arcanist/__phutil_library_.+\\.php$)" ], "phpcs.standard": "arcanist/phpcs.xml" }, "lint-locale-dependence": { "type": "lint-locale-dependence", "include": "(^src/.*\\.(h|cpp)$)", "exclude": [ "(^src/(crypto/ctaes/|leveldb/|secp256k1/|seeder/|tinyformat.h|univalue/))" ] }, "lint-cheader": { "type": "lint-cheader", "include": "(^src/.*\\.(h|cpp)$)", "exclude": [ "(^src/(crypto/ctaes|secp256k1|univalue|leveldb)/)" ] }, "spelling": { "type": "spelling", "exclude": [ "(^build-aux/m4/)", "(^depends/)", "(^doc/release-notes/)", "(^src/(qt/locale|secp256k1|univalue|leveldb)/)", "(^test/lint/dictionary/)" ], "spelling.dictionaries": [ "test/lint/dictionary/english.json" ] }, "lint-assert-with-side-effects": { "type": "lint-assert-with-side-effects", "include": "(^src/.*\\.(h|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "lint-include-quotes": { "type": "lint-include-quotes", "include": "(^src/.*\\.(h|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "lint-include-guard": { "type": "lint-include-guard", "include": "(^src/.*\\.h$)", "exclude": [ "(^src/(crypto/ctaes|secp256k1|univalue|leveldb)/)", "(^src/tinyformat.h$)" ] }, "lint-include-source": { "type": "lint-include-source", "include": "(^src/.*\\.(h|c|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "lint-stdint": { "type": "lint-stdint", "include": "(^src/.*\\.(h|c|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)", "(^src/compat/assumptions.h$)" ] }, "lint-source-filename": { "type": "lint-source-filename", "include": "(^src/.*\\.(h|c|cpp)$)", "exclude": [ "(^src/(secp256k1|univalue|leveldb)/)" ] }, "lint-boost-dependencies": { "type": "lint-boost-dependencies", "include": "(^src/.*\\.(h|cpp)$)" }, "check-rpc-mappings": { "type": "check-rpc-mappings", "include": "(^src/(rpc/|wallet/rpc).*\\.cpp$)" }, "lint-python-encoding": { "type": "lint-python-encoding", "include": "(\\.py$)" }, "lint-python-shebang": { "type": "lint-python-shebang", "include": "(\\.py$)", "exclude": [ "(__init__\\.py$)" ] }, "lint-bash-shebang": { "type": "lint-bash-shebang", "include": "(\\.sh$)" }, "shellcheck": { "type": "shellcheck", "include": "(\\.sh$)", "exclude": [ "(^src/(secp256k1|univalue)/)" ] }, "lint-shell-locale": { "type": "lint-shell-locale", "include": "(\\.sh$)", "exclude": [ "(^src/(secp256k1|univalue)/)" ] }, "lint-cpp-void-parameters": { "type": "lint-cpp-void-parameters", "include": "(^src/.*\\.(h|cpp)$)", "exclude": [ "(^src/(crypto/ctaes|secp256k1|univalue|leveldb)/)", "(^src/compat/glibc_compat.cpp$)" ] }, "lint-logs": { "type": "lint-logs", "include": "(^src/.*\\.(h|cpp)$)" }, "lint-qt": { "type": "lint-qt", "include": "(^src/qt/.*\\.(h|cpp)$)", "exclude": [ "(^src/qt/(locale|forms|res)/)" ] } } } diff --git a/.autopep8 b/.autopep8 new file mode 100644 index 0000000000..1a9b89f214 --- /dev/null +++ b/.autopep8 @@ -0,0 +1,2 @@ +[pycodestyle] +select = E,W diff --git a/contrib/devtools/security-check.py b/contrib/devtools/security-check.py index e5d8c1f293..96ba09619c 100755 --- a/contrib/devtools/security-check.py +++ b/contrib/devtools/security-check.py @@ -1,234 +1,234 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Perform basic ELF security checks on a series of executables. Exit status will be 0 if successful, and the program will be silent. Otherwise the exit status will be 1 and it will log which executables failed which checks. Needs `readelf` (for ELF) and `objdump` (for PE). ''' import subprocess import sys import os READELF_CMD = os.getenv('READELF', '/usr/bin/readelf') OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump') # checks which are non-fatal for now but only generate a warning NONFATAL = {'HIGH_ENTROPY_VA'} def check_ELF_PIE(executable): ''' Check for position independent executable (PIE), allowing for address space randomization. ''' p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') ok = False for line in stdout.splitlines(): line = line.split() if len(line) >= 2 and line[0] == 'Type:' and line[1] == 'DYN': ok = True return ok def get_ELF_program_headers(executable): '''Return type and flags for ELF program headers''' p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') in_headers = False count = 0 headers = [] for line in stdout.splitlines(): if line.startswith('Program Headers:'): in_headers = True if line == '': in_headers = False if in_headers: if count == 1: # header line ofs_typ = line.find('Type') ofs_offset = line.find('Offset') ofs_flags = line.find('Flg') ofs_align = line.find('Align') if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1: raise ValueError('Cannot parse elfread -lW output') elif count > 1: typ = line[ofs_typ:ofs_offset].rstrip() flags = line[ofs_flags:ofs_align].rstrip() headers.append((typ, flags)) count += 1 return headers def check_ELF_NX(executable): ''' Check that no sections are writable and executable (including the stack) ''' have_wx = False have_gnu_stack = False for (typ, flags) in get_ELF_program_headers(executable): if typ == 'GNU_STACK': have_gnu_stack = True if 'W' in flags and 'E' in flags: # section is both writable and executable have_wx = True return have_gnu_stack and not have_wx def check_ELF_RELRO(executable): ''' Check for read-only relocations. GNU_RELRO program header must exist Dynamic section must have BIND_NOW flag ''' have_gnu_relro = False for (typ, flags) in get_ELF_program_headers(executable): # Note: not checking flags == 'R': here as linkers set the permission differently # This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions. # However, the dynamic linker need to write to this area so these are RW. # Glibc itself takes care of mprotecting this area R after relocations are finished. # See also http://permalink.gmane.org/gmane.comp.gnu.binutils/71347 if typ == 'GNU_RELRO': have_gnu_relro = True have_bindnow = False p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') for line in stdout.splitlines(): tokens = line.split() if len(tokens) > 1 and tokens[1] == '(BIND_NOW)' or (len(tokens) > 2 and tokens[1] == '(FLAGS)' and 'BIND_NOW' in tokens[2]): have_bindnow = True return have_gnu_relro and have_bindnow def check_ELF_Canary(executable): ''' Check for use of stack canary ''' p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') ok = False for line in stdout.splitlines(): if '__stack_chk_fail' in line: ok = True return ok def get_PE_dll_characteristics(executable): ''' Get PE DllCharacteristics bits. Returns a tuple (arch,bits) where arch is 'i386:x86-64' or 'i386' and bits is the DllCharacteristics value. ''' - p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, + p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') arch = '' bits = 0 for line in stdout.splitlines(): tokens = line.split() if len(tokens) >= 2 and tokens[0] == 'architecture:': arch = tokens[1].rstrip(',') if len(tokens) >= 2 and tokens[0] == 'DllCharacteristics': bits = int(tokens[1], 16) return (arch, bits) IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA = 0x0020 IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE = 0x0040 IMAGE_DLL_CHARACTERISTICS_NX_COMPAT = 0x0100 def check_PE_DYNAMIC_BASE(executable): '''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)''' (arch, bits) = get_PE_dll_characteristics(executable) reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE return (bits & reqbits) == reqbits # On 64 bit, must support high-entropy 64-bit address space layout randomization in addition to DYNAMIC_BASE # to have secure ASLR. def check_PE_HIGH_ENTROPY_VA(executable): '''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR''' (arch, bits) = get_PE_dll_characteristics(executable) if arch == 'i386:x86-64': reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA else: # Unnecessary on 32-bit assert(arch == 'i386') reqbits = 0 return (bits & reqbits) == reqbits def check_PE_NX(executable): '''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)''' (arch, bits) = get_PE_dll_characteristics(executable) return (bits & IMAGE_DLL_CHARACTERISTICS_NX_COMPAT) == IMAGE_DLL_CHARACTERISTICS_NX_COMPAT CHECKS = { 'ELF': [ ('PIE', check_ELF_PIE), ('NX', check_ELF_NX), ('RELRO', check_ELF_RELRO), ('Canary', check_ELF_Canary) ], 'PE': [ ('DYNAMIC_BASE', check_PE_DYNAMIC_BASE), ('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA), ('NX', check_PE_NX) ] } def identify_executable(executable): with open(filename, 'rb') as f: magic = f.read(4) if magic.startswith(b'MZ'): return 'PE' elif magic.startswith(b'\x7fELF'): return 'ELF' return None if __name__ == '__main__': retval = 0 for filename in sys.argv[1:]: try: etype = identify_executable(filename) if etype is None: print('{}: unknown format'.format(filename)) retval = 1 continue failed = [] warning = [] for (name, func) in CHECKS[etype]: if not func(filename): if name in NONFATAL: warning.append(name) else: failed.append(name) if failed: print('{}: failed {}'.format(filename, ' '.join(failed))) retval = 1 if warning: print('{}: warning {}'.format(filename, ' '.join(warning))) except IOError: print('{}: cannot open'.format(filename)) retval = 1 sys.exit(retval) diff --git a/contrib/devtools/symbol-check.py b/contrib/devtools/symbol-check.py index dc961e9740..d5278f6f97 100755 --- a/contrib/devtools/symbol-check.py +++ b/contrib/devtools/symbol-check.py @@ -1,190 +1,190 @@ #!/usr/bin/env python3 # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' A script to check that the (Linux) executables produced by gitian only contain allowed gcc, glibc and libstdc++ version symbols. This makes sure they are still compatible with the minimum supported Linux distribution versions. Example usage: find ../gitian-builder/build -type f -executable | xargs python3 contrib/devtools/symbol-check.py ''' import subprocess import re import sys import os # Debian 8.11 (Jessie) has: # # - g++ version 4.9.2 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B) # - libc version 2.19.18 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6) # - libstdc++ version 4.8.4 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6) # # Ubuntu 14.04 (Trusty Tahr) has: # # - g++ version 4.8.2 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=g%2B%2B&searchon=names) # - libc version 2.19.0 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=libc6&searchon=names) # - libstdc++ version 4.8.2 (https://packages.ubuntu.com/search?suite=trusty§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names) # # Taking the minimum of these as our target. # # According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to: # GCC 4.8.0: GCC_4.8.0 # GCC 4.8.0: GLIBCXX_3.4.18, CXXABI_1.3.7 # (glibc) GLIBC_2_19 # MAX_VERSIONS = { - 'GCC': (4, 8, 0), - 'CXXABI': (1, 3, 7), - 'GLIBCXX': (3, 4, 18), - 'GLIBC': (2, 19), + 'GCC': (4, 8, 0), + 'CXXABI': (1, 3, 7), + 'GLIBCXX': (3, 4, 18), + 'GLIBC': (2, 19), 'LIBATOMIC': (1, 0) } # See here for a description of _IO_stdin_used: # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109 # Ignore symbols that are exported as part of every executable IGNORE_EXPORTS = { '_edata', '_end', '__end__', '_init', '__bss_start', '__bss_start__', '_bss_end__', '__bss_end__', '_fini', '_IO_stdin_used', 'stdin', 'stdout', 'stderr', # Figure out why we get these symbols exported on xenial. '_ZNKSt5ctypeIcE8do_widenEc', 'in6addr_any', 'optarg', '_ZNSt16_Sp_counted_baseILN9__gnu_cxx12_Lock_policyE2EE10_M_destroyEv' } READELF_CMD = os.getenv('READELF', '/usr/bin/readelf') CPPFILT_CMD = os.getenv('CPPFILT', '/usr/bin/c++filt') # Allowed NEEDED libraries ALLOWED_LIBRARIES = { # bitcoind and bitcoin-qt 'libgcc_s.so.1', # GCC base support 'libc.so.6', # C library 'libpthread.so.0', # threading 'libanl.so.1', # DNS resolve 'libm.so.6', # math library 'librt.so.1', # real-time (clock) 'libatomic.so.1', 'ld-linux-x86-64.so.2', # 64-bit dynamic linker 'ld-linux.so.2', # 32-bit dynamic linker 'ld-linux-aarch64.so.1', # 64-bit ARM dynamic linker 'ld-linux-armhf.so.3', # 32-bit ARM dynamic linker # bitcoin-qt only 'libX11-xcb.so.1', # part of X11 'libX11.so.6', # part of X11 'libxcb.so.1', # part of X11 'libfontconfig.so.1', # font support 'libfreetype.so.6', # font parsing 'libdl.so.2' # programming interface to dynamic linker } ARCH_MIN_GLIBC_VER = { - '80386': (2, 1), + '80386': (2, 1), 'X86-64': (2, 2, 5), - 'ARM': (2, 4), + 'ARM': (2, 4), 'AArch64': (2, 17) } class CPPFilt(object): ''' Demangle C++ symbol names. Use a pipe to the 'c++filt' command. ''' def __init__(self): self.proc = subprocess.Popen( CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) def __call__(self, mangled): self.proc.stdin.write(mangled + '\n') self.proc.stdin.flush() return self.proc.stdout.readline().rstrip() def close(self): self.proc.stdin.close() self.proc.stdout.close() self.proc.wait() def read_symbols(executable, imports=True): ''' Parse an ELF executable and return a list of (symbol,version) tuples for dynamic, imported symbols. ''' p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', '-h', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Could not read symbols for {}: {}'.format( executable, stderr.strip())) syms = [] for line in stdout.splitlines(): line = line.split() if 'Machine:' in line: arch = line[-1] if len(line) > 7 and re.match('[0-9]+:$', line[0]): (sym, _, version) = line[7].partition('@') is_import = line[6] == 'UND' if version.startswith('@'): version = version[1:] if is_import == imports: syms.append((sym, version, arch)) return syms def check_version(max_versions, version, arch): if '_' in version: (lib, _, ver) = version.rpartition('_') else: lib = version ver = '0' ver = tuple([int(x) for x in ver.split('.')]) if not lib in max_versions: return False return ver <= max_versions[lib] or lib == 'GLIBC' and ver <= ARCH_MIN_GLIBC_VER[arch] def read_libraries(filename): p = subprocess.Popen([READELF_CMD, '-d', '-W', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') libraries = [] for line in stdout.splitlines(): tokens = line.split() if len(tokens) > 2 and tokens[1] == '(NEEDED)': match = re.match( r'^Shared library: \[(.*)\]$', ' '.join(tokens[2:])) if match: libraries.append(match.group(1)) else: raise ValueError('Unparseable (NEEDED) specification') return libraries if __name__ == '__main__': cppfilt = CPPFilt() retval = 0 for filename in sys.argv[1:]: # Check imported symbols for sym, version, arch in read_symbols(filename, True): if version and not check_version(MAX_VERSIONS, version, arch): print('{}: symbol {} from unsupported version {}'.format( filename, cppfilt(sym), version)) retval = 1 # Check exported symbols for sym, version, arch in read_symbols(filename, False): if sym in IGNORE_EXPORTS: continue print('{}: export of symbol {} not allowed'.format( filename, cppfilt(sym))) retval = 1 # Check dependency libraries for library_name in read_libraries(filename): if library_name not in ALLOWED_LIBRARIES: print('{}: NEEDED library {} is not allowed'.format( filename, library_name)) retval = 1 sys.exit(retval) diff --git a/contrib/gitian-build.py b/contrib/gitian-build.py index 276dd47bac..e5d4eecfc4 100755 --- a/contrib/gitian-build.py +++ b/contrib/gitian-build.py @@ -1,312 +1,312 @@ #!/usr/bin/env python3 import argparse import multiprocessing import os import subprocess import sys def setup(): global args, workdir programs = ['ruby', 'git', 'apt-cacher-ng', 'make', 'wget'] if args.kvm: programs += ['python-vm-builder', 'qemu-kvm', 'qemu-utils'] elif args.docker: dockers = ['docker.io', 'docker-ce'] for i in dockers: return_code = subprocess.call( ['sudo', 'apt-get', 'install', '-qq', i]) if return_code == 0: break if return_code != 0: print('Cannot find any way to install docker', file=sys.stderr) exit(1) else: programs += ['lxc', 'debootstrap'] subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs) if not os.path.isdir('gitian-builder'): subprocess.check_call( ['git', 'clone', 'https://github.com/devrandom/gitian-builder.git']) if not os.path.isdir('bitcoin-abc'): subprocess.check_call( ['git', 'clone', 'https://github.com/Bitcoin-ABC/bitcoin-abc.git']) os.chdir('gitian-builder') make_image_prog = ['bin/make-base-vm', '--suite', 'bionic', '--arch', 'amd64'] if args.docker: make_image_prog += ['--docker'] elif not args.kvm: make_image_prog += ['--lxc'] subprocess.check_call(make_image_prog) os.chdir(workdir) if args.is_bionic and not args.kvm and not args.docker: subprocess.check_call( ['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net']) print('Reboot is required') exit(0) def build(): global args, workdir base_output_dir = 'bitcoin-binaries/' + args.version os.makedirs(base_output_dir + '/src', exist_ok=True) print('\nBuilding Dependencies\n') os.chdir('gitian-builder') os.makedirs('inputs', exist_ok=True) subprocess.check_call(['make', '-C', '../bitcoin-abc/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) output_dir_src = '../' + base_output_dir + '/src' if args.linux: print('\nCompiling ' + args.version + ' Linux') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, - '--url', 'bitcoin='+args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) + subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, + '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-linux', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) output_dir_linux = '../' + base_output_dir + '/linux' os.makedirs(output_dir_linux, exist_ok=True) subprocess.check_call( 'mv build/out/bitcoin-*.tar.gz ' + output_dir_linux, shell=True) subprocess.check_call( 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) subprocess.check_call( 'mv result/bitcoin-*-linux-res.yml ' + output_dir_linux, shell=True) if args.windows: print('\nCompiling ' + args.version + ' Windows') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, - '--url', 'bitcoin='+args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) + subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, + '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-win-unsigned', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) output_dir_win = '../' + base_output_dir + '/win' os.makedirs(output_dir_win, exist_ok=True) subprocess.check_call( 'mv build/out/bitcoin-*-win-unsigned.tar.gz inputs/', shell=True) subprocess.check_call( 'mv build/out/bitcoin-*.zip build/out/bitcoin-*.exe ' + output_dir_win, shell=True) subprocess.check_call( 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) subprocess.check_call( 'mv result/bitcoin-*-win-res.yml ' + output_dir_win, shell=True) if args.macos: print('\nCompiling ' + args.version + ' MacOS') - subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, - '--url', 'bitcoin='+args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) + subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin=' + args.commit, + '--url', 'bitcoin=' + args.url, '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-osx-unsigned', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) output_dir_osx = '../' + base_output_dir + '/osx' os.makedirs(output_dir_osx, exist_ok=True) subprocess.check_call( 'mv build/out/bitcoin-*-osx-unsigned.tar.gz inputs/', shell=True) subprocess.check_call( 'mv build/out/bitcoin-*.tar.gz build/out/bitcoin-*.dmg ' + output_dir_osx, shell=True) subprocess.check_call( 'mv build/out/src/bitcoin-*.tar.gz ' + output_dir_src, shell=True) subprocess.check_call( 'mv result/bitcoin-*-osx-res.yml ' + output_dir_osx, shell=True) os.chdir(workdir) if args.commit_files: - print('\nCommitting '+args.version+' Unsigned Sigs\n') + print('\nCommitting ' + args.version + ' Unsigned Sigs\n') os.chdir('gitian.sigs') subprocess.check_call( - ['git', 'add', args.version+'-linux/'+args.signer]) + ['git', 'add', args.version + '-linux/' + args.signer]) subprocess.check_call( - ['git', 'add', args.version+'-win-unsigned/'+args.signer]) + ['git', 'add', args.version + '-win-unsigned/' + args.signer]) subprocess.check_call( - ['git', 'add', args.version+'-osx-unsigned/'+args.signer]) + ['git', 'add', args.version + '-osx-unsigned/' + args.signer]) subprocess.check_call( - ['git', 'commit', '-m', 'Add '+args.version+' unsigned sigs for '+args.signer]) + ['git', 'commit', '-m', 'Add ' + args.version + ' unsigned sigs for ' + args.signer]) os.chdir(workdir) def sign(): global args, workdir os.chdir('gitian-builder') if args.windows: print('\nSigning ' + args.version + ' Windows') subprocess.check_call('cp inputs/bitcoin-' + args.version + '-win-unsigned.tar.gz inputs/bitcoin-win-unsigned.tar.gz', shell=True) - subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, + subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature=' + args.commit, '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-signed', + subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-win-signed', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call( - 'mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/'+args.version, shell=True) + 'mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/' + args.version, shell=True) subprocess.check_call( - 'mv build/out/bitcoin-*win32-setup.exe ../bitcoin-binaries/'+args.version, shell=True) + 'mv build/out/bitcoin-*win32-setup.exe ../bitcoin-binaries/' + args.version, shell=True) if args.macos: print('\nSigning ' + args.version + ' MacOS') subprocess.check_call('cp inputs/bitcoin-' + args.version + '-osx-unsigned.tar.gz inputs/bitcoin-osx-unsigned.tar.gz', shell=True) - subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, + subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature=' + args.commit, '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) - subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-signed', + subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version + '-osx-signed', '--destination', '../gitian.sigs/', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call('mv build/out/bitcoin-osx-signed.dmg ../bitcoin-binaries/' + - args.version+'/bitcoin-'+args.version+'-osx.dmg', shell=True) + args.version + '/bitcoin-' + args.version + '-osx.dmg', shell=True) os.chdir(workdir) if args.commit_files: - print('\nCommitting '+args.version+' Signed Sigs\n') + print('\nCommitting ' + args.version + ' Signed Sigs\n') os.chdir('gitian.sigs') subprocess.check_call( - ['git', 'add', args.version+'-win-signed/'+args.signer]) + ['git', 'add', args.version + '-win-signed/' + args.signer]) subprocess.check_call( - ['git', 'add', args.version+'-osx-signed/'+args.signer]) + ['git', 'add', args.version + '-osx-signed/' + args.signer]) subprocess.check_call(['git', 'commit', '-a', '-m', 'Add ' + - args.version+' signed binary sigs for '+args.signer]) + args.version + ' signed binary sigs for ' + args.signer]) os.chdir(workdir) def verify(): global args, workdir os.chdir('gitian-builder') - print('\nVerifying v'+args.version+' Linux\n') + print('\nVerifying v' + args.version + ' Linux\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + '-linux', '../bitcoin-abc/contrib/gitian-descriptors/gitian-linux.yml']) - print('\nVerifying v'+args.version+' Windows\n') + print('\nVerifying v' + args.version + ' Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + '-win-unsigned', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win.yml']) - print('\nVerifying v'+args.version+' MacOS\n') + print('\nVerifying v' + args.version + ' MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + '-osx-unsigned', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx.yml']) - print('\nVerifying v'+args.version+' Signed Windows\n') + print('\nVerifying v' + args.version + ' Signed Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + '-win-signed', '../bitcoin-abc/contrib/gitian-descriptors/gitian-win-signer.yml']) - print('\nVerifying v'+args.version+' Signed MacOS\n') + print('\nVerifying v' + args.version + ' Signed MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version + '-osx-signed', '../bitcoin-abc/contrib/gitian-descriptors/gitian-osx-signer.yml']) os.chdir(workdir) def main(): global args, workdir num_cpus = multiprocessing.cpu_count() parser = argparse.ArgumentParser(usage='%(prog)s [options] signer version') parser.add_argument('-c', '--commit', action='store_true', dest='commit', help='Indicate that the version argument is for a commit or branch') parser.add_argument('-p', '--pull', action='store_true', dest='pull', help='Indicate that the version argument is the number of a github repository pull request') parser.add_argument('-u', '--url', dest='url', default='https://github.com/Bitcoin-ABC/bitcoin-abc.git', help='Specify the URL of the repository. Default is %(default)s') parser.add_argument('-v', '--verify', action='store_true', dest='verify', help='Verify the Gitian build') parser.add_argument('-b', '--build', action='store_true', dest='build', help='Do a Gitian build') parser.add_argument('-s', '--sign', action='store_true', dest='sign', help='Make signed binaries for Windows and MacOS') parser.add_argument('-B', '--buildsign', action='store_true', dest='buildsign', help='Build both signed and unsigned binaries') parser.add_argument('-o', '--os', dest='os', default='lwm', help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS') parser.add_argument('-j', '--jobs', dest='jobs', default=str(num_cpus), help='Number of processes to use. Default %(default)s') parser.add_argument('-m', '--memory', dest='memory', default='3500', help='Memory to allocate in MiB. Default %(default)s') parser.add_argument('-k', '--kvm', action='store_true', dest='kvm', help='Use KVM instead of LXC') parser.add_argument('-d', '--docker', action='store_true', dest='docker', help='Use Docker instead of LXC') parser.add_argument('-S', '--setup', action='store_true', dest='setup', help='Set up the Gitian building environment. Uses LXC. If you want to use KVM, use the --kvm option. Only works on Debian-based systems (Ubuntu, Debian)') parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', help='Create the assert file for detached signing. Will not commit anything.') parser.add_argument('-n', '--no-commit', action='store_false', dest='commit_files', help='Do not commit anything to git') parser.add_argument( 'signer', help='GPG signer to sign each build assert file') parser.add_argument( 'version', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified') args = parser.parse_args() workdir = os.getcwd() args.linux = 'l' in args.os args.windows = 'w' in args.os args.macos = 'm' in args.os args.is_bionic = b'bionic' in subprocess.check_output( ['lsb_release', '-cs']) if args.buildsign: args.build = True args.sign = True if args.kvm and args.docker: raise Exception('Error: cannot have both kvm and docker') args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign' # Set environment variable USE_LXC or USE_DOCKER, let gitian-builder know that we use lxc or docker if args.docker: os.environ['USE_DOCKER'] = '1' elif not args.kvm: os.environ['USE_LXC'] = '1' if not 'GITIAN_HOST_IP' in os.environ.keys(): os.environ['GITIAN_HOST_IP'] = '10.0.3.1' if not 'LXC_GUEST_IP' in os.environ.keys(): os.environ['LXC_GUEST_IP'] = '10.0.3.5' # Disable for MacOS if no SDK found if args.macos and not os.path.isfile('gitian-builder/inputs/MacOSX10.11.sdk.tar.gz'): print('Cannot build for MacOS, SDK does not exist. Will build for other OSes') args.macos = False script_name = os.path.basename(sys.argv[0]) # Signer and version shouldn't be empty if args.signer == '': - print(script_name+': Missing signer.') - print('Try '+script_name+' --help for more information') + print(script_name + ': Missing signer.') + print('Try ' + script_name + ' --help for more information') exit(1) if args.version == '': - print(script_name+': Missing version.') - print('Try '+script_name+' --help for more information') + print(script_name + ': Missing version.') + print('Try ' + script_name + ' --help for more information') exit(1) # Add leading 'v' for tags if args.commit and args.pull: raise Exception('Cannot have both commit and pull') args.commit = ('' if args.commit else 'v') + args.version if args.setup: setup() os.chdir('bitcoin-abc') if args.pull: subprocess.check_call( - ['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) + ['git', 'fetch', args.url, 'refs/pull/' + args.version + '/merge']) os.chdir('../gitian-builder/inputs/bitcoin') subprocess.check_call( - ['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) + ['git', 'fetch', args.url, 'refs/pull/' + args.version + '/merge']) args.commit = subprocess.check_output( ['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip() args.version = 'pull-' + args.version print(args.commit) subprocess.check_call(['git', 'fetch']) subprocess.check_call(['git', 'checkout', args.commit]) os.chdir(workdir) if args.build: build() if args.sign: sign() if args.verify: verify() if __name__ == '__main__': main() diff --git a/contrib/testgen/gen_base58_test_vectors.py b/contrib/testgen/gen_base58_test_vectors.py index fe8dcb2ae6..46f4b6a235 100755 --- a/contrib/testgen/gen_base58_test_vectors.py +++ b/contrib/testgen/gen_base58_test_vectors.py @@ -1,139 +1,139 @@ #!/usr/bin/env python3 # Copyright (c) 2012-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Generate valid and invalid base58 address and private key test vectors. Usage: gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json ''' # 2012 Wladimir J. van der Laan # Released under MIT License import os from itertools import islice from base58 import b58encode_chk, b58decode_chk, b58chars import random from binascii import b2a_hex # key types PUBKEY_ADDRESS = 0 SCRIPT_ADDRESS = 5 PUBKEY_ADDRESS_TEST = 111 SCRIPT_ADDRESS_TEST = 196 PRIVKEY = 128 PRIVKEY_TEST = 239 metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed'] # templates for valid sequences templates = [ # prefix, payload_size, suffix, metadata # None = N/A - ((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)), - ((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)), - ((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)), - ((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)), - ((PRIVKEY,), 32, (), (True, False, None, False)), - ((PRIVKEY,), 32, (1,), (True, False, None, True)), - ((PRIVKEY_TEST,), 32, (), (True, True, None, False)), - ((PRIVKEY_TEST,), 32, (1,), (True, True, None, True)) + ((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)), + ((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)), + ((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)), + ((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)), + ((PRIVKEY,), 32, (), (True, False, None, False)), + ((PRIVKEY,), 32, (1,), (True, False, None, True)), + ((PRIVKEY_TEST,), 32, (), (True, True, None, False)), + ((PRIVKEY_TEST,), 32, (1,), (True, True, None, True)) ] def is_valid(v): '''Check vector v for validity''' result = b58decode_chk(v) if result is None: return False for template in templates: prefix = bytearray(template[0]) suffix = bytearray(template[2]) if result.startswith(prefix) and result.endswith(suffix): if (len(result) - len(prefix) - len(suffix)) == template[1]: return True return False def gen_valid_vectors(): '''Generate valid test vectors''' while True: for template in templates: prefix = bytearray(template[0]) payload = bytearray(os.urandom(template[1])) suffix = bytearray(template[2]) rv = b58encode_chk(prefix + payload + suffix) assert is_valid(rv) metadata = {x: y for x, y in zip( metadata_keys, template[3]) if y is not None} hexrepr = b2a_hex(payload) if isinstance(hexrepr, bytes): hexrepr = hexrepr.decode('utf8') yield (rv, hexrepr, metadata) def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix): '''Generate possibly invalid vector''' if corrupt_prefix: prefix = os.urandom(1) else: prefix = bytearray(template[0]) if randomize_payload_size: payload = os.urandom(max(int(random.expovariate(0.5)), 50)) else: payload = os.urandom(template[1]) if corrupt_suffix: suffix = os.urandom(len(template[2])) else: suffix = bytearray(template[2]) return b58encode_chk(prefix + payload + suffix) def randbool(p=0.5): '''Return True with P(p)''' return random.random() < p def gen_invalid_vectors(): '''Generate invalid test vectors''' # start with some manual edge-cases yield "", yield "x", while True: # kinds of invalid vectors: # invalid prefix # invalid payload length # invalid (randomized) suffix (add random data) # corrupt checksum for template in templates: val = gen_invalid_vector(template, randbool( 0.2), randbool(0.2), randbool(0.2)) if random.randint(0, 10) < 1: # line corruption if randbool(): # add random character to end val += random.choice(b58chars) else: # replace random character in the middle n = random.randint(0, len(val)) val = val[0:n] + random.choice(b58chars) + val[n + 1:] if not is_valid(val): yield val, if __name__ == '__main__': import sys import json iters = {'valid': gen_valid_vectors, 'invalid': gen_invalid_vectors} try: uiter = iters[sys.argv[1]] except IndexError: uiter = gen_valid_vectors try: count = int(sys.argv[2]) except IndexError: count = 0 data = list(islice(uiter(), count)) json.dump(data, sys.stdout, sort_keys=True, indent=4) sys.stdout.write('\n') diff --git a/test/functional/abc-mempool-coherence-on-activations.py b/test/functional/abc-mempool-coherence-on-activations.py index 0a3db51fc4..e0711162f1 100755 --- a/test/functional/abc-mempool-coherence-on-activations.py +++ b/test/functional/abc-mempool-coherence-on-activations.py @@ -1,373 +1,373 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Copyright (c) 2017 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This test checks the mempool coherence when changing validation rulesets, which happens on (de)activations of network upgrades (forks). We test the mempool coherence in 3 cases: 1) on activations, pre-fork-only transactions are evicted from the mempool, while always-valid transactions remain. 2) on deactivations, post-fork-only transactions (unconfirmed or once confirmed) are evicted from the mempool, while always-valid transactions are reincluded. 3) on a reorg to a chain that deactivates and reactivates the fork, post-fork-only and always-valid transactions (unconfirmed and/or once confirmed on the shorter chain) are kept or reincluded in the mempool. """ from test_framework.blocktools import ( create_block, create_coinbase, create_transaction, make_conform_to_ctor, ) from test_framework.key import CECKey from test_framework.messages import ( COIN, COutPoint, CTransaction, CTxIn, CTxOut, ToHex, ) from test_framework.mininode import P2PDataStore from test_framework.script import ( CScript, OP_CHECKSIG, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error # ---Code specific to the activation used for this test--- # It might change depending on the activation code currently existing in the # client software. We use the replay protection activation for this test. ACTIVATION_TIME = 2000000000 EXTRA_ARG = "-replayprotectionactivationtime={}".format(ACTIVATION_TIME) # simulation starts before activation FIRST_BLOCK_TIME = ACTIVATION_TIME - 86400 # Expected RPC error when trying to send an activation specific spend txn. RPC_EXPECTED_ERROR = "mandatory-script-verify-flag-failed (Signature must be zero for failed CHECK(MULTI)SIG operation) (code 16)" def create_fund_and_activation_specific_spending_tx(spend, pre_fork_only): # Creates 2 transactions: # 1) txfund: create outputs to be used by txspend. Must be valid pre-fork. # 2) txspend: spending transaction that is specific to the activation # being used and can be pre-fork-only or post-fork-only, depending on the # function parameter. # This specific implementation uses the replay protection mechanism to # create transactions that are only valid before or after the fork. # Generate a key pair to test private_key = CECKey() private_key.set_secretbytes(b"replayprotection") public_key = private_key.get_pubkey() # Fund transaction script = CScript([public_key, OP_CHECKSIG]) txfund = create_transaction( spend.tx, spend.n, b'', 50 * COIN, script) txfund.rehash() # Activation specific spending tx txspend = CTransaction() txspend.vout.append(CTxOut(50 * COIN - 1000, CScript([OP_TRUE]))) txspend.vin.append(CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction # Use forkvalues that create pre-fork-only or post-fork-only # transactions. forkvalue = 0 if pre_fork_only else 0xffdead sighashtype = (forkvalue << 8) | SIGHASH_ALL | SIGHASH_FORKID sighash = SignatureHashForkId( script, txspend, 0, sighashtype, 50 * COIN) sig = private_key.sign(sighash) + \ bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID])) txspend.vin[0].scriptSig = CScript([sig]) txspend.rehash() return txfund, txspend def create_fund_and_pre_fork_only_tx(spend): return create_fund_and_activation_specific_spending_tx(spend, pre_fork_only=True) def create_fund_and_post_fork_only_tx(spend): return create_fund_and_activation_specific_spending_tx(spend, pre_fork_only=False) # ---Mempool coherence on activations test--- class PreviousSpendableOutput(object): def __init__(self, tx=CTransaction(), n=-1): self.tx = tx self.n = n class MempoolCoherenceOnActivationsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.block_heights = {} self.tip = None self.blocks = {} self.extra_args = [['-whitelist=127.0.0.1', EXTRA_ARG]] def next_block(self, number): if self.tip == None: base_block_hash = self.genesis_hash block_time = FIRST_BLOCK_TIME else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height) coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) # Do PoW, which is cheap on regnet block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def run_test(self): node = self.nodes[0] node.add_p2p_connection(P2PDataStore()) node.setmocktime(ACTIVATION_TIME) self.genesis_hash = int(node.getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] block.vtx.extend(new_transactions) old_sha256 = block.sha256 make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[ block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # send a txn to the mempool and check it was accepted def send_transaction_to_mempool(tx): tx_id = node.sendrawtransaction(ToHex(tx)) assert tx_id in node.getrawmempool() # checks the mempool has exactly the same txns as in the provided list def check_mempool_equal(txns): assert set(node.getrawmempool()) == set(tx.hash for tx in txns) # Create an always-valid chained transaction. It spends a # scriptPub=OP_TRUE coin into another. Returns the transaction and its # spendable output for further chaining. def create_always_valid_chained_tx(spend): tx = create_transaction( spend.tx, spend.n, b'', spend.tx.vout[0].nValue - 1000, CScript([OP_TRUE])) tx.rehash() return tx, PreviousSpendableOutput(tx, 0) # shorthand block = self.next_block # Create a new block block(0) save_spendable_output() node.p2p.send_blocks_and_test([self.tip], node) # Now we need that block to mature so we can spend the coinbase. maturity_blocks = [] for i in range(110): block(5000 + i) maturity_blocks.append(self.tip) save_spendable_output() node.p2p.send_blocks_and_test(maturity_blocks, node) # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(100): out.append(get_spendable_output()) # Create 2 pre-fork-only txns (tx_pre0, tx_pre1). Fund txns are valid # pre-fork, so we can mine them right away. txfund0, tx_pre0 = create_fund_and_pre_fork_only_tx(out[0]) txfund1, tx_pre1 = create_fund_and_pre_fork_only_tx(out[1]) # Create 2 post-fork-only txns (tx_post0, tx_post1). Fund txns are # valid pre-fork, so we can mine them right away. txfund2, tx_post0 = create_fund_and_post_fork_only_tx(out[2]) txfund3, tx_post1 = create_fund_and_post_fork_only_tx(out[3]) # Create blocks to activate the fork. Mine all funding transactions. bfork = block(5555) bfork.nTime = ACTIVATION_TIME - 1 update_block(5555, [txfund0, txfund1, txfund2, txfund3]) node.p2p.send_blocks_and_test([self.tip], node) for i in range(5): node.p2p.send_blocks_and_test([block(5200 + i)], node) # Check we are just before the activation time assert_equal(node.getblockheader(node.getbestblockhash())['mediantime'], ACTIVATION_TIME - 1) # We are just before the fork. Pre-fork-only and always-valid chained # txns (tx_chain0, tx_chain1) are valid, post-fork-only txns are # rejected. send_transaction_to_mempool(tx_pre0) send_transaction_to_mempool(tx_pre1) tx_chain0, last_chained_output = create_always_valid_chained_tx(out[4]) tx_chain1, last_chained_output = create_always_valid_chained_tx( last_chained_output) send_transaction_to_mempool(tx_chain0) send_transaction_to_mempool(tx_chain1) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_post0)) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_post1)) check_mempool_equal([tx_chain0, tx_chain1, tx_pre0, tx_pre1]) # Activate the fork. Mine the 1st always-valid chained txn and a # pre-fork-only txn. block(5556) update_block(5556, [tx_chain0, tx_pre0]) node.p2p.send_blocks_and_test([self.tip], node) forkblockid = node.getbestblockhash() # Check we just activated the fork assert_equal(node.getblockheader(forkblockid)['mediantime'], ACTIVATION_TIME) # Check mempool coherence when activating the fork. Pre-fork-only txns # were evicted from the mempool, while always-valid txns remain. # Evicted: tx_pre1 check_mempool_equal([tx_chain1]) # Post-fork-only and always-valid txns are accepted, pre-fork-only txn # are rejected. send_transaction_to_mempool(tx_post0) send_transaction_to_mempool(tx_post1) tx_chain2, _ = create_always_valid_chained_tx(last_chained_output) send_transaction_to_mempool(tx_chain2) assert_raises_rpc_error(-26, RPC_EXPECTED_ERROR, node.sendrawtransaction, ToHex(tx_pre1)) check_mempool_equal([tx_chain1, tx_chain2, tx_post0, tx_post1]) # Mine the 2nd always-valid chained txn and a post-fork-only txn. block(5557) update_block(5557, [tx_chain1, tx_post0]) node.p2p.send_blocks_and_test([self.tip], node) postforkblockid = node.getbestblockhash() # The mempool contains the 3rd chained txn and a post-fork-only txn. check_mempool_equal([tx_chain2, tx_post1]) # In the following we will testing block disconnections and reorgs. # - tx_chain2 will always be retained in the mempool since it is always # valid. Its continued presence shows that we are never simply # clearing the entire mempool. # - tx_post1 may be evicted from mempool if we land before the fork. # - tx_post0 is in a block and if 'de-mined', it will either be evicted # or end up in mempool depending if we land before/after the fork. # - tx_pre0 is in a block and if 'de-mined', it will either be evicted # or end up in mempool depending if we land after/before the fork. # First we do a disconnection of the post-fork block, which is a # normal disconnection that merely returns the block contents into # the mempool -- nothing is lost. node.invalidateblock(postforkblockid) # In old mempool: tx_chain2, tx_post1 # Recovered from blocks: tx_chain1 and tx_post0. # Lost from blocks: NONE # Retained from old mempool: tx_chain2, tx_post1 # Evicted from old mempool: NONE check_mempool_equal([tx_chain1, tx_chain2, tx_post0, tx_post1]) # Now, disconnect the fork block. This is a special disconnection # that requires reprocessing the mempool due to change in rules. node.invalidateblock(forkblockid) # In old mempool: tx_chain1, tx_chain2, tx_post0, tx_post1 # Recovered from blocks: tx_chain0, tx_pre0 # Lost from blocks: NONE # Retained from old mempool: tx_chain1, tx_chain2 # Evicted from old mempool: tx_post0, tx_post1 check_mempool_equal([tx_chain0, tx_chain1, tx_chain2, tx_pre0]) # Restore state node.reconsiderblock(postforkblockid) node.reconsiderblock(forkblockid) send_transaction_to_mempool(tx_post1) check_mempool_equal([tx_chain2, tx_post1]) # Test a reorg that crosses the fork. # If such a reorg happens, most likely it will both start *and end* # after the fork. We will test such a case here and make sure that # post-fork-only transactions are not unnecessarily discarded from # the mempool in such a reorg. Pre-fork-only transactions however can # get lost. # Set up a longer competing chain that doesn't confirm any of our txns. # This starts after 5204, so it contains neither the forkblockid nor # the postforkblockid from above. tip(5204) reorg_blocks = [] for i in range(3): reorg_blocks.append(block(5900 + i)) # Perform the reorg node.p2p.send_blocks_and_test(reorg_blocks, node) # reorg finishes after the fork assert_equal(node.getblockheader(node.getbestblockhash())['mediantime'], - ACTIVATION_TIME+2) + ACTIVATION_TIME + 2) # In old mempool: tx_chain2, tx_post1 # Recovered from blocks: tx_chain0, tx_chain1, tx_post0 # Lost from blocks: tx_pre0 # Retained from old mempool: tx_chain2, tx_post1 # Evicted from old mempool: NONE check_mempool_equal( [tx_chain0, tx_chain1, tx_chain2, tx_post0, tx_post1]) if __name__ == '__main__': MempoolCoherenceOnActivationsTest().main() diff --git a/test/functional/abc-minimaldata-activation.py b/test/functional/abc-minimaldata-activation.py index 8c4cde4620..7f92885e72 100755 --- a/test/functional/abc-minimaldata-activation.py +++ b/test/functional/abc-minimaldata-activation.py @@ -1,253 +1,253 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This tests the activation of MINIMALDATA rule to consensus (from standard). - test rejection in mempool, with error changing before/after activation. - test acceptance in blocks before activation, and rejection after. - check non-banning for peers who send invalid txns that would have been valid on the other side of the upgrade. Derived from abc-schnorr.py """ from test_framework.blocktools import ( create_block, create_coinbase, create_transaction, make_conform_to_ctor, ) from test_framework.messages import ( CBlock, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ) from test_framework.mininode import ( P2PDataStore, ) from test_framework.script import ( CScript, OP_ADD, OP_TRUE, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal, assert_raises_rpc_error # the upgrade activation time, which we artificially set far into the future GRAVITON_START_TIME = 2000000000 # If we don't do this, autoreplay protection will activate before graviton and # all our sigs will mysteriously fail. REPLAY_PROTECTION_START_TIME = GRAVITON_START_TIME * 2 # Both before and after the upgrade, minimal push violations in mempool are # rejected with a bannable error. MINIMALPUSH_ERROR = 'mandatory-script-verify-flag-failed (Data push larger than necessary)' # Blocks with invalid scripts give this error: BADINPUTS_ERROR = 'blk-bad-inputs' class SchnorrTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.block_heights = {} self.extra_args = [["-gravitonactivationtime={}".format( GRAVITON_START_TIME), "-replayprotectionactivationtime={}".format( REPLAY_PROTECTION_START_TIME)]] def bootstrap_p2p(self, *, num_connections=1): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" for _ in range(num_connections): self.nodes[0].add_p2p_connection(P2PDataStore()) def reconnect_p2p(self, **kwargs): """Tear down and bootstrap the P2P connection to the node. The node gets disconnected several times in this test. This helper method reconnects the p2p and restarts the network thread.""" self.nodes[0].disconnect_p2ps() self.bootstrap_p2p(**kwargs) def getbestblock(self, node): """Get the best block. Register its height so we can use build_block.""" block_height = node.getblockcount() blockhash = node.getblockhash(block_height) block = FromHex(CBlock(), node.getblock(blockhash, 0)) block.calc_sha256() self.block_heights[block.sha256] = block_height return block def build_block(self, parent, transactions=(), nTime=None): """Make a new block with an OP_1 coinbase output. Requires parent to have its height registered.""" parent.calc_sha256() block_height = self.block_heights[parent.sha256] + 1 block_time = (parent.nTime + 1) if nTime is None else nTime block = create_block( parent.sha256, create_coinbase(block_height), block_time) block.vtx.extend(transactions) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.block_heights[block.sha256] = block_height return block def check_for_ban_on_rejected_tx(self, tx, reject_reason=None): """Check we are disconnected when sending a txn that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_txs_and_test( [tx], self.nodes[0], success=False, expect_disconnect=True, reject_reason=reject_reason) self.reconnect_p2p() def check_for_no_ban_on_rejected_tx(self, tx, reject_reason): """Check we are not disconnected when sending a txn that the node rejects.""" self.nodes[0].p2p.send_txs_and_test( [tx], self.nodes[0], success=False, reject_reason=reject_reason) def check_for_ban_on_rejected_block(self, block, reject_reason=None): """Check we are disconnected when sending a block that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_blocks_and_test( [block], self.nodes[0], success=False, reject_reason=reject_reason, expect_disconnect=True) self.reconnect_p2p() def run_test(self): node, = self.nodes self.bootstrap_p2p() tip = self.getbestblock(node) self.log.info("Create some blocks with OP_1 coinbase for spending.") blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node, success=True) spendable_outputs = [block.vtx[0] for block in blocks] self.log.info("Mature the blocks and get out of IBD.") node.generate(100) tip = self.getbestblock(node) self.log.info("Setting up spends to test and mining the fundings.") fundings = [] def create_fund_and_spend_tx(): spendfrom = spendable_outputs.pop() script = CScript([OP_ADD]) value = spendfrom.vout[0].nValue # Fund transaction txfund = create_transaction(spendfrom, 0, b'', value, script) txfund.rehash() fundings.append(txfund) # Spend transaction txspend = CTransaction() txspend.vout.append( - CTxOut(value-1000, CScript([OP_TRUE]))) + CTxOut(value - 1000, CScript([OP_TRUE]))) txspend.vin.append( CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction txspend.vin[0].scriptSig = CScript( b'\x01\x01\x51') # PUSH1(0x01) OP_1 pad_tx(txspend) txspend.rehash() return txspend # make a few of these, which are nonstandard before upgrade and invalid after. nonminimaltx = create_fund_and_spend_tx() nonminimaltx_2 = create_fund_and_spend_tx() nonminimaltx_3 = create_fund_and_spend_tx() tip = self.build_block(tip, fundings) node.p2p.send_blocks_and_test([tip], node) self.log.info("Start preupgrade tests") self.log.info("Sending rejected transactions via RPC") assert_raises_rpc_error(-26, MINIMALPUSH_ERROR, node.sendrawtransaction, ToHex(nonminimaltx)) assert_raises_rpc_error(-26, MINIMALPUSH_ERROR, node.sendrawtransaction, ToHex(nonminimaltx_2)) assert_raises_rpc_error(-26, MINIMALPUSH_ERROR, node.sendrawtransaction, ToHex(nonminimaltx_3)) self.log.info( "Sending rejected transactions via net (banning)") self.check_for_ban_on_rejected_tx( nonminimaltx, MINIMALPUSH_ERROR) self.check_for_ban_on_rejected_tx( nonminimaltx_2, MINIMALPUSH_ERROR) self.check_for_ban_on_rejected_tx( nonminimaltx_3, MINIMALPUSH_ERROR) assert_equal(node.getrawmempool(), []) self.log.info("Successfully mine nonstandard transaction") tip = self.build_block(tip, [nonminimaltx]) node.p2p.send_blocks_and_test([tip], node) # Activation tests self.log.info("Approach to just before upgrade activation") # Move our clock to the uprade time so we will accept such future-timestamped blocks. node.setmocktime(GRAVITON_START_TIME) # Mine six blocks with timestamp starting at GRAVITON_START_TIME-1 blocks = [] for i in range(-1, 5): tip = self.build_block(tip, nTime=GRAVITON_START_TIME + i) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node) assert_equal(node.getblockchaininfo()[ 'mediantime'], GRAVITON_START_TIME - 1) self.log.info( "Mine the activation block itself, including a minimaldata violation at the last possible moment") tip = self.build_block(tip, [nonminimaltx_2]) node.p2p.send_blocks_and_test([tip], node) self.log.info("We have activated!") assert_equal(node.getblockchaininfo()[ 'mediantime'], GRAVITON_START_TIME) self.log.info( "Trying to mine a minimaldata violation, but we are just barely too late") self.check_for_ban_on_rejected_block( self.build_block(tip, [nonminimaltx_3]), BADINPUTS_ERROR) self.log.info( "If we try to submit it by mempool or RPC we still aren't banned") assert_raises_rpc_error(-26, MINIMALPUSH_ERROR, node.sendrawtransaction, ToHex(nonminimaltx_3)) self.check_for_ban_on_rejected_tx( nonminimaltx_3, MINIMALPUSH_ERROR) self.log.info("Mine a normal block") tip = self.build_block(tip) node.p2p.send_blocks_and_test([tip], node) if __name__ == '__main__': SchnorrTest().main() diff --git a/test/functional/abc-schnorr.py b/test/functional/abc-schnorr.py index 79c848a74e..f4fde2e950 100755 --- a/test/functional/abc-schnorr.py +++ b/test/functional/abc-schnorr.py @@ -1,244 +1,244 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This tests the treatment of Schnorr transaction signatures: - acceptance both in mempool and blocks. - check banning for peers who send txns with 64 byte ECDSA DER sigs. Derived from a variety of functional tests. """ from test_framework.blocktools import ( create_block, create_coinbase, create_transaction, make_conform_to_ctor, ) from test_framework.key import CECKey from test_framework.messages import ( CBlock, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ) from test_framework.mininode import ( P2PDataStore, ) from test_framework import schnorr from test_framework.script import ( CScript, OP_1, OP_CHECKMULTISIG, OP_CHECKSIG, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error # A mandatory (bannable) error occurs when people pass Schnorr signatures into OP_CHECKMULTISIG. SCHNORR_MULTISIG_ERROR = 'mandatory-script-verify-flag-failed (Signature cannot be 65 bytes in CHECKMULTISIG)' # A mandatory (bannable) error occurs when people send invalid Schnorr sigs into OP_CHECKSIG. NULLFAIL_ERROR = 'mandatory-script-verify-flag-failed (Signature must be zero for failed CHECK(MULTI)SIG operation)' # Blocks with invalid scripts give this error: BADINPUTS_ERROR = 'blk-bad-inputs' # This 64-byte signature is used to test exclusion & banning according to # the above error messages. # Tests of real 64 byte ECDSA signatures can be found in script_tests. -sig64 = b'\0'*64 +sig64 = b'\0' * 64 class SchnorrTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.block_heights = {} def bootstrap_p2p(self, *, num_connections=1): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" for _ in range(num_connections): self.nodes[0].add_p2p_connection(P2PDataStore()) def reconnect_p2p(self, **kwargs): """Tear down and bootstrap the P2P connection to the node. The node gets disconnected several times in this test. This helper method reconnects the p2p and restarts the network thread.""" self.nodes[0].disconnect_p2ps() self.bootstrap_p2p(**kwargs) def getbestblock(self, node): """Get the best block. Register its height so we can use build_block.""" block_height = node.getblockcount() blockhash = node.getblockhash(block_height) block = FromHex(CBlock(), node.getblock(blockhash, 0)) block.calc_sha256() self.block_heights[block.sha256] = block_height return block def build_block(self, parent, transactions=(), nTime=None): """Make a new block with an OP_1 coinbase output. Requires parent to have its height registered.""" parent.calc_sha256() block_height = self.block_heights[parent.sha256] + 1 block_time = (parent.nTime + 1) if nTime is None else nTime block = create_block( parent.sha256, create_coinbase(block_height), block_time) block.vtx.extend(transactions) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.block_heights[block.sha256] = block_height return block def check_for_ban_on_rejected_tx(self, tx, reject_reason=None): """Check we are disconnected when sending a txn that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_txs_and_test( [tx], self.nodes[0], success=False, reject_reason=reject_reason, expect_disconnect=True) self.reconnect_p2p() def check_for_ban_on_rejected_block(self, block, reject_reason=None): """Check we are disconnected when sending a block that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_blocks_and_test( [block], self.nodes[0], success=False, reject_reason=reject_reason, expect_disconnect=True) self.reconnect_p2p() def run_test(self): node, = self.nodes self.bootstrap_p2p() tip = self.getbestblock(node) self.log.info("Create some blocks with OP_1 coinbase for spending.") blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node, success=True) spendable_outputs = [block.vtx[0] for block in blocks] self.log.info("Mature the blocks and get out of IBD.") node.generate(100) tip = self.getbestblock(node) self.log.info("Setting up spends to test and mining the fundings.") fundings = [] # Generate a key pair privkeybytes = b"Schnorr!" * 4 private_key = CECKey() private_key.set_secretbytes(privkeybytes) # get uncompressed public key serialization public_key = private_key.get_pubkey() def create_fund_and_spend_tx(multi=False, sig='schnorr'): spendfrom = spendable_outputs.pop() if multi: script = CScript([OP_1, public_key, OP_1, OP_CHECKMULTISIG]) else: script = CScript([public_key, OP_CHECKSIG]) value = spendfrom.vout[0].nValue # Fund transaction txfund = create_transaction(spendfrom, 0, b'', value, script) txfund.rehash() fundings.append(txfund) # Spend transaction txspend = CTransaction() txspend.vout.append( - CTxOut(value-1000, CScript([OP_TRUE]))) + CTxOut(value - 1000, CScript([OP_TRUE]))) txspend.vin.append( CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction sighashtype = SIGHASH_ALL | SIGHASH_FORKID hashbyte = bytes([sighashtype & 0xff]) sighash = SignatureHashForkId( script, txspend, 0, sighashtype, value) if sig == 'schnorr': txsig = schnorr.sign(privkeybytes, sighash) + hashbyte elif sig == 'ecdsa': txsig = private_key.sign(sighash) + hashbyte elif isinstance(sig, bytes): txsig = sig + hashbyte if multi: txspend.vin[0].scriptSig = CScript([b'', txsig]) else: txspend.vin[0].scriptSig = CScript([txsig]) txspend.rehash() return txspend schnorrchecksigtx = create_fund_and_spend_tx() schnorrmultisigtx = create_fund_and_spend_tx(multi=True) ecdsachecksigtx = create_fund_and_spend_tx(sig='ecdsa') sig64checksigtx = create_fund_and_spend_tx(sig=sig64) sig64multisigtx = create_fund_and_spend_tx(multi=True, sig=sig64) tip = self.build_block(tip, fundings) node.p2p.send_blocks_and_test([tip], node) self.log.info("Typical ECDSA and Schnorr CHECKSIG are valid.") node.p2p.send_txs_and_test([schnorrchecksigtx, ecdsachecksigtx], node) # They get mined as usual. node.generate(1) tip = self.getbestblock(node) # Make sure they are in the block, and mempool is now empty. txhashes = set([schnorrchecksigtx.hash, ecdsachecksigtx.hash]) assert txhashes.issubset(tx.rehash() for tx in tip.vtx) assert not node.getrawmempool() self.log.info("Schnorr in multisig is rejected with mandatory error.") assert_raises_rpc_error(-26, SCHNORR_MULTISIG_ERROR, node.sendrawtransaction, ToHex(schnorrmultisigtx)) # And it is banworthy. self.check_for_ban_on_rejected_tx( schnorrmultisigtx, SCHNORR_MULTISIG_ERROR) # And it can't be mined self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorrmultisigtx]), BADINPUTS_ERROR) self.log.info("Bad 64-byte sig is rejected with mandatory error.") # In CHECKSIG it's invalid Schnorr and hence NULLFAIL. assert_raises_rpc_error(-26, NULLFAIL_ERROR, node.sendrawtransaction, ToHex(sig64checksigtx)) # In CHECKMULTISIG it's invalid length and hence BAD_LENGTH. assert_raises_rpc_error(-26, SCHNORR_MULTISIG_ERROR, node.sendrawtransaction, ToHex(sig64multisigtx)) # Sending these transactions is banworthy. self.check_for_ban_on_rejected_tx(sig64checksigtx, NULLFAIL_ERROR) self.check_for_ban_on_rejected_tx( sig64multisigtx, SCHNORR_MULTISIG_ERROR) # And they can't be mined either... self.check_for_ban_on_rejected_block( self.build_block(tip, [sig64checksigtx]), BADINPUTS_ERROR) self.check_for_ban_on_rejected_block( self.build_block(tip, [sig64multisigtx]), BADINPUTS_ERROR) if __name__ == '__main__': SchnorrTest().main() diff --git a/test/functional/abc-schnorrmultisig-activation.py b/test/functional/abc-schnorrmultisig-activation.py index b85d43b8b1..cc21f131e2 100755 --- a/test/functional/abc-schnorrmultisig-activation.py +++ b/test/functional/abc-schnorrmultisig-activation.py @@ -1,398 +1,398 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ This tests the activation of the upgraded CHECKMULTISIG mode that uses Schnorr transaction signatures and repurposes the dummy element to indicate which signatures are being checked. - acceptance both in mempool and blocks. - check non-banning for peers who send invalid txns that would have been valid on the other side of the upgrade. - check banning of peers for some fully-invalid transactions. Derived from abc-schnorr.py """ from test_framework.blocktools import ( create_block, create_coinbase, create_transaction, make_conform_to_ctor, ) from test_framework.key import CECKey from test_framework.messages import ( CBlock, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ) from test_framework.mininode import ( P2PDataStore, ) from test_framework import schnorr from test_framework.script import ( CScript, OP_0, OP_1, OP_CHECKMULTISIG, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error # the upgrade activation time, which we artificially set far into the future GRAVITON_START_TIME = 2000000000 # If we don't do this, autoreplay protection will activate before graviton and # all our sigs will mysteriously fail. REPLAY_PROTECTION_START_TIME = GRAVITON_START_TIME * 2 # Before the upgrade, Schnorr checkmultisig is rejected. PREUPGRADE_SCHNORR_MULTISIG_ERROR = 'mandatory-script-verify-flag-failed (Signature cannot be 65 bytes in CHECKMULTISIG)' # After the upgrade, ECDSA checkmultisig with non-null dummy are invalid since # the new mode refuses ECDSA. POSTUPGRADE_ECDSA_NULLDUMMY_ERROR = 'mandatory-script-verify-flag-failed (Only Schnorr signatures allowed in this operation)' # A mandatory (bannable) error occurs when people pass Schnorr signatures into # legacy OP_CHECKMULTISIG; this is the case on both sides of the upgrade. SCHNORR_LEGACY_MULTISIG_ERROR = 'mandatory-script-verify-flag-failed (Signature cannot be 65 bytes in CHECKMULTISIG)' # Blocks with invalid scripts give this error: BADINPUTS_ERROR = 'blk-bad-inputs' # This 64-byte signature is used to test exclusion & banning according to # the above error messages. # Tests of real 64 byte ECDSA signatures can be found in script_tests. -sig64 = b'\0'*64 +sig64 = b'\0' * 64 class SchnorrTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.block_heights = {} self.extra_args = [["-gravitonactivationtime={}".format( GRAVITON_START_TIME), "-replayprotectionactivationtime={}".format( REPLAY_PROTECTION_START_TIME)]] def bootstrap_p2p(self, *, num_connections=1): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" for _ in range(num_connections): self.nodes[0].add_p2p_connection(P2PDataStore()) def reconnect_p2p(self, **kwargs): """Tear down and bootstrap the P2P connection to the node. The node gets disconnected several times in this test. This helper method reconnects the p2p and restarts the network thread.""" self.nodes[0].disconnect_p2ps() self.bootstrap_p2p(**kwargs) def getbestblock(self, node): """Get the best block. Register its height so we can use build_block.""" block_height = node.getblockcount() blockhash = node.getblockhash(block_height) block = FromHex(CBlock(), node.getblock(blockhash, 0)) block.calc_sha256() self.block_heights[block.sha256] = block_height return block def build_block(self, parent, transactions=(), nTime=None): """Make a new block with an OP_1 coinbase output. Requires parent to have its height registered.""" parent.calc_sha256() block_height = self.block_heights[parent.sha256] + 1 block_time = (parent.nTime + 1) if nTime is None else nTime block = create_block( parent.sha256, create_coinbase(block_height), block_time) block.vtx.extend(transactions) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.block_heights[block.sha256] = block_height return block def check_for_ban_on_rejected_tx(self, tx, reject_reason=None): """Check we are disconnected when sending a txn that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_txs_and_test( [tx], self.nodes[0], success=False, expect_disconnect=True, reject_reason=reject_reason) self.reconnect_p2p() def check_for_no_ban_on_rejected_tx(self, tx, reject_reason): """Check we are not disconnected when sending a txn that the node rejects.""" self.nodes[0].p2p.send_txs_and_test( [tx], self.nodes[0], success=False, reject_reason=reject_reason) def check_for_ban_on_rejected_block(self, block, reject_reason=None): """Check we are disconnected when sending a block that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.nodes[0].p2p.send_blocks_and_test( [block], self.nodes[0], success=False, reject_reason=reject_reason, expect_disconnect=True) self.reconnect_p2p() def run_test(self): node, = self.nodes self.bootstrap_p2p() tip = self.getbestblock(node) self.log.info("Create some blocks with OP_1 coinbase for spending.") blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node, success=True) spendable_outputs = [block.vtx[0] for block in blocks] self.log.info("Mature the blocks and get out of IBD.") node.generate(100) tip = self.getbestblock(node) self.log.info("Setting up spends to test and mining the fundings.") fundings = [] # Generate a key pair privkeybytes = b"Schnorr!" * 4 private_key = CECKey() private_key.set_secretbytes(privkeybytes) # get uncompressed public key serialization public_key = private_key.get_pubkey() def create_fund_and_spend_tx(dummy=OP_0, sigtype='ecdsa'): spendfrom = spendable_outputs.pop() script = CScript([OP_1, public_key, OP_1, OP_CHECKMULTISIG]) value = spendfrom.vout[0].nValue # Fund transaction txfund = create_transaction(spendfrom, 0, b'', value, script) txfund.rehash() fundings.append(txfund) # Spend transaction txspend = CTransaction() txspend.vout.append( - CTxOut(value-1000, CScript([OP_TRUE]))) + CTxOut(value - 1000, CScript([OP_TRUE]))) txspend.vin.append( CTxIn(COutPoint(txfund.sha256, 0), b'')) # Sign the transaction sighashtype = SIGHASH_ALL | SIGHASH_FORKID hashbyte = bytes([sighashtype & 0xff]) sighash = SignatureHashForkId( script, txspend, 0, sighashtype, value) if sigtype == 'schnorr': txsig = schnorr.sign(privkeybytes, sighash) + hashbyte elif sigtype == 'ecdsa': txsig = private_key.sign(sighash) + hashbyte txspend.vin[0].scriptSig = CScript([dummy, txsig]) txspend.rehash() return txspend # two of these transactions, which are valid both before and after upgrade. ecdsa0tx = create_fund_and_spend_tx(OP_0, 'ecdsa') ecdsa0tx_2 = create_fund_and_spend_tx(OP_0, 'ecdsa') # two of these, which are nonstandard before upgrade and invalid after. ecdsa1tx = create_fund_and_spend_tx(OP_1, 'ecdsa') ecdsa1tx_2 = create_fund_and_spend_tx(OP_1, 'ecdsa') # this one is always invalid. schnorr0tx = create_fund_and_spend_tx(OP_0, 'schnorr') # this one is only going to be valid after the upgrade. schnorr1tx = create_fund_and_spend_tx(OP_1, 'schnorr') tip = self.build_block(tip, fundings) node.p2p.send_blocks_and_test([tip], node) self.log.info("Start preupgrade tests") self.log.info("Sending rejected transactions via RPC") assert_raises_rpc_error(-26, SCHNORR_LEGACY_MULTISIG_ERROR, node.sendrawtransaction, ToHex(schnorr0tx)) # Since MULTISIG_SCHNORR is in mandatory flags, we are not accepting # non-null-dummy ECDSA transactions before the upgrade. We get a # post-upgrade error since the mempool is using post-upgrade flags. assert_raises_rpc_error(-26, POSTUPGRADE_ECDSA_NULLDUMMY_ERROR, node.sendrawtransaction, ToHex(ecdsa1tx)) # The Schnorr multisig almost gets accepted here but it finally gets # caught in the block flags check. Note that "BUG! PLEASE REPORT # THIS!" will appear in the log, since AcceptToMemoryPoolWorker expects # that scriptVerifyFlags is more strict than nextBlockScriptVerifyFlags. # For strictly subtractive ('soft forking') flags, it is fine if they # are always part of scriptVerifyFlags and only sometimes appear in # nextBlockScriptVerifyFlags, but for additive flags this kind of # strange situation can be created. # In practice, only new nodes will ever be in a pre-upgrade state, # and they will also be in initial block download mode and hence # not request transactions from peers. So, this weird log message # could only be triggered by unsolicited submission of a tx, and # it would be benign since the node is behaving correctly by rejecting # the transaction (as tested here). assert_raises_rpc_error(-26, PREUPGRADE_SCHNORR_MULTISIG_ERROR, node.sendrawtransaction, ToHex(schnorr1tx)) self.log.info( "Sending rejected transactions via net (bannable)") self.check_for_ban_on_rejected_tx( schnorr0tx, SCHNORR_LEGACY_MULTISIG_ERROR) self.check_for_ban_on_rejected_tx( ecdsa1tx, POSTUPGRADE_ECDSA_NULLDUMMY_ERROR) # If we are sent unsolicited post-upgrade transactions while before # the upgrade block, the tx is to be rejected. self.check_for_ban_on_rejected_tx( schnorr1tx, PREUPGRADE_SCHNORR_MULTISIG_ERROR) self.log.info( "Sending invalid transactions in blocks (and get banned!)") self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorr0tx]), BADINPUTS_ERROR) self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorr1tx]), BADINPUTS_ERROR) self.log.info("Sending valid transaction via net, then mining it") node.p2p.send_txs_and_test([ecdsa0tx], node) assert_equal(node.getrawmempool(), [ecdsa0tx.hash]) tip = self.build_block(tip, [ecdsa0tx]) node.p2p.send_blocks_and_test([tip], node) assert_equal(node.getrawmempool(), []) # Activation tests self.log.info("Approach to just before upgrade activation") # Move our clock to the uprade time so we will accept such future-timestamped blocks. node.setmocktime(GRAVITON_START_TIME) # Mine six blocks with timestamp starting at GRAVITON_START_TIME-1 blocks = [] for i in range(-1, 5): tip = self.build_block(tip, nTime=GRAVITON_START_TIME + i) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node) assert_equal(node.getblockchaininfo()[ 'mediantime'], GRAVITON_START_TIME - 1) self.log.info( "The next block will activate, but the activation block itself must follow old rules") self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorr0tx]), BADINPUTS_ERROR) self.log.info( "Send a lecacy ECDSA multisig into mempool, we will check after upgrade to make sure it didn't get cleaned out unnecessarily.") node.p2p.send_txs_and_test([ecdsa0tx_2], node) assert_equal(node.getrawmempool(), [ecdsa0tx_2.hash]) # save this tip for later preupgrade_block = tip self.log.info( "Mine the activation block itself, including a non-null-dummy ECDSA at the last possible moment") tip = self.build_block(tip, [ecdsa1tx]) node.p2p.send_blocks_and_test([tip], node) self.log.info("We have activated!") assert_equal(node.getblockchaininfo()[ 'mediantime'], GRAVITON_START_TIME) assert_equal(node.getrawmempool(), [ecdsa0tx_2.hash]) # save this tip for later upgrade_block = tip self.log.info( "Trying to mine a non-null-dummy ECDSA, but we are just barely too late") self.check_for_ban_on_rejected_block( self.build_block(tip, [ecdsa1tx_2]), BADINPUTS_ERROR) self.log.info( "If we try to submit it by mempool or RPC, it is rejected and we are banned") assert_raises_rpc_error(-26, POSTUPGRADE_ECDSA_NULLDUMMY_ERROR, node.sendrawtransaction, ToHex(ecdsa1tx_2)) self.check_for_ban_on_rejected_tx( ecdsa1tx_2, POSTUPGRADE_ECDSA_NULLDUMMY_ERROR) self.log.info( "Submitting a new Schnorr-multisig via net, and mining it in a block") node.p2p.send_txs_and_test([schnorr1tx], node) assert_equal(set(node.getrawmempool()), { ecdsa0tx_2.hash, schnorr1tx.hash}) tip = self.build_block(tip, [schnorr1tx]) node.p2p.send_blocks_and_test([tip], node) # save this tip for later postupgrade_block = tip self.log.info( "That legacy ECDSA multisig is still in mempool, let's mine it") assert_equal(node.getrawmempool(), [ecdsa0tx_2.hash]) tip = self.build_block(tip, [ecdsa0tx_2]) node.p2p.send_blocks_and_test([tip], node) assert_equal(node.getrawmempool(), []) self.log.info( "Trying Schnorr in legacy multisig remains invalid and banworthy as ever") self.check_for_ban_on_rejected_tx( schnorr0tx, SCHNORR_LEGACY_MULTISIG_ERROR) self.check_for_ban_on_rejected_block( self.build_block(tip, [schnorr0tx]), BADINPUTS_ERROR) # Deactivation tests self.log.info( "Invalidating the post-upgrade blocks returns the transactions to mempool") node.invalidateblock(postupgrade_block.hash) assert_equal(set(node.getrawmempool()), { ecdsa0tx_2.hash, schnorr1tx.hash}) self.log.info( "Invalidating the upgrade block evicts the transactions valid only after upgrade") node.invalidateblock(upgrade_block.hash) assert_equal(set(node.getrawmempool()), { ecdsa0tx_2.hash}) self.log.info("Return to our tip") node.reconsiderblock(upgrade_block.hash) node.reconsiderblock(postupgrade_block.hash) assert_equal(node.getbestblockhash(), tip.hash) assert_equal(node.getrawmempool(), []) self.log.info( "Create an empty-block reorg that forks from pre-upgrade") tip = preupgrade_block blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) node.p2p.send_blocks_and_test(blocks, node) self.log.info("Transactions from orphaned blocks are sent into mempool ready to be mined again, including upgrade-dependent ones even though the fork deactivated and reactivated the upgrade.") assert_equal(set(node.getrawmempool()), { ecdsa0tx_2.hash, schnorr1tx.hash}) node.generate(1) tip = self.getbestblock(node) assert set(tx.rehash() for tx in tip.vtx).issuperset( {ecdsa0tx_2.hash, schnorr1tx.hash}) if __name__ == '__main__': SchnorrTest().main() diff --git a/test/functional/abc-wallet-standardness.py b/test/functional/abc-wallet-standardness.py index 1a90ed7344..bcb05f0f5e 100755 --- a/test/functional/abc-wallet-standardness.py +++ b/test/functional/abc-wallet-standardness.py @@ -1,180 +1,180 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the response of wallet to a variety of weird / nonstandard coins that it might try to spend.""" from decimal import Decimal from test_framework.script import ( CScript, OP_1, OP_5, OP_CHECKSIG, OP_CHECKMULTISIG, OP_DUP, OP_EQUALVERIFY, OP_HASH160, OP_PUSHDATA1, hash160, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_raises_rpc_error, assert_equal, sync_blocks, ) from test_framework.messages import ( CTransaction, CTxOut, FromHex, ToHex, ) SATOSHI = Decimal('0.00000001') class WalletStandardnessTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-acceptnonstdtxn=0'], ['-acceptnonstdtxn=1']] def run_test(self): std_node, nonstd_node = self.nodes address_nonstd = nonstd_node.getnewaddress() # make and mature some coins for the nonstandard node nonstd_node.generate(120) sync_blocks(self.nodes) def fund_and_test_wallet(scriptPubKey, shouldBeStandard, shouldBeInWallet, amount=10000, spendfee=500, nonstd_error="scriptpubkey (code 64)"): """ Get the nonstandard node to fund a transaction, test its standardness by trying to broadcast on the standard node, then mine it and see if it ended up in the standard node's wallet. Finally, it attempts to spend the coin. """ self.log.info("Trying script {}".format(scriptPubKey.hex(),)) # get nonstandard node to fund the script tx = CTransaction() tx.vout.append(CTxOut(max(amount, 10000), scriptPubKey)) rawtx = nonstd_node.fundrawtransaction( ToHex(tx), {'lockUnspents': True, 'changePosition': 1})['hex'] # fundrawtransaction doesn't like to fund dust outputs, so we # have to manually override the amount. FromHex(tx, rawtx) tx.vout[0].nValue = min(amount, 10000) rawtx = nonstd_node.signrawtransactionwithwallet(ToHex(tx))['hex'] # ensure signing process did not disturb scriptPubKey signedtx = FromHex(CTransaction(), rawtx) assert_equal(scriptPubKey, signedtx.vout[0].scriptPubKey) txid = signedtx.rehash() balance_initial = std_node.getbalance() # try broadcasting it on the standard node if shouldBeStandard: std_node.sendrawtransaction(rawtx) assert txid in std_node.getrawmempool() else: assert_raises_rpc_error(-26, nonstd_error, std_node.sendrawtransaction, rawtx) assert txid not in std_node.getrawmempool() # make sure it's in nonstandard node's mempool, then mine it nonstd_node.sendrawtransaction(rawtx) assert txid in nonstd_node.getrawmempool() [blockhash] = nonstd_node.generate(1) # make sure it was mined assert txid in nonstd_node.getblock(blockhash)["tx"] sync_blocks(self.nodes) wallet_outpoints = {(entry['txid'], entry['vout']) for entry in std_node.listunspent()} # calculate wallet balance change just as a double check balance_change = std_node.getbalance() - balance_initial # try spending the funds using the wallet. - outamount = (amount-spendfee) * SATOSHI + outamount = (amount - spendfee) * SATOSHI if outamount < 546 * SATOSHI: # If the final amount would be too small, then just donate # to miner fees. outputs = [{"data": b"to miner, with love".hex()}] else: outputs = [{address_nonstd: outamount}] spendtx = std_node.createrawtransaction( [{'txid': txid, 'vout': 0}], outputs) signresult = std_node.signrawtransactionwithwallet(spendtx) if shouldBeInWallet: assert (txid, 0) in wallet_outpoints assert balance_change == amount * SATOSHI assert_equal(signresult['complete'], True) txid = std_node.sendrawtransaction(signresult['hex']) [blockhash] = std_node.generate(1) # make sure it was mined assert txid in std_node.getblock(blockhash)["tx"] sync_blocks(self.nodes) else: assert (txid, 0) not in wallet_outpoints assert balance_change == 0 # signresult['errors'] will vary depending on input script. What # occurs is that in sign.cpp, ProduceSignature gets back # solved=false since SignStep sees a nonstandard input. Then, # an empty SignatureData results. Back in rawtransaction.cpp's # SignTransaction, it will then attempt to execute the # scriptPubKey with an empty scriptSig. A P2PKH script will thus # fail at OP_DUP with stack error, and P2PK/Multisig will fail # once they hit a nonminimal push. The error message is just an # artifact of the script type, basically. assert_equal(signresult['complete'], False) # we start with an empty wallet assert_equal(std_node.getbalance(), 0) address = std_node.getnewaddress() pubkey = bytes.fromhex(std_node.getaddressinfo(address)['pubkey']) pubkeyhash = hash160(pubkey) # P2PK fund_and_test_wallet(CScript([pubkey, OP_CHECKSIG]), True, True) fund_and_test_wallet( CScript([OP_PUSHDATA1, pubkey, OP_CHECKSIG]), False, False) # P2PKH fund_and_test_wallet(CScript( [OP_DUP, OP_HASH160, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG]), True, True) fund_and_test_wallet(CScript( [OP_DUP, OP_HASH160, OP_PUSHDATA1, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG]), False, False) # Bare multisig fund_and_test_wallet( CScript([OP_1, pubkey, OP_1, OP_CHECKMULTISIG]), True, True) fund_and_test_wallet( CScript([OP_1, OP_PUSHDATA1, pubkey, OP_1, OP_CHECKMULTISIG]), False, False) fund_and_test_wallet( CScript([OP_1, pubkey, b'\x01', OP_CHECKMULTISIG]), False, False) fund_and_test_wallet( CScript([b'\x01', pubkey, OP_1, OP_CHECKMULTISIG]), False, False) # Note: 1-of-5 is nonstandard to fund but standard to spend. fund_and_test_wallet( CScript([OP_1, pubkey, pubkey, pubkey, pubkey, pubkey, OP_5, OP_CHECKMULTISIG]), False, True) fund_and_test_wallet( CScript([OP_1, pubkey, pubkey, pubkey, OP_PUSHDATA1, pubkey, pubkey, OP_5, OP_CHECKMULTISIG]), False, False) # Dust also is nonstandard to fund but standard to spend. fund_and_test_wallet( CScript([pubkey, OP_CHECKSIG]), False, True, amount=200, nonstd_error="dust (code 64)") # and we end with an empty wallet assert_equal(std_node.getbalance(), 0) if __name__ == '__main__': WalletStandardnessTest().main() diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py index 491efd8804..96d8a9dde9 100755 --- a/test/functional/feature_cltv.py +++ b/test/functional/feature_cltv.py @@ -1,223 +1,223 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test BIP65 (CHECKLOCKTIMEVERIFY). Test that the CHECKLOCKTIMEVERIFY soft-fork activates at (regtest) block height 1351. """ from test_framework.blocktools import create_block, create_coinbase, make_conform_to_ctor from test_framework.messages import ( CTransaction, FromHex, msg_block, msg_tx, ToHex, ) from test_framework.mininode import ( P2PInterface, ) from test_framework.script import ( CScript, CScriptNum, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP, OP_TRUE, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_tx from test_framework.util import assert_equal CLTV_HEIGHT = 1351 # Reject codes that we might receive in this test REJECT_INVALID = 16 REJECT_OBSOLETE = 17 REJECT_NONSTANDARD = 64 def cltv_lock_to_height(node, tx, to_address, amount, height=-1): '''Modify the scriptPubKey to add an OP_CHECKLOCKTIMEVERIFY, and make a transaction that spends it. This transforms the output script to anyone can spend (OP_TRUE) if the lock time condition is valid. Default height is -1 which leads CLTV to fail TODO: test more ways that transactions using CLTV could be invalid (eg locktime requirements fail, sequence time requirements fail, etc). ''' height_op = OP_1NEGATE if(height > 0): tx.vin[0].nSequence = 0 tx.nLockTime = height height_op = CScriptNum(height) tx.vout[0].scriptPubKey = CScript( [height_op, OP_CHECKLOCKTIMEVERIFY, OP_DROP, OP_TRUE]) pad_tx(tx) fundtx_raw = node.signrawtransactionwithwallet(ToHex(tx))['hex'] fundtx = FromHex(CTransaction(), fundtx_raw) fundtx.rehash() # make spending tx inputs = [{ "txid": fundtx.hash, "vout": 0 }] output = {to_address: amount} spendtx_raw = node.createrawtransaction(inputs, output) spendtx = FromHex(CTransaction(), spendtx_raw) pad_tx(spendtx) return fundtx, spendtx def spend_from_coinbase(node, coinbase, to_address, amount): from_txid = node.getblock(coinbase)['tx'][0] inputs = [{"txid": from_txid, "vout": 0}] outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransactionwithwallet(rawtx) tx = FromHex(CTransaction(), signresult['hex']) return tx class BIP65Test(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.extra_args = [['-whitelist=127.0.0.1']] self.setup_clean_chain = True def run_test(self): self.nodes[0].add_p2p_connection(P2PInterface()) self.log.info("Mining {} blocks".format(CLTV_HEIGHT - 2)) self.coinbase_blocks = self.nodes[0].generate(CLTV_HEIGHT - 2) self.nodeaddress = self.nodes[0].getnewaddress() self.log.info( "Test that an invalid-according-to-CLTV transaction can still appear in a block") fundtx = spend_from_coinbase(self.nodes[0], self.coinbase_blocks[0], self.nodeaddress, 49.99) fundtx, spendtx = cltv_lock_to_height( self.nodes[0], fundtx, self.nodeaddress, 49.98) tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 block = create_block(int(tip, 16), create_coinbase( CLTV_HEIGHT - 1), block_time) block.nVersion = 3 block.vtx.append(fundtx) # include the -1 CLTV in block block.vtx.append(spendtx) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) # This block is valid assert_equal(self.nodes[0].getbestblockhash(), block.hash) self.log.info("Test that blocks must now be at least version 4") tip = block.sha256 block_time += 1 block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time) block.nVersion = 3 block.solve() with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]): self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) self.nodes[0].p2p.sync_with_ping() self.log.info( "Test that invalid-according-to-cltv transactions cannot appear in a block") block.nVersion = 4 fundtx = spend_from_coinbase(self.nodes[0], self.coinbase_blocks[1], self.nodeaddress, 49.99) fundtx, spendtx = cltv_lock_to_height( self.nodes[0], fundtx, self.nodeaddress, 49.98) # The funding tx only has unexecuted bad CLTV, in scriptpubkey; this is valid. self.nodes[0].p2p.send_and_ping(msg_tx(fundtx)) assert fundtx.hash in self.nodes[0].getrawmempool() # Mine a block containing the funding transaction block.vtx.append(fundtx) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) # This block is valid assert_equal(self.nodes[0].getbestblockhash(), block.hash) # We show that this tx is invalid due to CLTV by getting it # rejected from the mempool for exactly that reason. assert_equal( [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Negative locktime)'}], self.nodes[0].testmempoolaccept( rawtxs=[spendtx.serialize().hex()], allowhighfees=True) ) rejectedtx_signed = self.nodes[0].signrawtransactionwithwallet( ToHex(spendtx)) # Couldn't complete signature due to CLTV assert rejectedtx_signed['errors'][0]['error'] == 'Negative locktime' tip = block.hash block_time += 1 block = create_block( - block.sha256, create_coinbase(CLTV_HEIGHT+1), block_time) + block.sha256, create_coinbase(CLTV_HEIGHT + 1), block_time) block.nVersion = 4 block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.solve() with self.nodes[0].assert_debug_log(expected_msgs=['ConnectBlock {} failed (blk-bad-inputs'.format(block.hash)]): self.nodes[0].p2p.send_and_ping(msg_block(block)) assert_equal(self.nodes[0].getbestblockhash(), tip) self.nodes[0].p2p.sync_with_ping() self.log.info( "Test that a version 4 block with a valid-according-to-CLTV transaction is accepted") fundtx = spend_from_coinbase(self.nodes[0], self.coinbase_blocks[2], self.nodeaddress, 49.99) fundtx, spendtx = cltv_lock_to_height( self.nodes[0], fundtx, self.nodeaddress, 49.98, CLTV_HEIGHT) # make sure sequence is nonfinal and locktime is good spendtx.vin[0].nSequence = 0xfffffffe spendtx.nLockTime = CLTV_HEIGHT # both transactions are fully valid self.nodes[0].sendrawtransaction(ToHex(fundtx)) self.nodes[0].sendrawtransaction(ToHex(spendtx)) # Modify the transactions in the block to be valid against CLTV block.vtx.pop(1) block.vtx.append(fundtx) block.vtx.append(spendtx) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.nodes[0].p2p.send_and_ping(msg_block(block)) # This block is now valid assert_equal(self.nodes[0].getbestblockhash(), block.hash) if __name__ == '__main__': BIP65Test().main() diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py index e968d3c069..35f88e4bbf 100755 --- a/test/functional/feature_config_args.py +++ b/test/functional/feature_config_args.py @@ -1,59 +1,59 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test various command line arguments and configuration file parameters.""" import os from test_framework.test_framework import BitcoinTestFramework class ConfArgsTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): self.stop_node(0) # Remove the -datadir argument so it doesn't override the config file self.nodes[0].remove_default_args(["-datadir"]) default_data_dir = self.nodes[0].datadir new_data_dir = os.path.join(default_data_dir, 'newdatadir') new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2') # Check that using -datadir argument on non-existent directory fails self.nodes[0].datadir = new_data_dir self.nodes[0].assert_start_raises_init_error( ['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.') # Check that using non-existent datadir in conf file fails conf_file = os.path.join(default_data_dir, "bitcoin.conf") # datadir needs to be set before [regtest] section conf_file_contents = open(conf_file, encoding='utf8').read() with open(conf_file, 'w', encoding='utf8') as f: f.write("datadir=" + new_data_dir + "\n") f.write(conf_file_contents) self.nodes[0].assert_start_raises_init_error( ['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.') # Create the directory and ensure the config file now works os.mkdir(new_data_dir) - self.start_node(0, ['-conf='+conf_file, '-wallet=w1']) + self.start_node(0, ['-conf=' + conf_file, '-wallet=w1']) self.stop_node(0) assert os.path.exists(os.path.join( new_data_dir, 'regtest', 'wallets', 'w1')) # Ensure command line argument overrides datadir in conf os.mkdir(new_data_dir_2) self.nodes[0].datadir = new_data_dir_2 - self.start_node(0, ['-datadir='+new_data_dir_2, - '-conf='+conf_file, '-wallet=w2']) + self.start_node(0, ['-datadir=' + new_data_dir_2, + '-conf=' + conf_file, '-wallet=w2']) assert os.path.exists(os.path.join( new_data_dir_2, 'regtest', 'wallets', 'w2')) if __name__ == '__main__': ConfArgsTest().main() diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py index 76d46c3497..dfe05b918e 100755 --- a/test/functional/feature_minchainwork.py +++ b/test/functional/feature_minchainwork.py @@ -1,99 +1,99 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test logic for setting nMinimumChainWork on command line. Nodes don't consider themselves out of "initial block download" until their active chain has more work than nMinimumChainWork. Nodes don't download blocks from a peer unless the peer's best known block has more work than nMinimumChainWork. While in initial block download, nodes won't relay blocks to their peers, so test that this parameter functions as intended by verifying that block relay only succeeds past a given node once its nMinimumChainWork has been exceeded. """ import time from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, connect_nodes # 2 hashes required per regtest block (with no difficulty adjustment) REGTEST_WORK_PER_BLOCK = 2 class MinimumChainWorkTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]] self.node_min_work = [0, 101, 101] def setup_network(self): # This test relies on the chain setup being: # node0 <- node1 <- node2 # Before leaving IBD, nodes prefer to download blocks from outbound # peers, so ensure that we're mining on an outbound peer and testing # block relay to inbound peers. self.setup_nodes() - for i in range(self.num_nodes-1): - connect_nodes(self.nodes[i+1], self.nodes[i]) + for i in range(self.num_nodes - 1): + connect_nodes(self.nodes[i + 1], self.nodes[i]) def run_test(self): # Start building a chain on node0. node2 shouldn't be able to sync until node1's # minchainwork is exceeded starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work self.log.info( "Testing relay across node {} (minChainWork = {})".format( 1, self.node_min_work[1])) starting_blockcount = self.nodes[2].getblockcount() num_blocks_to_generate = int( (self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK) self.log.info("Generating {} blocks on node0".format( num_blocks_to_generate)) hashes = self.nodes[0].generate(num_blocks_to_generate) self.log.info("Node0 current chain work: {}".format( self.nodes[0].getblockheader(hashes[-1])['chainwork'])) # Sleep a few seconds and verify that node2 didn't get any new blocks # or headers. We sleep, rather than sync_blocks(node0, node1) because # it's reasonable either way for node1 to get the blocks, or not get # them (since they're below node1's minchainwork). time.sleep(3) self.log.info("Verifying node 2 has no more blocks than before") self.log.info("Blockcounts: {}".format( [n.getblockcount() for n in self.nodes])) # Node2 shouldn't have any new headers yet, because node1 should not # have relayed anything. assert_equal(len(self.nodes[2].getchaintips()), 1) assert_equal(self.nodes[2].getchaintips()[0]['height'], 0) assert self.nodes[1].getbestblockhash( ) != self.nodes[0].getbestblockhash() assert_equal(self.nodes[2].getblockcount(), starting_blockcount) self.log.info("Generating one more block") self.nodes[0].generate(1) self.log.info("Verifying nodes are all synced") # Because nodes in regtest are all manual connections (eg using # addnode), node1 should not have disconnected node0. If not for that, # we'd expect node1 to have disconnected node0 for serving an # insufficient work chain, in which case we'd need to reconnect them to # continue the test. self.sync_all() self.log.info("Blockcounts: {}".format( [n.getblockcount() for n in self.nodes])) if __name__ == '__main__': MinimumChainWorkTest().main() diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index 0d47e58921..481b14f509 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -1,333 +1,333 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the REST API.""" import binascii from decimal import Decimal from enum import Enum import http.client from io import BytesIO import json from struct import pack, unpack import urllib.parse from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_greater_than_or_equal, hex_str_to_bytes, ) class ReqType(Enum): JSON = 1 BIN = 2 HEX = 3 class RetType(Enum): OBJ = 1 BYTES = 2 JSON = 3 def filter_output_indices_by_value(vouts, value): for vout in vouts: if vout['value'] == value: yield vout['n'] class RESTTest (BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [["-rest"], []] def test_rest_request(self, uri, http_method='GET', req_type=ReqType.JSON, body='', status=200, ret_type=RetType.JSON): rest_uri = '/rest' + uri if req_type == ReqType.JSON: rest_uri += '.json' elif req_type == ReqType.BIN: rest_uri += '.bin' elif req_type == ReqType.HEX: rest_uri += '.hex' conn = http.client.HTTPConnection(self.url.hostname, self.url.port) self.log.debug('{} {} {}'.format(http_method, rest_uri, body)) if http_method == 'GET': conn.request('GET', rest_uri) elif http_method == 'POST': conn.request('POST', rest_uri, body) resp = conn.getresponse() assert_equal(resp.status, status) if ret_type == RetType.OBJ: return resp elif ret_type == RetType.BYTES: return resp.read() elif ret_type == RetType.JSON: return json.loads(resp.read().decode('utf-8'), parse_float=Decimal) def run_test(self): self.url = urllib.parse.urlparse(self.nodes[0].url) self.log.info("Mine blocks and send Bitcoin Cash to node 1") # Random address so node1's balance doesn't increase not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" self.nodes[0].generate(1) self.sync_all() self.nodes[1].generatetoaddress(100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() self.nodes[1].generatetoaddress(1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) self.log.info("Load the transaction using the /tx URI") json_obj = self.test_rest_request("/tx/{}".format(txid)) # Get the vin to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # Get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) self.log.info("Query an unspent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is one utxo assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1')) self.log.info("Query a spent TXO using the /getutxos URI") json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) # Check chainTip response assert_equal(json_obj['chaintipHash'], bb_hash) # Make sure there is no utxo in the response because this outpoint has # been spent assert_equal(len(json_obj['utxos']), 0) # Check bitmap assert_equal(json_obj['bitmap'], "0") self.log.info("Query two TXOs using the /getutxos URI") json_obj = self.test_rest_request( "/getutxos/{}-{}/{}-{}".format(*(spending + spent))) assert_equal(len(json_obj['utxos']), 1) assert_equal(json_obj['bitmap'], "10") self.log.info( "Query the TXOs using the /getutxos URI with a binary response") bin_request = b'\x01\x02' for txid, n in [spending, spent]: bin_request += hex_str_to_bytes(txid) bin_request += pack("i", n) bin_response = self.test_rest_request( "/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) output = BytesIO(bin_response) chain_height, = unpack("i", output.read(4)) response_hash = output.read(32)[::-1].hex() # Check if getutxo's chaintip during calculation was fine assert_equal(bb_hash, response_hash) # Chain height must be 102 assert_equal(chain_height, 102) self.log.info("Test the /getutxos URI with and without /checkmempool") # Create a transaction, check that it's found with /checkmempool, but # not found without. Then confirm the transaction and check that it's # found with or without /checkmempool. # Do a tx and don't sync txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) json_obj = self.test_rest_request("/tx/{}".format(txid)) # Get the spent output to later check for utxo (should be spent by then) spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # Get n of 0.1 outpoint n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) spending = (txid, n) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 0) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 0) self.nodes[0].generate(1) self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) json_obj = self.test_rest_request( "/getutxos/checkmempool/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) # Do some invalid requests self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ) self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ) # Test limits long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ) long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)]) self.test_rest_request( "/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) # Generate block to not affect upcoming tests self.nodes[0].generate( 1) self.sync_all() self.log.info("Test the /block and /headers URIs") bb_hash = self.nodes[0].getbestblockhash() # Check binary format response = self.test_rest_request( "/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_greater_than(int(response.getheader('content-length')), 80) response_bytes = response.read() # Compare with block header response_header = self.test_rest_request( "/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) assert_equal(int(response_header.getheader('content-length')), 80) response_header_bytes = response_header.read() assert_equal(response_bytes[0:80], response_header_bytes) # Check block hex format response_hex = self.test_rest_request( "/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than(int(response_hex.getheader('content-length')), 160) response_hex_bytes = response_hex.read().strip(b'\n') assert_equal(binascii.hexlify(response_bytes), response_hex_bytes) # Compare with hex block header response_header_hex = self.test_rest_request( "/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than( int(response_header_hex.getheader('content-length')), 160) response_header_hex_bytes = response_header_hex.read(160) assert_equal(binascii.hexlify( response_bytes[:80]), response_header_hex_bytes) # Check json format block_json_obj = self.test_rest_request("/block/{}".format(bb_hash)) assert_equal(block_json_obj['hash'], bb_hash) # Compare with json block header json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash)) # Ensure that there is one header in the json response assert_equal(len(json_obj), 1) # Request/response hash should be the same assert_equal(json_obj[0]['hash'], bb_hash) # Compare with normal RPC block response rpc_block_json = self.nodes[0].getblock(bb_hash) for key in ['hash', 'confirmations', 'height', 'version', 'merkleroot', 'time', 'nonce', 'bits', 'difficulty', 'chainwork', 'previousblockhash']: assert_equal(json_obj[0][key], rpc_block_json[key]) # See if we can get 5 headers in one response self.nodes[1].generate(5) self.sync_all() json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) # Now we should have 5 header objects assert_equal(len(json_obj), 5) self.log.info("Test the /tx URI") tx_hash = block_json_obj['tx'][0]['txid'] json_obj = self.test_rest_request("/tx/{}".format(tx_hash)) assert_equal(json_obj['txid'], tx_hash) # Check hex format response hex_response = self.test_rest_request( "/tx/{}".format(tx_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) assert_greater_than_or_equal( - int(hex_response.getheader('content-length')), json_obj['size']*2) + int(hex_response.getheader('content-length')), json_obj['size'] * 2) self.log.info("Test tx inclusion in the /mempool and /block URIs") # Make 3 tx and mine them on node 1 txs = [] txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) self.sync_all() # Check that there are exactly 3 transactions in the TX memory pool # before generating the block json_obj = self.test_rest_request("/mempool/info") assert_equal(json_obj['size'], 3) # The size of the memory pool should be greater than 3x ~100 bytes assert_greater_than(json_obj['bytes'], 300) # Check that there are our submitted transactions in the TX memory pool json_obj = self.test_rest_request("/mempool/contents") for i, tx in enumerate(txs): assert tx in json_obj assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2]) assert_equal(json_obj[tx]['depends'], txs[i - 1:i]) # Now mine the transactions newblockhash = self.nodes[1].generate(1) self.sync_all() # Check if the 3 tx show up in the new block json_obj = self.test_rest_request("/block/{}".format(newblockhash[0])) non_coinbase_txs = {tx['txid'] for tx in json_obj['tx'] if 'coinbase' not in tx['vin'][0]} assert_equal(non_coinbase_txs, set(txs)) # Check the same but without tx details json_obj = self.test_rest_request( "/block/notxdetails/{}".format(newblockhash[0])) for tx in txs: assert tx in json_obj['tx'] self.log.info("Test the /chaininfo URI") bb_hash = self.nodes[0].getbestblockhash() json_obj = self.test_rest_request("/chaininfo") assert_equal(json_obj['bestblockhash'], bb_hash) if __name__ == '__main__': RESTTest().main() diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index 279f7f215e..901f463c15 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -1,166 +1,166 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the prioritisetransaction mining RPC.""" import time from test_framework.blocktools import ( create_confirmed_utxos, send_big_transactions, ) # FIXME: review how this test needs to be adapted w.r.t _LEGACY_MAX_BLOCK_SIZE from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE from test_framework.messages import COIN from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class PrioritiseTransactionTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [["-printpriority=1"], ["-printpriority=1"]] def run_test(self): self.relayfee = self.nodes[0].getnetworkinfo()['relayfee'] utxo_count = 90 utxos = create_confirmed_utxos(self.nodes[0], utxo_count) txids = [] # Create 3 batches of transactions at 3 different fee rate levels range_size = utxo_count // 3 for i in range(3): txids.append([]) start_range = i * range_size end_range = start_range + range_size txids[i] = send_big_transactions(self.nodes[0], utxos[start_range:end_range], end_range - start_range, 10 * (i + 1)) # Make sure that the size of each group of transactions exceeds # LEGACY_MAX_BLOCK_SIZE -- otherwise the test needs to be revised to create # more transactions. mempool = self.nodes[0].getrawmempool(True) sizes = [0, 0, 0] for i in range(3): for j in txids[i]: assert j in mempool sizes[i] += mempool[j]['size'] # Fail => raise utxo_count assert sizes[i] > LEGACY_MAX_BLOCK_SIZE # add a fee delta to something in the cheapest bucket and make sure it gets mined # also check that a different entry in the cheapest bucket is NOT mined (lower # the priority to ensure its not mined due to priority) self.nodes[0].prioritisetransaction( txids[0][0], 0, 100 * self.nodes[0].calculate_fee_from_txid(txids[0][0])) self.nodes[0].prioritisetransaction(txids[0][1], -1e15, 0) self.nodes[0].generate(1) mempool = self.nodes[0].getrawmempool() self.log.info("Assert that prioritised transaction was mined") assert txids[0][0] not in mempool assert txids[0][1] in mempool confirmed_transactions = self.nodes[0].getblock( self.nodes[0].getbestblockhash())['tx'] # Pull the highest fee-rate transaction from a block high_fee_tx = confirmed_transactions[1] # Something high-fee should have been mined! assert high_fee_tx != None # Add a prioritisation before a tx is in the mempool (de-prioritising a # high-fee transaction so that it's now low fee). # # NOTE WELL: gettransaction returns the fee as a negative number and # as fractional coins. However, the prioritisetransaction expects a # number of satoshi to add or subtract from the actual fee. # Thus the conversation here is simply int(tx_fee*COIN) to remove all fees, and then # we add the minimum fee back. tx_fee = self.nodes[0].gettransaction(high_fee_tx)['fee'] self.nodes[0].prioritisetransaction( - high_fee_tx, -1e15, int(tx_fee*COIN) + self.nodes[0].calculate_fee_from_txid(high_fee_tx)) + high_fee_tx, -1e15, int(tx_fee * COIN) + self.nodes[0].calculate_fee_from_txid(high_fee_tx)) # Add everything back to mempool self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) # Check to make sure our high fee rate tx is back in the mempool mempool = self.nodes[0].getrawmempool() assert high_fee_tx in mempool # Now verify the modified-high feerate transaction isn't mined before # the other high fee transactions. Keep mining until our mempool has # decreased by all the high fee size that we calculated above. while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]): self.nodes[0].generate(1) # High fee transaction should not have been mined, but other high fee rate # transactions should have been. mempool = self.nodes[0].getrawmempool() self.log.info( "Assert that de-prioritised transaction is still in mempool") assert high_fee_tx in mempool for x in txids[2]: if (x != high_fee_tx): assert x not in mempool # Create a free, low priority transaction. Should be rejected. utxo_list = self.nodes[0].listunspent() assert len(utxo_list) > 0 utxo = utxo_list[0] inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"] txid = self.nodes[0].sendrawtransaction(tx_hex) # A tx that spends an in-mempool tx has 0 priority, so we can use it to # test the effect of using prioritise transaction for mempool # acceptance inputs = [] inputs.append({"txid": txid, "vout": 0}) outputs = {} outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee raw_tx2 = self.nodes[0].createrawtransaction(inputs, outputs) tx2_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx2)["hex"] tx2_id = self.nodes[0].decoderawtransaction(tx2_hex)["txid"] # This will raise an exception due to min relay fee not being met assert_raises_rpc_error(-26, "min relay fee not met (code 66)", self.nodes[0].sendrawtransaction, tx2_hex) assert tx2_id not in self.nodes[0].getrawmempool() # This is a less than 1000-byte transaction, so just set the fee # to be the minimum for a 1000-byte transaction and check that it is # accepted. self.nodes[0].prioritisetransaction( tx2_id, 0, int(self.relayfee * COIN)) self.log.info( "Assert that prioritised free transaction is accepted to mempool") assert_equal(self.nodes[0].sendrawtransaction(tx2_hex), tx2_id) assert tx2_id in self.nodes[0].getrawmempool() # Test that calling prioritisetransaction is sufficient to trigger # getblocktemplate to (eventually) return a new block. mock_time = int(time.time()) self.nodes[0].setmocktime(mock_time) template = self.nodes[0].getblocktemplate() self.nodes[0].prioritisetransaction( tx2_id, 0, -int(self.relayfee * COIN)) self.nodes[0].setmocktime(mock_time + 10) new_template = self.nodes[0].getblocktemplate() assert template != new_template if __name__ == '__main__': PrioritiseTransactionTest().main() diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py index fcdf8a5735..56f107d9dd 100755 --- a/test/functional/p2p_unrequested_blocks.py +++ b/test/functional/p2p_unrequested_blocks.py @@ -1,351 +1,351 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test processing of unrequested blocks. Setup: two nodes, node0+node1, not connected to each other. Node1 will have nMinimumChainWork set to 0x10, so it won't process low-work unrequested blocks. We have one P2PInterface connection to node0 called test_node, and one to node1 called min_work_node. The test: 1. Generate one block on each node, to leave IBD. 2. Mine a new block on each tip, and deliver to each node from node's peer. The tip should advance for node0, but node1 should skip processing due to nMinimumChainWork. Node1 is unused in tests 3-7: 3. Mine a block that forks from the genesis block, and deliver to test_node. Node0 should not process this block (just accept the header), because it is unrequested and doesn't have more or equal work to the tip. 4a,b. Send another two blocks that build on the forking block. Node0 should process the second block but be stuck on the shorter chain, because it's missing an intermediate block. 4c.Send 288 more blocks on the longer chain (the number of blocks ahead we currently store). Node0 should process all but the last block (too far ahead in height). 5. Send a duplicate of the block in #3 to Node0. Node0 should not process the block because it is unrequested, and stay on the shorter chain. 6. Send Node0 an inv for the height 3 block produced in #4 above. Node0 should figure out that Node0 has the missing height 2 block and send a getdata. 7. Send Node0 the missing block again. Node0 should process and the tip should advance. 8. Create a fork which is invalid at a height longer than the current chain (ie to which the node will try to reorg) but which has headers built on top of the invalid block. Check that we get disconnected if we send more headers on the chain the node now knows to be invalid. 9. Test Node1 is able to sync when connected to node0 (which should have sufficient work on its chain). """ import time from test_framework.blocktools import ( create_block, create_coinbase, create_transaction, ) from test_framework.messages import ( CBlockHeader, CInv, msg_block, msg_headers, msg_inv, ) from test_framework.mininode import ( mininode_lock, P2PInterface, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, sync_blocks, ) class AcceptBlockTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [["-noparkdeepreorg"], ["-minimumchainwork=0x10"]] def setup_network(self): # Node0 will be used to test behavior of processing unrequested blocks # from peers which are not whitelisted, while Node1 will be used for # the whitelisted case. # Node2 will be used for non-whitelisted peers to test the interaction # with nMinimumChainWork. self.setup_nodes() def run_test(self): # Setup the p2p connections # test_node connects to node0 (not whitelisted) test_node = self.nodes[0].add_p2p_connection(P2PInterface()) # min_work_node connects to node1 (whitelisted) min_work_node = self.nodes[1].add_p2p_connection(P2PInterface()) # 1. Have nodes mine a block (leave IBD) [n.generate(1) for n in self.nodes] tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes] # 2. Send one block that builds on each tip. # This should be accepted by node0 blocks_h2 = [] # the height 2 blocks on each node's chain block_time = int(time.time()) + 1 for i in range(2): blocks_h2.append(create_block( tips[i], create_coinbase(2), block_time)) blocks_h2[i].solve() block_time += 1 test_node.send_message(msg_block(blocks_h2[0])) min_work_node.send_message(msg_block(blocks_h2[1])) for x in [test_node, min_work_node]: x.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 2) assert_equal(self.nodes[1].getblockcount(), 1) self.log.info( "First height 2 block accepted by node0; correctly rejected by node1") # 3. Send another block that builds on genesis. block_h1f = create_block( int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time) block_time += 1 block_h1f.solve() test_node.send_message(msg_block(block_h1f)) test_node.sync_with_ping() tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h1f.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash) # 4. Send another two block that build on the fork. block_h2f = create_block( block_h1f.sha256, create_coinbase(2), block_time) block_time += 1 block_h2f.solve() test_node.send_message(msg_block(block_h2f)) test_node.sync_with_ping() # Since the earlier block was not processed by node, the new block # can't be fully validated. tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h2f.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found # But this block should be accepted by node since it has equal work. self.nodes[0].getblock(block_h2f.hash) self.log.info("Second height 2 block accepted, but not reorg'ed to") # 4b. Now send another block that builds on the forking chain. block_h3 = create_block( - block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1) + block_h2f.sha256, create_coinbase(3), block_h2f.nTime + 1) block_h3.solve() test_node.send_message(msg_block(block_h3)) test_node.sync_with_ping() # Since the earlier block was not processed by node, the new block # can't be fully validated. tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_h3.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found self.nodes[0].getblock(block_h3.hash) # But this block should be accepted by node since it has more work. self.nodes[0].getblock(block_h3.hash) self.log.info("Unrequested more-work block accepted") # 4c. Now mine 288 more blocks and deliver; all should be processed but # the last (height-too-high) on node (as long as it is not missing any headers) tip = block_h3 all_blocks = [] for i in range(288): next_block = create_block( - tip.sha256, create_coinbase(i + 4), tip.nTime+1) + tip.sha256, create_coinbase(i + 4), tip.nTime + 1) next_block.solve() all_blocks.append(next_block) tip = next_block # Now send the block at height 5 and check that it wasn't accepted (missing header) test_node.send_message(msg_block(all_blocks[1])) test_node.sync_with_ping() assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash) assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash) # The block at height 5 should be accepted if we provide the missing header, though headers_message = msg_headers() headers_message.headers.append(CBlockHeader(all_blocks[0])) test_node.send_message(headers_message) test_node.send_message(msg_block(all_blocks[1])) test_node.sync_with_ping() self.nodes[0].getblock(all_blocks[1].hash) # Now send the blocks in all_blocks for i in range(288): test_node.send_message(msg_block(all_blocks[i])) test_node.sync_with_ping() # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead for x in all_blocks[:-1]: self.nodes[0].getblock(x.hash) assert_raises_rpc_error( -1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash) # 5. Test handling of unrequested block on the node that didn't process # Should still not be processed (even though it has a child that has more # work). # The node should have requested the blocks at some point, so # disconnect/reconnect first self.nodes[0].disconnect_p2ps() self.nodes[1].disconnect_p2ps() test_node = self.nodes[0].add_p2p_connection(P2PInterface()) test_node.send_message(msg_block(block_h1f)) test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 2) self.log.info( "Unrequested block that would complete more-work chain was ignored") # 6. Try to get node to request the missing block. # Poke the node with an inv for block at height 3 and see if that # triggers a getdata on block 2 (it should if block 2 is missing). with mininode_lock: # Clear state so we can check the getdata request test_node.last_message.pop("getdata", None) test_node.send_message(msg_inv([CInv(2, block_h3.sha256)])) test_node.sync_with_ping() with mininode_lock: getdata = test_node.last_message["getdata"] # Check that the getdata includes the right block assert_equal(getdata.inv[0].hash, block_h1f.sha256) self.log.info("Inv at tip triggered getdata for unprocessed block") # 7. Send the missing block for the third time (now it is requested) test_node.send_message(msg_block(block_h1f)) test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 290) self.nodes[0].getblock(all_blocks[286].hash) assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash) assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash) self.log.info( "Successfully reorged to longer chain from non-whitelisted peer") # 8. Create a chain which is invalid at a height longer than the # current chain, but which has more blocks on top of that block_289f = create_block( - all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1) + all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime + 1) block_289f.solve() block_290f = create_block( - block_289f.sha256, create_coinbase(290), block_289f.nTime+1) + block_289f.sha256, create_coinbase(290), block_289f.nTime + 1) block_290f.solve() block_291 = create_block( - block_290f.sha256, create_coinbase(291), block_290f.nTime+1) + block_290f.sha256, create_coinbase(291), block_290f.nTime + 1) # block_291 spends a coinbase below maturity! block_291.vtx.append(create_transaction( block_290f.vtx[0], 0, b"42", 1)) block_291.hashMerkleRoot = block_291.calc_merkle_root() block_291.solve() block_292 = create_block( - block_291.sha256, create_coinbase(292), block_291.nTime+1) + block_291.sha256, create_coinbase(292), block_291.nTime + 1) block_292.solve() # Now send all the headers on the chain and enough blocks to trigger reorg headers_message = msg_headers() headers_message.headers.append(CBlockHeader(block_289f)) headers_message.headers.append(CBlockHeader(block_290f)) headers_message.headers.append(CBlockHeader(block_291)) headers_message.headers.append(CBlockHeader(block_292)) test_node.send_message(headers_message) test_node.sync_with_ping() tip_entry_found = False for x in self.nodes[0].getchaintips(): if x['hash'] == block_292.hash: assert_equal(x['status'], "headers-only") tip_entry_found = True assert tip_entry_found assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash) test_node.send_message(msg_block(block_289f)) test_node.send_message(msg_block(block_290f)) test_node.sync_with_ping() self.nodes[0].getblock(block_289f.hash) self.nodes[0].getblock(block_290f.hash) test_node.send_message(msg_block(block_291)) # At this point we've sent an obviously-bogus block, wait for full processing # without assuming whether we will be disconnected or not try: # Only wait a short while so the test doesn't take forever if we do get # disconnected test_node.sync_with_ping(timeout=1) except AssertionError: test_node.wait_for_disconnect() self.nodes[0].disconnect_p2ps() test_node = self.nodes[0].add_p2p_connection(P2PInterface()) # We should have failed reorg and switched back to 290 (but have block 291) assert_equal(self.nodes[0].getblockcount(), 290) assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash) assert_equal(self.nodes[0].getblock( block_291.hash)["confirmations"], -1) # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected block_293 = create_block( - block_292.sha256, create_coinbase(293), block_292.nTime+1) + block_292.sha256, create_coinbase(293), block_292.nTime + 1) block_293.solve() headers_message = msg_headers() headers_message.headers.append(CBlockHeader(block_293)) test_node.send_message(headers_message) test_node.wait_for_disconnect() # 9. Connect node1 to node0 and ensure it is able to sync connect_nodes(self.nodes[0], self.nodes[1]) sync_blocks([self.nodes[0], self.nodes[1]]) self.log.info("Successfully synced nodes 1 and 0") if __name__ == '__main__': AcceptBlockTest().main() diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py index 739989c167..194f60671e 100755 --- a/test/functional/rpc_createmultisig.py +++ b/test/functional/rpc_createmultisig.py @@ -1,103 +1,103 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test transaction signing using the signrawtransaction* RPCs.""" from test_framework.test_framework import BitcoinTestFramework import decimal class RpcCreateMultiSigTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 def get_keys(self): node0, node1, node2 = self.nodes self.add = [node1.getnewaddress() for _ in range(self.nkeys)] self.pub = [node1.getaddressinfo(a)["pubkey"] for a in self.add] self.priv = [node1.dumpprivkey(a) for a in self.add] self.final = node2.getnewaddress() def run_test(self): node0, node1, node2 = self.nodes # 50 BCH each, rest will be 25 BCH each node0.generate(149) self.sync_all() self.moved = 0 for self.nkeys in [3, 5]: for self.nsigs in [2, 3]: self.get_keys() self.do_multisig() self.checkbalances() def checkbalances(self): node0, node1, node2 = self.nodes node0.generate(100) self.sync_all() bal0 = node0.getbalance() bal1 = node1.getbalance() bal2 = node2.getbalance() height = node0.getblockchaininfo()["blocks"] assert 150 < height < 350 total = 149 * 50 + (height - 149 - 100) * 25 assert bal1 == 0 assert bal2 == self.moved assert bal0 + bal1 + bal2 == total def do_multisig(self): node0, node1, node2 = self.nodes msig = node2.createmultisig(self.nsigs, self.pub) madd = msig["address"] mredeem = msig["redeemScript"] # compare against addmultisigaddress msigw = node1.addmultisigaddress(self.nsigs, self.pub, None) maddw = msigw["address"] mredeemw = msigw["redeemScript"] # addmultisigiaddress and createmultisig work the same assert maddw == madd assert mredeemw == mredeem txid = node0.sendtoaddress(madd, 40) tx = node0.getrawtransaction(txid, True) vout = [v["n"] for v in tx["vout"] if madd in v["scriptPubKey"].get("addresses", [])] assert len(vout) == 1 vout = vout[0] scriptPubKey = tx["vout"][vout]["scriptPubKey"]["hex"] value = tx["vout"][vout]["value"] prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}] node0.generate(1) outval = value - decimal.Decimal("0.00001000") rawtx = node2.createrawtransaction( [{"txid": txid, "vout": vout}], [{self.final: outval}]) rawtx2 = node2.signrawtransactionwithkey( - rawtx, self.priv[0:self.nsigs-1], prevtxs) + rawtx, self.priv[0:self.nsigs - 1], prevtxs) rawtx3 = node2.signrawtransactionwithkey( rawtx2["hex"], [self.priv[-1]], prevtxs) self.moved += outval tx = node0.sendrawtransaction(rawtx3["hex"], True) blk = node0.generate(1)[0] assert tx in node0.getblock(blk)["tx"] txinfo = node0.getrawtransaction(tx, True, blk) self.log.info("n/m={}/{} size={}".format(self.nsigs, self.nkeys, txinfo["size"])) if __name__ == '__main__': RpcCreateMultiSigTest().main() diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py index 9b9d09471d..57a6a7a8c0 100755 --- a/test/functional/rpc_psbt.py +++ b/test/functional/rpc_psbt.py @@ -1,199 +1,199 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the Partially Signed Transaction RPCs. """ import json import os from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, find_output, ) # Create one-input, one-output, no-fee transaction: class PSBTTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = False self.num_nodes = 3 def run_test(self): # Create and fund a raw tx for sending 10 BTC psbtx1 = self.nodes[0].walletcreatefundedpsbt( [], {self.nodes[2].getnewaddress(): 10})['psbt'] # Node 1 should not be able to add anything to it but still return the psbtx same as before psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt'] assert_equal(psbtx1, psbtx) # Sign the transaction and send signed_tx = self.nodes[0].walletprocesspsbt(psbtx)['psbt'] final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex'] self.nodes[0].sendrawtransaction(final_tx) # Create p2sh, p2pkh addresses pubkey0 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress())['pubkey'] pubkey1 = self.nodes[1].getaddressinfo( self.nodes[1].getnewaddress())['pubkey'] pubkey2 = self.nodes[2].getaddressinfo( self.nodes[2].getnewaddress())['pubkey'] p2sh = self.nodes[1].addmultisigaddress( 2, [pubkey0, pubkey1, pubkey2], "")['address'] p2pkh = self.nodes[1].getnewaddress("") # fund those addresses rawtx = self.nodes[0].createrawtransaction([], {p2sh: 10, p2pkh: 10}) rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition": 0}) signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])[ 'hex'] txid = self.nodes[0].sendrawtransaction(signed_tx) self.nodes[0].generate(6) self.sync_all() # Find the output pos p2sh_pos = -1 p2pkh_pos = -1 decoded = self.nodes[0].decoderawtransaction(signed_tx) for out in decoded['vout']: if out['scriptPubKey']['addresses'][0] == p2sh: p2sh_pos = out['n'] elif out['scriptPubKey']['addresses'][0] == p2pkh: p2pkh_pos = out['n'] # spend single key from node 1 rawtx = self.nodes[1].walletcreatefundedpsbt([{"txid": txid, "vout": p2pkh_pos}], { self.nodes[1].getnewaddress(): 9.99})['psbt'] walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(rawtx) assert_equal(walletprocesspsbt_out['complete'], True) self.nodes[1].sendrawtransaction( self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex']) # partially sign multisig things with node 1 psbtx = self.nodes[1].walletcreatefundedpsbt([{"txid": txid, "vout": p2sh_pos}], { self.nodes[1].getnewaddress(): 9.99})['psbt'] walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx) psbtx = walletprocesspsbt_out['psbt'] assert_equal(walletprocesspsbt_out['complete'], False) # partially sign with node 2. This should be complete and sendable walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx) assert_equal(walletprocesspsbt_out['complete'], True) self.nodes[2].sendrawtransaction( self.nodes[2].finalizepsbt(walletprocesspsbt_out['psbt'])['hex']) # check that walletprocesspsbt fails to decode a non-psbt rawtx = self.nodes[1].createrawtransaction([{"txid": txid, "vout": p2pkh_pos}], { self.nodes[1].getnewaddress(): 9.99}) assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].walletprocesspsbt, rawtx) # Convert a non-psbt to psbt and make sure we can decode it rawtx = self.nodes[0].createrawtransaction( [], {self.nodes[1].getnewaddress(): 10}) rawtx = self.nodes[0].fundrawtransaction(rawtx) new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) self.nodes[0].decodepsbt(new_psbt) # Explicilty allow converting non-empty txs new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) self.nodes[0].decodepsbt(new_psbt) # Create outputs to nodes 1 and 2 node1_addr = self.nodes[1].getnewaddress() node2_addr = self.nodes[2].getnewaddress() txid1 = self.nodes[0].sendtoaddress(node1_addr, 13) txid2 = self.nodes[0].sendtoaddress(node2_addr, 13) self.nodes[0].generate(6) self.sync_all() vout1 = find_output(self.nodes[1], txid1, 13) vout2 = find_output(self.nodes[2], txid2, 13) # Create a psbt spending outputs from nodes 1 and 2 - psbt_orig = self.nodes[0].createpsbt([{"txid": txid1, "vout": vout1}, { + psbt_orig = self.nodes[0].createpsbt([{"txid": txid1, "vout": vout1}, { "txid": txid2, "vout": vout2}], {self.nodes[0].getnewaddress(): 25.999}) # Update psbts, should only have data for one input and not the other psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig)['psbt'] psbt1_decoded = self.nodes[0].decodepsbt(psbt1) assert psbt1_decoded['inputs'][0] and not psbt1_decoded['inputs'][1] psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig)['psbt'] psbt2_decoded = self.nodes[0].decodepsbt(psbt2) assert not psbt2_decoded['inputs'][0] and psbt2_decoded['inputs'][1] # Combine, finalize, and send the psbts combined = self.nodes[0].combinepsbt([psbt1, psbt2]) finalized = self.nodes[0].finalizepsbt(combined)['hex'] self.nodes[0].sendrawtransaction(finalized) self.nodes[0].generate(6) self.sync_all() # BIP 174 Test Vectors # Check that unknown values are just passed through unknown_psbt = "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAA=" unknown_out = self.nodes[0].walletprocesspsbt(unknown_psbt)['psbt'] assert_equal(unknown_psbt, unknown_out) # Open the data file with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_psbt.json'), encoding='utf-8') as f: d = json.load(f) invalids = d['invalid'] valids = d['valid'] creators = d['creator'] signers = d['signer'] combiners = d['combiner'] finalizers = d['finalizer'] extractors = d['extractor'] # Invalid PSBTs for invalid in invalids: assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decodepsbt, invalid) # Valid PSBTs for valid in valids: self.nodes[0].decodepsbt(valid) # Creator Tests for creator in creators: created_tx = self.nodes[0].createpsbt( creator['inputs'], creator['outputs']) assert_equal(created_tx, creator['result']) # Signer tests for i, signer in enumerate(signers): self.nodes[2].createwallet("wallet{}".format(i)) wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i)) for key in signer['privkeys']: wrpc.importprivkey(key) signed_tx = wrpc.walletprocesspsbt(signer['psbt'])['psbt'] assert_equal(signed_tx, signer['result']) # Combiner test for combiner in combiners: combined = self.nodes[2].combinepsbt(combiner['combine']) assert_equal(combined, combiner['result']) # Finalizer test for finalizer in finalizers: finalized = self.nodes[2].finalizepsbt( finalizer['finalize'], False)['psbt'] assert_equal(finalized, finalizer['result']) # Extractor test for extractor in extractors: extracted = self.nodes[2].finalizepsbt( extractor['extract'], True)['hex'] assert_equal(extracted, extractor['result']) if __name__ == '__main__': PSBTTest().main() diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py index ce2760c6b5..4d3797dc23 100755 --- a/test/functional/rpc_rawtransaction.py +++ b/test/functional/rpc_rawtransaction.py @@ -1,526 +1,527 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the rawtranscation RPCs. Test the following RPCs: - createrawtransaction - signrawtransactionwithwallet - sendrawtransaction - decoderawtransaction - getrawtransaction """ from decimal import Decimal from collections import OrderedDict from io import BytesIO from test_framework.messages import ( COutPoint, CTransaction, CTxIn, CTxOut, ToHex, ) from test_framework.script import CScript from test_framework.test_framework import BitcoinTestFramework from test_framework.txtools import pad_raw_tx from test_framework.util import ( assert_equal, assert_greater_than, assert_raises_rpc_error, connect_nodes_bi, hex_str_to_bytes, ) class multidict(dict): """Dictionary that allows duplicate keys. Constructed with a list of (key, value) tuples. When dumped by the json module, will output invalid json with repeated keys, eg: >>> json.dumps(multidict([(1,2),(1,2)]) '{"1": 2, "1": 2}' Used to test calls to rpc methods with repeated keys in the json object.""" def __init__(self, x): dict.__init__(self, x) self.x = x def items(self): return self.x # Create one-input, one-output, no-fee transaction: class RawTransactionsTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 def setup_network(self): super().setup_network() connect_nodes_bi(self.nodes[0], self.nodes[2]) def run_test(self): self.log.info( 'prepare some coins for multiple *rawtransaction commands') self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(101) self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0) self.sync_all() self.nodes[0].generate(5) self.sync_all() self.log.info( 'Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) self.log.info( 'Check parameter types and required parameters of createrawtransaction') # Test `createrawtransaction` required parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) # Test `createrawtransaction` invalid extra parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') # Test `createrawtransaction` invalid `inputs` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{}], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout must be a number", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) # Test `createrawtransaction` invalid `outputs` address = self.nodes[0].getnewaddress() address2 = self.nodes[0].getnewaddress() assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') # Should not throw for backwards compatibility self.nodes[0].createrawtransaction(inputs=[], outputs={}) self.nodes[0].createrawtransaction(inputs=[], outputs=[]) assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0}) assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format( address), self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: {}".format( address), self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) # Test `createrawtransaction` invalid `locktime` assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) self.log.info( 'Check that createrawtransaction accepts an array and object as outputs') tx = CTransaction() # One output tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction( inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) assert_equal(len(tx.vout), 1) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction( inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), ) # Two outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[ {'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) assert_equal(len(tx.vout), 2) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[ {address: 99}, {address2: 99}]), ) # Two data outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[ {'txid': txid, 'vout': 9}], outputs=multidict([('data', '99'), ('data', '99')]))))) assert_equal(len(tx.vout), 2) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[ {'data': '99'}, {'data': '99'}]), ) # Multiple mixed outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[ {'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), ('data', '99'), ('data', '99')]))))) assert_equal(len(tx.vout), 3) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[ {address: 99}, {'data': '99'}, {'data': '99'}]), ) self.log.info('sendrawtransaction with missing input') # won't exists inputs = [ {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout': 1}] outputs = {self.nodes[0].getnewaddress(): 4.998} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) rawtx = pad_raw_tx(rawtx) rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) # This will raise an exception since there are missing inputs assert_raises_rpc_error( -25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex']) ##################################### # getrawtransaction with block hash # ##################################### # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) block1, block2 = self.nodes[2].generate(2) self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) assert_equal(gottx['in_active_chain'], True) # We should not have the 'in_active_chain' flag when we don't provide a block gottx = self.nodes[0].getrawtransaction(tx, True) assert_equal(gottx['txid'], tx) assert 'in_active_chain' not in gottx # We should not get the tx if we provide an unrelated block assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) # An invalid block hash should raise the correct errors assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, True) assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, "foobar") assert_raises_rpc_error(-8, "parameter 3 must be of length 64", self.nodes[0].getrawtransaction, tx, True, "abcd1234") assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") # Undo the blocks and check in_active_chain self.nodes[0].invalidateblock(block1) gottx = self.nodes[0].getrawtransaction( txid=tx, verbose=True, blockhash=block1) assert_equal(gottx['in_active_chain'], False) self.nodes[0].reconsiderblock(block1) assert_equal(self.nodes[0].getbestblockhash(), block2) # # RAW TX MULTISIG TESTS # # # 2of2 test addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) # Tests for createmultisig and addmultisigaddress assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) # createmultisig can only take public keys self.nodes[0].createmultisig( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr1])['address'] # use balance deltas instead of absolute values bal = self.nodes[2].getbalance() # send 1.2 BCH to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[0].generate(1) self.sync_all() # node2 has both keys of the 2of2 ms addr., tx should affect the # balance assert_equal(self.nodes[2].getbalance(), bal + Decimal('1.20000000')) # 2of3 test from different nodes bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr3 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) addr3Obj = self.nodes[2].getaddressinfo(addr3) mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() # THIS IS AN INCOMPLETE FEATURE # NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND # COUNT AT BALANCE CALCULATION # for now, assume the funds of a 2of3 multisig tx are not marked as # spendable assert_equal(self.nodes[2].getbalance(), bal) txDetails = self.nodes[0].gettransaction(txId, True) rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = False for outpoint in rawTx['vout']: if outpoint['value'] == Decimal('2.20000000'): vout = outpoint break bal = self.nodes[0].getbalance() inputs = [{ "txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "amount": vout['value'], }] outputs = {self.nodes[0].getnewaddress(): 2.19} rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet( rawTx, inputs) # node1 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned['complete'], False) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) # node2 can sign the tx compl., own two of three keys assert_equal(rawTxSigned['complete'], True) self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal + Decimal( '50.00000000') + Decimal('2.19000000')) # block reward + tx rawTxBlock = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) # 2of2 test for combining transactions bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) self.nodes[1].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() # the funds of a 2of2 multisig tx should not be marked as spendable assert_equal(self.nodes[2].getbalance(), bal) txDetails = self.nodes[0].gettransaction(txId, True) rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = False for outpoint in rawTx2['vout']: if outpoint['value'] == Decimal('2.20000000'): vout = outpoint break bal = self.nodes[0].getbalance() inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey'] ['hex'], "redeemScript": mSigObjValid['hex'], "amount": vout['value']}] outputs = {self.nodes[0].getnewaddress(): 2.19} rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet( rawTx2, inputs) self.log.debug(rawTxPartialSigned1) # node1 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned1['complete'], False) rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet( rawTx2, inputs) self.log.debug(rawTxPartialSigned2) # node2 only has one key, can't comp. sign the tx assert_equal(rawTxPartialSigned2['complete'], False) rawTxComb = self.nodes[2].combinerawtransaction( [rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) self.log.debug(rawTxComb) self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance( - ), bal+Decimal('50.00000000')+Decimal('2.19000000')) # block reward + tx + ), bal + Decimal('50.00000000') + Decimal('2.19000000')) # block reward + tx # getrawtransaction tests # 1. valid parameters - only supply txid txHash = rawTx["hash"] assert_equal( self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex']) # 2. valid parameters - supply txid and 0 for non-verbose assert_equal( self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex']) # 3. valid parameters - supply txid and False for non-verbose assert_equal(self.nodes[0].getrawtransaction( txHash, False), rawTxSigned['hex']) # 4. valid parameters - supply txid and 1 for verbose. # We only check the "hex" field of the output so we don't need to # update this test every time the output format changes. assert_equal(self.nodes[0].getrawtransaction( txHash, 1)["hex"], rawTxSigned['hex']) # 5. valid parameters - supply txid and True for non-verbose assert_equal(self.nodes[0].getrawtransaction( txHash, True)["hex"], rawTxSigned['hex']) # 6. invalid parameters - supply txid and string "Flase" assert_raises_rpc_error( -1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "False") # 7. invalid parameters - supply txid and empty array assert_raises_rpc_error( -1, "not a boolean", self.nodes[0].getrawtransaction, txHash, []) # 8. invalid parameters - supply txid and empty dict assert_raises_rpc_error( -1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {}) # Sanity checks on verbose getrawtransaction output rawTxOutput = self.nodes[0].getrawtransaction(txHash, True) assert_equal(rawTxOutput["hex"], rawTxSigned["hex"]) assert_equal(rawTxOutput["txid"], txHash) assert_equal(rawTxOutput["hash"], txHash) assert_greater_than(rawTxOutput["size"], 300) assert_equal(rawTxOutput["version"], 0x02) assert_equal(rawTxOutput["locktime"], 0) assert_equal(len(rawTxOutput["vin"]), 1) assert_equal(len(rawTxOutput["vout"]), 1) assert_equal(rawTxOutput["blockhash"], rawTxBlock["hash"]) assert_equal(rawTxOutput["confirmations"], 3) assert_equal(rawTxOutput["time"], rawTxBlock["time"]) assert_equal(rawTxOutput["blocktime"], rawTxBlock["time"]) inputs = [ {'txid': "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'sequence': 1000}] outputs = {self.nodes[0].getnewaddress(): 1} assert_raises_rpc_error( -8, 'Invalid parameter, missing vout key', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = "1" assert_raises_rpc_error( -8, 'Invalid parameter, vout must be a number', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = -1 assert_raises_rpc_error( -8, 'Invalid parameter, vout must be positive', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['vout'] = 1 rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 1000) # 9. invalid parameters - sequence number out of range inputs[0]['sequence'] = -1 assert_raises_rpc_error( -8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) # 10. invalid parameters - sequence number out of range inputs[0]['sequence'] = 4294967296 assert_raises_rpc_error( -8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) inputs[0]['sequence'] = 4294967294 rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) #################################### # TRANSACTION VERSION NUMBER TESTS # #################################### # Test the minimum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = -0x80000000 rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], -0x80000000) # Test the maximum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = 0x7fffffff rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x7fffffff) ########################################## # Decoding weird scripts in transactions # ########################################## self.log.info('Decode correctly-formatted but weird transactions') tx = CTransaction() # empty self.nodes[0].decoderawtransaction(ToHex(tx)) # truncated push tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4e\x00\x00')) tx.vin.append(CTxIn(COutPoint(42, 0), b'\x4c\x10TRUNC')) tx.vout.append(CTxOut(0, b'\x4e\x00\x00')) tx.vout.append(CTxOut(0, b'\x4c\x10TRUNC')) self.nodes[0].decoderawtransaction(ToHex(tx)) # giant pushes and long scripts - tx.vin.append(CTxIn(COutPoint(42, 0), CScript([b'giant push'*10000]))) - tx.vout.append(CTxOut(0, CScript([b'giant push'*10000]))) + tx.vin.append( + CTxIn(COutPoint(42, 0), CScript([b'giant push' * 10000]))) + tx.vout.append(CTxOut(0, CScript([b'giant push' * 10000]))) self.nodes[0].decoderawtransaction(ToHex(tx)) self.log.info('Refuse garbage after transaction') assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, ToHex(tx) + '00') if __name__ == '__main__': RawTransactionsTest().main() diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py index 37b15c3419..ea7f47745e 100755 --- a/test/functional/rpc_txoutproof.py +++ b/test/functional/rpc_txoutproof.py @@ -1,129 +1,129 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test gettxoutproof and verifytxoutproof RPCs.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes, ) from test_framework.messages import CMerkleBlock, FromHex, ToHex class MerkleBlockTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.setup_clean_chain = True # Nodes 0/1 are "wallet" nodes, Nodes 2/3 are used for testing self.extra_args = [[], [], [], ["-txindex"]] def setup_network(self): self.setup_nodes() connect_nodes(self.nodes[0], self.nodes[1]) connect_nodes(self.nodes[0], self.nodes[2]) connect_nodes(self.nodes[0], self.nodes[3]) self.sync_all() def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(105) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) node0utxos = self.nodes[0].listunspent(1) tx1 = self.nodes[0].createrawtransaction( [node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99}) txid1 = self.nodes[0].sendrawtransaction( self.nodes[0].signrawtransactionwithwallet(tx1)["hex"]) tx2 = self.nodes[0].createrawtransaction( [node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99}) txid2 = self.nodes[0].sendrawtransaction( self.nodes[0].signrawtransactionwithwallet(tx2)["hex"]) # This will raise an exception because the transaction is not yet in a block assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1]) self.nodes[0].generate(1) blockhash = self.nodes[0].getblockhash(chain_height + 1) self.sync_all() txlist = [] blocktxn = self.nodes[0].getblock(blockhash, True)["tx"] txlist.append(blocktxn[1]) txlist.append(blocktxn[2]) assert_equal(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid1])), [txid1]) assert_equal(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid1, txid2])), txlist) assert_equal(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist) txin_spent = self.nodes[1].listunspent(1).pop() tx3 = self.nodes[1].createrawtransaction( [txin_spent], {self.nodes[0].getnewaddress(): 49.98}) txid3 = self.nodes[0].sendrawtransaction( self.nodes[1].signrawtransactionwithwallet(tx3)["hex"]) self.nodes[0].generate(1) self.sync_all() txid_spent = txin_spent["txid"] txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2 # We can't find the block from a fully-spent tx assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent]) # We can get the proof if we specify the block assert_equal(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent]) # We can't get the proof if we specify a non-existent block assert_raises_rpc_error(-5, "Block not found", self.nodes[2].gettxoutproof, [ txid_spent], "00000000000000000000000000000000") # We can get the proof if the transaction is unspent assert_equal(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent]) # We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter. assert_equal(sorted(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid1, txid2]))), sorted(txlist)) assert_equal(sorted(self.nodes[2].verifytxoutproof( self.nodes[2].gettxoutproof([txid2, txid1]))), sorted(txlist)) # We can always get a proof if we have a -txindex assert_equal(self.nodes[2].verifytxoutproof( self.nodes[3].gettxoutproof([txid_spent])), [txid_spent]) # We can't get a proof if we specify transactions from different blocks assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3]) # Now we'll try tweaking a proof. proof = self.nodes[3].gettxoutproof([txid1, txid2]) assert txid1 in self.nodes[0].verifytxoutproof(proof) assert txid2 in self.nodes[1].verifytxoutproof(proof) tweaked_proof = FromHex(CMerkleBlock(), proof) # Make sure that our serialization/deserialization is working assert txid1 in self.nodes[2].verifytxoutproof(ToHex(tweaked_proof)) # Check to see if we can go up the merkle tree and pass this off as a # single-transaction block tweaked_proof.txn.nTransactions = 1 tweaked_proof.txn.vHash = [tweaked_proof.header.hashMerkleRoot] - tweaked_proof.txn.vBits = [True] + [False]*7 + tweaked_proof.txn.vBits = [True] + [False] * 7 for n in self.nodes: assert not n.verifytxoutproof(ToHex(tweaked_proof)) # TODO: try more variants, eg transactions at different depths, and # verify that the proofs are invalid if __name__ == '__main__': MerkleBlockTest().main() diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py index 56fdc2c749..3e8d175a32 100755 --- a/test/functional/rpc_users.py +++ b/test/functional/rpc_users.py @@ -1,207 +1,207 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test multiple RPC users.""" import http.client import os import urllib.parse import subprocess from random import SystemRandom import string import configparser from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, get_datadir_path, str_to_b64str, ) class HTTPBasicsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 def setup_chain(self): super().setup_chain() # Append rpcauth to bitcoin.conf before initialization rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144" rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e" rpcuser = "rpcuser=rpcuser💻" rpcpassword = "rpcpassword=rpcpassword🔑" self.user = ''.join(SystemRandom().choice( string.ascii_letters + string.digits) for _ in range(10)) config = configparser.ConfigParser() config.read_file(open(self.options.configfile, encoding='utf-8')) gen_rpcauth = config['environment']['RPCAUTH'] p = subprocess.Popen([gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True) lines = p.stdout.read().splitlines() rpcauth3 = lines[1] self.password = lines[3] with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f: f.write(rpcauth + "\n") f.write(rpcauth2 + "\n") - f.write(rpcauth3+"\n") + f.write(rpcauth3 + "\n") with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f: f.write(rpcuser + "\n") f.write(rpcpassword + "\n") def run_test(self): # # Check correctness of the rpcauth config option # # url = urllib.parse.urlparse(self.nodes[0].url) # Old authpair authpair = url.username + ':' + url.password # New authpair generated via share/rpcuser tool password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM=" # Second authpair with different username password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI=" authpairnew = "rt:" + password self.log.info('Correct...') headers = {"Authorization": "Basic " + str_to_b64str(authpair)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 200) conn.close() # Use new authpair to confirm both work self.log.info('Correct...') headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 200) conn.close() # Wrong login name with rt's password self.log.info('Wrong...') authpairnew = "rtwrong:" + password headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() # Wrong password for rt self.log.info('Wrong...') authpairnew = "rt:" + password + "wrong" headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() # Correct for rt2 self.log.info('Correct...') authpairnew = "rt2:" + password2 headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 200) conn.close() # Wrong password for rt2 self.log.info('Wrong...') authpairnew = "rt2:" + password2 + "wrong" headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() # Correct for randomly generated user self.log.info('Correct...') authpairnew = self.user + ":" + self.password headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 200) conn.close() # Wrong password for randomly generated user self.log.info('Wrong...') authpairnew = self.user + ":" + self.password + "Wrong" headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() ############################################################### # Check correctness of the rpcuser/rpcpassword config options # ############################################################### url = urllib.parse.urlparse(self.nodes[1].url) # rpcuser and rpcpassword authpair self.log.info('Correct...') rpcuserauthpair = "rpcuser💻:rpcpassword🔑" headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 200) conn.close() # Wrong login name with rpcuser's password rpcuserauthpair = "rpcuserwrong:rpcpassword" headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() # Wrong password for rpcuser self.log.info('Wrong...') rpcuserauthpair = "rpcuser:rpcpasswordwrong" headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) resp = conn.getresponse() assert_equal(resp.status, 401) conn.close() if __name__ == '__main__': HTTPBasicsTest().main() diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py index 991339b9fe..88247772a8 100644 --- a/test/functional/test_framework/blocktools.py +++ b/test/functional/test_framework/blocktools.py @@ -1,200 +1,200 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Utilities for manipulating blocks and transactions.""" from .script import ( CScript, OP_CHECKSIG, OP_DUP, OP_EQUALVERIFY, OP_HASH160, OP_RETURN, OP_TRUE, ) from .messages import ( CBlock, COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ser_string, ) from .txtools import pad_tx from .util import satoshi_round # Create a block (with regtest difficulty) def create_block(hashprev, coinbase, nTime=None): block = CBlock() if nTime is None: import time block.nTime = int(time.time() + 600) else: block.nTime = nTime block.hashPrevBlock = hashprev block.nBits = 0x207fffff # Will break after a difficulty adjustment... block.vtx.append(coinbase) block.hashMerkleRoot = block.calc_merkle_root() block.calc_sha256() return block def make_conform_to_ctor(block): for tx in block.vtx: tx.rehash() block.vtx = [block.vtx[0]] + \ sorted(block.vtx[1:], key=lambda tx: tx.get_id()) def serialize_script_num(value): r = bytearray(0) if value == 0: return r neg = value < 0 absvalue = -value if neg else value while (absvalue): r.append(int(absvalue & 0xff)) absvalue >>= 8 if r[-1] & 0x80: r.append(0x80 if neg else 0) elif neg: r[-1] |= 0x80 return r # Create a coinbase transaction, assuming no miner fees. # If pubkey is passed in, the coinbase output will be a P2PK output; # otherwise an anyone-can-spend output. def create_coinbase(height, pubkey=None): coinbase = CTransaction() coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), ser_string(serialize_script_num(height)), 0xffffffff)) coinbaseoutput = CTxOut() coinbaseoutput.nValue = 50 * COIN halvings = int(height / 150) # regtest coinbaseoutput.nValue >>= halvings if (pubkey != None): coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG]) else: coinbaseoutput.scriptPubKey = CScript([OP_TRUE]) coinbase.vout = [coinbaseoutput] # Make sure the coinbase is at least 100 bytes pad_tx(coinbase) coinbase.calc_sha256() return coinbase # Create a transaction. # If the scriptPubKey is not specified, make it anyone-can-spend. def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()): tx = CTransaction() assert n < len(prevtx.vout) tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff)) tx.vout.append(CTxOut(value, scriptPubKey)) pad_tx(tx) tx.calc_sha256() return tx def get_legacy_sigopcount_block(block, fAccurate=True): count = 0 for tx in block.vtx: count += get_legacy_sigopcount_tx(tx, fAccurate) return count def get_legacy_sigopcount_tx(tx, fAccurate=True): count = 0 for i in tx.vout: count += i.scriptPubKey.GetSigOpCount(fAccurate) for j in tx.vin: # scriptSig might be of type bytes, so convert to CScript for the moment count += CScript(j.scriptSig).GetSigOpCount(fAccurate) return count def create_confirmed_utxos(node, count, age=101): """ Helper to create at least "count" utxos """ to_generate = int(0.5 * count) + age while to_generate > 0: node.generate(min(25, to_generate)) to_generate -= 25 utxos = node.listunspent() iterations = count - len(utxos) addr1 = node.getnewaddress() addr2 = node.getnewaddress() if iterations <= 0: return utxos for i in range(iterations): t = utxos.pop() inputs = [] inputs.append({"txid": t["txid"], "vout": t["vout"]}) outputs = {} outputs[addr1] = satoshi_round(t['amount'] / 2) outputs[addr2] = satoshi_round(t['amount'] / 2) raw_tx = node.createrawtransaction(inputs, outputs) ctx = FromHex(CTransaction(), raw_tx) fee = node.calculate_fee(ctx) // 2 ctx.vout[0].nValue -= fee # Due to possible truncation, we go ahead and take another satoshi in # fees to ensure the transaction gets through ctx.vout[1].nValue -= fee + 1 signed_tx = node.signrawtransactionwithwallet(ToHex(ctx))["hex"] node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): node.generate(1) utxos = node.listunspent() assert len(utxos) >= count return utxos def mine_big_block(node, utxos=None): # generate a 66k transaction, # and 14 of them is close to the 1MB block limit num = 14 utxos = utxos if utxos is not None else [] if len(utxos) < num: utxos.clear() utxos.extend(node.listunspent()) send_big_transactions(node, utxos, num, 100) node.generate(1) def send_big_transactions(node, utxos, num, fee_multiplier): from .cashaddr import decode txids = [] - padding = "1"*512 + padding = "1" * 512 addrHash = decode(node.getnewaddress())[2] for _ in range(num): ctx = CTransaction() utxo = utxos.pop() txid = int(utxo['txid'], 16) ctx.vin.append(CTxIn(COutPoint(txid, int(utxo["vout"])), b"")) ctx.vout.append( - CTxOut(int(satoshi_round(utxo['amount']*COIN)), + CTxOut(int(satoshi_round(utxo['amount'] * COIN)), CScript([OP_DUP, OP_HASH160, addrHash, OP_EQUALVERIFY, OP_CHECKSIG]))) for i in range(0, 127): ctx.vout.append(CTxOut(0, CScript( [OP_RETURN, bytes(padding, 'utf-8')]))) # Create a proper fee for the transaction to be mined ctx.vout[0].nValue -= int(fee_multiplier * node.calculate_fee(ctx)) signresult = node.signrawtransactionwithwallet( ToHex(ctx), None, "NONE|FORKID") txid = node.sendrawtransaction(signresult["hex"], True) txids.append(txid) return txids diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py index 73dd79cb8f..77decae979 100755 --- a/test/functional/test_framework/mininode.py +++ b/test/functional/test_framework/mininode.py @@ -1,639 +1,640 @@ #!/usr/bin/env python3 # Copyright (c) 2010 ArtForz -- public domain half-a-node # Copyright (c) 2012 Jeff Garzik # Copyright (c) 2010-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Bitcoin P2P network half-a-node. This python code was modified from ArtForz' public domain half-a-node, as found in the mini-node branch of http://github.com/jgarzik/pynode. P2PConnection: A low-level connection object to a node's P2P interface P2PInterface: A high-level interface object for communicating to a node over P2P P2PDataStore: A p2p interface class that keeps a store of transactions and blocks and can respond correctly to getdata and getheaders messages""" import asyncio from collections import defaultdict from io import BytesIO import logging import struct import sys import threading from test_framework.messages import ( CBlockHeader, MIN_VERSION_SUPPORTED, msg_addr, msg_block, MSG_BLOCK, msg_blocktxn, msg_cmpctblock, msg_feefilter, msg_getaddr, msg_getblocks, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_mempool, msg_notfound, msg_ping, msg_pong, msg_reject, msg_sendcmpct, msg_sendheaders, msg_tx, MSG_TX, MSG_TYPE_MASK, msg_verack, msg_version, NODE_NETWORK, sha256, ) from test_framework.util import wait_until logger = logging.getLogger("TestFramework.mininode") MESSAGEMAP = { b"addr": msg_addr, b"block": msg_block, b"blocktxn": msg_blocktxn, b"cmpctblock": msg_cmpctblock, b"feefilter": msg_feefilter, b"getaddr": msg_getaddr, b"getblocks": msg_getblocks, b"getblocktxn": msg_getblocktxn, b"getdata": msg_getdata, b"getheaders": msg_getheaders, b"headers": msg_headers, b"inv": msg_inv, b"mempool": msg_mempool, b"notfound": msg_notfound, b"ping": msg_ping, b"pong": msg_pong, b"reject": msg_reject, b"sendcmpct": msg_sendcmpct, b"sendheaders": msg_sendheaders, b"tx": msg_tx, b"verack": msg_verack, b"version": msg_version, } MAGIC_BYTES = { "mainnet": b"\xe3\xe1\xf3\xe8", "testnet3": b"\xf4\xe5\xf3\xf4", "regtest": b"\xda\xb5\xbf\xfa", } class P2PConnection(asyncio.Protocol): """A low-level connection object to a node's P2P interface. This class is responsible for: - opening and closing the TCP connection to the node - reading bytes from and writing bytes to the socket - deserializing and serializing the P2P message header - logging messages as they are sent and received This class contains no logic for handing the P2P message payloads. It must be sub-classed and the on_message() callback overridden.""" def __init__(self): # The underlying transport of the connection. # Should only call methods on this from the NetworkThread, c.f. call_soon_threadsafe self._transport = None @property def is_connected(self): return self._transport is not None def peer_connect(self, dstaddr, dstport, net="regtest"): assert not self.is_connected self.dstaddr = dstaddr self.dstport = dstport # The initial message to send after the connection was made: self.on_connection_send_msg = None self.on_connection_send_msg_is_raw = False self.recvbuf = b"" self.network = net logger.debug('Connecting to Bitcoin Node: {}:{}'.format( self.dstaddr, self.dstport)) loop = NetworkThread.network_event_loop conn_gen_unsafe = loop.create_connection( lambda: self, host=self.dstaddr, port=self.dstport) def conn_gen(): return loop.call_soon_threadsafe( loop.create_task, conn_gen_unsafe) return conn_gen def peer_disconnect(self): # Connection could have already been closed by other end. NetworkThread.network_event_loop.call_soon_threadsafe( lambda: self._transport and self._transport.abort()) # Connection and disconnection methods def connection_made(self, transport): """asyncio callback when a connection is opened.""" assert not self._transport logger.debug("Connected & Listening: {}:{}".format( self.dstaddr, self.dstport)) self._transport = transport if self.on_connection_send_msg: if self.on_connection_send_msg_is_raw: self.send_raw_message(self.on_connection_send_msg) else: self.send_message(self.on_connection_send_msg) # Never used again self.on_connection_send_msg = None self.on_open() def connection_lost(self, exc): """asyncio callback when a connection is closed.""" if exc: logger.warning("Connection lost to {}:{} due to {}".format( self.dstaddr, self.dstport, exc)) else: logger.debug("Closed connection to: {}:{}".format( self.dstaddr, self.dstport)) self._transport = None self.recvbuf = b"" self.on_close() # Socket read methods def data_received(self, t): """asyncio callback when data is read from the socket.""" with mininode_lock: if len(t) > 0: self.recvbuf += t while True: msg = self._on_data() if msg == None: break self.on_message(msg) def _on_data(self): """Try to read P2P messages from the recv buffer. This method reads data from the buffer in a loop. It deserializes, parses and verifies the P2P header, then passes the P2P payload to the on_message callback for processing.""" try: with mininode_lock: if len(self.recvbuf) < 4: return None if self.recvbuf[:4] != MAGIC_BYTES[self.network]: raise ValueError( "got garbage {}".format(repr(self.recvbuf))) if len(self.recvbuf) < 4 + 12 + 4 + 4: return None - command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] - msglen = struct.unpack(" 500: log_message += "... (msg truncated)" logger.debug(log_message) class P2PInterface(P2PConnection): """A high-level P2P interface class for communicating with a Bitcoin Cash node. This class provides high-level callbacks for processing P2P message payloads, as well as convenience methods for interacting with the node over P2P. Individual testcases should subclass this and override the on_* methods if they want to alter message handling behaviour.""" def __init__(self): super().__init__() # Track number of messages of each type received and the most recent # message of each type self.message_count = defaultdict(int) self.last_message = {} # A count of the number of ping messages we've sent to the node self.ping_counter = 1 # The network services received from the peer self.nServices = 0 def peer_connect(self, *args, services=NODE_NETWORK, send_version=True, **kwargs): create_conn = super().peer_connect(*args, **kwargs) if send_version: # Send a version msg vt = msg_version() vt.nServices = services vt.addrTo.ip = self.dstaddr vt.addrTo.port = self.dstport vt.addrFrom.ip = "0.0.0.0" vt.addrFrom.port = 0 # Will be sent soon after connection_made self.on_connection_send_msg = vt return create_conn # Message receiving methods def on_message(self, message): """Receive message and dispatch message to appropriate callback. We keep a count of how many of each message type has been received and the most recent message of each type.""" with mininode_lock: try: command = message.command.decode('ascii') self.message_count[command] += 1 self.last_message[command] = message getattr(self, 'on_' + command)(message) except: print("ERROR delivering {} ({})".format( repr(message), sys.exc_info()[0])) raise # Callback methods. Can be overridden by subclasses in individual test # cases to provide custom message handling behaviour. def on_open(self): pass def on_close(self): pass def on_addr(self, message): pass def on_block(self, message): pass def on_blocktxn(self, message): pass def on_cmpctblock(self, message): pass def on_feefilter(self, message): pass def on_getaddr(self, message): pass def on_getblocks(self, message): pass def on_getblocktxn(self, message): pass def on_getdata(self, message): pass def on_getheaders(self, message): pass def on_headers(self, message): pass def on_mempool(self, message): pass def on_notfound(self, message): pass def on_pong(self, message): pass def on_reject(self, message): pass def on_sendcmpct(self, message): pass def on_sendheaders(self, message): pass def on_tx(self, message): pass def on_inv(self, message): want = msg_getdata() for i in message.inv: if i.type != 0: want.inv.append(i) if len(want.inv): self.send_message(want) def on_ping(self, message): self.send_message(msg_pong(message.nonce)) def on_verack(self, message): self.verack_received = True def on_version(self, message): assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format( message.nVersion, MIN_VERSION_SUPPORTED) self.send_message(msg_verack()) self.nServices = message.nServices # Connection helper methods def wait_for_disconnect(self, timeout=60): def test_function(): return not self.is_connected wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message receiving helper methods def wait_for_block(self, blockhash, timeout=60): def test_function(): return self.last_message.get( "block") and self.last_message["block"].block.rehash() == blockhash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_header(self, blockhash, timeout=60): def test_function(): last_headers = self.last_message.get('headers') if not last_headers: return False return last_headers.headers[0].rehash() == blockhash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getdata(self, timeout=60): """Waits for a getdata message. Receiving any getdata message will satisfy the predicate. the last_message["getdata"] value must be explicitly cleared before calling this method, or this will return immediately with success. TODO: change this method to take a hash value and only return true if the correct block/tx has been requested.""" def test_function(): return self.last_message.get("getdata") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getheaders(self, timeout=60): """Waits for a getheaders message. Receiving any getheaders message will satisfy the predicate. the last_message["getheaders"] value must be explicitly cleared before calling this method, or this will return immediately with success. TODO: change this method to take a hash value and only return true if the correct block header has been requested.""" def test_function(): return self.last_message.get("getheaders") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_inv(self, expected_inv, timeout=60): """Waits for an INV message and checks that the first inv object in the message was as expected.""" if len(expected_inv) > 1: raise NotImplementedError( "wait_for_inv() will only verify the first inv object") def test_function(): return self.last_message.get("inv") and \ self.last_message["inv"].inv[0].type == expected_inv[0].type and \ self.last_message["inv"].inv[0].hash == expected_inv[0].hash wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_verack(self, timeout=60): def test_function(): return self.message_count["verack"] wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message sending helper functions def send_and_ping(self, message): self.send_message(message) self.sync_with_ping() # Sync up with the node def sync_with_ping(self, timeout=60): self.send_message(msg_ping(nonce=self.ping_counter)) def test_function(): if not self.last_message.get("pong"): return False return self.last_message["pong"].nonce == self.ping_counter wait_until(test_function, timeout=timeout, lock=mininode_lock) self.ping_counter += 1 # One lock for synchronizing all data access between the networking thread (see # NetworkThread below) and the thread running the test logic. For simplicity, # P2PConnection acquires this lock whenever delivering a message to a P2PInterface. # This lock should be acquired in the thread running the test logic to synchronize # access to any data shared with the P2PInterface or P2PConnection. mininode_lock = threading.RLock() class NetworkThread(threading.Thread): network_event_loop = None def __init__(self): super().__init__(name="NetworkThread") # There is only one event loop and no more than one thread must be created assert not self.network_event_loop NetworkThread.network_event_loop = asyncio.new_event_loop() def run(self): """Start the network thread.""" self.network_event_loop.run_forever() def close(self, timeout=10): """Close the connections and network event loop.""" self.network_event_loop.call_soon_threadsafe( self.network_event_loop.stop) wait_until(lambda: not self.network_event_loop.is_running(), timeout=timeout) self.network_event_loop.close() self.join(timeout) class P2PDataStore(P2PInterface): """A P2P data store class. Keeps a block and transaction store and responds correctly to getdata and getheaders requests.""" def __init__(self): super().__init__() # store of blocks. key is block hash, value is a CBlock object self.block_store = {} self.last_block_hash = '' # store of txs. key is txid, value is a CTransaction object self.tx_store = {} self.getdata_requests = [] def on_getdata(self, message): """Check for the tx/block in our stores and if found, reply with an inv message.""" for inv in message.inv: self.getdata_requests.append(inv.hash) if (inv.type & MSG_TYPE_MASK) == MSG_TX and inv.hash in self.tx_store.keys(): self.send_message(msg_tx(self.tx_store[inv.hash])) elif (inv.type & MSG_TYPE_MASK) == MSG_BLOCK and inv.hash in self.block_store.keys(): self.send_message(msg_block(self.block_store[inv.hash])) else: logger.debug( 'getdata message type {} received.'.format(hex(inv.type))) def on_getheaders(self, message): """Search back through our block store for the locator, and reply with a headers message if found.""" locator, hash_stop = message.locator, message.hashstop # Assume that the most recent block added is the tip if not self.block_store: return headers_list = [self.block_store[self.last_block_hash]] maxheaders = 2000 while headers_list[-1].sha256 not in locator.vHave: # Walk back through the block store, adding headers to headers_list # as we go. prev_block_hash = headers_list[-1].hashPrevBlock if prev_block_hash in self.block_store: prev_block_header = CBlockHeader( self.block_store[prev_block_hash]) headers_list.append(prev_block_header) if prev_block_header.sha256 == hash_stop: # if this is the hashstop header, stop here break else: logger.debug('block hash {} not found in block store'.format( hex(prev_block_hash))) break # Truncate the list if there are too many headers headers_list = headers_list[:-maxheaders - 1:-1] response = msg_headers(headers_list) if response is not None: self.send_message(response) def send_blocks_and_test(self, blocks, node, *, success=True, request_block=True, reject_reason=None, expect_disconnect=False, timeout=60): """Send blocks to test node and test whether the tip advances. - add all blocks to our block_store - send a headers message for the final block - the on_getheaders handler will ensure that any getheaders are responded to - if request_block is True: wait for getdata for each of the blocks. The on_getdata handler will ensure that any getdata messages are responded to - if success is True: assert that the node's tip advances to the most recent block - if success is False: assert that the node's tip doesn't advance - if reject_reason is set: assert that the correct reject message is logged""" with mininode_lock: for block in blocks: self.block_store[block.sha256] = block self.last_block_hash = block.sha256 reject_reason = [reject_reason] if reject_reason else [] with node.assert_debug_log(expected_msgs=reject_reason): self.send_message(msg_headers([CBlockHeader(blocks[-1])])) if request_block: wait_until( lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock) if expect_disconnect: self.wait_for_disconnect() else: self.sync_with_ping() if success: wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout) else: assert node.getbestblockhash() != blocks[-1].hash def send_txs_and_test(self, txs, node, *, success=True, expect_disconnect=False, reject_reason=None): """Send txs to test node and test whether they're accepted to the mempool. - add all txs to our tx_store - send tx messages for all txs - if success is True/False: assert that the txs are/are not accepted to the mempool - if expect_disconnect is True: Skip the sync with ping - if reject_reason is set: assert that the correct reject message is logged.""" with mininode_lock: for tx in txs: self.tx_store[tx.sha256] = tx reject_reason = [reject_reason] if reject_reason else [] with node.assert_debug_log(expected_msgs=reject_reason): for tx in txs: self.send_message(msg_tx(tx)) if expect_disconnect: self.wait_for_disconnect() else: self.sync_with_ping() raw_mempool = node.getrawmempool() if success: # Check that all txs are now in the mempool for tx in txs: assert tx.hash in raw_mempool, "{} not found in mempool".format( tx.hash) else: # Check that none of the txs are now in the mempool for tx in txs: assert tx.hash not in raw_mempool, "{} tx found in mempool".format( tx.hash) diff --git a/test/functional/test_framework/schnorr.py b/test/functional/test_framework/schnorr.py index 8018fc7146..76c5f52838 100755 --- a/test/functional/test_framework/schnorr.py +++ b/test/functional/test_framework/schnorr.py @@ -1,255 +1,255 @@ #!/usr/bin/env python3 # Copyright 2019 The Bitcoin Developers """Schnorr secp256k1 using OpenSSL WARNING: This module does not mlock() secrets; your private keys may end up on disk in swap! Also, operations are not constant time. Use with caution! Inspired by key.py from python-bitcoinlib. """ import ctypes import ctypes.util import hashlib import hmac import threading ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library('ssl') or 'libeay32') ssl.BN_new.restype = ctypes.c_void_p ssl.BN_new.argtypes = [] ssl.BN_free.restype = None ssl.BN_free.argtypes = [ctypes.c_void_p] ssl.BN_bin2bn.restype = ctypes.c_void_p ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p] ssl.BN_CTX_new.restype = ctypes.c_void_p ssl.BN_CTX_new.argtypes = [] ssl.BN_CTX_free.restype = None ssl.BN_CTX_free.argtypes = [ctypes.c_void_p] ssl.EC_GROUP_new_by_curve_name.restype = ctypes.c_void_p ssl.EC_GROUP_new_by_curve_name.argtypes = [ctypes.c_int] ssl.EC_POINT_new.restype = ctypes.c_void_p ssl.EC_POINT_new.argtypes = [ctypes.c_void_p] ssl.EC_POINT_free.restype = None ssl.EC_POINT_free.argtypes = [ctypes.c_void_p] ssl.EC_POINT_mul.restype = ctypes.c_int ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] ssl.EC_POINT_is_at_infinity.restype = ctypes.c_int ssl.EC_POINT_is_at_infinity.argtypes = [ctypes.c_void_p, ctypes.c_void_p] ssl.EC_POINT_point2oct.restype = ctypes.c_size_t ssl.EC_POINT_point2oct.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_size_t, ctypes.c_void_p] # point encodings for EC_POINT_point2oct POINT_CONVERSION_COMPRESSED = 2 POINT_CONVERSION_UNCOMPRESSED = 4 SECP256K1_FIELDSIZE = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f SECP256K1_ORDER = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2 # this specifies the curve used NID_secp256k1 = 714 # from openssl/obj_mac.h group = ssl.EC_GROUP_new_by_curve_name(NID_secp256k1) if not group: raise RuntimeError("Cannot get secp256k1 group!") class CTX: """Wrapper for a bignum context""" def __init__(self): self.ptr = ssl.BN_CTX_new() assert self.ptr def __del__(self): ssl.BN_CTX_free(self.ptr) _threadlocal = threading.local() @classmethod def ptr_for_this_thread(cls): """grab a pointer to per-thread ctx""" try: self = cls._threadlocal.ctxwrapper except AttributeError: self = cls() cls._threadlocal.ctxwrapper = self return self.ptr def jacobi(a, n): """Jacobi symbol""" # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149. # This function has been tested by comparison with a small # table printed in HAC, and by extensive use in calculating # modular square roots. # Borrowed from python ecdsa package (function originally from Peter Pearson) # ... modified to use bitwise arithmetic when possible, for speed. assert n >= 3 assert n & 1 == 1 a = a % n if a == 0: return 0 if a == 1: return 1 a1, e = a, 0 while a1 & 1 == 0: - a1, e = a1 >> 1, e+1 + a1, e = a1 >> 1, e + 1 if e & 1 == 0 or n & 7 == 1 or n & 7 == 7: s = 1 else: s = -1 if a1 == 1: return s if n & 3 == 3 and a1 & 3 == 3: s = -s return s * jacobi(n % a1, a1) def nonce_function_rfc6979(privkeybytes, msg32, algo16=b'', ndata=b''): # RFC6979 deterministic nonce generation, done in libsecp256k1 style. # see nonce_function_rfc6979() in secp256k1.c; and details in hash_impl.h assert len(privkeybytes) == 32 assert len(msg32) == 32 assert len(algo16) in (0, 16) assert len(ndata) in (0, 32) - V = b'\x01'*32 - K = b'\x00'*32 + V = b'\x01' * 32 + K = b'\x00' * 32 blob = bytes(privkeybytes) + msg32 + ndata + algo16 # initialize - K = hmac.HMAC(K, V+b'\x00'+blob, 'sha256').digest() + K = hmac.HMAC(K, V + b'\x00' + blob, 'sha256').digest() V = hmac.HMAC(K, V, 'sha256').digest() - K = hmac.HMAC(K, V+b'\x01'+blob, 'sha256').digest() + K = hmac.HMAC(K, V + b'\x01' + blob, 'sha256').digest() V = hmac.HMAC(K, V, 'sha256').digest() # loop forever until an in-range k is found k = 0 while True: # see RFC6979 3.2.h.2 : we take a shortcut and don't build T in # multiple steps since the first step is always the right size for # our purpose. V = hmac.HMAC(K, V, 'sha256').digest() T = V assert len(T) >= 32 k = int.from_bytes(T, 'big') if k > 0 and k < SECP256K1_ORDER: break - K = hmac.HMAC(K, V+b'\x00', 'sha256').digest() + K = hmac.HMAC(K, V + b'\x00', 'sha256').digest() V = hmac.HMAC(K, V, 'sha256').digest() return k def sign(privkeybytes, msg32): """Create Schnorr signature (BIP-Schnorr convention).""" assert len(privkeybytes) == 32 assert len(msg32) == 32 k = nonce_function_rfc6979( privkeybytes, msg32, algo16=b"Schnorr+SHA256 ") ctx = CTX.ptr_for_this_thread() # calculate R point and pubkey point, and get them in # uncompressed/compressed formats respectively. R = ssl.EC_POINT_new(group) assert R pubkey = ssl.EC_POINT_new(group) assert pubkey kbn = ssl.BN_bin2bn(k.to_bytes(32, 'big'), 32, None) assert kbn privbn = ssl.BN_bin2bn(privkeybytes, 32, None) assert privbn assert ssl.EC_POINT_mul(group, R, kbn, None, None, ctx) assert ssl.EC_POINT_mul(group, pubkey, privbn, None, None, ctx) # buffer for uncompressed R coord Rbuf = ctypes.create_string_buffer(65) assert 65 == ssl.EC_POINT_point2oct( group, R, POINT_CONVERSION_UNCOMPRESSED, Rbuf, 65, ctx) # buffer for compressed pubkey pubkeybuf = ctypes.create_string_buffer(33) assert 33 == ssl.EC_POINT_point2oct( group, pubkey, POINT_CONVERSION_COMPRESSED, pubkeybuf, 33, ctx) ssl.BN_free(kbn) ssl.BN_free(privbn) ssl.EC_POINT_free(R) ssl.EC_POINT_free(pubkey) Ry = int.from_bytes(Rbuf[33:65], 'big') # y coord if jacobi(Ry, SECP256K1_FIELDSIZE) == -1: k = SECP256K1_ORDER - k rbytes = Rbuf[1:33] # x coord big-endian e = int.from_bytes(hashlib.sha256( rbytes + pubkeybuf + msg32).digest(), 'big') privkey = int.from_bytes(privkeybytes, 'big') - s = (k + e*privkey) % SECP256K1_ORDER + s = (k + e * privkey) % SECP256K1_ORDER return rbytes + s.to_bytes(32, 'big') def getpubkey(privkeybytes, compressed=True): assert len(privkeybytes) == 32 encoding = POINT_CONVERSION_COMPRESSED if compressed else POINT_CONVERSION_UNCOMPRESSED ctx = CTX.ptr_for_this_thread() pubkey = ssl.EC_POINT_new(group) assert pubkey privbn = ssl.BN_bin2bn(privkeybytes, 32, None) assert privbn assert ssl.EC_POINT_mul(group, pubkey, privbn, None, None, ctx) assert not ssl.EC_POINT_is_at_infinity(group, pubkey) # first call (with nullptr for buffer) gets us the size size = ssl.EC_POINT_point2oct(group, pubkey, encoding, None, 0, ctx) pubkeybuf = ctypes.create_string_buffer(size) ret = ssl.EC_POINT_point2oct(group, pubkey, encoding, pubkeybuf, size, ctx) assert ret == size ssl.BN_free(privbn) ssl.EC_POINT_free(pubkey) return bytes(pubkeybuf) if __name__ == '__main__': # Test Schnorr implementation. # duplicate the deterministic sig test from src/test/key_tests.cpp private_key = bytes.fromhex( "12b004fff7f4b69ef8650e767f18f11ede158148b425660723b9f9a66e61f747") pubkey = getpubkey(private_key, compressed=True) assert pubkey == bytes.fromhex( "030b4c866585dd868a9d62348a9cd008d6a312937048fff31670e7e920cfc7a744") def sha(b): return hashlib.sha256(b).digest() msg = b"Very deterministic message" msghash = sha(sha(msg)) assert msghash == bytes.fromhex( "5255683da567900bfd3e786ed8836a4e7763c221bf1ac20ece2a5171b9199e8a") sig = sign(private_key, msghash) assert sig == bytes.fromhex( "2c56731ac2f7a7e7f11518fc7722a166b02438924ca9d8b4d1113" "47b81d0717571846de67ad3d913a8fdf9d8f3f73161a4c48ae81c" "b183b214765feb86e255ce") print("ok") diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py index 31a74038b1..90ca5b9446 100755 --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -1,762 +1,762 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Copyright (c) 2017 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Run regression test suite. This module calls down into individual test cases via subprocess. It will forward all unrecognized arguments onto the individual test scripts. Functional tests are disabled on Windows by default. Use --force to run them anyway. For a description of arguments recognized by test scripts, see `test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`. """ import argparse from collections import deque import configparser import datetime import os import time import shutil import sys import subprocess import tempfile import re import logging import xml.etree.ElementTree as ET import json import threading import multiprocessing from queue import Queue, Empty # Formatting. Default colors to empty strings. BOLD, BLUE, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "") try: # Make sure python thinks it can write unicode to its stdout "\u2713".encode("utf_8").decode(sys.stdout.encoding) TICK = "✓ " CROSS = "✖ " CIRCLE = "○ " except UnicodeDecodeError: TICK = "P " CROSS = "x " CIRCLE = "o " if os.name == 'posix': # primitive formatting on supported # terminal via ANSI escape sequences: BOLD = ('\033[0m', '\033[1m') BLUE = ('\033[0m', '\033[0;34m') RED = ('\033[0m', '\033[0;31m') GREY = ('\033[0m', '\033[1;30m') TEST_EXIT_PASSED = 0 TEST_EXIT_SKIPPED = 77 NON_SCRIPTS = [ # These are python files that live in the functional tests directory, but are not test scripts. "combine_logs.py", "create_cache.py", "test_runner.py", ] TEST_PARAMS = { # Some test can be run with additional parameters. # When a test is listed here, the it will be run without parameters # as well as with additional parameters listed here. # This: # example "testName" : [["--param1", "--param2"] , ["--param3"]] # will run the test 3 times: # testName # testName --param1 --param2 # testname --param3 "wallet_txn_doublespend.py": [["--mineblock"]], "wallet_txn_clone.py": [["--mineblock"]], "wallet_multiwallet.py": [["--usecli"]], "wallet_disableprivatekeys.py": [["--usecli"]], } # Used to limit the number of tests, when list of tests is not provided on command line # When --extended is specified, we run all tests, otherwise # we only run a test if its execution time in seconds does not exceed EXTENDED_CUTOFF DEFAULT_EXTENDED_CUTOFF = 40 DEFAULT_JOBS = (multiprocessing.cpu_count() // 3) + 1 class TestCase(): """ Data structure to hold and run information necessary to launch a test case. """ def __init__(self, test_num, test_case, tests_dir, tmpdir, flags=None): self.tests_dir = tests_dir self.tmpdir = tmpdir self.test_case = test_case self.test_num = test_num self.flags = flags def run(self, portseed_offset): t = self.test_case portseed = self.test_num + portseed_offset portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) test_argv = t.split() testdir = os.path.join("{}", "{}_{}").format( self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) tmpdir_arg = ["--tmpdir={}".format(testdir)] name = t time0 = time.time() process = subprocess.Popen([sys.executable, os.path.join(self.tests_dir, test_argv[0])] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr) process.wait() log_stdout.seek(0), log_stderr.seek(0) [stdout, stderr] = [l.read().decode('utf-8') for l in (log_stdout, log_stderr)] log_stdout.close(), log_stderr.close() if process.returncode == TEST_EXIT_PASSED and stderr == "": status = "Passed" elif process.returncode == TEST_EXIT_SKIPPED: status = "Skipped" else: status = "Failed" return TestResult(self.test_num, name, testdir, status, int(time.time() - time0), stdout, stderr) def on_ci(): return os.getenv('TRAVIS') == 'true' or os.getenv('TEAMCITY_VERSION') != None def main(): # Read config generated by configure. config = configparser.ConfigParser() configfile = os.path.join(os.path.abspath( os.path.dirname(__file__)), "..", "config.ini") config.read_file(open(configfile, encoding="utf8")) src_dir = config["environment"]["SRCDIR"] build_dir = config["environment"]["BUILDDIR"] tests_dir = os.path.join(src_dir, 'test', 'functional') # Parse arguments and pass through unrecognised args parser = argparse.ArgumentParser(add_help=False, usage='%(prog)s [test_runner.py options] [script options] [scripts]', description=__doc__, epilog=''' Help text and arguments for individual test script:''', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.') parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface') parser.add_argument( '--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.') parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests') parser.add_argument('--cutoff', type=int, default=DEFAULT_EXTENDED_CUTOFF, help='set the cutoff runtime for what tests get run') parser.add_argument('--force', '-f', action='store_true', help='run tests even on platforms where they are disabled by default (e.g. windows).') parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit') parser.add_argument('--jobs', '-j', type=int, default=DEFAULT_JOBS, help='how many test scripts to run in parallel.') parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.') parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs') parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs") parser.add_argument('--junitoutput', '-J', default=os.path.join(build_dir, 'junit_results.xml'), help="file that will store JUnit formatted test results.") args, unknown_args = parser.parse_known_args() # args to be passed on always start with two dashes; tests are the # remaining unknown args tests = [arg for arg in unknown_args if arg[:2] != "--"] passon_args = [arg for arg in unknown_args if arg[:2] == "--"] passon_args.append("--configfile={}".format(configfile)) # Set up logging logging_level = logging.INFO if args.quiet else logging.DEBUG logging.basicConfig(format='%(message)s', level=logging_level) # Create base test directory tmpdir = os.path.join("{}", "bitcoin_test_runner_{:%Y%m%d_%H%M%S}").format( args.tmpdirprefix, datetime.datetime.now()) os.makedirs(tmpdir) logging.debug("Temporary test directory at {}".format(tmpdir)) enable_wallet = config["components"].getboolean("ENABLE_WALLET") enable_utils = config["components"].getboolean("ENABLE_UTILS") enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND") if config["environment"]["EXEEXT"] == ".exe" and not args.force: # https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9 # https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964 print( "Tests currently disabled on Windows by default. Use --force option to enable") sys.exit(0) if not (enable_wallet and enable_utils and enable_bitcoind): print( "No functional tests to run. Wallet, utils, and bitcoind must all be enabled") print( "Rerun `configure` with -enable-wallet, -with-utils and -with-daemon and rerun make") sys.exit(0) # Build list of tests all_scripts = get_all_scripts_from_disk(tests_dir, NON_SCRIPTS) # Check all tests with parameters actually exist for test in TEST_PARAMS: if not test in all_scripts: print("ERROR: Test with parameter {} does not exist, check it has " "not been renamed or deleted".format(test)) sys.exit(1) if tests: # Individual tests have been specified. Run specified tests that exist # in the all_scripts list. Accept the name with or without .py # extension. individual_tests = [ re.sub(r"\.py$", "", t) + ".py" for t in tests if not t.endswith('*')] test_list = [] for t in individual_tests: if t in all_scripts: test_list.append(t) else: print("{}WARNING!{} Test '{}' not found in full test list.".format( BOLD[1], BOLD[0], t)) # Allow for wildcard at the end of the name, so a single input can # match multiple tests for test in tests: if test.endswith('*'): test_list.extend( [t for t in all_scripts if t.startswith(test[:-1])]) # do not cut off explicitly specified tests cutoff = sys.maxsize else: # No individual tests have been specified. # Run all tests that do not exceed test_list = all_scripts cutoff = args.cutoff if args.extended: cutoff = sys.maxsize # Remove the test cases that the user has explicitly asked to exclude. if args.exclude: tests_excl = [re.sub(r"\.py$", "", t) + ".py" for t in args.exclude.split(',')] for exclude_test in tests_excl: if exclude_test in test_list: test_list.remove(exclude_test) else: print("{}WARNING!{} Test '{}' not found in current test list.".format( BOLD[1], BOLD[0], exclude_test)) # Update timings from build_dir only if separate build directory is used. # We do not want to pollute source directory. build_timings = None if (src_dir != build_dir): build_timings = Timings(os.path.join(build_dir, 'timing.json')) # Always use timings from scr_dir if present src_timings = Timings(os.path.join( src_dir, "test", "functional", 'timing.json')) # Add test parameters and remove long running tests if needed test_list = get_tests_to_run( test_list, TEST_PARAMS, cutoff, src_timings) if not test_list: print("No valid test scripts specified. Check that your test is in one " "of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests") sys.exit(0) if args.help: # Print help for test_runner.py, then print help of the first script # and exit. parser.print_help() subprocess.check_call( [sys.executable, os.path.join(tests_dir, test_list[0]), '-h']) sys.exit(0) if not args.keepcache: shutil.rmtree(os.path.join(build_dir, "test", "cache"), ignore_errors=True) run_tests(test_list, build_dir, tests_dir, args.junitoutput, tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen, build_timings) def run_tests(test_list, build_dir, tests_dir, junitoutput, tmpdir, num_jobs, enable_coverage=False, args=[], combined_logs_len=0, build_timings=None): # Warn if bitcoind is already running (unix only) try: pidofOutput = subprocess.check_output(["pidof", "bitcoind"]) if pidofOutput is not None and pidofOutput != b'': print("{}WARNING!{} There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!".format( BOLD[1], BOLD[0])) except (OSError, subprocess.SubprocessError): pass # Warn if there is a cache directory cache_dir = os.path.join(build_dir, "test", "cache") if os.path.isdir(cache_dir): print("{}WARNING!{} There is a cache directory here: {}. If tests fail unexpectedly, try deleting the cache directory.".format( BOLD[1], BOLD[0], cache_dir)) flags = ['--cachedir={}'.format(cache_dir)] + args if enable_coverage: coverage = RPCCoverage() flags.append(coverage.flag) logging.debug( "Initializing coverage directory at {}".format(coverage.dir)) else: coverage = None if len(test_list) > 1 and num_jobs > 1: # Populate cache try: subprocess.check_output([sys.executable, os.path.join( tests_dir, 'create_cache.py')] + flags + [os.path.join("--tmpdir={}", "cache") .format(tmpdir)]) except subprocess.CalledProcessError as e: sys.stdout.buffer.write(e.output) raise # Run Tests time0 = time.time() test_results = execute_test_processes( num_jobs, test_list, tests_dir, tmpdir, flags) runtime = int(time.time() - time0) max_len_name = len(max(test_list, key=len)) print_results(test_results, tests_dir, max_len_name, runtime, combined_logs_len) save_results_as_junit(test_results, junitoutput, runtime) if (build_timings is not None): build_timings.save_timings(test_results) if coverage: coverage.report_rpc_coverage() logging.debug("Cleaning up coverage data") coverage.cleanup() # Clear up the temp directory if all subdirectories are gone if not os.listdir(tmpdir): os.rmdir(tmpdir) all_passed = all( map(lambda test_result: test_result.was_successful, test_results)) sys.exit(not all_passed) def execute_test_processes(num_jobs, test_list, tests_dir, tmpdir, flags): update_queue = Queue() job_queue = Queue() test_results = [] poll_timeout = 10 # seconds # In case there is a graveyard of zombie bitcoinds, we can apply a # pseudorandom offset to hopefully jump over them. # (625 is PORT_RANGE/MAX_NODES) portseed_offset = int(time.time() * 1000) % 625 ## # Define some helper functions we will need for threading. ## def handle_message(message, running_jobs): """ handle_message handles a single message from handle_test_cases """ if isinstance(message, TestCase): running_jobs.append((message.test_num, message.test_case)) print("{}{}{} started".format(BOLD[1], message.test_case, BOLD[0])) return if isinstance(message, TestResult): test_result = message running_jobs.remove((test_result.num, test_result.name)) test_results.append(test_result) if test_result.status == "Passed": print("{}{}{} passed, Duration: {} s".format( BOLD[1], test_result.name, BOLD[0], test_result.time)) elif test_result.status == "Skipped": print("{}{}{} skipped".format( BOLD[1], test_result.name, BOLD[0])) else: print("{}{}{} failed, Duration: {} s\n".format( BOLD[1], test_result.name, BOLD[0], test_result.time)) print(BOLD[1] + 'stdout:' + BOLD[0]) print(test_result.stdout) print(BOLD[1] + 'stderr:' + BOLD[0]) print(test_result.stderr) return assert False, "we should not be here" def handle_update_messages(): """ handle_update_messages waits for messages to be sent from handle_test_cases via the update_queue. It serializes the results so we can print nice status update messages. """ printed_status = False running_jobs = [] while True: message = None try: message = update_queue.get(True, poll_timeout) if message is None: break # We printed a status message, need to kick to the next line # before printing more. if printed_status: print() printed_status = False handle_message(message, running_jobs) update_queue.task_done() except Empty: if not on_ci(): print("Running jobs: {}".format(", ".join([j[1] for j in running_jobs])), end="\r") sys.stdout.flush() printed_status = True def handle_test_cases(): """ job_runner represents a single thread that is part of a worker pool. It waits for a test, then executes that test. It also reports start and result messages to handle_update_messages """ while True: test = job_queue.get() if test is None: break # Signal that the test is starting to inform the poor waiting # programmer update_queue.put(test) result = test.run(portseed_offset) update_queue.put(result) job_queue.task_done() ## # Setup our threads, and start sending tasks ## # Start our result collection thread. t = threading.Thread(target=handle_update_messages) t.setDaemon(True) t.start() # Start some worker threads for j in range(num_jobs): t = threading.Thread(target=handle_test_cases) t.setDaemon(True) t.start() # Push all our test cases into the job queue. for i, t in enumerate(test_list): job_queue.put(TestCase(i, t, tests_dir, tmpdir, flags)) # Wait for all the jobs to be completed job_queue.join() # Wait for all the results to be compiled update_queue.join() # Flush our queues so the threads exit update_queue.put(None) for j in range(num_jobs): job_queue.put(None) return test_results def print_results(test_results, tests_dir, max_len_name, runtime, combined_logs_len): results = "\n" + BOLD[1] + "{} | {} | {}\n\n".format( "TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0] test_results.sort(key=TestResult.sort_key) all_passed = True time_sum = 0 for test_result in test_results: all_passed = all_passed and test_result.was_successful time_sum += test_result.time test_result.padding = max_len_name results += str(test_result) testdir = test_result.testdir if combined_logs_len and os.path.isdir(testdir): # Print the final `combinedlogslen` lines of the combined logs print('{}Combine the logs and print the last {} lines ...{}'.format( BOLD[1], combined_logs_len, BOLD[0])) print('\n============') print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0])) print('============\n') combined_logs, _ = subprocess.Popen([sys.executable, os.path.join( tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate() print("\n".join(deque(combined_logs.splitlines(), combined_logs_len))) status = TICK + "Passed" if all_passed else CROSS + "Failed" if not all_passed: results += RED[1] results += BOLD[1] + "\n{} | {} | {} s (accumulated) \n".format( "ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0] if not all_passed: results += RED[0] results += "Runtime: {} s\n".format(runtime) print(results) class TestResult(): """ Simple data structure to store test result values and print them properly """ def __init__(self, num, name, testdir, status, time, stdout, stderr): self.num = num self.name = name self.testdir = testdir self.status = status self.time = time self.padding = 0 self.stdout = stdout self.stderr = stderr def sort_key(self): if self.status == "Passed": return 0, self.name.lower() elif self.status == "Failed": return 2, self.name.lower() elif self.status == "Skipped": return 1, self.name.lower() def __repr__(self): if self.status == "Passed": color = BLUE glyph = TICK elif self.status == "Failed": color = RED glyph = CROSS elif self.status == "Skipped": color = GREY glyph = CIRCLE return color[1] + "{} | {}{} | {} s\n".format( self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0] @property def was_successful(self): return self.status != "Failed" def get_all_scripts_from_disk(test_dir, non_scripts): """ Return all available test script from script directory (excluding NON_SCRIPTS) """ python_files = set([t for t in os.listdir(test_dir) if t[-3:] == ".py"]) return list(python_files - set(non_scripts)) def get_tests_to_run(test_list, test_params, cutoff, src_timings): """ Returns only test that will not run longer that cutoff. Long running tests are returned first to favor running tests in parallel Timings from build directory override those from src directory """ def get_test_time(test): # Return 0 if test is unknown to always run it return next( (x['time'] for x in src_timings.existing_timings if x['name'] == test), 0) # Some tests must also be run with additional parameters. Add them to the list. tests_with_params = [] for test_name in test_list: # always execute a test without parameters tests_with_params.append(test_name) params = test_params.get(test_name) if params is not None: tests_with_params.extend( [test_name + " " + " ".join(p) for p in params]) result = [t for t in tests_with_params if get_test_time(t) <= cutoff] - result.sort(key=lambda x: (-get_test_time(x), x)) + result.sort(key=lambda x: (-get_test_time(x), x)) return result class RPCCoverage(): """ Coverage reporting utilities for test_runner. Coverage calculation works by having each test script subprocess write coverage files into a particular directory. These files contain the RPC commands invoked during testing, as well as a complete listing of RPC commands per `bitcoin-cli help` (`rpc_interface.txt`). After all tests complete, the commands run are combined and diff'd against the complete list to calculate uncovered RPC commands. See also: test/functional/test_framework/coverage.py """ def __init__(self): self.dir = tempfile.mkdtemp(prefix="coverage") self.flag = '--coveragedir={}'.format(self.dir) def report_rpc_coverage(self): """ Print out RPC commands that were unexercised by tests. """ uncovered = self._get_uncovered_rpc_commands() if uncovered: print("Uncovered RPC commands:") print("".join((" - {}\n".format(i)) for i in sorted(uncovered))) else: print("All RPC commands covered.") def cleanup(self): return shutil.rmtree(self.dir) def _get_uncovered_rpc_commands(self): """ Return a set of currently untested RPC commands. """ # This is shared from `test/functional/test-framework/coverage.py` reference_filename = 'rpc_interface.txt' coverage_file_prefix = 'coverage.' coverage_ref_filename = os.path.join(self.dir, reference_filename) coverage_filenames = set() all_cmds = set() covered_cmds = set() if not os.path.isfile(coverage_ref_filename): raise RuntimeError("No coverage reference found") with open(coverage_ref_filename, 'r', encoding="utf8") as f: all_cmds.update([i.strip() for i in f.readlines()]) for root, dirs, files in os.walk(self.dir): for filename in files: if filename.startswith(coverage_file_prefix): coverage_filenames.add(os.path.join(root, filename)) for filename in coverage_filenames: with open(filename, 'r', encoding="utf8") as f: covered_cmds.update([i.strip() for i in f.readlines()]) return all_cmds - covered_cmds def save_results_as_junit(test_results, file_name, time): """ Save tests results to file in JUnit format See http://llg.cubic.org/docs/junit/ for specification of format """ e_test_suite = ET.Element("testsuite", {"name": "bitcoin_abc_tests", "tests": str(len(test_results)), # "errors": "failures": str(len([t for t in test_results if t.status == "Failed"])), "id": "0", "skipped": str(len([t for t in test_results if t.status == "Skipped"])), "time": str(time), "timestamp": datetime.datetime.now().isoformat('T') }) for test_result in test_results: e_test_case = ET.SubElement(e_test_suite, "testcase", {"name": test_result.name, "classname": test_result.name, "time": str(test_result.time) } ) if test_result.status == "Skipped": ET.SubElement(e_test_case, "skipped") elif test_result.status == "Failed": ET.SubElement(e_test_case, "failure") # no special element for passed tests ET.SubElement(e_test_case, "system-out").text = test_result.stdout ET.SubElement(e_test_case, "system-err").text = test_result.stderr ET.ElementTree(e_test_suite).write( file_name, "UTF-8", xml_declaration=True) class Timings(): """ Takes care of loading, merging and saving tests execution times. """ def __init__(self, timing_file): self.timing_file = timing_file self.existing_timings = self.load_timings() def load_timings(self): if os.path.isfile(self.timing_file): with open(self.timing_file, encoding="utf8") as f: return json.load(f) else: return [] def get_merged_timings(self, new_timings): """ Return new list containing existing timings updated with new timings Tests that do not exists are not removed """ key = 'name' merged = {} for item in self.existing_timings + new_timings: if item[key] in merged: merged[item[key]].update(item) else: merged[item[key]] = item # Sort the result to preserve test ordering in file merged = list(merged.values()) merged.sort(key=lambda t, key=key: t[key]) return merged def save_timings(self, test_results): # we only save test that have passed - timings for failed test might be # wrong (timeouts or early fails) passed_results = [t for t in test_results if t.status == 'Passed'] - new_timings = list(map(lambda t: {'name': t.name, 'time': t.time}, + new_timings = list(map(lambda t: {'name': t.name, 'time': t.time}, passed_results)) merged_timings = self.get_merged_timings(new_timings) with open(self.timing_file, 'w', encoding="utf8") as f: json.dump(merged_timings, f, indent=True) if __name__ == '__main__': main() diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py index 02acf088a1..8d9cf147f4 100755 --- a/test/functional/wallet_groups.py +++ b/test/functional/wallet_groups.py @@ -1,101 +1,101 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test wallet group functionality.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.messages import CTransaction, FromHex, ToHex from test_framework.util import ( assert_equal, ) def assert_approx(v, vexp, vspan=0.00001): if v < vexp - vspan: raise AssertionError("{} < [{}..{}]".format( str(v), str(vexp - vspan), str(vexp + vspan))) if v > vexp + vspan: raise AssertionError("{} > [{}..{}]".format( str(v), str(vexp - vspan), str(vexp + vspan))) class WalletGroupTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [[], [], ['-avoidpartialspends']] self.rpc_timeout = 120 def run_test(self): # Mine some coins self.nodes[0].generate(110) # Get some addresses from the two nodes addr1 = [self.nodes[1].getnewaddress() for i in range(3)] addr2 = [self.nodes[2].getnewaddress() for i in range(3)] addrs = addr1 + addr2 # Send 1 + 0.5 coin to each address [self.nodes[0].sendtoaddress(addr, 1.0) for addr in addrs] [self.nodes[0].sendtoaddress(addr, 0.5) for addr in addrs] self.nodes[0].generate(1) self.sync_all() # For each node, send 0.2 coins back to 0; # - node[1] should pick one 0.5 UTXO and leave the rest # - node[2] should pick one (1.0 + 0.5) UTXO group corresponding to a # given address, and leave the rest txid1 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) tx1 = self.nodes[1].getrawtransaction(txid1, True) # txid1 should have 1 input and 2 outputs assert_equal(1, len(tx1["vin"])) assert_equal(2, len(tx1["vout"])) # one output should be 0.2, the other should be ~0.3 v = [vout["value"] for vout in tx1["vout"]] v.sort() assert_approx(v[0], 0.2) assert_approx(v[1], 0.3, 0.0001) txid2 = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) tx2 = self.nodes[2].getrawtransaction(txid2, True) # txid2 should have 2 inputs and 2 outputs assert_equal(2, len(tx2["vin"])) assert_equal(2, len(tx2["vout"])) # one output should be 0.2, the other should be ~1.3 v = [vout["value"] for vout in tx2["vout"]] v.sort() assert_approx(v[0], 0.2) assert_approx(v[1], 1.3, 0.0001) # Empty out node2's wallet self.nodes[2].sendtoaddress(address=self.nodes[0].getnewaddress( ), amount=self.nodes[2].getbalance(), subtractfeefromamount=True) self.sync_all() self.nodes[0].generate(1) # Fill node2's wallet with 10000 outputs corresponding to the same # scriptPubKey for i in range(5): raw_tx = self.nodes[0].createrawtransaction( - [{"txid": "0"*64, "vout": 0}], [{addr2[0]: 0.05}]) + [{"txid": "0" * 64, "vout": 0}], [{addr2[0]: 0.05}]) tx = FromHex(CTransaction(), raw_tx) tx.vin = [] tx.vout = [tx.vout[0]] * 2000 funded_tx = self.nodes[0].fundrawtransaction(ToHex(tx)) signed_tx = self.nodes[0].signrawtransactionwithwallet( funded_tx['hex']) self.nodes[0].sendrawtransaction(signed_tx['hex']) self.nodes[0].generate(1) self.sync_all() # Check that we can create a transaction that only requires ~100 of our # utxos, without pulling in all outputs and creating a transaction that # is way too big. assert self.nodes[2].sendtoaddress(address=addr2[0], amount=5) if __name__ == '__main__': WalletGroupTest().main()