Changeset View
Changeset View
Standalone View
Standalone View
test/functional/test_runner.py
Show First 20 Lines • Show All 97 Lines • ▼ Show 20 Lines | class TestCase(): | ||||
def __init__(self, test_num, test_case, tests_dir, tmpdir, flags=None): | def __init__(self, test_num, test_case, tests_dir, tmpdir, flags=None): | ||||
self.tests_dir = tests_dir | self.tests_dir = tests_dir | ||||
self.tmpdir = tmpdir | self.tmpdir = tmpdir | ||||
self.test_case = test_case | self.test_case = test_case | ||||
self.test_num = test_num | self.test_num = test_num | ||||
self.flags = flags | self.flags = flags | ||||
def run(self, portseed_offset, run_extended): | def run(self, portseed_offset, run_tags): | ||||
t = self.test_case | t = self.test_case | ||||
portseed = self.test_num * 10 + portseed_offset | portseed = self.test_num * 1000 + portseed_offset | ||||
portseed_arg = ["--portseed={}".format(portseed)] | portseed_arg = ["--portseed={}".format(portseed)] | ||||
test_argv = t.split() | test_argv = t.split() | ||||
tmpdir = [os.path.join("--tmpdir=%s", "%s_%s") % | tmpdir = [os.path.join("--tmpdir=%s", "%s_%s") % | ||||
(self.tmpdir, re.sub(".py$", "", t), portseed)] | (self.tmpdir, re.sub(".py$", "", t), portseed)] | ||||
name = t | name = t | ||||
time0 = time.time() | time0 = time.time() | ||||
test_modulepath = None | test_modulepath = None | ||||
Show All 17 Lines | def run(self, portseed_offset, run_tags): | ||||
sys.stdout = test_stdout | sys.stdout = test_stdout | ||||
sys.stderr = test_stderr | sys.stderr = test_stderr | ||||
exit_code = None | exit_code = None | ||||
for prop in dir(test_module): | for prop in dir(test_module): | ||||
obj = getattr(test_module, prop) | obj = getattr(test_module, prop) | ||||
if inspect.isclass(obj) and issubclass(obj, BitcoinTestFramework) and obj is not BitcoinTestFramework and obj is not ComparisonTestFramework: | if inspect.isclass(obj) and issubclass(obj, BitcoinTestFramework) and obj is not BitcoinTestFramework and obj is not ComparisonTestFramework: | ||||
test_instance = obj() | test_instance = obj() | ||||
is_extended = hasattr(test_instance, 'extended') | |||||
if (is_extended and run_extended) or not is_extended: | # Give every test the fast tag by default unless otherwise specified | ||||
tags = ["fast"] | |||||
if hasattr(test_instance, 'test_tags'): | |||||
tags = test_instance.test_tags | |||||
if compare_tags(tags, run_tags): | |||||
exit_code = obj().main( | exit_code = obj().main( | ||||
test_argv[1:] + self.flags + portseed_arg + tmpdir) | test_argv[1:] + self.flags + portseed_arg + tmpdir) | ||||
else: | else: | ||||
exit_code = TestStatus.SKIPPED | exit_code = TestStatus.SKIPPED | ||||
except Exception as e: | except Exception as e: | ||||
print(e) | print(e) | ||||
finally: | finally: | ||||
sys.stdout = original_stdout | sys.stdout = original_stdout | ||||
▲ Show 20 Lines • Show All 57 Lines • ▼ Show 20 Lines | def main(): | ||||
parser.add_argument('--keepcache', '-k', action='store_true', | parser.add_argument('--keepcache', '-k', action='store_true', | ||||
help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.') | help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.') | ||||
parser.add_argument('--quiet', '-q', action='store_true', | parser.add_argument('--quiet', '-q', action='store_true', | ||||
help='only print results summary and failure logs') | help='only print results summary and failure logs') | ||||
parser.add_argument('--tmpdirprefix', '-t', | parser.add_argument('--tmpdirprefix', '-t', | ||||
default=tempfile.gettempdir(), help="Root directory for datadirs") | default=tempfile.gettempdir(), help="Root directory for datadirs") | ||||
parser.add_argument('--junitouput', '-ju', | parser.add_argument('--junitouput', '-ju', | ||||
default=os.path.join(build_dir, 'junit_results.xml'), help="file that will store JUnit formated test results.") | default=os.path.join(build_dir, 'junit_results.xml'), help="file that will store JUnit formated test results.") | ||||
parser.add_argument( | |||||
'--tags', nargs='+', default=[".*", "!slow"], help="List of tags to be run. Use '!' to negate") | |||||
args, unknown_args = parser.parse_known_args() | args, unknown_args = parser.parse_known_args() | ||||
if args.extended: | |||||
args.tags.extend('slow') | |||||
# Create a set to store arguments and create the passon string | # Create a set to store arguments and create the passon string | ||||
tests = set(arg for arg in unknown_args if arg[:2] != "--") | tests = set(arg for arg in unknown_args if arg[:2] != "--") | ||||
passon_args = [arg for arg in unknown_args if arg[:2] == "--"] | passon_args = [arg for arg in unknown_args if arg[:2] == "--"] | ||||
passon_args.append("--configfile=%s" % configfile) | passon_args.append("--configfile=%s" % configfile) | ||||
# Set up logging | # Set up logging | ||||
logging_level = logging.INFO if args.quiet else logging.DEBUG | logging_level = logging.INFO if args.quiet else logging.DEBUG | ||||
logging.basicConfig(format='%(message)s', level=logging_level) | logging.basicConfig(format='%(message)s', level=logging_level) | ||||
Show All 27 Lines | def main(): | ||||
all_scripts = get_all_scripts_from_disk(tests_dir, NON_SCRIPTS) | all_scripts = get_all_scripts_from_disk(tests_dir, NON_SCRIPTS) | ||||
if tests: | if tests: | ||||
# Individual tests have been specified. Run specified tests that exist | # Individual tests have been specified. Run specified tests that exist | ||||
# in the all_scripts list. Accept the name with or without .py | # in the all_scripts list. Accept the name with or without .py | ||||
# extension. | # extension. | ||||
test_list = [t for t in all_scripts if | test_list = [t for t in all_scripts if | ||||
(t in tests or re.sub(".py$", "", t) in tests)] | (t in tests or re.sub(".py$", "", t) in tests)] | ||||
cutoff = sys.maxsize # do not cut off explicitly specified tests | |||||
else: | else: | ||||
# No individual tests have been specified. | # No individual tests have been specified. | ||||
# Run all tests that do not exceed | # Run all tests that do not exceed | ||||
test_list = all_scripts | test_list = all_scripts | ||||
cutoff = args.cutoff | |||||
if args.extended: | |||||
cutoff = sys.maxsize | |||||
# Remove the test cases that the user has explicitly asked to exclude. | # Remove the test cases that the user has explicitly asked to exclude. | ||||
if args.exclude: | if args.exclude: | ||||
for exclude_test in args.exclude.split(','): | for exclude_test in args.exclude.split(','): | ||||
if exclude_test + ".py" in test_list: | if exclude_test + ".py" in test_list: | ||||
test_list.remove(exclude_test + ".py") | test_list.remove(exclude_test + ".py") | ||||
# Add test parameters and remove long running tests if needed | # Add test parameters and remove long running tests if needed | ||||
Show All 12 Lines | if args.help: | ||||
[os.path.join(tests_dir, test_list[0]), '-h']) | [os.path.join(tests_dir, test_list[0]), '-h']) | ||||
sys.exit(0) | sys.exit(0) | ||||
if not args.keepcache: | if not args.keepcache: | ||||
shutil.rmtree(os.path.join(build_dir, "test", | shutil.rmtree(os.path.join(build_dir, "test", | ||||
"cache"), ignore_errors=True) | "cache"), ignore_errors=True) | ||||
run_tests(test_list, build_dir, tests_dir, args.junitouput, | run_tests(test_list, build_dir, tests_dir, args.junitouput, | ||||
config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args, args.extended) | config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args, args.tags) | ||||
def run_tests(test_list, build_dir, tests_dir, junitouput, exeext, tmpdir, num_jobs, enable_coverage=False, args=[], run_extended=False): | def run_tests(test_list, build_dir, tests_dir, junitouput, exeext, tmpdir, num_jobs, enable_coverage=False, args=[], tags=[]): | ||||
# Warn if bitcoind is already running (unix only) | # Warn if bitcoind is already running (unix only) | ||||
try: | try: | ||||
pidofOutput = subprocess.check_output(["pidof", "bitcoind"]) | pidofOutput = subprocess.check_output(["pidof", "bitcoind"]) | ||||
if pidofOutput is not None and pidofOutput != b'': | if pidofOutput is not None and pidofOutput != b'': | ||||
print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % ( | print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % ( | ||||
BOLD[1], BOLD[0])) | BOLD[1], BOLD[0])) | ||||
except (OSError, subprocess.SubprocessError): | except (OSError, subprocess.SubprocessError): | ||||
pass | pass | ||||
Show All 24 Lines | def run_tests(test_list, build_dir, tests_dir, junitouput, exeext, tmpdir, num_jobs, enable_coverage=False, args=[], tags=[]): | ||||
if len(test_list) > 1 and num_jobs > 1: | if len(test_list) > 1 and num_jobs > 1: | ||||
# Populate cache | # Populate cache | ||||
subprocess.check_output( | subprocess.check_output( | ||||
[os.path.join(tests_dir, 'create_cache.py')] + flags + [os.path.join("--tmpdir=%s", "cache") % tmpdir]) | [os.path.join(tests_dir, 'create_cache.py')] + flags + [os.path.join("--tmpdir=%s", "cache") % tmpdir]) | ||||
# Run Tests | # Run Tests | ||||
time0 = time.time() | time0 = time.time() | ||||
test_results = execute_test_processes( | test_results = execute_test_processes( | ||||
num_jobs, test_list, tests_dir, tmpdir, flags, run_extended) | num_jobs, test_list, tests_dir, tmpdir, flags, tags) | ||||
runtime = int(time.time() - time0) | runtime = int(time.time() - time0) | ||||
max_len_name = len(max(test_list, key=len)) | max_len_name = len(max(test_list, key=len)) | ||||
print_results(test_results, max_len_name, runtime) | print_results(test_results, max_len_name, runtime) | ||||
save_results_as_junit(test_results, junitouput, runtime) | save_results_as_junit(test_results, junitouput, runtime) | ||||
if coverage: | if coverage: | ||||
coverage.report_rpc_coverage() | coverage.report_rpc_coverage() | ||||
▲ Show 20 Lines • Show All 69 Lines • ▼ Show 20 Lines | while True: | ||||
except Empty as e: | except Empty as e: | ||||
if not on_ci(): | if not on_ci(): | ||||
print("Running jobs: {}".format( | print("Running jobs: {}".format( | ||||
", ".join(running_jobs)), end="\r") | ", ".join(running_jobs)), end="\r") | ||||
sys.stdout.flush() | sys.stdout.flush() | ||||
printed_status = True | printed_status = True | ||||
def handle_test_cases(job_queue, update_queue, run_extended): | def handle_test_cases(job_queue, update_queue, tags): | ||||
""" | """ | ||||
job_runner represents a single thread that is part of a worker pool. | job_runner represents a single thread that is part of a worker pool. | ||||
It waits for a test, then executes that test. It also reports start | It waits for a test, then executes that test. It also reports start | ||||
and result messages to handle_update_messages. | and result messages to handle_update_messages. | ||||
""" | """ | ||||
# In case there is a graveyard of zombie bitcoinds, we can apply a | # In case there is a graveyard of zombie bitcoinds, we can apply a | ||||
# pseudorandom offset to hopefully jump over them. | # pseudorandom offset to hopefully jump over them. | ||||
# (625 is PORT_RANGE/MAX_NODES) | # (625 is PORT_RANGE/MAX_NODES) | ||||
portseed_offset = int(time.time() * 1000) % 625 | portseed_offset = int(time.time() * 1000) % 625 | ||||
while True: | while True: | ||||
test = job_queue.get() | test = job_queue.get() | ||||
if test is None: | if test is None: | ||||
break | break | ||||
# Signal that the test is starting to inform the poor waiting | # Signal that the test is starting to inform the poor waiting | ||||
# programmer | # programmer | ||||
update_queue.put(test) | update_queue.put(test) | ||||
result = test.run(portseed_offset, run_extended) | result = test.run(portseed_offset, tags) | ||||
update_queue.put(result) | update_queue.put(result) | ||||
job_queue.task_done() | job_queue.task_done() | ||||
def execute_test_processes(num_jobs, test_list, tests_dir, tmpdir, flags, run_extended): | def compare_tags(test_tags, run_tags): | ||||
""" | |||||
Compare two sets of tags. Tags are evaludated in order, so if an | |||||
include is specified after an exclusion, then we will still run the test. | |||||
""" | |||||
run_test = False | |||||
for tag in run_tags: | |||||
run = True | |||||
if tag.startswith('!'): | |||||
run = False | |||||
tag = tag[1:] | |||||
for test_tag in test_tags: | |||||
if re.match(tag, test_tag): | |||||
run_test = run | |||||
return run_test | |||||
def execute_test_processes(num_jobs, test_list, tests_dir, tmpdir, flags, tags): | |||||
ctx = mp.get_context('spawn') | ctx = mp.get_context('spawn') | ||||
update_queue = ctx.JoinableQueue() | update_queue = ctx.JoinableQueue() | ||||
job_queue = ctx.JoinableQueue() | job_queue = ctx.JoinableQueue() | ||||
results_queue = ctx.Queue() | results_queue = ctx.Queue() | ||||
## | ## | ||||
# Setup our threads, and start sending tasks | # Setup our threads, and start sending tasks | ||||
## | ## | ||||
# Start our result collection thread. | # Start our result collection thread. | ||||
t = ctx.Process(target=handle_update_messages, | t = ctx.Process(target=handle_update_messages, | ||||
args=(update_queue, results_queue,)) | args=(update_queue, results_queue,)) | ||||
t.start() | t.start() | ||||
# Start some worker threads | # Start some worker threads | ||||
for j in range(num_jobs): | for j in range(num_jobs): | ||||
t = ctx.Process(target=handle_test_cases, | t = ctx.Process(target=handle_test_cases, | ||||
args=(job_queue, update_queue, run_extended,)) | args=(job_queue, update_queue, tags,)) | ||||
t.start() | t.start() | ||||
# Push all our test cases into the job queue. | # Push all our test cases into the job queue. | ||||
for i, t in enumerate(test_list): | for i, t in enumerate(test_list): | ||||
job_queue.put(TestCase(i, t, tests_dir, tmpdir, flags)) | job_queue.put(TestCase(i, t, tests_dir, tmpdir, flags)) | ||||
# Wait for all the jobs to be completed | # Wait for all the jobs to be completed | ||||
job_queue.join() | job_queue.join() | ||||
▲ Show 20 Lines • Show All 198 Lines • Show Last 20 Lines |