diff --git a/cmake/modules/TestSuite.cmake b/cmake/modules/TestSuite.cmake index 00bb6e5de..23dc89bbe 100644 --- a/cmake/modules/TestSuite.cmake +++ b/cmake/modules/TestSuite.cmake @@ -1,214 +1,217 @@ # Allow to easily build test suites option(ENABLE_JUNIT_REPORT "Enable Junit report generation for targets that support it" OFF) set(JUNIT_REPORT_DIRECTORY "${CMAKE_BINARY_DIR}/test/junit") +set(TEST_LOG_DIRECTORY "${CMAKE_BINARY_DIR}/test/log") set_property( DIRECTORY "${CMAKE_SOURCE_DIR}" APPEND PROPERTY ADDITIONAL_CLEAN_FILES - "${JUNIT_REPORT_DIRECTORY}" "${CMAKE_BINARY_DIR}/test/tmp" + "${JUNIT_REPORT_DIRECTORY}" + "${TEST_LOG_DIRECTORY}" ) macro(add_test_environment VARIABLE VALUE) set_property(GLOBAL APPEND PROPERTY TEST_ENVIRONMENT "${VARIABLE}=${VALUE}") endmacro() function(add_test_custom_target TARGET) cmake_parse_arguments(ARG "" "" "CUSTOM_TARGET_ARGS;TEST_COMMAND" ${ARGN}) get_property(TEST_ENVIRONMENT GLOBAL PROPERTY TEST_ENVIRONMENT) add_custom_target(${TARGET} ${ARG_CUSTOM_TARGET_ARGS} COMMAND ${CMAKE_COMMAND} -E make_directory "${JUNIT_REPORT_DIRECTORY}" + COMMAND ${CMAKE_COMMAND} -E make_directory "${TEST_LOG_DIRECTORY}" COMMAND ${CMAKE_COMMAND} -E env ${TEST_ENVIRONMENT} ${ARG_TEST_COMMAND} ) endfunction() # Define a new target property to hold the list of tests associated with a test # suite. This property is named UNIT_TESTS to avoid confusion with the directory # level property TESTS. define_property(TARGET PROPERTY UNIT_TESTS BRIEF_DOCS "List of tests" FULL_DOCS "A list of the tests associated with a test suite" ) macro(get_target_from_suite SUITE TARGET) set(${TARGET} "check-${SUITE}") endmacro() macro(get_pool_from_suite SUITE POOL) set(${POOL} "${SUITE}-pool") endmacro() include(Coverage) function(create_test_suite_with_parent_targets NAME) get_target_from_suite(${NAME} TARGET) add_custom_target(${TARGET} COMMENT "Running ${NAME} test suite" COMMAND "${CMAKE_COMMAND}" -E echo "PASSED: ${NAME} test suite" ) foreach(PARENT_TARGET ${ARGN}) if(TARGET ${PARENT_TARGET}) add_dependencies(${PARENT_TARGET} ${TARGET}) endif() endforeach() add_custom_target_coverage(${TARGET}) endfunction() macro(create_test_suite NAME) create_test_suite_with_parent_targets(${NAME} check-all check-extended) endmacro() # After this call, all the tests added to the suite will also be added to the # pool. Works only with the Ninja generators. function(test_suite_create_pool SUITE JOBS) # Create a pool for the test suite get_pool_from_suite(${SUITE} POOL) set_property(GLOBAL APPEND PROPERTY JOB_POOLS ${POOL}=${JOBS}) endfunction() include(InstallationHelper) function(install_test SUITE NAME) # Allow for installing all tests ... if(NOT TARGET install-tests) add_custom_target(install-tests) endif() # ... a complete test suite ... if(NOT TARGET install-test-suite-${SUITE}) add_custom_target(install-test-suite-${SUITE}) endif() if(NOT TARGET install-${SUITE}-${NAME}) install_target(${NAME} COMPONENT ${SUITE}-${NAME} EXCLUDE_FROM_ALL) # ... or a single test add_custom_target(install-${SUITE}-${NAME} COMMENT "Installing ${NAME} from test suite ${SUITE}" COMMAND "${CMAKE_COMMAND}" -DCOMPONENT="${SUITE}-${NAME}" -DCMAKE_INSTALL_PREFIX="${CMAKE_INSTALL_PREFIX}" -P cmake_install.cmake DEPENDS ${NAME} ) add_dependencies(install-test-suite-${SUITE} install-${SUITE}-${NAME}) add_dependencies(install-tests install-${SUITE}-${NAME}) endif() endfunction() set(TEST_RUNNER_TEMPLATE "${CMAKE_CURRENT_LIST_DIR}/../templates/TestRunner.cmake.in") function(add_test_runner SUITE NAME EXECUTABLE) cmake_parse_arguments(ARG "JUNIT" "" "" ${ARGN}) get_target_from_suite(${SUITE} SUITE_TARGET) set(TARGET "${SUITE_TARGET}-${NAME}") # If there is a pool associated to the test suite, then add the test to the # pool. get_property(JOB_POOLS GLOBAL PROPERTY JOB_POOLS) get_pool_from_suite(${SUITE} POOL) if(JOB_POOLS MATCHES ${POOL}) set(JOB_POOL_ARG JOB_POOL ${POOL}) endif() add_test_custom_target(${TARGET} TEST_COMMAND "${CMAKE_SOURCE_DIR}/cmake/utils/test_wrapper.sh" - "${SUITE}-${NAME}.log" + "${TEST_LOG_DIRECTORY}/${SUITE}-${NAME}.log" ${CMAKE_CROSSCOMPILING_EMULATOR} "$" ${ARG_UNPARSED_ARGUMENTS} CUSTOM_TARGET_ARGS COMMENT "${SUITE}: testing ${NAME}" DEPENDS ${EXECUTABLE} VERBATIM ${JOB_POOL_ARG} ) add_dependencies(${SUITE_TARGET} ${TARGET}) if(ENABLE_JUNIT_REPORT AND ARG_JUNIT) add_custom_command(TARGET ${TARGET} POST_BUILD COMMENT "Processing junit report for test ${NAME} from suite ${SUITE}" COMMAND_EXPAND_LISTS COMMAND "${Python_EXECUTABLE}" "${CMAKE_SOURCE_DIR}/cmake/utils/junit-reports-merge.py" "${JUNIT_REPORT_DIRECTORY}" "${CMAKE_BINARY_DIR}/test/tmp" "${SUITE}" "${NAME}" ) endif() install_test(${SUITE} ${EXECUTABLE}) endfunction() function(add_test_to_suite SUITE NAME) add_executable(${NAME} EXCLUDE_FROM_ALL ${ARGN}) add_test_runner(${SUITE} ${NAME} ${NAME}) get_target_from_suite(${SUITE} TARGET) set_property( TARGET ${TARGET} APPEND PROPERTY UNIT_TESTS ${NAME} ) endfunction(add_test_to_suite) function(add_boost_unit_tests_to_suite SUITE NAME) cmake_parse_arguments(ARG "" "" "TESTS" ${ARGN} ) get_target_from_suite(${SUITE} SUITE_TARGET) add_executable(${NAME} EXCLUDE_FROM_ALL ${ARG_UNPARSED_ARGUMENTS}) add_dependencies("${SUITE_TARGET}" ${NAME}) set(HRF_LOGGER "HRF,test_suite") foreach(_test_source ${ARG_TESTS}) target_sources(${NAME} PRIVATE "${_test_source}") get_filename_component(_test_name "${_test_source}" NAME_WE) if(ENABLE_JUNIT_REPORT) set(JUNIT_LOGGER ":JUNIT,message,${SUITE}-${_test_name}.xml") endif() add_test_runner( ${SUITE} ${_test_name} ${NAME} JUNIT "--run_test=${_test_name}" "--logger=${HRF_LOGGER}${JUNIT_LOGGER}" ) set_property( TARGET ${SUITE_TARGET} APPEND PROPERTY UNIT_TESTS ${_test_name} ) endforeach() find_package(Boost 1.59 REQUIRED unit_test_framework) target_link_libraries(${NAME} Boost::unit_test_framework) # We need to detect if the BOOST_TEST_DYN_LINK flag is required include(CheckCXXSourceCompiles) set(CMAKE_REQUIRED_LIBRARIES Boost::unit_test_framework) check_cxx_source_compiles(" #define BOOST_TEST_DYN_LINK #define BOOST_TEST_MAIN #include " BOOST_REQUIRES_TEST_DYN_LINK) if(BOOST_REQUIRES_TEST_DYN_LINK) target_compile_definitions(${NAME} PRIVATE BOOST_TEST_DYN_LINK) endif() endfunction(add_boost_unit_tests_to_suite) diff --git a/contrib/teamcity/build-configurations.json b/contrib/teamcity/build-configurations.json index 38029d340..38b8b7df6 100644 --- a/contrib/teamcity/build-configurations.json +++ b/contrib/teamcity/build-configurations.json @@ -1,280 +1,278 @@ { "templates": { "common_unix_artifacts": { "artifacts": { "CMakeCache.txt": "CMakeCache.txt", "src/bitcoind": "bin/bitcoind", "src/bitcoin-*": "bin", "src/qt/bitcoin-qt": "bin/bitcoin-qt", "src/bench/bitcoin-bench": "bin/bitcoin-bench", "src/seeder/bitcoin-seeder": "bin/bitcoin-seeder", "src/libbitcoinconsensus.*": "lib", "src/test/test_bitcoin": "bin/test_bitcoin", "src/qt/test/test_bitcoin-qt": "bin/test_bitcoin-qt", "src/seeder/test/test-seeder": "bin/test-seeder", - "src/qt/test/test_bitcoin-qt.log": "log/qt/test_bitcoin-qt.log", - "src/seeder/test/*.log": "log/seeder", "test/tmp/test_runner_*": "functional" } }, "gitian_builds": { "script": "gitian.sh", "timeout": 7200, "artifacts": { "gitian-results": "" } } }, "builds": { "build-asan": { "script": "builds/build-asan.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1800, "env": { "ASAN_OPTIONS": "log_path=stdout", "LSAN_OPTIONS": "log_path=stdout" } }, "build-autotools": { "script": "builds/build-autotools.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200, "artifacts": { "src/bench/bench_bitcoin": "bin/bench_bitcoin", "config.log": "log/config.log", "src/test-suite.log": "log/test-suite.log", "src/test/*.log": "log", "src/seeder/test/test-seeder.log": "log/test-seeder.log" } }, "build-bench": { "script": "builds/build-bench.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-clang-10": { "runOnDiff": true, "script": "builds/build-clang-10.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-clang-tidy": { "runOnDiff": true, "script": "builds/build-clang-tidy.sh", "templates": [ "common_unix_artifacts" ], "timeout": 600, "artifacts": { "clang-tidy-warnings.txt": "clang-tidy-warnings.txt" } }, "build-coverage": { "script": "builds/build-coverage.sh", "templates": [ "common_unix_artifacts" ], "timeout": 4800, "artifacts": { "coverage.tar.gz": "coverage.tar.gz" } }, "build-diff": { "runOnDiff": true, "script": "builds/build-diff.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-docs": { "script": "builds/build-docs.sh", "templates": [ "common_unix_artifacts" ], "timeout": 600, "artifacts": { "doc/*": "doc" } }, "build-ibd": { "script": "builds/build-ibd.sh", "templates": [ "common_unix_artifacts" ], "timeout": 14400, "artifacts": { "ibd/debug.log": "log/debug.log" } }, "build-ibd-no-assumevalid-checkpoint": { "script": "builds/build-ibd-no-assumevalid-checkpoint.sh", "template": [ "common_unix_artifacts" ], "timeout": 21600, "artifacts": { "ibd/debug.log": "log/debug.log" } }, "build-linux64": { "script": "builds/build-linux64.sh", "templates": [ "common_unix_artifacts" ], "timeout": 3600 }, "build-linux-aarch64": { "script": "builds/build-linux-aarch64.sh", "templates": [ "common_unix_artifacts" ], "timeout": 3600, "env": { "QEMU_LD_PREFIX": "/usr/aarch64-linux-gnu" } }, "build-linux-arm": { "script": "builds/build-linux-arm.sh", "templates": [ "common_unix_artifacts" ], "timeout": 3600, "env": { "QEMU_LD_PREFIX": "/usr/arm-linux-gnueabihf" } }, "build-make-generator": { "script": "builds/build-make-generator.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-master": { "script": "builds/build-master.sh", "templates": [ "common_unix_artifacts" ], "timeout": 4800 }, "build-osx": { "script": "builds/build-osx.sh", "templates": [ "common_unix_artifacts" ], "timeout": 3600, "artifacts": { "src/qt/BitcoinABC-Qt.app": "bin", "Bitcoin-ABC.dmg": "Bitcoin-ABC.dmg" } }, "build-secp256k1": { "script": "builds/build-secp256k1.sh", "templates": [ "common_unix_artifacts" ], "timeout": 900, "artifacts": { "src/secp256k1/libsecp256k1*": "lib" } }, "build-tsan": { "script": "builds/build-tsan.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1800, "env": { "TSAN_OPTIONS": "log_path=stdout" } }, "build-ubsan": { "script": "builds/build-ubsan.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1800, "env": { "UBSAN_OPTIONS": "log_path=stdout" } }, "build-win64": { "script": "builds/build-win64.sh", "timeout": 3600, "artifacts": { "CMakeCache.txt": "CMakeCache.txt", "src/bitcoind.exe": "bin/bitcoind.exe", "src/bitcoin-*.exe": "bin", "src/qt/bitcoin-qt.exe": "bin/bitcoin-qt.exe", "src/bench/bitcoin-bench.exe": "bin/bitcoin-bench.exe", "src/libbitcoinconsensus*": "lib", "src/test/test_bitcoin.exe": "bin/test_bitcoin.exe", "src/qt/test/test_bitcoin-qt.exe": "bin/test_bitcoin-qt.exe", "src/qt/test/test_bitcoin-qt.log": "log/qt/test_bitcoin-qt.log", "bitcoin-abc-*-x86_64-w64-mingw32.exe": "bitcoin-abc-x86_64-w64-mingw32.exe" } }, "build-without-cli": { "script": "builds/build-without-cli.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-without-wallet": { "runOnDiff": true, "script": "builds/build-without-wallet.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1200 }, "build-without-zmq": { "script": "builds/build-without-zmq.sh", "templates": [ "common_unix_artifacts" ], "timeout": 1800 }, "check-seeds": { "script": "builds/check-seeds.sh", "templates": [ "common_unix_artifacts" ], "timeout": 600 }, "gitian-linux": { "templates": [ "gitian_builds" ], "env": { "OS_NAME": "linux" } }, "gitian-osx": { "templates": [ "gitian_builds" ], "env": { "OS_NAME": "osx" } }, "gitian-win": { "templates": [ "gitian_builds" ], "env": { "OS_NAME": "win" } } } } diff --git a/contrib/teamcity/build-configurations.py b/contrib/teamcity/build-configurations.py index f2a55b132..1d6c353a0 100755 --- a/contrib/teamcity/build-configurations.py +++ b/contrib/teamcity/build-configurations.py @@ -1,382 +1,384 @@ #!/usr/bin/env python3 # Copyright (c) 2020 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import argparse import asyncio from deepmerge import always_merger import json import os from pathlib import Path, PurePath import shutil import subprocess import sys from teamcity import is_running_under_teamcity from teamcity.messages import TeamcityServiceMessages # Default timeout value in seconds. Should be overridden by the # configuration file. DEFAULT_TIMEOUT = 1 * 60 * 60 if sys.version_info < (3, 6): raise SystemError("This script requires python >= 3.6") class BuildConfiguration: def __init__(self, script_root, config_file, build_name=None): self.script_root = script_root self.config_file = config_file self.name = None self.config = {} self.script_path = Path() if not config_file.is_file(): raise FileNotFoundError( "The configuration file does not exist {}".format( str(config_file) ) ) if build_name is not None: self.load(build_name) def load(self, build_name): self.name = build_name # Read the configuration with open(self.config_file, encoding="utf-8") as f: config = json.load(f) # The configuration root should contain a mandatory element "builds", and # it should not be empty. if not config.get("builds", None): raise AssertionError( "Invalid configuration file {}: the \"builds\" element is missing or empty".format( str(self.config_file) ) ) # Check the target build has an entry in the configuration file build = config["builds"].get(self.name, None) if not build: raise AssertionError( "{} is not a valid build identifier. Valid identifiers are {}".format( self.name, list(config.keys()) ) ) # Get a list of the templates, if any templates = config.get("templates", {}) # If the build references some templates, merge all the configurations. # The merge is applied in the same order as the templates are declared # in the template list. template_config = {} template_names = build.get("templates", []) for template_name in template_names: # Raise an error if the template does not exist if template_name not in templates: raise AssertionError( "Build {} configuration inherits from template {}, but the template does not exist.".format( self.name, template_name ) ) always_merger.merge(template_config, templates.get(template_name)) self.config = always_merger.merge(template_config, build) # Make sure there is a script file associated with the build... script = self.config.get("script", None) if script is None: raise AssertionError( "No script provided for the build {}".format( self.name ) ) # ... and that the script file can be executed self.script_path = Path(self.script_root.joinpath(script)) if not self.script_path.is_file() or not os.access(self.script_path, os.X_OK): raise FileNotFoundError( "The script file {} does not exist or does not have execution permission".format( str(self.script_path) ) ) def get(self, key, default): return self.config.get(key, default) class UserBuild(): def __init__(self, configuration, project_root): self.configuration = configuration self.project_root = project_root # Create the build directory as needed self.build_directory = Path( self.project_root.joinpath( 'abc-ci-builds', self.configuration.name)) self.build_directory.mkdir(exist_ok=True, parents=True) self.artifact_dir = self.build_directory.joinpath("artifacts") self.junit_reports_dir = self.build_directory.joinpath("test/junit") + self.test_logs_dir = self.build_directory.joinpath("test/log") # We will provide the required environment variables self.environment_variables = { "BUILD_DIR": str(self.build_directory), "CMAKE_PLATFORMS_DIR": self.project_root.joinpath("cmake", "platforms"), "THREADS": str(os.cpu_count() or 1), "TOPLEVEL": str(self.project_root), } # Build 2 log files: # - the full log will contain all unfiltered content # - the clean log will contain the same filtered content as what is # printed to stdout. This filter is done in print_line_to_logs(). self.logs = {} self.logs["clean_log"] = self.build_directory.joinpath( "build.clean.log") if self.logs["clean_log"].is_file(): self.logs["clean_log"].unlink() self.logs["full_log"] = self.build_directory.joinpath("build.full.log") if self.logs["full_log"].is_file(): self.logs["full_log"].unlink() def copy_artifacts(self, artifacts): if self.artifact_dir.is_dir(): shutil.rmtree(self.artifact_dir) self.artifact_dir.mkdir(exist_ok=True) # Find and copy artifacts. # The source is relative to the build tree, the destination relative to # the artifact directory. # The artifact directory is located in the build directory tree, results # from it needs to be excluded from the glob matches to prevent infinite # recursion. for pattern, dest in artifacts.items(): matches = [m for m in sorted(self.build_directory.glob( pattern)) if self.artifact_dir not in m.parents and self.artifact_dir != m] dest = self.artifact_dir.joinpath(dest) # Pattern did not match if not matches: continue # If there is a single file, destination is the new file path if len(matches) == 1 and matches[0].is_file(): # Create the parent directories as needed dest.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(matches[0], dest) continue # If there are multiple files or a single directory, destination is a # directory. dest.mkdir(parents=True, exist_ok=True) for match in matches: if match.is_file(): shutil.copy2(match, dest) else: shutil.copytree(match, dest.joinpath(match.name)) def print_line_to_logs(self, line): # Always print to the full log with open(self.logs["full_log"], 'a', encoding='utf-8') as log: log.write(line) # Discard the set -x bash output for stdout and the clean log if not line.startswith("+"): with open(self.logs["clean_log"], 'a', encoding='utf-8') as log: log.write(line) print(line.rstrip()) async def process_stdout(self, stdout): while True: line = await stdout.readline() line = line.decode('utf-8') if not line: break self.print_line_to_logs(line) async def run_build(self, args=[]): proc = await asyncio.create_subprocess_exec( *([str(self.configuration.script_path)] + args), # Buffer limit is 64KB by default, but we need a larger buffer: limit=1024 * 128, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT, cwd=self.build_directory, env={ **os.environ, **self.environment_variables, **self.configuration.get("env", {}) }, ) await asyncio.wait([ self.process_stdout(proc.stdout) ]) return await proc.wait() async def wait_for_build(self, timeout, args=[]): message = "Build {} completed successfully".format( self.configuration.name ) try: return_code = await asyncio.wait_for(self.run_build(args), timeout) if return_code != 0: message = "Build {} failed with exit code {}".format( self.configuration.name, return_code ) except asyncio.TimeoutError: message = "Build {} timed out after {:.1f}s".format( self.configuration.name, round(timeout, 1) ) # The process is killed, set return code to 128 + 9 (SIGKILL) = 137 return_code = 137 finally: self.print_line_to_logs(message) # Always add the build logs to the root of the artifacts artifacts = { **self.configuration.get("artifacts", {}), str(self.logs["full_log"].relative_to(self.build_directory)): "", str(self.logs["clean_log"].relative_to(self.build_directory)): "", str(self.junit_reports_dir.relative_to(self.build_directory)): "", + str(self.test_logs_dir.relative_to(self.build_directory)): "", } self.copy_artifacts(artifacts) return (return_code, message) def run(self, args=[]): return_code, message = asyncio.run( self.wait_for_build( self.configuration.get( "timeout", DEFAULT_TIMEOUT)) ) return (return_code, message) class TeamcityBuild(UserBuild): def __init__(self, configuration, project_root): super().__init__(configuration, project_root) # This accounts for the volume mapping from the container. # Our local /results is mapped to some relative ./results on the host, # so we use /results/artifacts to copy our files but results/artifacts as # an artifact path for teamcity. # TODO abstract out the volume mapping self.artifact_dir = Path("/results/artifacts") self.teamcity_messages = TeamcityServiceMessages() def copy_artifacts(self, artifacts): super().copy_artifacts(artifacts) # Start loading the junit reports. junit_reports_pattern = "{}/junit/*.xml".format( str(self.artifact_dir.relative_to("/")) ) self.teamcity_messages.importData("junit", junit_reports_pattern) # Instruct teamcity to upload our artifact directory artifact_path_pattern = "+:{}=>artifacts.tar.gz".format( str(self.artifact_dir.relative_to("/")) ) self.teamcity_messages.publishArtifacts(artifact_path_pattern) def run(self, args=[]): # Let the user know what build is being run. # This makes it easier to retrieve the info from the logs. self.teamcity_messages.customMessage( "Starting build {}".format(self.configuration.name), status="NORMAL" ) return_code, message = super().run() # Since we are aborting the build, make sure to flush everything first os.sync() if return_code != 0: # Add a build problem to the report self.teamcity_messages.buildProblem( message, # Let Teamcity calculate an ID from our message None ) # Change the final build message self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to failure None, message ) else: # Change the final build message but keep the original one as well self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to success None, "{} ({{build.status.text}})".format(message) ) return (return_code, message) def main(): script_dir = PurePath(os.path.realpath(__file__)).parent # By default search for a configuration file in the same directory as this # script. default_config_path = Path( script_dir.joinpath("build-configurations.json") ) parser = argparse.ArgumentParser(description="Run a CI build") parser.add_argument( "build", help="The name of the build to run" ) parser.add_argument( "--config", "-c", help="Path to the builds configuration file (default to {})".format( str(default_config_path) ) ) args, unknown_args = parser.parse_known_args() # Check the configuration file exists config_path = Path(args.config) if args.config else default_config_path build_configuration = BuildConfiguration( script_dir, config_path, args.build) git_root = PurePath( subprocess.run( ['git', 'rev-parse', '--show-toplevel'], capture_output=True, check=True, encoding='utf-8', text=True, ).stdout.strip() ) if is_running_under_teamcity(): build = TeamcityBuild(build_configuration, git_root) else: build = UserBuild(build_configuration, git_root) sys.exit(build.run(unknown_args)[0]) if __name__ == '__main__': main()