diff --git a/contrib/buildbot/requirements.txt b/contrib/buildbot/requirements.txt index 31250cfb0..8236c659e 100644 --- a/contrib/buildbot/requirements.txt +++ b/contrib/buildbot/requirements.txt @@ -1,5 +1,6 @@ mock flask phabricator==0.7.0 +pyyaml requests slackclient diff --git a/contrib/buildbot/server.py b/contrib/buildbot/server.py index 9beb1b70e..7f2dd3ce8 100755 --- a/contrib/buildbot/server.py +++ b/contrib/buildbot/server.py @@ -1,906 +1,906 @@ #!/usr/bin/env python3 # # Copyright (c) 2019 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from build import BuildStatus, BuildTarget from flask import abort, Flask, request from functools import wraps import hashlib import hmac -import json import os from phabricator_wrapper import ( BITCOIN_ABC_PROJECT_PHID, ) import re from shieldio import RasterBadge from shlex import quote from teamcity import TeamcityRequestException +import yaml # Some keywords used by TeamCity and tcWebHook SUCCESS = "success" FAILURE = "failure" RUNNING = "running" UNRESOLVED = "UNRESOLVED" LANDBOT_BUILD_TYPE = "BitcoinAbcLandBot" with open(os.path.join(os.path.dirname(__file__), 'resources', 'teamcity-icon-16.base64'), 'rb') as icon: BADGE_TC_BASE = RasterBadge( label='TC build', logo='data:image/png;base64,{}'.format( icon.read().strip().decode('utf-8')), ) BADGE_TRAVIS_BASE = RasterBadge( label='Travis build', logo='travis' ) def create_server(tc, phab, slackbot, travis, jsonEncoder=None): # Create Flask app for use as decorator app = Flask("abcbot") # json_encoder can be overridden for testing if jsonEncoder: app.json_encoder = jsonEncoder phab.setLogger(app.logger) tc.set_logger(app.logger) travis.set_logger(app.logger) # A collection of the known build targets create_server.diff_targets = {} # Build status panel data create_server.panel_data = {} # Whether the last status check of master was green create_server.master_is_green = True # This decorator specifies an HMAC secret environment variable to use for verifying # requests for the given route. Currently, we're using Phabricator to trigger these # routes as webhooks, and a separate HMAC secret is required for each hook. # Phabricator does not support basic auth for webhooks, so HMAC must be # used instead. def verify_hmac(secret_env): def decorator(fn): @wraps(fn) def decorated_function(*args, **kwargs): secret = os.getenv(secret_env, None) if not secret: app.logger.info( "Error: HMAC env variable '{}' does not exist".format(secret_env)) abort(401) data = request.get_data() digest = hmac.new( secret.encode(), data, hashlib.sha256).hexdigest() hmac_header = request.headers.get( 'X-Phabricator-Webhook-Signature') if not hmac_header: abort(401) if not hmac.compare_digest( digest.encode(), hmac_header.encode()): abort(401) return fn(*args, **kwargs) return decorated_function return decorator def get_json_request_data(request): if not request.is_json: abort(415, "Expected content-type is 'application/json'") return request.get_json() @app.route("/getCurrentUser", methods=['GET']) def getCurrentUser(): return request.authorization.username if request.authorization else None @app.route("/backportCheck", methods=['POST']) @verify_hmac('HMAC_BACKPORT_CHECK') def backportCheck(): data = get_json_request_data(request) revisionId = data['object']['phid'] revisionSearchArgs = { "constraints": { "phids": [revisionId], }, } data_list = phab.differential.revision.search( **revisionSearchArgs).data assert len(data_list) == 1, "differential.revision.search({}): Expected 1 revision, got: {}".format( revisionSearchArgs, data_list) summary = data_list[0]['fields']['summary'] foundPRs = 0 multilineCodeBlockDelimiters = 0 newSummary = "" for line in summary.splitlines(keepends=True): multilineCodeBlockDelimiters += len(re.findall(r'```', line)) # Only link PRs that do not reside in code blocks if multilineCodeBlockDelimiters % 2 == 0: def replacePRWithLink(baseUrl): def repl(match): nonlocal foundPRs # This check matches identation-based code blocks (2+ spaces) # and common cases for single-line code blocks (using # both single and triple backticks) if match.string.startswith(' ') or len( re.findall(r'`', match.string[:match.start()])) % 2 > 0: # String remains unchanged return match.group(0) else: # Backport PR is linked inline foundPRs += 1 PRNum = match.group(1) remaining = '' if len(match.groups()) >= 2: remaining = match.group(2) return '[[{}/{} | PR{}]]{}'.format( baseUrl, PRNum, PRNum, remaining) return repl line = re.sub( r'PR(\d{3}\d+)', replacePRWithLink( 'https://github.com/bitcoin/bitcoin/pull'), line) # Be less aggressive about serving libsecp256k1 links. Check # for some reference to the name first. if re.search('secp', line, re.IGNORECASE): line = re.sub(r'PR(\d{2}\d?)([^\d]|$)', replacePRWithLink( 'https://github.com/bitcoin-core/secp256k1/pull'), line) newSummary += line if foundPRs > 0: phab.updateRevisionSummary(revisionId, newSummary) commentMessage = ("[Bot Message]\n" "One or more PR numbers were detected in the summary.\n" "Links to those PRs have been inserted into the summary for reference.") phab.commentOnRevision(revisionId, commentMessage) return SUCCESS, 200 @app.route("/build", methods=['POST']) def build(): buildTypeId = request.args.get('buildTypeId', None) ref = request.args.get('ref', 'master') PHID = request.args.get('PHID', None) abcBuildName = request.args.get('abcBuildName', None) properties = None if abcBuildName: properties = [{ 'name': 'env.ABC_BUILD_NAME', 'value': abcBuildName, }] build_id = tc.trigger_build(buildTypeId, ref, PHID, properties)['id'] if PHID in create_server.diff_targets: build_target = create_server.diff_targets[PHID] else: build_target = BuildTarget(PHID) build_target.queue_build(build_id, abcBuildName) create_server.diff_targets[PHID] = build_target return SUCCESS, 200 @app.route("/buildDiff", methods=['POST']) def build_diff(): def get_mandatory_argument(argument): value = request.args.get(argument, None) if value is None: raise AssertionError( "Calling /buildDiff endpoint with missing mandatory argument {}:\n{}".format( argument, request.args ) ) return value staging_ref = get_mandatory_argument('stagingRef') target_phid = get_mandatory_argument('targetPHID') # Get the configuration from master - config = json.loads(phab.get_file_content_from_master( - "contrib/teamcity/build-configurations.json")) + config = yaml.safe_load(phab.get_file_content_from_master( + "contrib/teamcity/build-configurations.yaml")) # Get a list of the builds that should run on diffs builds = [ k for k, v in config.get( 'builds', {}).items() if v.get( 'runOnDiff', False)] if target_phid in create_server.diff_targets: build_target = create_server.diff_targets[target_phid] else: build_target = BuildTarget(target_phid) for build_name in builds: properties = [{ 'name': 'env.ABC_BUILD_NAME', 'value': build_name, }] build_id = tc.trigger_build( 'BitcoinABC_BitcoinAbcStaging', staging_ref, target_phid, properties)['id'] build_target.queue_build(build_id, build_name) create_server.diff_targets[target_phid] = build_target return SUCCESS, 200 @app.route("/land", methods=['POST']) def land(): data = get_json_request_data(request) revision = data['revision'] if not revision: return FAILURE, 400 # conduitToken is expected to be encrypted and will be decrypted by the # land bot. conduitToken = data['conduitToken'] if not conduitToken: return FAILURE, 400 committerName = data['committerName'] if not committerName: return FAILURE, 400 committerEmail = data['committerEmail'] if not committerEmail: return FAILURE, 400 properties = [{ 'name': 'env.ABC_REVISION', 'value': revision, }, { 'name': 'env.ABC_CONDUIT_TOKEN', 'value': conduitToken, }, { 'name': 'env.ABC_COMMITTER_NAME', 'value': committerName, }, { 'name': 'env.ABC_COMMITTER_EMAIL', 'value': committerEmail, }] output = tc.trigger_build( LANDBOT_BUILD_TYPE, 'master', UNRESOLVED, properties) if output: return output return FAILURE, 500 @app.route("/triggerCI", methods=['POST']) @verify_hmac('HMAC_TRIGGER_CI') def triggerCI(): data = get_json_request_data(request) app.logger.info("Received /triggerCI POST:\n{}".format(data)) # We expect a webhook with an edited object and a list of transactions. if "object" not in data or "transactions" not in data: return FAILURE, 400 data_object = data["object"] if "type" not in data_object or "phid" not in data_object: return FAILURE, 400 # We are searching for a specially crafted comment to trigger a CI # build. Only comments on revision should be parsed. Also if there is # no transaction, or the object is not what we expect, just return. if data_object["type"] != "DREV" or not data.get('transactions', []): return SUCCESS, 200 revision_PHID = data_object["phid"] # Retrieve the transactions details from their PHIDs transaction_PHIDs = [t["phid"] for t in data["transactions"] if "phid" in t] transactions = phab.transaction.search( objectIdentifier=revision_PHID, constraints={ "phids": transaction_PHIDs, } ).data # Extract the comments from the transaction list. Each transaction # contains a list of comments. comments = [c for t in transactions if t["type"] == "comment" for c in t["comments"]] # If there is no comment we have no interest in this webhook if not comments: return SUCCESS, 200 # In order to prevent DoS, only ABC members are allowed to call the bot # to trigger builds. # FIXME implement a better anti DoS filter. abc_members = phab.get_project_members(BITCOIN_ABC_PROJECT_PHID) comments = [c for c in comments if c["authorPHID"] in abc_members] # Check if there is a specially crafted comment that should trigger a # CI build. Format: # @bot [build_name ...] def get_builds_from_comment(comment): tokens = comment.split() if not tokens or tokens.pop(0) != "@bot": return [] # Escape to prevent shell injection and remove duplicates return [quote(token) for token in list(set(tokens))] builds = [] for comment in comments: builds += get_builds_from_comment(comment["content"]["raw"]) # If there is no build provided, this request is not what we are after, # just return. # TODO return an help command to explain how to use the bot. if not builds: return SUCCESS, 200 staging_ref = phab.get_latest_diff_staging_ref(revision_PHID) # Trigger the requested builds for build in builds: # FIXME the hardcoded infos here should be gathered from somewhere tc.trigger_build( "BitcoinABC_BitcoinAbcStaging", staging_ref, properties=[{ 'name': 'env.ABC_BUILD_NAME', 'value': build, }] ) # If we reach this point, trigger_build did not raise an exception. return SUCCESS, 200 @app.route("/status", methods=['POST']) def buildStatus(): out = get_json_request_data(request) app.logger.info("Received /status POST with data: {}".format(out)) return handle_build_result(**out) def send_harbormaster_build_link_if_required( build_link, build_target, build_name): # Check if a link to the build server has already been sent by searching # the artifacts. artifacts = phab.harbormaster.artifact.search( constraints={ "buildTargetPHIDs": [build_target.phid], } ).data build_link_artifact_key = build_name + "-" + build_target.phid # Search for the appropriated artifact key in the artifact list. # If found then the link is already set and there is no need to send it # again. for artifact in artifacts: if "artifactKey" in (artifact["fields"] or { }) and artifact["fields"]["artifactKey"] == build_link_artifact_key: return phab.harbormaster.createartifact( buildTargetPHID=build_target.phid, artifactKey=build_link_artifact_key, artifactType="uri", artifactData={ "uri": build_link, "name": build_name, "ui.external": True, } ) def update_build_status_panel(updated_build_type_id): # Perform a XOR like operation on the dicts: # - if a key from target is missing from reference, remove it from # target. # - if a key from reference is missing from target, add it to target. # The default value is the output of the default_value_callback(key). # - if the key exist in both, don't update it. # where target is a dictionary updated in place and reference a list of # keys. # Returns a tuple of (removed keys, added keys) def dict_xor(target, reference_keys, default_value_callback): removed_keys = [ k for k in list( target.keys()) if k not in reference_keys] for key in removed_keys: del target[key] added_keys = [ k for k in reference_keys if k not in list( target.keys())] for key in added_keys: target[key] = default_value_callback(key) return (removed_keys, added_keys) panel_content = '' def add_line_to_panel(line): return panel_content + line + '\n' def add_project_header_to_panel(project_name): return panel_content + ( '| {} | Status |\n' '|---|---|\n' ).format(project_name) # secp256k1 is a special case because it has a Travis build from a - # Github repo that is not managed by the build-configurations.json config. + # Github repo that is not managed by the build-configurations.yml config. # The status always need to be fetched. sepc256k1_default_branch = 'master' sepc256k1_travis_status = travis.get_branch_status( 27431354, sepc256k1_default_branch) travis_badge_url = BADGE_TRAVIS_BASE.get_badge_url( message=sepc256k1_travis_status.value, color='brightgreen' if sepc256k1_travis_status == BuildStatus.Success else 'red', ) # Add secp256k1 Travis to the status panel. panel_content = add_project_header_to_panel( 'secp256k1 ([[https://github.com/Bitcoin-ABC/secp256k1 | Github]])') panel_content = add_line_to_panel( '| [[{} | {}]] | {{image uri="{}", alt="{}"}} |'.format( 'https://travis-ci.org/github/bitcoin-abc/secp256k1', sepc256k1_default_branch, travis_badge_url, sepc256k1_travis_status.value, ) ) panel_content = add_line_to_panel('') # Download the build configuration from master - config = json.loads(phab.get_file_content_from_master( - "contrib/teamcity/build-configurations.json")) + config = yaml.safe_load(phab.get_file_content_from_master( + "contrib/teamcity/build-configurations.yml")) # Get a list of the builds to display config_build_names = [ k for k, v in config.get( 'builds', {}).items() if not v.get( 'hideOnStatusPanel', False)] # If there is no build to display, don't update the panel with teamcity # data if not config_build_names: phab.set_text_panel_content(17, panel_content) return # Associate with Teamcity data from the BitcoinABC project associated_builds = tc.associate_configuration_names( "BitcoinABC", config_build_names) # Get a unique list of the project ids project_ids = [build["teamcity_project_id"] for build in list(associated_builds.values())] project_ids = list(set(project_ids)) # Construct a dictionary from teamcity project id to project name. # This will make it easier to navigate from one to the other. project_name_map = {} for build in list(associated_builds.values()): project_name_map[build['teamcity_project_id'] ] = build['teamcity_project_name'] # If the list of project names has changed (project was added, deleted # or renamed, update the panel data accordingly. (removed_projects, added_projects) = dict_xor( create_server.panel_data, project_ids, lambda key: {}) # Log the project changes if any if (len(removed_projects) + len(added_projects)) > 0: app.logger.info( "Teamcity project list has changed.\nRemoved: {}\nAdded: {}".format( removed_projects, added_projects, ) ) # Construct a dictionary from teamcity build type id to build name. # This will make it easier to navigate from one to the other. build_name_map = {} for build in list(associated_builds.values()): build_name_map[build['teamcity_build_type_id'] ] = build['teamcity_build_name'] def get_build_status_and_message(build_type_id): latest_build = tc.getLatestCompletedBuild(build_type_id) # If no build completed, set the status to unknown if not latest_build: build_status = BuildStatus.Unknown build_status_message = build_status.value else: build_info = tc.getBuildInfo(latest_build['id']) build_status = BuildStatus(build_info['status'].lower()) build_status_message = build_info.get( 'statusText', build_status.value) if build_status == BuildStatus.Failure else build_status.value return (build_status, build_status_message) # Update the builds for project_id, project_builds in sorted( create_server.panel_data.items()): build_type_ids = [build['teamcity_build_type_id'] for build in list( associated_builds.values()) if build['teamcity_project_id'] == project_id] # If the list of builds has changed (build was added, deleted, # renamed, added to or removed from the items to display), update # the panel data accordingly. (removed_builds, added_builds) = dict_xor( project_builds, build_type_ids, # We need to fetch the satus for each added build lambda key: get_build_status_and_message(key) ) # Log the build changes if any if (len(removed_builds) + len(added_builds)) > 0: app.logger.info( "Teamcity build list has changed for project {}.\nRemoved: {}\nAdded: {}".format( project_id, removed_builds, added_builds, ) ) # From here only the build that triggered the call need to be # updated. Note that it might already be up-to-date if the build was # part of the added ones. # Other data remains valid from the previous calls. if updated_build_type_id not in added_builds and updated_build_type_id in list( project_builds.keys()): project_builds[updated_build_type_id] = get_build_status_and_message( updated_build_type_id) # Create a table view of the project: # # | | Status | # |------------------------------------| # | Link to latest build | Status icon | # | Link to latest build | Status icon | # | Link to latest build | Status icon | panel_content = add_project_header_to_panel( project_name_map[project_id]) for build_type_id, (build_status, build_status_message) in project_builds.items(): url = tc.build_url( "viewLog.html", { "buildTypeId": build_type_id, "buildId": "lastFinished" } ) # TODO insert Teamcity build failure message badge_url = BADGE_TC_BASE.get_badge_url( message=build_status_message, color=( 'lightgrey' if build_status == BuildStatus.Unknown else 'brightgreen' if build_status == BuildStatus.Success else 'red' ), ) panel_content = add_line_to_panel( '| [[{} | {}]] | {{image uri="{}", alt="{}"}} |'.format( url, build_name_map[build_type_id], badge_url, build_status_message, ) ) panel_content = add_line_to_panel('') phab.set_text_panel_content(17, panel_content) def update_coverage_panel(coverage_summary): # FIXME don't harcode the permalink but pull it from some configuration coverage_permalink = "**[[ https://build.bitcoinabc.org/viewLog.html?buildId=lastSuccessful&buildTypeId=BitcoinABC_Master_BitcoinAbcMasterCoverage&tab=report__Root_Code_Coverage&guest=1 | HTML coverage report ]]**\n\n" coverage_report = "| Granularity | % hit | # hit | # total |\n" coverage_report += "| ----------- | ----- | ----- | ------- |\n" # Convert the textual coverage summary report to a pretty remarkup # content. # # The content loooks like this: # # Summary coverage rate: # lines......: 82.3% (91410 of 111040 lines) # functions..: 74.1% (6686 of 9020 functions) # branches...: 45.0% (188886 of 420030 branches) pattern = r"^\s*(?P\w+)\.+: (?P[0-9.]+%) \((?P\d+) of (?P\d+) .+$" for line in coverage_summary.splitlines(): match = re.match(pattern, line.strip()) if not match: continue coverage_report += "| {} | {} | {} | {} |\n".format( match.group('granularity').capitalize(), match.group('percent'), match.group('hit'), match.group('total'), ) # Update the coverage panel with our remarkup content phab.set_text_panel_content(21, coverage_permalink + coverage_report) def handle_build_result(buildName, buildTypeId, buildResult, buildURL, branch, buildId, buildTargetPHID, **kwargs): # Do not report build status for ignored builds if phab.getIgnoreKeyword() in buildTypeId: return SUCCESS, 200 # Build didn't have a branch if branch == "UNRESOLVED": return FAILURE, 400 guest_url = tc.convert_to_guest_url(buildURL) status = BuildStatus(buildResult) isMaster = (branch == "refs/heads/master" or branch == "") # If a build completed on master, update the build status panel. if isMaster and ( status == BuildStatus.Success or status == BuildStatus.Failure): update_build_status_panel(buildTypeId) # If the build succeeded and there is a coverage report in the build # artifacts, update the coverage panel. if status == BuildStatus.Success: try: coverage_summary = tc.get_coverage_summary(buildId) except TeamcityRequestException: # The coverage report is not guaranteed to exist, in this # case teamcity will raise an exception. coverage_summary = None if coverage_summary: update_coverage_panel(coverage_summary) # If we have a buildTargetPHID, report the status. build_target = create_server.diff_targets.get(buildTargetPHID, None) if build_target is not None: phab.update_build_target_status(build_target, buildId, status) send_harbormaster_build_link_if_required( guest_url, build_target, build_target.builds[buildId].name ) if build_target.is_finished(): del create_server.diff_targets[buildTargetPHID] revisionPHID = phab.get_revisionPHID(branch) buildInfo = tc.getBuildInfo(buildId) isAutomated = tc.checkBuildIsAutomated(buildInfo) if isAutomated and status == BuildStatus.Failure: # Check if this failure is infrastructure-related buildFailures = tc.getBuildProblems(buildId) if len(buildFailures) > 0: # If any infrastructure-related failures occurred, ping the right # people with a useful message. buildLog = tc.getBuildLog(buildId) if re.search(re.escape("[Infrastructure Error]"), buildLog): slackbot.postMessage('infra', " There was an infrastructure failure in '{}': {}".format( buildName, guest_url)) # Normally a comment of the build status is provided on diffs. Since no useful debug # info can be provided that is actionable to the user, we # give them a short message. if not isMaster: phab.commentOnRevision(revisionPHID, "(IMPORTANT) The build failed due to an unexpected infrastructure outage. " "The administrators have been notified to investigate. Sorry for the inconvenience.", buildName) return SUCCESS, 200 # Handle land bot builds if buildTypeId == LANDBOT_BUILD_TYPE: if status == BuildStatus.Success or status == BuildStatus.Failure: properties = buildInfo.getProperties() revisionId = properties.get( 'env.ABC_REVISION', 'MISSING REVISION ID') author = phab.getRevisionAuthor(revisionId) landBotMessage = "Failed to land your change:" if status == BuildStatus.Success: landBotMessage = "Successfully landed your change:" landBotMessage = "{}\nRevision: https://reviews.bitcoinabc.org/{}\nBuild: {}".format( landBotMessage, revisionId, guest_url) # Send a direct message to the revision author authorSlackUsername = phab.getAuthorSlackUsername(author) authorSlackUser = slackbot.getUserByName(authorSlackUsername) slackChannel = authorSlackUser['id'] if authorSlackUser else None if not slackChannel: slackChannel = 'dev' landBotMessage = "{}: Please set your slack username in your Phabricator profile so the landbot can send you direct messages: {}\n{}".format( authorSlackUsername, "https://reviews.bitcoinabc.org/people/editprofile/{}".format( author['id']), landBotMessage) slackbot.postMessage(slackChannel, landBotMessage) return SUCCESS, 200 # Open/update an associated task and message developers with relevant information if this build was # the latest completed, automated, master build of its type. if isMaster and isAutomated: latestBuild = tc.getLatestCompletedBuild(buildTypeId) latestBuildId = None if latestBuild: latestBuildId = latestBuild.get('id', None) logLatestBuildId = 'None' if latestBuildId is None else latestBuildId app.logger.info( "Latest completed build ID of type '{}': {}".format( buildTypeId, logLatestBuildId)) if latestBuildId == buildId: if status == BuildStatus.Success: updatedTask = phab.updateBrokenBuildTaskStatus( buildName, 'resolved') if updatedTask: # Only message once all of master is green (buildFailures, testFailures) = tc.getLatestBuildAndTestFailures( 'BitcoinABC') if len(buildFailures) == 0 and len(testFailures) == 0: if not create_server.master_is_green: create_server.master_is_green = True slackbot.postMessage( 'dev', "Master is green again.") if status == BuildStatus.Failure: shortBuildUrl = tc.build_url( "viewLog.html", { "buildId": buildId, } ) # Get number of build failures over the last few days numRecentFailures = tc.getNumAggregateFailuresSince( buildTypeId, 60 * 60 * 24 * 5) if numRecentFailures >= 3: # This build is likely flaky and the channel has # already been notified. return SUCCESS, 200 if numRecentFailures >= 2: # This build may be flaky. Ping the channel with a # less-noisy message. slackbot.postMessage('dev', "Build '{}' appears to be flaky: {}".format(buildName, shortBuildUrl)) return SUCCESS, 200 # Only mark master as red for failures that are not flaky create_server.master_is_green = False commitHashes = buildInfo.getCommits() newTask = phab.createBrokenBuildTask( buildName, guest_url, branch, commitHashes, 'rABC') if newTask: # TODO: Add 'Reviewed by: ' line # Do not point to a specific change for scheduled builds, as this generates noise for # the author of a change that is unlikely to contain # the root cause of the issue. if tc.checkBuildIsScheduled(buildInfo): slackbot.postMessage('dev', "Scheduled build '{}' appears to be broken: {}\n" "Task: https://reviews.bitcoinabc.org/T{}".format( buildName, shortBuildUrl, newTask['id'])) else: commitMap = phab.getRevisionPHIDsFromCommits( commitHashes) decoratedCommits = phab.decorateCommitMap( commitMap) decoratedCommit = decoratedCommits[commitHashes[0]] changeLink = decoratedCommit['link'] authorSlackUsername = decoratedCommit['authorSlackUsername'] authorSlackId = slackbot.formatMentionByName( authorSlackUsername) if not authorSlackId: authorSlackId = authorSlackUsername slackbot.postMessage('dev', "Committer: {}\n" "Build '{}' appears to be broken: {}\n" "Task: https://reviews.bitcoinabc.org/T{}\n" "Diff: {}".format( authorSlackId, buildName, shortBuildUrl, newTask['id'], changeLink)) if not isMaster: revisionId, authorPHID = phab.get_revision_info(revisionPHID) properties = buildInfo.getProperties() buildConfig = properties.get('env.ABC_BUILD_NAME', None) if not buildConfig: buildConfig = properties.get('env.OS_NAME', 'UNKNOWN') buildName = "{} ({})".format(buildName, buildConfig) if status == BuildStatus.Failure: msg = phab.createBuildStatusMessage( status, guest_url, buildName) # Append a snippet of the log if there are build failures, # attempting to focus on the first build failure. buildFailures = tc.getBuildProblems(buildId) if len(buildFailures) > 0: buildLog = tc.getBuildLog(buildId) logLines = [] for line in buildLog.splitlines(keepends=True): logLines.append(line) # If this line contains any of the build failures, # append the last N log lines to the message. foundBuildFailure = None for failure in buildFailures: if re.search(re.escape(failure['details']), line): foundBuildFailure = failure break if foundBuildFailure: # Recreate the build status message to point to the full build log # to make the build failure more accessible. msg = phab.createBuildStatusMessage( status, foundBuildFailure['logUrl'], buildName) # We add two newlines to break away from the # (IMPORTANT) callout. msg += "\n\nSnippet of first build failure:\n```lines=16,COUNTEREXAMPLE\n{}```".format( ''.join(logLines[-60:])) break # Append detailed links when there are test failures. testFailures = tc.getFailedTests(buildId) if len(testFailures) > 0: # We add two newlines to break away from the (IMPORTANT) # callout. msg += '\n\nEach failure log is accessible here:' for failure in testFailures: msg += "\n[[{} | {}]]".format(failure['logUrl'], failure['name']) phab.commentOnRevision(revisionPHID, msg, buildName) return SUCCESS, 200 return app diff --git a/contrib/teamcity/build-configurations.json b/contrib/teamcity/build-configurations.json deleted file mode 100644 index ad2c234e0..000000000 --- a/contrib/teamcity/build-configurations.json +++ /dev/null @@ -1,324 +0,0 @@ -{ - "templates": { - "common_unix_artifacts": { - "artifacts": { - "CMakeCache.txt": "CMakeCache.txt", - "src/bitcoind": "bin/bitcoind", - "src/bitcoin-*": "bin", - "src/qt/bitcoin-qt": "bin/bitcoin-qt", - "src/bench/bitcoin-bench": "bin/bitcoin-bench", - "src/seeder/bitcoin-seeder": "bin/bitcoin-seeder", - "src/libbitcoinconsensus.*": "lib", - "src/test/test_bitcoin": "bin/test_bitcoin", - "src/qt/test/test_bitcoin-qt": "bin/test_bitcoin-qt", - "src/seeder/test/test-seeder": "bin/test-seeder", - "test/tmp/test_runner_*": "functional" - } - }, - "gitian_builds": { - "script": "gitian.sh", - "timeout": 7200, - "artifacts": { - "gitian-results": "" - } - } - }, - "builds": { - "build-asan": { - "Werror": true, - "clang": true, - "cmake_flags": [ - "-DCMAKE_CXX_FLAGS=-DARENA_DEBUG", - "-DCMAKE_BUILD_TYPE=Debug", - "-DCRYPTO_USE_ASM=OFF", - "-DENABLE_SANITIZERS=address" - ], - "targets": [ - ["all", "install", "install-secp256k1"], - ["check", "check-secp256k1", "check-functional"] - ], - "timeout": 1800, - "env": { - "ASAN_OPTIONS": "log_path=stdout", - "LSAN_OPTIONS": "log_path=stdout" - } - }, - "build-bench": { - "Werror": true, - "cmake_flags": [ - "-DSECP256K1_ENABLE_MODULE_ECDH=ON", - "-DSECP256K1_ENABLE_MODULE_MULTISET=ON" - ], - "targets": [ - ["all", "install-bitcoin-bench", "install-secp256k1-bench"], - ["bench-bitcoin"], - ["bench-secp256k1"] - ], - "timeout": 1200 - }, - "build-clang-10": { - "runOnDiff": true, - "Werror": true, - "cmake_flags": [ - "-DCMAKE_C_COMPILER=clang-10", - "-DCMAKE_CXX_COMPILER=clang++-10" - ], - "targets": [ - ["all", "install", "install-secp256k1"], - ["check", "check-secp256k1"] - ], - "timeout": 1200 - }, - "build-clang-tidy": { - "runOnDiff": true, - "script": "builds/build-clang-tidy.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 600, - "artifacts": { - "clang-tidy-warnings.txt": "clang-tidy-warnings.txt" - } - }, - "build-coverage": { - "script": "builds/build-coverage.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 4800, - "artifacts": { - "coverage.tar.gz": "coverage.tar.gz" - } - }, - "build-diff": { - "runOnDiff": true, - "Werror": true, - "targets": [ - ["all", "install", "install-secp256k1"], - ["check-all", "check-upgrade-activated"] - ], - "timeout": 1200 - }, - "build-docs": { - "script": "builds/build-docs.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 600, - "artifacts": { - "doc/*": "doc" - } - }, - "build-ibd": { - "script": "builds/build-ibd.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 14400, - "artifacts": { - "ibd/debug.log": "log/debug.log" - } - }, - "build-ibd-no-assumevalid-checkpoint": { - "script": "builds/build-ibd-no-assumevalid-checkpoint.sh", - "template": [ - "common_unix_artifacts" - ], - "timeout": 21600, - "artifacts": { - "ibd/debug.log": "log/debug.log" - } - }, - "build-linux32": { - "script": "builds/build-linux32.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 3600 - }, - "build-linux64": { - "script": "builds/build-linux64.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 3600 - }, - "build-linux-aarch64": { - "script": "builds/build-linux-aarch64.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 3600, - "env": { - "QEMU_LD_PREFIX": "/usr/aarch64-linux-gnu" - } - }, - "build-linux-arm": { - "script": "builds/build-linux-arm.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 3600, - "env": { - "QEMU_LD_PREFIX": "/usr/arm-linux-gnueabihf" - } - }, - "build-make-generator": { - "script": "builds/build-make-generator.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 1200 - }, - "build-master": { - "Werror": true, - "targets": [ - ["all", "install", "install-secp256k1"], - ["check-extended", "check-upgrade-activated-extended"] - ], - "timeout": 4800 - }, - "build-osx": { - "script": "builds/build-osx.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 3600, - "artifacts": { - "src/qt/BitcoinABC-Qt.app": "bin", - "Bitcoin-ABC.dmg": "Bitcoin-ABC.dmg" - } - }, - "build-secp256k1": { - "script": "builds/build-secp256k1.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 900, - "artifacts": { - "src/secp256k1/libsecp256k1*": "lib" - } - }, - "build-tsan": { - "Werror": true, - "clang": true, - "cmake_flags": [ - "-DENABLE_SANITIZERS=thread" - ], - "targets": [ - ["all", "install"], - ["check", "check-functional"] - ], - "timeout": 1800, - "env": { - "TSAN_OPTIONS": "log_path=stdout" - } - }, - "build-ubsan": { - "Werror": true, - "clang": true, - "cmake_flags": [ - "-DCMAKE_BUILD_TYPE=Debug", - "-DENABLE_SANITIZERS=undefined" - ], - "targets": [ - ["all", "install", "install-secp256k1"], - ["check", "check-secp256k1", "check-functional"] - ], - "timeout": 1800, - "env": { - "UBSAN_OPTIONS": "log_path=stdout" - } - }, - "build-win64": { - "script": "builds/build-win64.sh", - "timeout": 3600, - "artifacts": { - "CMakeCache.txt": "CMakeCache.txt", - "src/bitcoind.exe": "bin/bitcoind.exe", - "src/bitcoin-*.exe": "bin", - "src/qt/bitcoin-qt.exe": "bin/bitcoin-qt.exe", - "src/bench/bitcoin-bench.exe": "bin/bitcoin-bench.exe", - "src/libbitcoinconsensus*": "lib", - "src/test/test_bitcoin.exe": "bin/test_bitcoin.exe", - "src/qt/test/test_bitcoin-qt.exe": "bin/test_bitcoin-qt.exe", - "src/qt/test/test_bitcoin-qt.log": "log/qt/test_bitcoin-qt.log", - "bitcoin-abc-*-x86_64-w64-mingw32.exe": "bitcoin-abc-x86_64-w64-mingw32.exe" - } - }, - "build-without-cli": { - "Werror": true, - "cmake_flags": [ - "-DBUILD_BITCOIN_CLI=OFF" - ], - "targets": [ - ["all", "install"], - ["check-functional"] - ], - "timeout": 1200 - }, - "build-without-wallet": { - "runOnDiff": true, - "Werror": true, - "cmake_flags": [ - "-DBUILD_BITCOIN_WALLET=OFF" - ], - "targets": [ - ["all", "install"], - ["check", "check-functional"] - ], - "timeout": 1200 - }, - "build-without-zmq": { - "Werror": true, - "cmake_flags": [ - "-DBUILD_BITCOIN_ZMQ=OFF" - ], - "targets": [ - ["all", "install"], - ["check", "check-functional"] - ], - "timeout": 1800 - }, - "check-seeds": { - "script": "builds/check-seeds.sh", - "templates": [ - "common_unix_artifacts" - ], - "timeout": 600 - }, - "check-source-control-tools": { - "cmake_flags": [ - "-DBUILD_SOURCE_CONTROL_TOOLS=ON" - ], - "targets": [ - ["check-source-control-tools"] - ], - "timeout": 600 - }, - "gitian-linux": { - "templates": [ - "gitian_builds" - ], - "env": { - "OS_NAME": "linux" - } - }, - "gitian-osx": { - "templates": [ - "gitian_builds" - ], - "env": { - "OS_NAME": "osx" - } - }, - "gitian-win": { - "templates": [ - "gitian_builds" - ], - "env": { - "OS_NAME": "win" - } - } - } -} diff --git a/contrib/teamcity/build-configurations.py b/contrib/teamcity/build-configurations.py index 7747e0375..1381ab375 100755 --- a/contrib/teamcity/build-configurations.py +++ b/contrib/teamcity/build-configurations.py @@ -1,474 +1,474 @@ #!/usr/bin/env python3 # Copyright (c) 2020 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import argparse import asyncio from deepmerge import always_merger -import json import os from pathlib import Path, PurePath import shutil import subprocess import sys from teamcity import is_running_under_teamcity from teamcity.messages import TeamcityServiceMessages +import yaml # Default timeout value in seconds. Should be overridden by the # configuration file. DEFAULT_TIMEOUT = 1 * 60 * 60 if sys.version_info < (3, 6): raise SystemError("This script requires python >= 3.6") class BuildConfiguration: def __init__(self, script_root, config_file, build_name=None): self.script_root = script_root self.config_file = config_file self.name = None self.config = {} self.cmake_flags = [] self.build_steps = [] self.build_directory = None self.junit_reports_dir = None self.test_logs_dir = None self.project_root = PurePath( subprocess.run( ['git', 'rev-parse', '--show-toplevel'], capture_output=True, check=True, encoding='utf-8', text=True, ).stdout.strip() ) if not config_file.is_file(): raise FileNotFoundError( "The configuration file does not exist {}".format( str(config_file) ) ) if build_name is not None: self.load(build_name) def load(self, build_name): self.name = build_name # Read the configuration with open(self.config_file, encoding="utf-8") as f: - config = json.load(f) + config = yaml.safe_load(f) # The configuration root should contain a mandatory element "builds", and # it should not be empty. if not config.get("builds", None): raise AssertionError( "Invalid configuration file {}: the \"builds\" element is missing or empty".format( str(self.config_file) ) ) # Check the target build has an entry in the configuration file build = config["builds"].get(self.name, None) if not build: raise AssertionError( "{} is not a valid build identifier. Valid identifiers are {}".format( self.name, list(config.keys()) ) ) # Get a list of the templates, if any templates = config.get("templates", {}) # If the build references some templates, merge all the configurations. # The merge is applied in the same order as the templates are declared # in the template list. template_config = {} template_names = build.get("templates", []) for template_name in template_names: # Raise an error if the template does not exist if template_name not in templates: raise AssertionError( "Build {} configuration inherits from template {}, but the template does not exist.".format( self.name, template_name ) ) always_merger.merge(template_config, templates.get(template_name)) self.config = always_merger.merge(template_config, build) # Create the build directory as needed self.build_directory = Path( self.project_root.joinpath( 'abc-ci-builds', self.name)) self.build_directory.mkdir(exist_ok=True, parents=True) # Define the junit and logs directories self.junit_reports_dir = self.build_directory.joinpath("test/junit") self.test_logs_dir = self.build_directory.joinpath("test/log") def create_build_steps(self, artifact_dir): # There are 2 possibilities to define the build steps: # - By defining a script to run. If such a script is set and is # executable, it is the only thing to run. # - By defining the configuration options and a list of target groups to # run. The configuration step should be run once then all the targets # groups. Each target group can contain 1 or more targets which # should be run parallel. script = self.config.get("script", None) if script: script_path = Path(self.script_root.joinpath(script)) if not script_path.is_file() or not os.access(script_path, os.X_OK): raise FileNotFoundError( "The script file {} does not exist or does not have execution permission".format( str(script_path) ) ) self.build_steps = [ { "bin": str(script_path), "args": [], } ] return # Get the cmake configuration definitions. self.cmake_flags = self.config.get("cmake_flags", []) self.cmake_flags.append("-DCMAKE_INSTALL_PREFIX={}".format( str(artifact_dir))) # Get the targets to build. If none is provided then raise an error. targets = self.config.get("targets", None) if not targets: raise AssertionError( "No build target has been provided for build {} and no script is defined, aborting".format( self.name ) ) # Some more flags for the build_cmake.sh script build_cmake_flags = [] if self.config.get("Werror", False): build_cmake_flags.append("--Werror") if self.config.get("junit", True): build_cmake_flags.append("--junit") if self.config.get("clang", False): build_cmake_flags.append("--clang") # Some generator flags generator_flags = [] if self.config.get("fail_fast", False): generator_flags.append("-k0") # First call should use the build_cmake.sh script in order to run # cmake. self.build_steps = [ { "bin": str(self.project_root.joinpath("contrib/devtools/build_cmake.sh")), "args": targets[0] + build_cmake_flags, } ] for target_group in targets[1:]: self.build_steps.append( { # TODO: let the generator be configurable "bin": "ninja", "args": generator_flags + target_group, } ) def get(self, key, default): return self.config.get(key, default) class UserBuild(): def __init__(self, configuration): self.configuration = configuration project_root = self.configuration.project_root build_directory = self.configuration.build_directory self.artifact_dir = build_directory.joinpath("artifacts") # We will provide the required environment variables self.environment_variables = { "BUILD_DIR": str(build_directory), "CMAKE_PLATFORMS_DIR": project_root.joinpath("cmake", "platforms"), "THREADS": str(os.cpu_count() or 1), "TOPLEVEL": str(project_root), } # Build 2 log files: # - the full log will contain all unfiltered content # - the clean log will contain the same filtered content as what is # printed to stdout. This filter is done in print_line_to_logs(). self.logs = {} self.logs["clean_log"] = build_directory.joinpath( "build.clean.log") if self.logs["clean_log"].is_file(): self.logs["clean_log"].unlink() self.logs["full_log"] = build_directory.joinpath("build.full.log") if self.logs["full_log"].is_file(): self.logs["full_log"].unlink() def copy_artifacts(self, artifacts): # Make sure the artifact directory always exists. It is created before # the build is run (to let the build install things to it) but since we # have no control on what is being executed, it might very well be # deleted by the build as well. This can happen when the artifacts # are located in the build directory and the build calls git clean. self.artifact_dir.mkdir(exist_ok=True) # Find and copy artifacts. # The source is relative to the build tree, the destination relative to # the artifact directory. # The artifact directory is located in the build directory tree, results # from it needs to be excluded from the glob matches to prevent infinite # recursion. for pattern, dest in artifacts.items(): matches = [m for m in sorted(self.configuration.build_directory.glob( pattern)) if self.artifact_dir not in m.parents and self.artifact_dir != m] dest = self.artifact_dir.joinpath(dest) # Pattern did not match if not matches: continue # If there is a single file, destination is the new file path if len(matches) == 1 and matches[0].is_file(): # Create the parent directories as needed dest.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(matches[0], dest) continue # If there are multiple files or a single directory, destination is a # directory. dest.mkdir(parents=True, exist_ok=True) for match in matches: if match.is_file(): shutil.copy2(match, dest) else: # FIXME after python => 3.8 is enforced, avoid the # try/except block and use dirs_exist_ok=True instead. try: shutil.copytree(match, dest.joinpath(match.name)) except FileExistsError: pass def print_line_to_logs(self, line): # Always print to the full log with open(self.logs["full_log"], 'a', encoding='utf-8') as log: log.write(line) # Discard the set -x bash output for stdout and the clean log if not line.startswith("+"): with open(self.logs["clean_log"], 'a', encoding='utf-8') as log: log.write(line) print(line.rstrip()) async def process_stdout(self, stdout): while True: try: line = await stdout.readline() line = line.decode('utf-8') if not line: break self.print_line_to_logs(line) except ValueError: self.print_line_to_logs( "--- Line discarded due to StreamReader overflow ---" ) continue def run_process(self, bin, args=[]): return asyncio.create_subprocess_exec( *([bin] + args), # Buffer limit is 64KB by default, but we need a larger buffer: limit=1024 * 256, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT, cwd=self.configuration.build_directory, env={ **os.environ, **self.environment_variables, **self.configuration.get("env", {}), "CMAKE_FLAGS": " ".join(self.configuration.cmake_flags), }, ) async def run_build(self, bin, args=[]): proc = await self.run_process(bin, args) await asyncio.wait([ self.process_stdout(proc.stdout) ]) return await proc.wait() async def wait_for_build(self, timeout, args=[]): message = "Build {} completed successfully".format( self.configuration.name ) try: for step in self.configuration.build_steps: return_code = await asyncio.wait_for(self.run_build(step["bin"], step["args"]), timeout) if return_code != 0: message = "Build {} failed with exit code {}".format( self.configuration.name, return_code ) return except asyncio.TimeoutError: message = "Build {} timed out after {:.1f}s".format( self.configuration.name, round(timeout, 1) ) # The process is killed, set return code to 128 + 9 (SIGKILL) = 137 return_code = 137 finally: self.print_line_to_logs(message) build_directory = self.configuration.build_directory # Always add the build logs to the root of the artifacts artifacts = { **self.configuration.get("artifacts", {}), str(self.logs["full_log"].relative_to(build_directory)): "", str(self.logs["clean_log"].relative_to(build_directory)): "", str(self.configuration.junit_reports_dir.relative_to(build_directory)): "", str(self.configuration.test_logs_dir.relative_to(build_directory)): "", } self.copy_artifacts(artifacts) return (return_code, message) def run(self, args=[]): if self.artifact_dir.is_dir(): shutil.rmtree(self.artifact_dir) self.artifact_dir.mkdir(exist_ok=True) self.configuration.create_build_steps(self.artifact_dir) return_code, message = asyncio.run( self.wait_for_build( self.configuration.get( "timeout", DEFAULT_TIMEOUT)) ) return (return_code, message) class TeamcityBuild(UserBuild): def __init__(self, configuration): super().__init__(configuration) # This accounts for the volume mapping from the container. # Our local /results is mapped to some relative ./results on the host, # so we use /results/artifacts to copy our files but results/artifacts as # an artifact path for teamcity. # TODO abstract out the volume mapping self.artifact_dir = Path("/results/artifacts") self.teamcity_messages = TeamcityServiceMessages() def copy_artifacts(self, artifacts): super().copy_artifacts(artifacts) # Start loading the junit reports. junit_reports_pattern = "{}/junit/*.xml".format( str(self.artifact_dir.relative_to("/")) ) self.teamcity_messages.importData("junit", junit_reports_pattern) # Instruct teamcity to upload our artifact directory artifact_path_pattern = "+:{}=>artifacts.tar.gz".format( str(self.artifact_dir.relative_to("/")) ) self.teamcity_messages.publishArtifacts(artifact_path_pattern) def run(self, args=[]): # Let the user know what build is being run. # This makes it easier to retrieve the info from the logs. self.teamcity_messages.customMessage( "Starting build {}".format(self.configuration.name), status="NORMAL" ) return_code, message = super().run() # Since we are aborting the build, make sure to flush everything first os.sync() if return_code != 0: # Add a build problem to the report self.teamcity_messages.buildProblem( message, # Let Teamcity calculate an ID from our message None ) # Change the final build message self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to failure None, message ) else: # Change the final build message but keep the original one as well self.teamcity_messages.buildStatus( # Don't change the status, let Teamcity set it to success None, "{} ({{build.status.text}})".format(message) ) return (return_code, message) def main(): script_dir = PurePath(os.path.realpath(__file__)).parent # By default search for a configuration file in the same directory as this # script. default_config_path = Path( - script_dir.joinpath("build-configurations.json") + script_dir.joinpath("build-configurations.yml") ) parser = argparse.ArgumentParser(description="Run a CI build") parser.add_argument( "build", help="The name of the build to run" ) parser.add_argument( "--config", "-c", help="Path to the builds configuration file (default to {})".format( str(default_config_path) ) ) args, unknown_args = parser.parse_known_args() # Check the configuration file exists config_path = Path(args.config) if args.config else default_config_path build_configuration = BuildConfiguration( script_dir, config_path, args.build) if is_running_under_teamcity(): build = TeamcityBuild(build_configuration) else: build = UserBuild(build_configuration) sys.exit(build.run(unknown_args)[0]) if __name__ == '__main__': main() diff --git a/contrib/teamcity/build-configurations.yml b/contrib/teamcity/build-configurations.yml new file mode 100644 index 000000000..85e720535 --- /dev/null +++ b/contrib/teamcity/build-configurations.yml @@ -0,0 +1,305 @@ +--- +# Templates can be referenced in builds to avoid duplication +templates: + common_unix_artifacts: + artifacts: + CMakeCache.txt: CMakeCache.txt + src/bitcoind: bin/bitcoind + src/bitcoin-*: bin + src/qt/bitcoin-qt: bin/bitcoin-qt + src/bench/bitcoin-bench: bin/bitcoin-bench + src/seeder/bitcoin-seeder: bin/bitcoin-seeder + src/libbitcoinconsensus.*: lib + src/test/test_bitcoin: bin/test_bitcoin + src/qt/test/test_bitcoin-qt: bin/test_bitcoin-qt + src/seeder/test/test-seeder: bin/test-seeder + test/tmp/test_runner_*: functional + + gitian_builds: + script: gitian.sh + timeout: 7200 + artifacts: + gitian-results: '' + +# The build descriptions. +# If a script is defined, then this will be the only step to run. +# Otherwise a list of targets can be specified, grouped by parallel runs. +# Example: +# targets: +# - - build11 +# - build12 +# - - build21 +# - build22 +# Will run: +# ninja build11 build12 +# ninja build21 build22 +builds: + build-asan: + Werror: true + clang: true + cmake_flags: + - '-DCMAKE_CXX_FLAGS=-DARENA_DEBUG' + - '-DCMAKE_BUILD_TYPE=Debug' + - '-DCRYPTO_USE_ASM=OFF' + - '-DENABLE_SANITIZERS=address' + targets: + - - all + - install + - install-secp256k1 + - - check + - check-secp256k1 + - check-functional + timeout: 1800 + env: + ASAN_OPTIONS: log_path=stdout + LSAN_OPTIONS: log_path=stdout + + build-bench: + Werror: true + cmake_flags: + - '-DSECP256K1_ENABLE_MODULE_ECDH=ON' + - '-DSECP256K1_ENABLE_MODULE_MULTISET=ON' + targets: + - - all + - install-bitcoin-bench + - install-secp256k1-bench + - - bench-bitcoin + - - bench-secp256k1 + timeout: 1200 + + build-clang-10: + runOnDiff: true + Werror: true + cmake_flags: + - '-DCMAKE_C_COMPILER=clang-10' + - '-DCMAKE_CXX_COMPILER=clang++-10' + targets: + - - all + - install + - install-secp256k1 + - - check + - check-secp256k1 + timeout: 1200 + + build-clang-tidy: + runOnDiff: true + script: builds/build-clang-tidy.sh + templates: + - common_unix_artifacts + timeout: 600 + artifacts: + clang-tidy-warnings.txt: clang-tidy-warnings.txt + + build-coverage: + script: builds/build-coverage.sh + templates: + - common_unix_artifacts + timeout: 4800 + artifacts: + coverage.tar.gz: coverage.tar.gz + + build-diff: + runOnDiff: true + Werror: true + targets: + - - all + - install + - install-secp256k1 + - - check-all + - check-upgrade-activated + timeout: 1200 + + build-docs: + script: builds/build-docs.sh + templates: + - common_unix_artifacts + timeout: 600 + artifacts: + doc/*: doc + + build-ibd: + script: builds/build-ibd.sh + templates: + - common_unix_artifacts + timeout: 14400 + artifacts: + ibd/debug.log: log/debug.log + + build-ibd-no-assumevalid-checkpoint: + script: builds/build-ibd-no-assumevalid-checkpoint.sh + template: + - common_unix_artifacts + timeout: 21600 + artifacts: + ibd/debug.log: log/debug.log + + build-linux32: + script: builds/build-linux32.sh + templates: + - common_unix_artifacts + timeout: 3600 + + build-linux64: + script: builds/build-linux64.sh + templates: + - common_unix_artifacts + timeout: 3600 + + build-linux-aarch64: + script: builds/build-linux-aarch64.sh + templates: + - common_unix_artifacts + timeout: 3600 + env: + QEMU_LD_PREFIX: /usr/aarch64-linux-gnu + + build-linux-arm: + script: builds/build-linux-arm.sh + templates: + - common_unix_artifacts + timeout: 3600 + env: + QEMU_LD_PREFIX: /usr/arm-linux-gnueabihf + + build-make-generator: + script: builds/build-make-generator.sh + templates: + - common_unix_artifacts + timeout: 1200 + + build-master: + Werror: true + targets: + - - all + - install + - install-secp256k1 + - - check-extended + - check-upgrade-activated-extended + timeout: 4800 + + build-osx: + script: builds/build-osx.sh + templates: + - common_unix_artifacts + timeout: 3600 + artifacts: + src/qt/BitcoinABC-Qt.app: bin + Bitcoin-ABC.dmg: Bitcoin-ABC.dmg + + build-secp256k1: + script: builds/build-secp256k1.sh + templates: + - common_unix_artifacts + timeout: 900 + artifacts: + src/secp256k1/libsecp256k1*: lib + + build-tsan: + Werror: true + clang: true + cmake_flags: + - '-DENABLE_SANITIZERS=thread' + targets: + - - all + - install + - - check + - check-functional + timeout: 1800 + env: + TSAN_OPTIONS: log_path=stdout + + build-ubsan: + Werror: true + clang: true + cmake_flags: + - '-DCMAKE_BUILD_TYPE=Debug' + - '-DENABLE_SANITIZERS=undefined' + targets: + - - all + - install + - install-secp256k1 + - - check + - check-secp256k1 + - check-functional + timeout: 1800 + env: + UBSAN_OPTIONS: log_path=stdout + + build-win64: + script: builds/build-win64.sh + timeout: 3600 + artifacts: + CMakeCache.txt: CMakeCache.txt + src/bitcoind.exe: bin/bitcoind.exe + src/bitcoin-*.exe: bin + src/qt/bitcoin-qt.exe: bin/bitcoin-qt.exe + src/bench/bitcoin-bench.exe: bin/bitcoin-bench.exe + src/libbitcoinconsensus*: lib + src/test/test_bitcoin.exe: bin/test_bitcoin.exe + src/qt/test/test_bitcoin-qt.exe: bin/test_bitcoin-qt.exe + src/qt/test/test_bitcoin-qt.log: log/qt/test_bitcoin-qt.log + bitcoin-abc-*-x86_64-w64-mingw32.exe: bitcoin-abc-x86_64-w64-mingw32.exe + + build-without-cli: + Werror: true + cmake_flags: + - '-DBUILD_BITCOIN_CLI=OFF' + targets: + - - all + - install + - - check-functional + timeout: 1200 + + build-without-wallet: + runOnDiff: true + Werror: true + cmake_flags: + - '-DBUILD_BITCOIN_WALLET=OFF' + targets: + - - all + - install + - - check + - check-functional + timeout: 1200 + + build-without-zmq: + Werror: true + cmake_flags: + - '-DBUILD_BITCOIN_ZMQ=OFF' + targets: + - - all + - install + - - check + - check-functional + timeout: 1800 + + check-seeds: + script: builds/check-seeds.sh + templates: + - common_unix_artifacts + timeout: 600 + + check-source-control-tools: + cmake_flags: + - '-DBUILD_SOURCE_CONTROL_TOOLS=ON' + targets: + - - check-source-control-tools + timeout: 600 + + gitian-linux: + templates: + - gitian_builds + env: + OS_NAME: linux + + gitian-osx: + templates: + - gitian_builds + env: + OS_NAME: osx + + gitian-win: + templates: + - gitian_builds + env: + OS_NAME: win diff --git a/contrib/teamcity/setup-debian-buster.sh b/contrib/teamcity/setup-debian-buster.sh index 033b92415..793a1732d 100755 --- a/contrib/teamcity/setup-debian-buster.sh +++ b/contrib/teamcity/setup-debian-buster.sh @@ -1,132 +1,133 @@ #!/usr/bin/env bash export LC_ALL=C.UTF-8 set -euxo pipefail dpkg --add-architecture i386 PACKAGES=( arcanist automake autotools-dev binutils bsdmainutils build-essential ccache cppcheck curl flake8 g++-aarch64-linux-gnu g++-arm-linux-gnueabihf git golang g++-mingw-w64 gnupg gperf imagemagick jq lcov less lib32stdc++-8-dev libboost-all-dev libbz2-dev libc6-dev:i386 libcap-dev libdb++-dev libdb-dev libevent-dev libjemalloc-dev libminiupnpc-dev libprotobuf-dev libqrencode-dev libqt5core5a libqt5dbus5 libqt5gui5 librsvg2-bin libssl-dev libtiff-tools libtinfo5 libtool libzmq3-dev make ninja-build nsis php-codesniffer pkg-config protobuf-compiler python3 python3-autopep8 python3-pip python3-setuptools + python3-yaml python3-zmq qemu-user-static qttools5-dev qttools5-dev-tools software-properties-common tar wget yamllint wine ) function join_by() { local IFS="$1" shift echo "$*" } apt-get update DEBIAN_FRONTEND=noninteractive apt-get install -y $(join_by ' ' "${PACKAGES[@]}") BACKPORTS=( cmake shellcheck ) echo "deb http://deb.debian.org/debian buster-backports main" | tee -a /etc/apt/sources.list apt-get update DEBIAN_FRONTEND=noninteractive apt-get -t buster-backports install -y $(join_by ' ' "${BACKPORTS[@]}") TEAMCITY_DIR=$(dirname "$0") # FIXME this should no longer be needed starting with Teamcity 2020.1, which # supports Java 11. "${TEAMCITY_DIR}/install_openjdk8.sh" # Install llvm-8 and clang-10 apt-key add "${TEAMCITY_DIR}"/llvm.pub add-apt-repository "deb https://apt.llvm.org/buster/ llvm-toolchain-buster-8 main" add-apt-repository "deb https://apt.llvm.org/buster/ llvm-toolchain-buster-10 main" apt-get update LLVM_PACKAGES=( clang-8 clang-10 clang-format-8 clang-tidy-8 clang-tools-8 ) DEBIAN_FRONTEND=noninteractive apt-get install -y $(join_by ' ' "${LLVM_PACKAGES[@]}") update-alternatives --install /usr/bin/clang clang "$(command -v clang-8)" 100 update-alternatives --install /usr/bin/clang++ clang++ "$(command -v clang++-8)" 100 update-alternatives --install /usr/bin/llvm-symbolizer llvm-symbolizer "$(command -v llvm-symbolizer-8)" 100 # Use a lower priority to keep clang-8 the default update-alternatives --install /usr/bin/clang clang "$(command -v clang-10)" 50 update-alternatives --install /usr/bin/clang++ clang++ "$(command -v clang++-10)" 50 update-alternatives --install /usr/bin/llvm-symbolizer llvm-symbolizer "$(command -v llvm-symbolizer-10)" 50 # Use the mingw posix variant update-alternatives --set x86_64-w64-mingw32-g++ $(command -v x86_64-w64-mingw32-g++-posix) update-alternatives --set x86_64-w64-mingw32-gcc $(command -v x86_64-w64-mingw32-gcc-posix) # Python library for interacting with teamcity pip3 install teamcity-messages # Python library for merging nested structures pip3 install deepmerge # Install pandoc. The version from buster is outdated, so get a more recent one # from github. wget https://github.com/jgm/pandoc/releases/download/2.10.1/pandoc-2.10.1-1-amd64.deb echo "4515d6fe2bf8b82765d8dfa1e1b63ccb0ff3332d60389f948672eaa37932e936 pandoc-2.10.1-1-amd64.deb" | sha256sum -c DEBIAN_FRONTEND=noninteractive dpkg -i pandoc-2.10.1-1-amd64.deb