diff --git a/contrib/buildbot/phabricator_wrapper.py b/contrib/buildbot/phabricator_wrapper.py index cc0882a3d..a76faa57b 100755 --- a/contrib/buildbot/phabricator_wrapper.py +++ b/contrib/buildbot/phabricator_wrapper.py @@ -1,526 +1,528 @@ #!/usr/bin/env python3 # # Copyright (c) 2019-2020 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from base64 import b64decode import os from build import BuildStatus from constants import Deployment from python_phabricator.phabricator import Phabricator BUILDNAME_IGNORE_KEYWORD = "__BOTIGNORE" BITCOIN_ABC_PROJECT_PHID = "PHID-PROJ-z2wrchs62yicqvwlgc5r" BITCOIN_ABC_REPO = "PHID-REPO-usc6skybawqxzw64opvi" class PhabWrapper(Phabricator): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.logger = None self.deployment = Deployment( os.getenv( "DEPLOYMENT_ENV", Deployment.DEV)) self.phid = None self.file_cache = {} def get_current_user_phid(self): # The current user PHID is not expected to change, so cache the result if self.phid is None: self.phid = self.user.whoami()["phid"] return self.phid def getIgnoreKeyword(self): return BUILDNAME_IGNORE_KEYWORD def setLogger(self, logger): self.logger = logger def get_revisionPHID(self, branch): branch_info = branch.split("/") # Either refs/tags/* or refs/heads/* if len(branch_info) < 3: return False if branch_info[-3] != "phabricator" and branch_info[-2] != "diff": return False diffId = int(branch_info[-1]) diffSearchArgs = { "constraints": { "ids": [diffId], }, } data_list = self.differential.diff.search(**diffSearchArgs).data assert len(data_list) == 1, "differential.diff.search({}): Expected 1 diff, got: {}".format( diffSearchArgs, data_list) diffdata = data_list[0] revisionPHID = diffdata['fields']['revisionPHID'] return revisionPHID def get_revision_info(self, revisionPHID): revisionSearchArgs = { "constraints": { "phids": [revisionPHID], }, } data_list = self.differential.revision.search( **revisionSearchArgs).data assert len(data_list) == 1, "differential.revision.search({}): Expected 1 revision, got: {}".format( revisionSearchArgs, data_list) diffdata = data_list[0] revisionId = diffdata['id'] authorPHID = diffdata['fields']['authorPHID'] return revisionId, authorPHID def getRevisionAuthor(self, revisionId): # Fetch revision revisionSearchArgs = { "constraints": { "ids": [int(revisionId.strip('D'))], }, } rev_list = self.differential.revision.search(**revisionSearchArgs).data assert len(rev_list) == 1, "differential.revision.search({}): Expected 1 revision, got: {}".format( revisionSearchArgs, rev_list) # Fetch revision author userSearchArgs = { "constraints": { "phids": [rev_list[0]['fields']['authorPHID']], }, } author_list = self.user.search(**userSearchArgs).data assert len(author_list) == 1, "user.search({}): Expected 1 user, got: {}".format( userSearchArgs, author_list) return author_list[0] def getRevisionPHIDsFromCommits(self, commitHashes): # Fetch commit objects using commit hashes commitSearchArgs = { "constraints": { "repositories": [BITCOIN_ABC_REPO], "identifiers": commitHashes, }, } commits = self.diffusion.commit.search(**commitSearchArgs).data expectedNumCommits = len(commitHashes) assert len(commits) == expectedNumCommits, "diffusion.commit.search({}): Expected {} commits, got: {}".format( expectedNumCommits, commitSearchArgs, commits) # Attempt to get revisions for all commit objects (not all commits have # revisions) commitPHIDs = [commit['phid'] for commit in commits] edgeSearchArgs = { "types": ["commit.revision"], "sourcePHIDs": commitPHIDs, } revisionEdges = self.edge.search(**edgeSearchArgs).data m = {} for commit in commits: commitHash = commit['fields']['identifier'] m[commitHash] = None for edge in (revisionEdges or {}): if commit['phid'] == edge['sourcePHID']: m[commitHash] = edge['destinationPHID'] break return m def getAuthorSlackUsername(self, author): # If slack-username is non-empty, use it. Otherwise default to the # author's Phabricator username authorSlackUsername = "" if 'fields' in author: if 'custom.abc:slack-username' in author['fields']: authorSlackUsername = author['fields']['custom.abc:slack-username'] if not authorSlackUsername and 'username' in author['fields']: authorSlackUsername = author['fields']['username'] return authorSlackUsername def decorateCommitMap(self, commitMapIn): # For commits that have revisions, get their revision IDs (Dxxxx) revisionPHIDs = [rev for rev in commitMapIn.values() if rev] revisionSearchArgs = { "constraints": { "phids": revisionPHIDs, }, } revs = self.differential.revision.search(**revisionSearchArgs).data assert len(revs) == len(revisionPHIDs), "differential.revision.search({}): Expected {} revisions, got: {}".format( revisionSearchArgs, len(revisionPHIDs), revs) # Decorate revision authors authorPHIDs = [rev['fields']['authorPHID'] for rev in revs] authors = self.user.search(constraints={ 'phids': authorPHIDs, }).data # Build map of decorated data commitMap = {} for commitHash, revisionPHID in commitMapIn.items(): decoratedCommit = { # TODO: Find a better way to get the commit link from # Phabricator 'link': "https://reviews.bitcoinabc.org/rABC{}".format(commitHash), } if revisionPHID: for rev in revs: if revisionPHID == rev['phid']: decoratedCommit['revision'] = rev decoratedCommit['link'] = "https://reviews.bitcoinabc.org/D{}".format( rev['id']) break for author in authors: if author['phid'] == rev['fields']['authorPHID']: decoratedCommit['author'] = author decoratedCommit['authorSlackUsername'] = self.getAuthorSlackUsername( author) break commitMap[commitHash] = decoratedCommit return commitMap def createBuildStatusMessage(self, build_status, buildUrl, buildName): if not buildUrl: buildUrl = "#" msg = "" if build_status == BuildStatus.Failure: msg = "(IMPORTANT) Build [[{} | {}]] failed.".format( buildUrl, buildName) elif build_status == BuildStatus.Success: msg = "Build [[{} | {}]] passed.".format(buildUrl, buildName) else: msg = "Build [[{} | {}]] started.".format(buildUrl, buildName) return msg def commentOnRevision(self, revisionID, msg, buildName=""): self.logger.info( "Comment on objectIdentifier '{}': '{}'".format( revisionID, msg)) # Production build-bot posts live comments for builds that are not staging-specific # FIXME: Currently all builds kick off a completion hook in Teamcity. The bot doesn't # have a better mechanism for knowing if that build is high value (worth commenting on) # or low value (staging builds, etc.) to end users. Until there is a more streamlined # way to define Teamcity webhooks to exclude these builds, we are going to look at the # buildName for an ignore keyword. if self.deployment == Deployment.PROD and BUILDNAME_IGNORE_KEYWORD not in buildName: self.differential.revision.edit(transactions=[ {"type": "comment", "value": msg} ], objectIdentifier=revisionID) else: self.logger.info( "Comment creation skipped due to deployment environment: '{}'".format( self.deployment)) def getBrokenBuildTaskTitle(self, buildName): return "Build {} is broken.".format(buildName) def getBrokenBuildTask(self, taskTitle): response = self.maniphest.search(constraints={ "query": "\"{}\"".format(taskTitle), "statuses": ["open"], }) self.logger.info( "Response from 'maniphest.search' querying for title '{}': {}".format( taskTitle, response)) return response def updateBrokenBuildTaskStatus(self, buildName, status): title = self.getBrokenBuildTaskTitle(buildName) task_data = self.getBrokenBuildTask(title).data if len(task_data) == 0: self.logger.info( "No existing broken build task with title '{}'. Skipping.".format(title)) return None self.logger.info( "Updating broken build task T{} status to '{}'.".format( task_data[0]['id'], status)) updatedTask = self.maniphest.edit(transactions=[{ 'type': 'status', 'value': status, }], objectIdentifier=task_data[0]['phid']) self.logger.info( "Response from 'maniphest.edit' updating status to '{}': {}".format( status, updatedTask)) return updatedTask['object'] def createBrokenBuildTask( self, buildName, buildURL, branch, gitCommitsIn, repoCallsign): gitCommits = [repoCallsign + commit for commit in gitCommitsIn] title = self.getBrokenBuildTaskTitle(buildName) res = self.getBrokenBuildTask(title) if len(res.data) != 0: self.logger.info( "Open broken build task (T{}) exists. Skipping creation of a new one.".format( res.data[0]['id'])) return None task_body = "[[ {} | {} ]] is broken on branch '{}'\n\nAssociated commits:\n{}".format( buildURL, buildName, branch, "\n".join(gitCommits)) newTask = self.maniphest.edit(transactions=[ {"type": "title", "value": title}, {"type": "priority", "value": "unbreak"}, {"type": "description", "value": task_body} ]) self.logger.info( "Response from 'maniphest.edit' creating new task with title '{}': {}".format( title, newTask)) return newTask['object'] def updateRevisionSummary(self, revisionId, summary): self.logger.info( "Updated summary on objectIdentifier '{}': '{}'".format( revisionId, summary)) if self.deployment == Deployment.PROD: self.differential.revision.edit(transactions=[{ "type": "summary", "value": summary, }], objectIdentifier=revisionId) else: self.logger.info( "Update of revision summary skipped due to deployment environment: '{}'".format( self.deployment)) def get_project_members(self, project_PHID): """ Return a list of user PHIDs corresponding to the ABC members """ project_data = self.project.search( constraints={ "phids": [project_PHID], }, attachments={ "members": True, }, ).data if len(project_data) != 1: self.logger.info( "Found {} project(s) while searching for Bitcoin ABC: '{}'".format( len(project_data), project_data)) return [] return [m["phid"] for m in project_data[0]["attachments"]["members"]["members"]] def get_latest_diff_staging_ref(self, revision_PHID): diff_data = self.differential.diff.search( constraints={ "revisionPHIDs": [revision_PHID], }, order="newest" ).data if not diff_data: self.logger.info( "Failed to retrieve diff data from revision {}".format(revision_PHID)) return "" # FIXME don't hardcode the staging branch mechanism return "refs/tags/phabricator/diff/{}".format(diff_data[0]["id"]) def get_user_roles(self, user_PHID): """ Return a list of the user roles for the target user PHID """ user_data = self.user.search( constraints={ "phids": [user_PHID], } ).data if not user_data: return [] if len(user_data) != 1: self.logger.info( "Found {} user(s) while searching for {}: '{}'".format( len(user_data), user_PHID, user_data ) ) return [] return user_data[0]["fields"]["roles"] def get_latest_master_commit_hash(self): commit_data = self.diffusion.commit.search( constraints={ "repositories": [BITCOIN_ABC_REPO], }, limit=1, ).data if not commit_data: raise AssertionError( "Failed to get last master commit for repository {}".format(BITCOIN_ABC_REPO)) return commit_data[0]["fields"]["identifier"] def get_revision_changed_files(self, revision_id): return list(self.differential.getcommitpaths( revision_id=int(revision_id))) def get_file_content_from_master(self, path): latest_commit_hash = self.get_latest_master_commit_hash() # Level 1 cache: check if the file is cached from the same commit if path in self.file_cache and self.file_cache[path]['commit'] == latest_commit_hash: return self.file_cache[path]['content'] def file_not_found(data): raise AssertionError("File {} not found in master commit {} for repository {}:\n{}".format( path, latest_commit_hash, BITCOIN_ABC_REPO, data, )) # Browse the parent directory to extract the file hash. # Use a Diffusion browsequery on the parent directory because the # API will fail if a filename is given. If path is not set the root # directory is browsed. browse_data = self.diffusion.browsequery( path=os.path.dirname(path) or None, commit=latest_commit_hash, repository=BITCOIN_ABC_REPO, branch="master", ) # No file in the directory if not browse_data or 'paths' not in browse_data: file_not_found("diffusion.browsequery returned no path data") # Loop over the directory content to find our file file_hash = None for file in browse_data['paths']: if file['fullPath'] == path: file_hash = file['hash'] # File not found in it's directory if not file_hash: file_not_found(browse_data) # Level 2 cache: check if the file did not change since last download if path in self.file_cache and self.file_cache[path]['hash'] == file_hash: return self.file_cache[path]['content'] # Limit to 5s or 1MB download file_data = self.diffusion.filecontentquery( path=path, commit=latest_commit_hash, timeout=5, byteLimit=1024 * 1024, repository=BITCOIN_ABC_REPO, branch="master", ) if "filePHID" not in file_data: file_not_found(file_data) if file_data["tooSlow"] or file_data["tooHuge"]: raise AssertionError("File {} from commit {} for repository {} is oversized or took too long to download: {}".format( path, latest_commit_hash, BITCOIN_ABC_REPO, file_data, )) file_content = self.file.download( phid=file_data["filePHID"] ).response if not file_content: self.logger.info("File {} appear to be empty in commit {} for repository {}".format( file_data["filePHID"], latest_commit_hash, BITCOIN_ABC_REPO, )) self.file_cache.update({ path: { 'commit': latest_commit_hash, 'hash': file_hash, 'content': b64decode(file_content), } }) return self.file_cache[path]['content'] def set_text_panel_content(self, panel_id, content): response = self.dashboard.panel.edit( objectIdentifier=panel_id, transactions=[ { "type": "text", "value": content } ] ) if response.get("error", None): raise AssertionError( "Failed to edit panel {} with content:\n{}\n\nPhabricator responded:\n{}\n".format( panel_id, content, response ) ) - def update_build_target_status(self, build_target, build_id, status): + def update_build_target_status( + self, build_target, build_id=None, status=None): harbormaster_build_status_mapping = { BuildStatus.Queued: "work", BuildStatus.Running: "work", BuildStatus.Success: "pass", BuildStatus.Failure: "fail", } - build_target.update_build_status(build_id, status) + if build_id and status: + build_target.update_build_status(build_id, status) self.harbormaster.sendmessage( buildTargetPHID=build_target.phid, type=harbormaster_build_status_mapping[build_target.status()] ) def get_object_token(self, object_PHID): """ Return the current token set by the current user on target object """ tokens = self.token.given( authorPHIDs=[self.get_current_user_phid()], objectPHIDs=[object_PHID], tokenPHIDs=[], ) if not tokens: return "" # There should be no more than a single token from the same user for the # same object. if len(tokens) > 1: self.logger.info( "Found {} tokens for user {} on object {}: {}".format( len(tokens), self.get_current_user_phid(), object_PHID, tokens, ) ) return tokens[0]["tokenPHID"] def set_object_token(self, object_PHID, token_PHID=None): """ Award or rescind a token for the target object """ # If no token is given, rescind any previously awarded token if token_PHID is None: token_PHID = "" self.token.give( objectPHID=object_PHID, tokenPHID=token_PHID, ) diff --git a/contrib/buildbot/server.py b/contrib/buildbot/server.py index f1de9c13b..1e518e47c 100755 --- a/contrib/buildbot/server.py +++ b/contrib/buildbot/server.py @@ -1,1027 +1,1031 @@ #!/usr/bin/env python3 # # Copyright (c) 2019 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from build import BuildStatus, BuildTarget from flask import abort, Flask, request from functools import wraps import hashlib import hmac import logging import os from phabricator_wrapper import ( BITCOIN_ABC_PROJECT_PHID, ) import re import shelve from shieldio import RasterBadge from shlex import quote from teamcity_wrapper import TeamcityRequestException import yaml # Some keywords used by TeamCity and tcWebHook SUCCESS = "success" FAILURE = "failure" RUNNING = "running" UNRESOLVED = "UNRESOLVED" LANDBOT_BUILD_TYPE = "BitcoinAbcLandBot" with open(os.path.join(os.path.dirname(__file__), 'resources', 'teamcity-icon-16.base64'), 'rb') as icon: BADGE_TC_BASE = RasterBadge( label='TC build', logo='data:image/png;base64,{}'.format( icon.read().strip().decode('utf-8')), ) BADGE_TRAVIS_BASE = RasterBadge( label='Travis build', logo='travis' ) def create_server(tc, phab, slackbot, travis, db_file_no_ext=None, jsonEncoder=None): # Create Flask app for use as decorator app = Flask("abcbot") app.logger.setLevel(logging.INFO) # json_encoder can be overridden for testing if jsonEncoder: app.json_encoder = jsonEncoder phab.setLogger(app.logger) tc.set_logger(app.logger) travis.set_logger(app.logger) # Optionally persistable database create_server.db = { # A collection of the known build targets 'diff_targets': {}, # Build status panel data 'panel_data': {}, # Whether the last status check of master was green 'master_is_green': True, } # If db_file_no_ext is not None, attempt to restore old database state if db_file_no_ext: app.logger.info( "Loading persisted state database with base name '{}'...".format(db_file_no_ext)) try: with shelve.open(db_file_no_ext, flag='r') as db: for key in create_server.db.keys(): if key in db: create_server.db[key] = db[key] app.logger.info( "Restored key '{}' from persisted state".format(key)) except BaseException: app.logger.info( "Persisted state database with base name '{}' could not be opened. A new one will be created when written to.".format(db_file_no_ext)) app.logger.info("Done") else: app.logger.warning( "No database file specified. State will not be persisted.") def persistDatabase(fn): @wraps(fn) def decorated_function(*args, **kwargs): fn_ret = fn(*args, **kwargs) # Persist database after executed decorated function if db_file_no_ext: with shelve.open(db_file_no_ext) as db: for key in create_server.db.keys(): db[key] = create_server.db[key] app.logger.debug("Persisted current state") else: app.logger.debug( "No database file specified. Persisting state is being skipped.") return fn_ret return decorated_function # This decorator specifies an HMAC secret environment variable to use for verifying # requests for the given route. Currently, we're using Phabricator to trigger these # routes as webhooks, and a separate HMAC secret is required for each hook. # Phabricator does not support basic auth for webhooks, so HMAC must be # used instead. def verify_hmac(secret_env): def decorator(fn): @wraps(fn) def decorated_function(*args, **kwargs): secret = os.getenv(secret_env, None) if not secret: app.logger.info( "Error: HMAC env variable '{}' does not exist".format(secret_env)) abort(401) data = request.get_data() digest = hmac.new( secret.encode(), data, hashlib.sha256).hexdigest() hmac_header = request.headers.get( 'X-Phabricator-Webhook-Signature') if not hmac_header: abort(401) if not hmac.compare_digest( digest.encode(), hmac_header.encode()): abort(401) return fn(*args, **kwargs) return decorated_function return decorator def get_json_request_data(request): if not request.is_json: abort(415, "Expected content-type is 'application/json'") return request.get_json() @app.route("/getCurrentUser", methods=['GET']) def getCurrentUser(): return request.authorization.username if request.authorization else None @app.route("/backportCheck", methods=['POST']) @verify_hmac('HMAC_BACKPORT_CHECK') def backportCheck(): data = get_json_request_data(request) revisionId = data['object']['phid'] revisionSearchArgs = { "constraints": { "phids": [revisionId], }, } data_list = phab.differential.revision.search( **revisionSearchArgs).data assert len(data_list) == 1, "differential.revision.search({}): Expected 1 revision, got: {}".format( revisionSearchArgs, data_list) summary = data_list[0]['fields']['summary'] foundPRs = 0 multilineCodeBlockDelimiters = 0 newSummary = "" for line in summary.splitlines(keepends=True): multilineCodeBlockDelimiters += len(re.findall(r'```', line)) # Only link PRs that do not reside in code blocks if multilineCodeBlockDelimiters % 2 == 0: def replacePRWithLink(baseUrl): def repl(match): nonlocal foundPRs # This check matches identation-based code blocks (2+ spaces) # and common cases for single-line code blocks (using # both single and triple backticks) if match.string.startswith(' ') or len( re.findall(r'`', match.string[:match.start()])) % 2 > 0: # String remains unchanged return match.group(0) else: # Backport PR is linked inline foundPRs += 1 PRNum = match.group(1) remaining = '' if len(match.groups()) >= 2: remaining = match.group(2) return '[[{}/{} | PR{}]]{}'.format( baseUrl, PRNum, PRNum, remaining) return repl line = re.sub( r'PR[ #]*(\d{3}\d+)', replacePRWithLink( 'https://github.com/bitcoin/bitcoin/pull'), line) # Be less aggressive about serving libsecp256k1 links. Check # for some reference to the name first. if re.search('secp', line, re.IGNORECASE): line = re.sub(r'PR[ #]*(\d{2}\d?)([^\d]|$)', replacePRWithLink( 'https://github.com/bitcoin-core/secp256k1/pull'), line) newSummary += line if foundPRs > 0: phab.updateRevisionSummary(revisionId, newSummary) return SUCCESS, 200 @app.route("/build", methods=['POST']) @persistDatabase def build(): buildTypeId = request.args.get('buildTypeId', None) ref = request.args.get('ref', 'refs/heads/master') PHID = request.args.get('PHID', None) abcBuildName = request.args.get('abcBuildName', None) properties = None if abcBuildName: properties = [{ 'name': 'env.ABC_BUILD_NAME', 'value': abcBuildName, }] build_id = tc.trigger_build(buildTypeId, ref, PHID, properties)['id'] if PHID in create_server.db['diff_targets']: build_target = create_server.db['diff_targets'][PHID] else: build_target = BuildTarget(PHID) build_target.queue_build(build_id, abcBuildName) create_server.db['diff_targets'][PHID] = build_target return SUCCESS, 200 @app.route("/buildDiff", methods=['POST']) @persistDatabase def build_diff(): def get_mandatory_argument(argument): value = request.args.get(argument, None) if value is None: raise AssertionError( "Calling /buildDiff endpoint with missing mandatory argument {}:\n{}".format( argument, request.args ) ) return value staging_ref = get_mandatory_argument('stagingRef') target_phid = get_mandatory_argument('targetPHID') revision_id = get_mandatory_argument('revisionId') # Get the configuration from master config = yaml.safe_load(phab.get_file_content_from_master( "contrib/teamcity/build-configurations.yml")) # Get the list of changed files changedFiles = phab.get_revision_changed_files( revision_id=revision_id) # Get a list of the builds that should run on diffs builds = [] for build_name, v in config.get('builds', {}).items(): diffRegexes = v.get('runOnDiffRegex', None) if v.get('runOnDiff', False) or diffRegexes is not None: if diffRegexes: # If the regex matches at least one changed file, add this # build to the list. def regexesMatchAnyFile(regexes, files): for regex in regexes: for filename in files: if re.match(regex, filename): return True return False if regexesMatchAnyFile(diffRegexes, changedFiles): builds.append(build_name) else: builds.append(build_name) if target_phid in create_server.db['diff_targets']: build_target = create_server.db['diff_targets'][target_phid] else: build_target = BuildTarget(target_phid) for build_name in builds: properties = [{ 'name': 'env.ABC_BUILD_NAME', 'value': build_name, }, { 'name': 'env.ABC_REVISION', 'value': revision_id, }] build_id = tc.trigger_build( 'BitcoinABC_BitcoinAbcStaging', staging_ref, target_phid, properties)['id'] build_target.queue_build(build_id, build_name) - create_server.db['diff_targets'][target_phid] = build_target + if len(build_target.builds) > 0: + create_server.db['diff_targets'][target_phid] = build_target + else: + phab.update_build_target_status(build_target) + return SUCCESS, 200 @app.route("/land", methods=['POST']) def land(): data = get_json_request_data(request) revision = data['revision'] if not revision: return FAILURE, 400 # conduitToken is expected to be encrypted and will be decrypted by the # land bot. conduitToken = data['conduitToken'] if not conduitToken: return FAILURE, 400 committerName = data['committerName'] if not committerName: return FAILURE, 400 committerEmail = data['committerEmail'] if not committerEmail: return FAILURE, 400 properties = [{ 'name': 'env.ABC_REVISION', 'value': revision, }, { 'name': 'env.ABC_CONDUIT_TOKEN', 'value': conduitToken, }, { 'name': 'env.ABC_COMMITTER_NAME', 'value': committerName, }, { 'name': 'env.ABC_COMMITTER_EMAIL', 'value': committerEmail, }] output = tc.trigger_build( LANDBOT_BUILD_TYPE, 'refs/heads/master', UNRESOLVED, properties) if output: return output return FAILURE, 500 @app.route("/triggerCI", methods=['POST']) @verify_hmac('HMAC_TRIGGER_CI') def triggerCI(): data = get_json_request_data(request) app.logger.info("Received /triggerCI POST:\n{}".format(data)) # We expect a webhook with an edited object and a list of transactions. if "object" not in data or "transactions" not in data: return FAILURE, 400 data_object = data["object"] if "type" not in data_object or "phid" not in data_object: return FAILURE, 400 # We are searching for a specially crafted comment to trigger a CI # build. Only comments on revision should be parsed. Also if there is # no transaction, or the object is not what we expect, just return. if data_object["type"] != "DREV" or not data.get('transactions', []): return SUCCESS, 200 revision_PHID = data_object["phid"] # Retrieve the transactions details from their PHIDs transaction_PHIDs = [t["phid"] for t in data["transactions"] if "phid" in t] transactions = phab.transaction.search( objectIdentifier=revision_PHID, constraints={ "phids": transaction_PHIDs, } ).data # Extract the comments from the transaction list. Each transaction # contains a list of comments. comments = [c for t in transactions if t["type"] == "comment" for c in t["comments"]] # If there is no comment we have no interest in this webhook if not comments: return SUCCESS, 200 # Check if there is a specially crafted comment that should trigger a # CI build. Format: # @bot [build_name ...] def get_builds_from_comment(comment): tokens = comment.split() if not tokens or tokens.pop(0) != "@bot": return [] # Escape to prevent shell injection and remove duplicates return [quote(token) for token in list(set(tokens))] def next_token(current_token): next_token = { "": "PHID-TOKN-coin-1", "PHID-TOKN-coin-1": "PHID-TOKN-coin-2", "PHID-TOKN-coin-2": "PHID-TOKN-coin-3", "PHID-TOKN-coin-3": "PHID-TOKN-coin-4", "PHID-TOKN-coin-4": "PHID-TOKN-like-1", "PHID-TOKN-like-1": "PHID-TOKN-heart-1", "PHID-TOKN-heart-1": "PHID-TOKN-like-1", } return next_token[current_token] if current_token in next_token else "PHID-TOKN-like-1" def is_user_allowed_to_trigger_builds(user_PHID, current_token): if current_token not in [ "", "PHID-TOKN-coin-1", "PHID-TOKN-coin-2", "PHID-TOKN-coin-3"]: return False return all(role in phab.get_user_roles(user_PHID) for role in [ "verified", "approved", "activated", ]) # Anti DoS filter # # Users are allowed to trigger builds if these conditions are met: # - It is an ABC member # OR # | - It is a "verified", "approved" and "activated" user # | AND # | - The maximum number of requests for this revision has not been # | reached yet. # # The number of requests is tracked by awarding a coin token to the # revision each time a build request is submitted (the number of build # in that request is not taken into account). # The awarded coin token is graduated as follow: # "Haypence" => "Piece of Eight" => "Dubloon" => "Mountain of Wealth". # If the "Mountain of Wealth" token is reached, the next request will be # refused by the bot. At this stage only ABC members will be able to # trigger new builds. abc_members = phab.get_project_members(BITCOIN_ABC_PROJECT_PHID) current_token = phab.get_object_token(revision_PHID) builds = [] for comment in comments: comment_builds = get_builds_from_comment(comment["content"]["raw"]) # Parsing the string is cheaper than phabricator requests, so check # if the comment is for us prior to filtering on the user. if not comment_builds: continue user = comment["authorPHID"] # ABC members can always trigger builds if user in abc_members: builds += comment_builds continue if is_user_allowed_to_trigger_builds(user, current_token): builds += comment_builds # If there is no build provided, this request is not what we are after, # just return. # TODO return an help command to explain how to use the bot. if not builds: return SUCCESS, 200 # Give (only positive) feedback to user. If several comments are part of # the same transaction then there is no way to differentiate what the # token is for; however this is very unlikely to happen in real life. phab.set_object_token(revision_PHID, next_token(current_token)) staging_ref = phab.get_latest_diff_staging_ref(revision_PHID) # Trigger the requested builds for build in builds: # FIXME the hardcoded infos here should be gathered from somewhere tc.trigger_build( "BitcoinABC_BitcoinAbcStaging", staging_ref, properties=[{ 'name': 'env.ABC_BUILD_NAME', 'value': build, }] ) # If we reach this point, trigger_build did not raise an exception. return SUCCESS, 200 @app.route("/status", methods=['POST']) @persistDatabase def buildStatus(): out = get_json_request_data(request) app.logger.info("Received /status POST with data: {}".format(out)) return handle_build_result(**out) def send_harbormaster_build_link_if_required( build_link, build_target, build_name): # Check if a link to the build server has already been sent by searching # the artifacts. artifacts = phab.harbormaster.artifact.search( constraints={ "buildTargetPHIDs": [build_target.phid], } ).data build_link_artifact_key = build_name + "-" + build_target.phid # Search for the appropriated artifact key in the artifact list. # If found then the link is already set and there is no need to send it # again. for artifact in artifacts: if "artifactKey" in (artifact["fields"] or { }) and artifact["fields"]["artifactKey"] == build_link_artifact_key: return phab.harbormaster.createartifact( buildTargetPHID=build_target.phid, artifactKey=build_link_artifact_key, artifactType="uri", artifactData={ "uri": build_link, "name": build_name, "ui.external": True, } ) def update_build_status_panel(updated_build_type_id): # Perform a XOR like operation on the dicts: # - if a key from target is missing from reference, remove it from # target. # - if a key from reference is missing from target, add it to target. # The default value is the output of the default_value_callback(key). # - if the key exist in both, don't update it. # where target is a dictionary updated in place and reference a list of # keys. # Returns a tuple of (removed keys, added keys) def dict_xor(target, reference_keys, default_value_callback): removed_keys = [ k for k in list( target.keys()) if k not in reference_keys] for key in removed_keys: del target[key] added_keys = [ k for k in reference_keys if k not in list( target.keys())] for key in added_keys: target[key] = default_value_callback(key) return (removed_keys, added_keys) panel_content = '' def add_line_to_panel(line): return panel_content + line + '\n' def add_project_header_to_panel(project_name): return panel_content + ( '| {} | Status |\n' '|---|---|\n' ).format(project_name) # secp256k1 is a special case because it has a Travis build from a # Github repo that is not managed by the build-configurations.yml config. # The status always need to be fetched. sepc256k1_default_branch = 'master' sepc256k1_travis_status = travis.get_branch_status( 27431354, sepc256k1_default_branch) travis_badge_url = BADGE_TRAVIS_BASE.get_badge_url( message=sepc256k1_travis_status.value, color='brightgreen' if sepc256k1_travis_status == BuildStatus.Success else 'red', ) # Add secp256k1 Travis to the status panel. panel_content = add_project_header_to_panel( 'secp256k1 ([[https://github.com/Bitcoin-ABC/secp256k1 | Github]])') panel_content = add_line_to_panel( '| [[{} | {}]] | {{image uri="{}", alt="{}"}} |'.format( 'https://travis-ci.org/github/bitcoin-abc/secp256k1', sepc256k1_default_branch, travis_badge_url, sepc256k1_travis_status.value, ) ) panel_content = add_line_to_panel('') # Download the build configuration from master config = yaml.safe_load(phab.get_file_content_from_master( "contrib/teamcity/build-configurations.yml")) # Get a list of the builds to display config_build_names = [ k for k, v in config.get( 'builds', {}).items() if not v.get( 'hideOnStatusPanel', False)] # If there is no build to display, don't update the panel with teamcity # data if not config_build_names: phab.set_text_panel_content(17, panel_content) return # Associate with Teamcity data from the BitcoinABC project associated_builds = tc.associate_configuration_names( "BitcoinABC", config_build_names) # Get a unique list of the project ids project_ids = [build["teamcity_project_id"] for build in list(associated_builds.values())] project_ids = list(set(project_ids)) # Construct a dictionary from teamcity project id to project name. # This will make it easier to navigate from one to the other. project_name_map = {} for build in list(associated_builds.values()): project_name_map[build['teamcity_project_id'] ] = build['teamcity_project_name'] # If the list of project names has changed (project was added, deleted # or renamed, update the panel data accordingly. (removed_projects, added_projects) = dict_xor( create_server.db['panel_data'], project_ids, lambda key: {}) # Log the project changes if any if (len(removed_projects) + len(added_projects)) > 0: app.logger.info( "Teamcity project list has changed.\nRemoved: {}\nAdded: {}".format( removed_projects, added_projects, ) ) # Construct a dictionary from teamcity build type id to build name. # This will make it easier to navigate from one to the other. build_name_map = {} for build in list(associated_builds.values()): build_name_map[build['teamcity_build_type_id'] ] = build['teamcity_build_name'] def get_build_status_and_message(build_type_id): latest_build = tc.getLatestCompletedBuild(build_type_id) # If no build completed, set the status to unknown if not latest_build: build_status = BuildStatus.Unknown build_status_message = build_status.value else: build_info = tc.getBuildInfo(latest_build['id']) build_status = BuildStatus(build_info['status'].lower()) build_status_message = build_info.get( 'statusText', build_status.value) if build_status == BuildStatus.Failure else build_status.value return (build_status, build_status_message) # Update the builds for project_id, project_builds in sorted( create_server.db['panel_data'].items()): build_type_ids = [build['teamcity_build_type_id'] for build in list( associated_builds.values()) if build['teamcity_project_id'] == project_id] # If the list of builds has changed (build was added, deleted, # renamed, added to or removed from the items to display), update # the panel data accordingly. (removed_builds, added_builds) = dict_xor( project_builds, build_type_ids, # We need to fetch the satus for each added build lambda key: get_build_status_and_message(key) ) # Log the build changes if any if (len(removed_builds) + len(added_builds)) > 0: app.logger.info( "Teamcity build list has changed for project {}.\nRemoved: {}\nAdded: {}".format( project_id, removed_builds, added_builds, ) ) # From here only the build that triggered the call need to be # updated. Note that it might already be up-to-date if the build was # part of the added ones. # Other data remains valid from the previous calls. if updated_build_type_id not in added_builds and updated_build_type_id in list( project_builds.keys()): project_builds[updated_build_type_id] = get_build_status_and_message( updated_build_type_id) # Create a table view of the project: # # | | Status | # |------------------------------------| # | Link to latest build | Status icon | # | Link to latest build | Status icon | # | Link to latest build | Status icon | panel_content = add_project_header_to_panel( project_name_map[project_id]) for build_type_id, (build_status, build_status_message) in project_builds.items(): url = tc.build_url( "viewLog.html", { "buildTypeId": build_type_id, "buildId": "lastFinished" } ) # TODO insert Teamcity build failure message badge_url = BADGE_TC_BASE.get_badge_url( message=build_status_message, color=( 'lightgrey' if build_status == BuildStatus.Unknown else 'brightgreen' if build_status == BuildStatus.Success else 'red' ), ) panel_content = add_line_to_panel( '| [[{} | {}]] | {{image uri="{}", alt="{}"}} |'.format( url, build_name_map[build_type_id], badge_url, build_status_message, ) ) panel_content = add_line_to_panel('') phab.set_text_panel_content(17, panel_content) def update_coverage_panel(coverage_summary): # FIXME don't harcode the permalink but pull it from some configuration coverage_permalink = "**[[ https://build.bitcoinabc.org/viewLog.html?buildId=lastSuccessful&buildTypeId=BitcoinABC_Master_BitcoinAbcMasterCoverage&tab=report__Root_Code_Coverage&guest=1 | HTML coverage report ]]**\n\n" coverage_report = "| Granularity | % hit | # hit | # total |\n" coverage_report += "| ----------- | ----- | ----- | ------- |\n" # Convert the textual coverage summary report to a pretty remarkup # content. # # The content loooks like this: # # Summary coverage rate: # lines......: 82.3% (91410 of 111040 lines) # functions..: 74.1% (6686 of 9020 functions) # branches...: 45.0% (188886 of 420030 branches) pattern = r"^\s*(?P\w+)\.+: (?P[0-9.]+%) \((?P\d+) of (?P\d+) .+$" for line in coverage_summary.splitlines(): match = re.match(pattern, line.strip()) if not match: continue coverage_report += "| {} | {} | {} | {} |\n".format( match.group('granularity').capitalize(), match.group('percent'), match.group('hit'), match.group('total'), ) # Update the coverage panel with our remarkup content phab.set_text_panel_content(21, coverage_permalink + coverage_report) def handle_build_result(buildName, buildTypeId, buildResult, buildURL, branch, buildId, buildTargetPHID, **kwargs): # Do not report build status for ignored builds if phab.getIgnoreKeyword() in buildTypeId: return SUCCESS, 200 # Build didn't have a branch if branch == "UNRESOLVED": return FAILURE, 400 guest_url = tc.convert_to_guest_url(buildURL) status = BuildStatus(buildResult) isMaster = (branch == "refs/heads/master" or branch == "") # If a build completed on master, update the build status panel. if isMaster and ( status == BuildStatus.Success or status == BuildStatus.Failure): update_build_status_panel(buildTypeId) # If the build succeeded and there is a coverage report in the build # artifacts, update the coverage panel. if status == BuildStatus.Success: try: coverage_summary = tc.get_coverage_summary(buildId) except TeamcityRequestException: # The coverage report is not guaranteed to exist, in this # case teamcity will raise an exception. coverage_summary = None if coverage_summary: update_coverage_panel(coverage_summary) # If we have a buildTargetPHID, report the status. build_target = create_server.db['diff_targets'].get( buildTargetPHID, None) if build_target is not None: phab.update_build_target_status(build_target, buildId, status) send_harbormaster_build_link_if_required( guest_url, build_target, build_target.builds[buildId].name ) if build_target.is_finished(): del create_server.db['diff_targets'][buildTargetPHID] revisionPHID = phab.get_revisionPHID(branch) buildInfo = tc.getBuildInfo(buildId) isAutomated = tc.checkBuildIsAutomated(buildInfo) if isAutomated and status == BuildStatus.Failure: # Check if this failure is infrastructure-related buildFailures = tc.getBuildProblems(buildId) if len(buildFailures) > 0: # If any infrastructure-related failures occurred, ping the right # people with a useful message. buildLog = tc.getBuildLog(buildId) if re.search(re.escape("[Infrastructure Error]"), buildLog): slackbot.postMessage('infra', " There was an infrastructure failure in '{}': {}".format( buildName, guest_url)) # Normally a comment of the build status is provided on diffs. Since no useful debug # info can be provided that is actionable to the user, we # give them a short message. if not isMaster: phab.commentOnRevision(revisionPHID, "(IMPORTANT) The build failed due to an unexpected infrastructure outage. " "The administrators have been notified to investigate. Sorry for the inconvenience.", buildName) return SUCCESS, 200 # Handle land bot builds if buildTypeId == LANDBOT_BUILD_TYPE: if status == BuildStatus.Success or status == BuildStatus.Failure: properties = buildInfo.getProperties() revisionId = properties.get( 'env.ABC_REVISION', 'MISSING REVISION ID') author = phab.getRevisionAuthor(revisionId) landBotMessage = "Failed to land your change:" if status == BuildStatus.Success: landBotMessage = "Successfully landed your change:" landBotMessage = "{}\nRevision: https://reviews.bitcoinabc.org/{}\nBuild: {}".format( landBotMessage, revisionId, guest_url) # Send a direct message to the revision author authorSlackUsername = phab.getAuthorSlackUsername(author) authorSlackUser = slackbot.getUserByName(authorSlackUsername) slackChannel = authorSlackUser['id'] if authorSlackUser else None if not slackChannel: slackChannel = 'dev' landBotMessage = "{}: Please set your slack username in your Phabricator profile so the landbot can send you direct messages: {}\n{}".format( authorSlackUsername, "https://reviews.bitcoinabc.org/people/editprofile/{}".format( author['id']), landBotMessage) slackbot.postMessage(slackChannel, landBotMessage) return SUCCESS, 200 # Open/update an associated task and message developers with relevant information if this build was # the latest completed, automated, master build of its type. if isMaster and isAutomated: latestBuild = tc.getLatestCompletedBuild(buildTypeId) latestBuildId = None if latestBuild: latestBuildId = latestBuild.get('id', None) logLatestBuildId = 'None' if latestBuildId is None else latestBuildId app.logger.info( "Latest completed build ID of type '{}': {}".format( buildTypeId, logLatestBuildId)) if latestBuildId == buildId: if status == BuildStatus.Success: updatedTask = phab.updateBrokenBuildTaskStatus( buildName, 'resolved') if updatedTask: # Only message once all of master is green (buildFailures, testFailures) = tc.getLatestBuildAndTestFailures( 'BitcoinABC') if len(buildFailures) == 0 and len(testFailures) == 0: if not create_server.db['master_is_green']: create_server.db['master_is_green'] = True slackbot.postMessage( 'dev', "Master is green again.") if status == BuildStatus.Failure: shortBuildUrl = tc.build_url( "viewLog.html", { "buildId": buildId, } ) # Explicitly ignored log lines. Use with care. buildLog = tc.getBuildLog(buildId) for line in tc.getIgnoreList(): # Skip empty lines and comments in the ignore file if not line or line.decode().strip()[0] == '#': continue # If any of the ignore patterns match any line in the # build log, ignore this failure if re.search(line.decode(), buildLog): return SUCCESS, 200 # Get number of build failures over the last few days numRecentFailures = tc.getNumAggregateFailuresSince( buildTypeId, 60 * 60 * 24 * 5) if numRecentFailures >= 3: # This build is likely flaky and the channel has # already been notified. return SUCCESS, 200 if numRecentFailures >= 2: # This build may be flaky. Ping the channel with a # less-noisy message. slackbot.postMessage('dev', "Build '{}' appears to be flaky: {}".format(buildName, shortBuildUrl)) return SUCCESS, 200 # Only mark master as red for failures that are not flaky create_server.db['master_is_green'] = False commitHashes = buildInfo.getCommits() newTask = phab.createBrokenBuildTask( buildName, guest_url, branch, commitHashes, 'rABC') if newTask: # TODO: Add 'Reviewed by: ' line # Do not point to a specific change for scheduled builds, as this generates noise for # the author of a change that is unlikely to contain # the root cause of the issue. if tc.checkBuildIsScheduled(buildInfo): slackbot.postMessage('dev', "Scheduled build '{}' appears to be broken: {}\n" "Task: https://reviews.bitcoinabc.org/T{}".format( buildName, shortBuildUrl, newTask['id'])) else: commitMap = phab.getRevisionPHIDsFromCommits( commitHashes) decoratedCommits = phab.decorateCommitMap( commitMap) decoratedCommit = decoratedCommits[commitHashes[0]] changeLink = decoratedCommit['link'] authorSlackUsername = decoratedCommit['authorSlackUsername'] authorSlackId = slackbot.formatMentionByName( authorSlackUsername) if not authorSlackId: authorSlackId = authorSlackUsername slackbot.postMessage('dev', "Committer: {}\n" "Build '{}' appears to be broken: {}\n" "Task: https://reviews.bitcoinabc.org/T{}\n" "Diff: {}".format( authorSlackId, buildName, shortBuildUrl, newTask['id'], changeLink)) if not isMaster: revisionId, authorPHID = phab.get_revision_info(revisionPHID) properties = buildInfo.getProperties() buildConfig = properties.get('env.ABC_BUILD_NAME', None) if not buildConfig: buildConfig = properties.get('env.OS_NAME', 'UNKNOWN') buildName = "{} ({})".format(buildName, buildConfig) if status == BuildStatus.Failure: msg = phab.createBuildStatusMessage( status, guest_url, buildName) # We add two newlines to break away from the (IMPORTANT) # callout. msg += '\n\n' testFailures = tc.getFailedTests(buildId) if len(testFailures) == 0: # If no test failure is available, print the tail of the # build log buildLog = tc.getBuildLog(buildId) logLines = [] for line in buildLog.splitlines(keepends=True): logLines.append(line) msg += "Tail of the build log:\n```lines=16,COUNTEREXAMPLE\n{}```".format( ''.join(logLines[-60:])) else: # Print the failure log for each test msg += 'Failed tests logs:\n' msg += '```lines=16,COUNTEREXAMPLE' for failure in testFailures: msg += "\n====== {} ======\n{}".format( failure['name'], failure['details']) msg += '```' msg += '\n\n' msg += 'Each failure log is accessible here:' for failure in testFailures: msg += "\n[[{} | {}]]".format( failure['logUrl'], failure['name']) phab.commentOnRevision(revisionPHID, msg, buildName) return SUCCESS, 200 return app diff --git a/contrib/buildbot/test/test_endpoint_buildDiff.py b/contrib/buildbot/test/test_endpoint_buildDiff.py index 5203e6cc4..1c43ea299 100755 --- a/contrib/buildbot/test/test_endpoint_buildDiff.py +++ b/contrib/buildbot/test/test_endpoint_buildDiff.py @@ -1,161 +1,165 @@ #!/usr/bin/env python3 # # Copyright (c) 2020 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import json import mock import requests import unittest from unittest.mock import call from build import Build, BuildStatus from test.abcbot_fixture import ABCBotFixture import test.mocks.teamcity from testutil import AnyWith class buildDiffRequestQuery(): def __init__(self): self.stagingRef = "refs/tags/phabricator/diff/1234" self.targetPHID = "PHID-HMBT-123456" self.revisionId = "1234" def __str__(self): return "?{}".format("&".join("{}={}".format(key, value) for key, value in self.__dict__.items())) class EndpointBuildDiffTestCase(ABCBotFixture): def test_buildDiff(self): data = buildDiffRequestQuery() def set_build_configuration(buildConfig): # add some build configs that we expect to always be skipped mergedConfig = dict() mergedConfig.update({ "build-skip-1": { "runOnDiff": False, }, "build-skip-2": {}, }) mergedConfig.update(buildConfig) config = { "builds": mergedConfig, } self.phab.get_file_content_from_master = mock.Mock() self.phab.get_file_content_from_master.return_value = json.dumps( config) def call_buildDiff(expectedBuilds): self.teamcity.session.send.side_effect = [ test.mocks.teamcity.buildInfo(build_id=build.build_id, buildqueue=True) for build in expectedBuilds ] self.phab.differential.getcommitpaths = mock.Mock() self.phab.differential.getcommitpaths.return_value = [ "dir/subdir/file.h", "dir/subdir/file.cpp", "someotherdir/file2.txt", ] response = self.app.post( '/buildDiff{}'.format(data), headers=self.headers) self.assertEqual(response.status_code, 200) self.phab.differential.getcommitpaths.assert_called() self.phab.get_file_content_from_master.assert_called() + if len(expectedBuilds) == 0: + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=data.targetPHID, type="pass") + expected_calls = [ call(AnyWith(requests.PreparedRequest, { "url": "https://teamcity.test/app/rest/buildQueue", "body": json.dumps({ "branchName": data.stagingRef, "buildType": { "id": "BitcoinABC_BitcoinAbcStaging", }, 'properties': { 'property': [ { 'name': 'env.ABC_BUILD_NAME', 'value': build.name, }, { 'name': 'env.ABC_REVISION', 'value': data.revisionId, }, { 'name': 'env.harborMasterTargetPHID', 'value': data.targetPHID, }, ], }, }), })) for build in expectedBuilds ] self.teamcity.session.send.assert_has_calls( expected_calls, any_order=True) self.teamcity.session.send.reset_mock() # No diff to run builds = [] set_build_configuration({}) call_buildDiff(builds) self.teamcity.session.send.assert_not_called() # Single diff build builds.append(Build(1, BuildStatus.Queued, "build-1")) set_build_configuration({ "build-1": { "runOnDiff": True, }, }) call_buildDiff(builds) # With matching file regex set_build_configuration({ "build-1": { "runOnDiffRegex": ["dir/subdir/.*"], }, }) call_buildDiff(builds) # With non-matching file regex set_build_configuration({ "build-1": { "runOnDiffRegex": ["dir/nonmatching/.*"], }, }) call_buildDiff([]) # Some builds match the file regex builds.append(Build(1, BuildStatus.Queued, "build-2")) set_build_configuration({ "build-1": { "runOnDiffRegex": ["dir/nonmatching/.*"], }, "build-2": { "runOnDiffRegex": ["someotherdir/file2.txt"], }, }) call_buildDiff([builds[1]]) # Lot of builds builds = [Build(i, BuildStatus.Queued, "build-{}".format(i)) for i in range(10)] buildConfig = {} for build in builds: buildConfig[build.name] = { "runOnDiff": True, } set_build_configuration(buildConfig) call_buildDiff(builds) if __name__ == '__main__': unittest.main() diff --git a/contrib/buildbot/test/test_phabricator.py b/contrib/buildbot/test/test_phabricator.py index e6ced1393..9d0f4eeec 100755 --- a/contrib/buildbot/test/test_phabricator.py +++ b/contrib/buildbot/test/test_phabricator.py @@ -1,516 +1,552 @@ #!/usr/bin/env python3 # # Copyright (c) 2020 The Bitcoin ABC developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from base64 import b64encode import mock import os import unittest +from build import BuildStatus, BuildTarget from phabricator_wrapper import BITCOIN_ABC_PROJECT_PHID, BITCOIN_ABC_REPO import test.mocks.phabricator class PhabricatorTests(unittest.TestCase): def setUp(self): self.phab = test.mocks.phabricator.instance() def tearDown(self): pass def test_get_project_members(self): self.phab.project.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "PROJ", "phid": BITCOIN_ABC_PROJECT_PHID, "attachments": { "members": { "members": [ { "phid": "PHID-USER-usernumber1" }, { "phid": "PHID-USER-usernumber2" }, { "phid": "PHID-USER-usernumber3" }, ] } } } ]) abc_members = self.phab.get_project_members(BITCOIN_ABC_PROJECT_PHID) self.phab.project.search.assert_called_with( constraints={ "phids": [BITCOIN_ABC_PROJECT_PHID], }, attachments={ "members": True, }, ) self.assertEqual( abc_members, [ "PHID-USER-usernumber1", "PHID-USER-usernumber2", "PHID-USER-usernumber3", ] ) def test_get_latest_diff_staging_ref(self): revision_PHID = "PHID-DREV-987645" def assert_diff_searched_called(): return self.phab.differential.diff.search.assert_called_with( constraints={ "revisionPHIDs": [revision_PHID], }, order="newest" ) # No diff associated to the revision ref = self.phab.get_latest_diff_staging_ref(revision_PHID) assert_diff_searched_called() self.assertEqual(ref, "") # 2 diffs associated with the revision. Ordering is guaranteed by the # "order" request parameter. self.phab.differential.diff.search.return_value = test.mocks.phabricator.Result([ { "id": 42, "type": "DIFF", "phid": "PHID-DIFF-123456", }, { "id": 41, "type": "DIFF", "phid": "PHID-DIFF-abcdef", }, ]) ref = self.phab.get_latest_diff_staging_ref(revision_PHID) assert_diff_searched_called() self.assertEqual(ref, "refs/tags/phabricator/diff/42") def test_get_current_user_phid(self): user_PHID = "PHID-USER-foobarbaz" self.phab.user.whoami.return_value = { "phid": user_PHID, "userName": "foo", "realName": "Foo Bar", } # The whoami result should be cached. Call the method a few times and # check the call occurs once and the result is always as expected. for i in range(10): phid = self.phab.get_current_user_phid() self.phab.user.whoami.assert_called_once() self.assertEqual(phid, user_PHID) def test_getRevisionAuthor(self): self.phab.differential.revision.search.return_value = test.mocks.phabricator.Result([{ 'fields': { 'authorPHID': 'PHID-USER-2345', }, }]) expectedAuthor = { "phid": 'PHID-USER-2345', } self.phab.user.search.return_value = test.mocks.phabricator.Result([ expectedAuthor]) actualAuthor = self.phab.getRevisionAuthor('D1234') self.assertEqual(actualAuthor, expectedAuthor) def test_getAuthorSlackUsername(self): self.assertEqual("", self.phab.getAuthorSlackUsername({})) self.assertEqual("", self.phab.getAuthorSlackUsername({'fields': {}})) self.assertEqual("test-slack-name", self.phab.getAuthorSlackUsername({ 'fields': { 'custom.abc:slack-username': 'test-slack-name', 'username': 'test-username', }, })) self.assertEqual("test-username", self.phab.getAuthorSlackUsername({ 'fields': { 'username': 'test-username', }, })) def test_user_roles(self): user_PHID = "PHID-USER-abcdef" def assert_user_search_called(): return self.phab.user.search.assert_called_with( constraints={ "phids": [user_PHID], } ) # User not found user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual(user_roles, []) # User found self.phab.user.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "USER", "phid": user_PHID, "fields": { "username": "foobar", "realName": "Foo Bar", "roles": [ "admin", "verified", "approved", "activated", ], "dateCreated": 0, "dateModified": 0, "custom.abc:slack-username": "Foobar", }, }, ]) user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual( user_roles, [ "admin", "verified", "approved", "activated", ] ) # If more than 1 user is returned (should never occur), check no role is # returned to prevent privilege exploits. self.phab.user.search.return_value = test.mocks.phabricator.Result([ { "id": 1, "type": "USER", "phid": user_PHID, "fields": { "roles": [ "verified", ], }, }, { "id": 2, "type": "USER", "phid": user_PHID, "fields": { "roles": [ "admin", ], }, }, ]) user_roles = self.phab.get_user_roles(user_PHID) assert_user_search_called() self.assertEqual(user_roles, []) def test_get_laster_master_commit_hash(self): with self.assertRaises(AssertionError): self.phab.get_latest_master_commit_hash() self.phab.diffusion.commit.search.return_value = test.mocks.phabricator.Result([ { "id": 1234, "type": "CMIT", "phid": "PHID-CMIT-abcdef", "fields": { "identifier": "0000000000000000000000000000000123456789", "repositoryPHID": "PHID-REPO-abcrepo", }, } ]) commit_hash = self.phab.get_latest_master_commit_hash() self.phab.diffusion.commit.search.assert_called_with( constraints={ "repositories": [BITCOIN_ABC_REPO], }, limit=1, ) self.assertEqual( commit_hash, "0000000000000000000000000000000123456789") def test_get_revision_changed_files(self): self.phab.differential.getcommitpaths.return_value = [ "file1", "dir/file2", ] self.assertEqual( self.phab.get_revision_changed_files(1234), [ "file1", "dir/file2", ]) def test_get_file_content_from_master(self): commit_hash = "0000000000000000000000000000000123456789" file_phid = "PHID-FILE-somefile" path = "some/file" self.phab.get_latest_master_commit_hash = mock.Mock() self.phab.get_latest_master_commit_hash.return_value = commit_hash self.phab.diffusion.browsequery = mock.Mock() def configure_browsequery(file_path=path, hash="abcdef"): self.phab.diffusion.browsequery.return_value = { "paths": [ { "fullPath": "some/file/1", "hash": "1234" }, { "fullPath": "some/file/2", "hash": "5678" }, { "fullPath": file_path, "hash": hash }, ] } def assert_diffusion_browsequery_called(): self.phab.get_latest_master_commit_hash.assert_called() self.phab.diffusion.browsequery.assert_called_with( path=os.path.dirname(path) or None, commit=commit_hash, repository=BITCOIN_ABC_REPO, branch="master", ) def configure_file_content_query( file_phid=file_phid, too_slow=False, too_huge=False): output = { "tooSlow": too_slow, "tooHuge": too_huge, } if file_phid is not None: output["filePHID"] = file_phid self.phab.diffusion.filecontentquery.return_value = output def assert_file_commit_and_file_searched(): self.phab.get_latest_master_commit_hash.assert_called() self.phab.diffusion.filecontentquery.assert_called_with( path=path, commit=commit_hash, timeout=5, byteLimit=1024 * 1024, repository=BITCOIN_ABC_REPO, branch="master", ) # Browse query failure self.phab.diffusion.browsequery.return_value = {} with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() # Browse query returns no file self.phab.diffusion.browsequery.return_value = {'paths': []} with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() # Browse query failed to find our file configure_browsequery(file_path='something/else') with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_diffusion_browsequery_called() configure_browsequery() # Missing file PHID configure_file_content_query(file_phid=None) with self.assertRaisesRegex(AssertionError, "File .+ not found in master"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Too long configure_file_content_query(too_slow=True) with self.assertRaisesRegex(AssertionError, "is oversized or took too long to download"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Too huge configure_file_content_query(too_huge=True) with self.assertRaisesRegex(AssertionError, "is oversized or took too long to download"): self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() # Check the file content can be retrieved expected_content = b'Some nice content' result = test.mocks.phabricator.Result([]) result.response = b64encode(expected_content) self.phab.file.download.return_value = result configure_file_content_query() file_content = self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() self.phab.file.download.assert_called_with(phid=file_phid) self.assertEqual(file_content, expected_content) # With later calls the content is returned directly thanks to the cache self.phab.diffusion.filecontentquery.reset_mock() self.phab.file.download.reset_mock() for i in range(10): file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() # If the master commit changes, the file content is still valid in cache # as long as its file hash is unchanged for i in range(10): commit_hash = str(int(commit_hash) + 1) self.phab.get_latest_master_commit_hash.return_value = commit_hash file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() # But if the file hash changes, the file content needs to be updated... configure_browsequery(hash="defghi") file_content = self.phab.get_file_content_from_master(path) assert_file_commit_and_file_searched() self.phab.file.download.assert_called_with(phid=file_phid) self.assertEqual(file_content, expected_content) # ... only once. self.phab.diffusion.filecontentquery.reset_mock() self.phab.file.download.reset_mock() for i in range(10): file_content = self.phab.get_file_content_from_master(path) self.assertEqual(file_content, expected_content) self.phab.diffusion.filecontentquery.assert_not_called() self.phab.file.download.assert_not_called() def test_set_text_panel_content(self): panel_id = 42 panel_content = "My wonderful panel content" self.phab.dashboard.panel.edit.return_value = { "error": None, "errorMessage": None, "response": { "object": { "id": panel_id, "phid": "PHID-DSHP-123456789", "transactions": [ { "phid": "PHID-XACT-DSHP-abcdefghi" } ] } } } def call_set_text_panel_content(): self.phab.set_text_panel_content(panel_id, panel_content) self.phab.dashboard.panel.edit.assert_called_with( objectIdentifier=panel_id, transactions=[ { "type": "text", "value": panel_content } ] ) # Happy path call_set_text_panel_content() # Error self.phab.dashboard.panel.edit.return_value["error"] = "You shall not pass !" with self.assertRaisesRegex(AssertionError, "Failed to edit panel"): call_set_text_panel_content() + def test_update_build_target_status(self): + build_target = BuildTarget("PHID-HMBT-1234") + + # With no builds queued, default to pass + self.phab.update_build_target_status(build_target) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="pass") + + # Queue a build + build_target.queue_build("build-1", "build-name") + self.phab.update_build_target_status(build_target) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="work") + + # Test various statuses + self.phab.update_build_target_status( + build_target, "build-1", BuildStatus.Queued) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="work") + + self.phab.update_build_target_status( + build_target, "build-1", BuildStatus.Running) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="work") + + self.phab.update_build_target_status( + build_target, "build-1", BuildStatus.Failure) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="fail") + + self.phab.update_build_target_status( + build_target, "build-1", BuildStatus.Success) + self.phab.harbormaster.sendmessage.assert_called_with( + buildTargetPHID=build_target.phid, type="pass") + def test_get_object_token(self): user_PHID = "PHID-USER-foobarbaz" self.phab.user.whoami.return_value = { "phid": user_PHID, } object_PHID = "PHID-DREV-abcdef" def assert_token_given_called(): self.phab.token.given.assert_called_with( authorPHIDs=[user_PHID], objectPHIDs=[object_PHID], tokenPHIDs=[], ) # There is no token for this object self.phab.token.given.return_value = [] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "") # There is exactly 1 token for this object self.phab.token.given.return_value = [ { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-1", "dateCreated": 0, }, ] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "PHID-TOKN-like-1") # If there is more than a single token only the first one is returned self.phab.token.given.return_value = [ { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-1", "dateCreated": 0, }, { "authorPHID": user_PHID, "objectPHID": object_PHID, "tokenPHID": "PHID-TOKN-like-2", "dateCreated": 1, }, ] token = self.phab.get_object_token(object_PHID) assert_token_given_called() self.assertEqual(token, "PHID-TOKN-like-1") def test_set_object_token(self): object_PHID = "PHID-DREV-abcdef" def assert_token_give_called(token_PHID): self.phab.token.give.assert_called_with( objectPHID=object_PHID, tokenPHID=token_PHID, ) # Rescind any previoulsy awarded token self.phab.set_object_token(object_PHID) assert_token_give_called("") token_PHID = "PHID-TOKN-like-1" self.phab.set_object_token(object_PHID, token_PHID) assert_token_give_called(token_PHID) if __name__ == '__main__': unittest.main()