def get(rev_id): assert DEPLOYMENT_URL is not None assert API_KEY is not None phabricator_api = PhabricatorAPI(API_KEY, DEPLOYMENT_URL) return phabricator_api.load_revision(rev_id=rev_id, attachments={"projects": True})
def apply_phab(self, hg, diff_id): phabricator_api = PhabricatorAPI( api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL")) diffs = phabricator_api.search_diffs(diff_id=diff_id) assert len(diffs) == 1, "No diff available for {}".format(diff_id) diff = diffs[0] # Get the stack of patches base, patches = phabricator_api.load_patches_stack(hg, diff) assert len(patches) > 0, "No patches to apply" # Load all the diffs details with commits messages diffs = phabricator_api.search_diffs(diff_phid=[p[0] for p in patches], attachments={"commits": True}) commits = { diff["phid"]: diff["attachments"]["commits"].get("commits", []) for diff in diffs } # First apply patches on local repo for diff_phid, patch in patches: commit = commits.get(diff_phid) message = "" if commit: message += "{}\n".format(commit[0]["message"]) logger.info(f"Applying {diff_phid}") hg.import_( patches=io.BytesIO(patch.encode("utf-8")), message=message, user="******", )
def from_autoland(autoland_task: dict, phabricator: PhabricatorAPI): """ Build a revision from a Mozilla autoland decision task """ assert ( autoland_task["payload"]["env"]["GECKO_HEAD_REPOSITORY"] == REPO_AUTOLAND ), "Not an autoland decision task" # Load mercurial revision mercurial_revision = autoland_task["payload"]["env"]["GECKO_HEAD_REV"] # Search phabricator revision from commit message commit_url = ( f"https://hg.mozilla.org/integration/autoland/json-rev/{mercurial_revision}" ) response = requests.get(commit_url) response.raise_for_status() description = response.json()["desc"] match = REGEX_PHABRICATOR_COMMIT.search(description) if match is not None: url, revision_id = match.groups() revision_id = int(revision_id) logger.info("Found phabricator revision", id=revision_id, url=url) else: raise Exception(f"No phabricator revision found in commit {commit_url}") # Lookup the Phabricator revision to get details (phid, title, bugzilla_id, ...) revision = phabricator.load_revision(rev_id=revision_id) # Search the Phabricator diff with same commit identifier diffs = phabricator.search_diffs( revision_phid=revision["phid"], attachments={"commits": True} ) diff = next( iter( d for d in diffs if d["attachments"]["commits"]["commits"][0]["identifier"] == mercurial_revision ), None, ) assert ( diff is not None ), f"No Phabricator diff found for D{revision_id} and mercurial revision {mercurial_revision}" logger.info("Found phabricator diff", id=diff["id"]) return Revision( id=revision_id, phid=revision["phid"], diff_id=diff["id"], diff_phid=diff["phid"], mercurial_revision=mercurial_revision, repository=REPO_AUTOLAND, target_repository=REPO_MOZILLA_CENTRAL, revision=revision, diff=diff, url=url, )
def from_try(try_task: dict, phabricator: PhabricatorAPI): """ Load identifiers from Phabricator, using the remote task description """ # Load build target phid from the task env code_review = try_task["extra"]["code-review"] build_target_phid = code_review.get("phabricator-diff") or code_review.get( "phabricator-build-target" ) assert ( build_target_phid is not None ), "Missing phabricator-build-target or phabricator-diff declaration" assert build_target_phid.startswith("PHID-HMBT-") # And get the diff from the phabricator api buildable = phabricator.find_target_buildable(build_target_phid) diff_phid = buildable["fields"]["objectPHID"] assert diff_phid.startswith("PHID-DIFF-") # Load diff details to get the diff revision # We also load the commits list in order to get the email of the author of the # patch for sending email if builds are failing. diffs = phabricator.search_diffs( diff_phid=diff_phid, attachments={"commits": True} ) assert len(diffs) == 1, "No diff available for {}".format(diff_phid) diff = diffs[0] diff_id = diff["id"] phid = diff["revisionPHID"] revision = phabricator.load_revision(phid) # Load repository detailed information repos = phabricator.request( "diffusion.repository.search", constraints={"phids": [revision["fields"]["repositoryPHID"]]}, ) assert len(repos["data"]) == 1, "Repository not found on Phabricator" # Load target patch from Phabricator for Try mode patch = phabricator.load_raw_diff(diff_id) # Build a revision without repositories as they are retrieved later # when analyzing the full task group return Revision( id=revision["id"], phid=phid, diff_id=diff_id, diff_phid=diff_phid, build_target_phid=build_target_phid, revision=revision, phabricator_repository=repos["data"][0], diff=diff, url="https://{}/D{}".format(phabricator.hostname, revision["id"]), patch=patch, )
def upload(self, report, changesets): results = self.generate(report, changesets) if secrets[secrets.PHABRICATOR_ENABLED]: phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN], secrets[secrets.PHABRICATOR_URL]) else: phabricator = None for rev_id, coverage in results.items(): # Only upload raw coverage data to Phabricator, not stats coverage = { path: cov["coverage"] for path, cov in coverage.items() } logger.info("{} coverage: {}".format(rev_id, coverage)) if not phabricator or not coverage: continue try: rev_data = phabricator.load_revision(rev_id=rev_id) phabricator.upload_coverage_results( rev_data["fields"]["diffPHID"], coverage) # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved. phabricator.upload_lint_results(rev_data["fields"]["diffPHID"], BuildState.Pass, []) except PhabricatorRevisionNotFoundException: logger.warn("Phabricator revision not found", rev_id=rev_id) return results
def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, cache_root, phab_build_target, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'HOOKS', 'ADMINS', 'PHABRICATOR', 'repositories', ), existing=dict( HOOKS=[], ADMINS=['*****@*****.**', '*****@*****.**'], repositories=[] ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) task_monitoring.emails = secrets['ADMINS'] phabricator = PhabricatorAPI( api_key=secrets['PHABRICATOR']['token'], url=secrets['PHABRICATOR']['url'], ) pl = PulseListener(secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['HOOKS'], secrets['repositories'], phabricator, cache_root, secrets['PHABRICATOR'].get('publish', False), taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') if phab_build_target: pl.add_build(phab_build_target) pl.run()
def __init__(self, url, api_key, retries=5, sleep=10): self.api = PhabricatorAPI(url=url, api_key=api_key) # Phabricator secure revision retries configuration assert isinstance(retries, int) assert isinstance(sleep, int) self.retries = collections.defaultdict(lambda: (retries, None)) self.sleep = sleep logger.info('Will retry Phabricator secure revision queries', retries=retries, sleep=sleep) # Load secure projects projects = self.api.search_projects(slugs=['secure-revision']) self.secure_projects = { p['phid']: p['fields']['name'] for p in projects } logger.info('Loaded secure projects', projects=self.secure_projects.values())
def __init__(self, url, api_key, retries=5, sleep=10): self.api = PhabricatorAPI(url=url, api_key=api_key) # Phabricator secure revision retries configuration assert isinstance(retries, int) assert isinstance(sleep, int) self.max_retries = retries self.retries = collections.defaultdict(lambda: (retries, None)) self.sleep = sleep logger.info( "Will retry Phabricator secure revision queries", retries=retries, sleep=sleep, ) # Load secure projects projects = self.api.search_projects(slugs=["secure-revision"]) self.secure_projects = { p["phid"]: p["fields"]["name"] for p in projects } logger.info("Loaded secure projects", projects=self.secure_projects.values())
def apply_phab(self, hg, diff_id): phabricator_api = PhabricatorAPI( api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL")) diffs = phabricator_api.search_diffs(diff_id=diff_id) assert len(diffs) == 1, f"No diff available for {diff_id}" diff = diffs[0] # Get the stack of patches base, patches = phabricator_api.load_patches_stack(hg, diff) assert len(patches) > 0, "No patches to apply" # Load all the diffs details with commits messages diffs = phabricator_api.search_diffs(diff_phid=[p[0] for p in patches], attachments={"commits": True}) diffs_data = {} for diff in diffs: revision = phabricator_api.load_revision( rev_phid=diff["revisionPHID"]) logger.info("Diff {} linked to Revision {}".format( diff["id"], revision["id"])) diffs_data[diff["phid"]] = { "commits": diff["attachments"]["commits"].get("commits", []), "revision": revision, } # First apply patches on local repo for diff_phid, patch in patches: diff_data = diffs_data.get(diff_phid) commits = diff_data["commits"] revision = diff_data["revision"] if commits and commits[0]["message"]: message = commits[0]["message"] else: message = revision["fields"]["title"] logger.info(f"Applying {diff_phid}") hg.import_( patches=io.BytesIO(patch.encode("utf-8")), message=message, user="******", )
def upload(self, report, changesets=None): results = self.generate(report, changesets) if secrets[secrets.PHABRICATOR_ENABLED]: phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN], secrets[secrets.PHABRICATOR_URL]) else: phabricator = None for rev_id, coverage in results.items(): logger.info('{} coverage: {}'.format(rev_id, coverage)) if not phabricator or not coverage: continue try: rev_data = phabricator.load_revision(rev_id=rev_id) phabricator.upload_coverage_results( rev_data['fields']['diffPHID'], coverage) # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved. phabricator.upload_lint_results(rev_data['fields']['diffPHID'], BuildState.Pass, []) except PhabricatorRevisionNotFoundException: logger.warn('Phabricator revision not found', rev_id=rev_id)
def main(): args = parse_cli() taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token) taskcluster.load_secrets( args.taskcluster_secret, prefixes=["common", "code-review-bot", "bot"], required=( "APP_CHANNEL", "REPORTERS", "PHABRICATOR", "ALLOWED_PATHS", "repositories", ), existing={ "APP_CHANNEL": "development", "REPORTERS": [], "ZERO_COVERAGE_ENABLED": True, "ALLOWED_PATHS": ["*"], "task_failures_ignored": [], }, local_secrets=yaml.safe_load(args.configuration) if args.configuration else None, ) init_logger( "bot", channel=taskcluster.secrets.get("APP_CHANNEL", "dev"), PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"), PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"), SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"), ) # Setup settings before stats settings.setup( taskcluster.secrets["APP_CHANNEL"], taskcluster.secrets["ALLOWED_PATHS"], taskcluster.secrets["repositories"], ) # Setup statistics influx_conf = taskcluster.secrets.get("influxdb") if influx_conf: stats.auth(influx_conf) # Load reporters reporters = get_reporters(taskcluster.secrets["REPORTERS"]) # Load index service index_service = taskcluster.get_service("index") # Load queue service queue_service = taskcluster.get_service("queue") # Load Phabricator API phabricator = taskcluster.secrets["PHABRICATOR"] phabricator_reporting_enabled = "phabricator" in reporters phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"]) if phabricator_reporting_enabled: reporters["phabricator"].setup_api(phabricator_api) # Load unique revision try: if settings.autoland_group_id: revision = Revision.from_autoland( queue_service.task(settings.autoland_group_id), phabricator_api ) else: revision = Revision.from_try( queue_service.task(settings.try_task_id), phabricator_api ) except Exception as e: # Report revision loading failure on production only # On testing or dev instances, we can use different Phabricator # configuration that do not match all the pulse messages sent if settings.on_production: raise else: logger.info( "Failed to load revision", task=settings.try_task_id, error=str(e), phabricator=phabricator["url"], ) return 1 # Run workflow according to source w = Workflow( reporters, index_service, queue_service, phabricator_api, taskcluster.secrets["ZERO_COVERAGE_ENABLED"], # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, task_failures_ignored=taskcluster.secrets["task_failures_ignored"], ) try: if revision.repository == REPO_AUTOLAND: w.ingest_autoland(revision) else: w.run(revision) except Exception as e: # Log errors to papertrail logger.error("Static analysis failure", revision=revision, error=e) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras["error_code"] = e.code extras["error_message"] = str(e) w.index(revision, state="error", **extras) # Update Harbormaster status w.update_status(revision, state=BuildState.Fail) # Then raise to mark task as erroneous raise return 0
def build_phabricator_api(name: str, url: str, token: str): assert url.endswith("/api/"), f"{name} Url {url} does not end with /api/" assert token is not None, f"Missing {name} Phabricator token" assert len( token) == 32, f"{name} Phabricator token must be 32 characters long" return PhabricatorAPI(token, url)
def mock_phabricator(mock_config): """ Mock phabricator authentication process """ def _response(name): path = os.path.join(MOCK_DIR, "phabricator_{}.json".format(name)) assert os.path.exists(path) return open(path).read() responses.add( responses.POST, "http://phabricator.test/api/user.whoami", body=_response("auth"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.diff.search", body=_response("diff_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.revision.search", body=_response("revision_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.query", body=_response("diff_query"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.getrawdiff", body=_response("diff_raw"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.createinline", body=_response("createinline"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/edge.search", body=_response("edge_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/transaction.search", body=_response("transaction_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.target.search", body=_response("target_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.build.search", body=_response("build_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.buildable.search", body=_response("buildable_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.sendmessage", body=_response("send_message"), content_type="application/json", ) yield PhabricatorAPI(url="http://phabricator.test/api/", api_key="deadbeef")
class PhabricatorActions(object): """ Common Phabricator actions shared across clients """ def __init__(self, url, api_key, retries=5, sleep=10): self.api = PhabricatorAPI(url=url, api_key=api_key) # Phabricator secure revision retries configuration assert isinstance(retries, int) assert isinstance(sleep, int) self.max_retries = retries self.retries = collections.defaultdict(lambda: (retries, None)) self.sleep = sleep logger.info( "Will retry Phabricator secure revision queries", retries=retries, sleep=sleep, ) # Load secure projects projects = self.api.search_projects(slugs=["secure-revision"]) self.secure_projects = { p["phid"]: p["fields"]["name"] for p in projects } logger.info("Loaded secure projects", projects=self.secure_projects.values()) def update_state(self, build): """ Check the visibility of the revision, by retrying N times with an exponential backoff time This method is executed regularly by the client application to check on the status evolution as the BMO daemon can take several minutes to update the status """ assert isinstance(build, PhabricatorBuild) # Only when queued if build.state != PhabricatorBuildState.Queued: return # Check this build has some retries left retries_left, last_try = self.retries[build.target_phid] if retries_left <= 0: return # Check this build has been awaited between tries exp_backoff = (2**(self.max_retries - retries_left)) * self.sleep now = time.time() if last_try is not None and now - last_try < exp_backoff: return # Now we can check if this revision is public retries_left -= 1 self.retries[build.target_phid] = (retries_left, now) logger.info("Checking visibility status", build=str(build), retries_left=retries_left) if self.is_visible(build): build.state = PhabricatorBuildState.Public build.revision_url = self.build_revision_url(build) logger.info("Revision is public", build=str(build)) elif retries_left <= 0: # Mark as secured when no retries are left build.state = PhabricatorBuildState.Secured logger.info("Revision is marked as secure", build=str(build)) else: # Enqueue back to retry later build.state = PhabricatorBuildState.Queued def is_visible(self, build): """ Check the visibility of the revision by loading its details """ assert isinstance(build, PhabricatorBuild) assert build.state == PhabricatorBuildState.Queued try: # Load revision with projects build.revision = self.api.load_revision( rev_id=build.revision_id, attachments={ "projects": True, "reviewers": True }, ) if not build.revision: raise Exception("Not found") # Check against secure projects projects = set( build.revision["attachments"]["projects"]["projectPHIDs"]) if projects.intersection(self.secure_projects): raise Exception("Secure revision") except Exception as e: logger.info("Revision not accessible", build=str(build), error=str(e)) return False return True def load_patches_stack(self, build): """ Load a stack of patches for a public Phabricator build without hitting a local mercurial repository """ build.stack = self.api.load_patches_stack(build.diff_id, build.diff) def load_reviewers(self, build): """ Load details for reviewers found on a build """ assert isinstance(build, PhabricatorBuild) assert build.state == PhabricatorBuildState.Public assert build.revision is not None def load_user(phid): if phid.startswith("PHID-USER"): return self.api.load_user(user_phid=phid) elif phid.startswith("PHID-PROJ"): logger.info(f"Skipping group reviewer {phid}") else: raise Exception(f"Unsupported reviewer {phid}") reviewers = build.revision["attachments"]["reviewers"]["reviewers"] build.reviewers = list( filter(None, [ load_user(reviewer["reviewerPHID"]) for reviewer in reviewers ])) def build_revision_url(self, build): """ Build a Phabricator frontend url for a build's revision """ return "https://{}/D{}".format(self.api.hostname, build.revision_id)
def __init__(self): super(NotLanded, self).__init__() self.nweeks = utils.get_config(self.name(), "number_of_weeks", 2) self.nyears = utils.get_config(self.name(), "number_of_years", 2) self.phab = PhabricatorAPI(utils.get_login_info()["phab_api_key"]) self.extra_ni = {}
def apply_phab(self, hg, diff_id): def has_revision(revision): if not revision: return False try: hg.identify(revision) return True except hglib.error.CommandError: return False phabricator_api = PhabricatorAPI( api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL")) # Get the stack of patches stack = phabricator_api.load_patches_stack(diff_id) assert len(stack) > 0, "No patches to apply" # Find the first unknown base revision needed_stack = [] revisions = {} for patch in reversed(stack): needed_stack.insert(0, patch) # Stop as soon as a base revision is available if has_revision(patch.base_revision): logger.info( f"Stopping at diff {patch.id} and revision {patch.base_revision}" ) break if not needed_stack: logger.info("All the patches are already applied") return # Load all the diff revisions diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack]) revisions = { diff["phid"]: phabricator_api.load_revision(rev_phid=diff["revisionPHID"]) for diff in diffs } # Update repo to base revision hg_base = needed_stack[0].base_revision if hg_base: hg.update(rev=hg_base, clean=True) logger.info(f"Updated repo to {hg_base}") for patch in needed_stack: revision = revisions[patch.phid] if patch.commits: message = patch.commits[0]["message"] else: message = revision["fields"]["title"] logger.info( f"Applying {patch.phid} from revision {revision['id']}: {message}" ) hg.import_( patches=io.BytesIO(patch.patch.encode("utf-8")), message=message, user="******", )
def set_api_key(url: str, api_key: str) -> None: global PHABRICATOR_API PHABRICATOR_API = PhabricatorAPI(api_key, url)
def apply_phab(self, hg, diff_id): def has_revision(revision): if not revision: return False try: hg.identify(revision) return True except hglib.error.CommandError: return False phabricator_api = PhabricatorAPI( api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL") ) # Get the stack of patches stack = phabricator_api.load_patches_stack(diff_id) assert len(stack) > 0, "No patches to apply" # Find the first unknown base revision needed_stack = [] revisions = {} for patch in reversed(stack): needed_stack.insert(0, patch) # Stop as soon as a base revision is available if has_revision(patch.base_revision): logger.info( f"Stopping at diff {patch.id} and revision {patch.base_revision}" ) break if not needed_stack: logger.info("All the patches are already applied") return # Load all the diff revisions diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack]) revisions = { diff["phid"]: phabricator_api.load_revision(rev_phid=diff["revisionPHID"]) for diff in diffs } # Update repo to base revision hg_base = needed_stack[0].base_revision if not has_revision(hg_base): logger.warning("Missing base revision {} from Phabricator".format(hg_base)) hg_base = "tip" if hg_base: hg.update(rev=hg_base, clean=True) logger.info(f"Updated repo to {hg_base}") try: self.git_base = vcs_map.mercurial_to_git(hg_base) subprocess.run( ["git", "checkout", "-b", "analysis_branch", self.git_base], check=True, cwd=self.git_repo_dir, ) logger.info(f"Updated git repo to {self.git_base}") except Exception as e: logger.info(f"Updating git repo to Mercurial {hg_base} failed: {e}") for patch in needed_stack: revision = revisions[patch.phid] if patch.commits: message = patch.commits[0]["message"] author_name = patch.commits[0]["author"]["name"] author_email = patch.commits[0]["author"]["email"] else: message = revision["fields"]["title"] author_name = "bugbug" author_email = "*****@*****.**" logger.info( f"Applying {patch.phid} from revision {revision['id']}: {message}" ) hg.import_( patches=io.BytesIO(patch.patch.encode("utf-8")), message=message.encode("utf-8"), user=f"{author_name} <{author_email}>".encode("utf-8"), ) with tempfile.TemporaryDirectory() as tmpdirname: temp_file = os.path.join(tmpdirname, "temp.patch") with open(temp_file, "w") as f: f.write(patch.patch) subprocess.run( ["git", "apply", "--3way", temp_file], check=True, cwd=self.git_repo_dir, ) subprocess.run( [ "git", "-c", f"user.name={author_name}", "-c", f"user.email={author_email}", "commit", "-am", message, ], check=True, cwd=self.git_repo_dir, )
def mock_phabricator(mock_config): ''' Mock phabricator authentication process ''' def _response(name): path = os.path.join(MOCK_DIR, 'phabricator_{}.json'.format(name)) assert os.path.exists(path) return open(path).read() responses.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.diff.search', body=_response('diff_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.revision.search', body=_response('revision_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.query', body=_response('diff_query'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', body=_response('diff_raw'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.createinline', body=_response('createinline'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/edge.search', body=_response('edge_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/transaction.search', body=_response('transaction_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.target.search', body=_response('target_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.build.search', body=_response('build_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.buildable.search', body=_response('buildable_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.sendmessage', body=_response('send_message'), content_type='application/json', ) yield PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', )
class PhabricatorActions(object): ''' Common Phabricator actions shared across clients ''' def __init__(self, url, api_key, retries=5, sleep=10): self.api = PhabricatorAPI(url=url, api_key=api_key) # Phabricator secure revision retries configuration assert isinstance(retries, int) assert isinstance(sleep, int) self.retries = collections.defaultdict(lambda: (retries, None)) self.sleep = sleep logger.info('Will retry Phabricator secure revision queries', retries=retries, sleep=sleep) # Load secure projects projects = self.api.search_projects(slugs=['secure-revision']) self.secure_projects = { p['phid']: p['fields']['name'] for p in projects } logger.info('Loaded secure projects', projects=self.secure_projects.values()) def update_state(self, build): ''' Check the visibility of the revision, by retrying N times with a specified time This method is executed regularly by the client application to check on the status evolution as the BMO daemon can take several minutes to update the status ''' assert isinstance(build, PhabricatorBuild) # Only when queued if build.state != PhabricatorBuildState.Queued: return # Check this build has some retries left retries_left, last_try = self.retries[build.target_phid] if retries_left <= 0: return # Check this build has been awaited between tries now = time.time() if last_try is not None and now - last_try < self.sleep: return # Now we can check if this revision is public retries_left -= 1 self.retries[build.target_phid] = (retries_left, now) logger.info('Checking visibility status', build=str(build), retries_left=retries_left) if self.is_visible(build): build.state = PhabricatorBuildState.Public logger.info('Revision is public', build=str(build)) elif retries_left <= 0: # Mark as secured when no retries are left build.state = PhabricatorBuildState.Secured logger.info('Revision is marked as secure', build=str(build)) else: # Enqueue back to retry later build.state = PhabricatorBuildState.Queued def is_visible(self, build): ''' Check the visibility of the revision by loading its details ''' assert isinstance(build, PhabricatorBuild) assert build.state == PhabricatorBuildState.Queued try: # Load revision with projects build.revision = self.api.load_revision(rev_id=build.revision_id, attachments={ 'projects': True, 'reviewers': True }) if not build.revision: raise Exception('Not found') # Check against secure projects projects = set( build.revision['attachments']['projects']['projectPHIDs']) if projects.intersection(self.secure_projects): raise Exception('Secure revision') except Exception as e: logger.info('Revision not accessible', build=str(build), error=str(e)) return False return True def load_patches_stack(self, build): ''' Load a stack of patches for a public Phabricator build without hitting a local mercurial repository ''' build.stack = self.api.load_patches_stack(build.diff_id, build.diff) def load_reviewers(self, build): ''' Load details for reviewers found on a build ''' assert isinstance(build, PhabricatorBuild) assert build.state == PhabricatorBuildState.Public assert build.revision is not None reviewers = build.revision['attachments']['reviewers']['reviewers'] build.reviewers = [ self.api.load_user(user_phid=reviewer['reviewerPHID']) for reviewer in reviewers ]
class NotLanded(BzCleaner): def __init__(self): super(NotLanded, self).__init__() self.nweeks = utils.get_config(self.name(), "number_of_weeks", 2) self.nyears = utils.get_config(self.name(), "number_of_years", 2) self.phab = PhabricatorAPI(utils.get_login_info()["phab_api_key"]) self.extra_ni = {} def description(self): return "Open bugs with no activity for {} weeks and a r+ patch which hasn't landed".format( self.nweeks) def has_assignee(self): return True def get_extra_for_template(self): return {"nweeks": self.nweeks} def get_extra_for_needinfo_template(self): self.extra_ni.update(self.get_extra_for_template()) return self.extra_ni def columns(self): return ["id", "summary", "assignee"] def handle_bug(self, bug, data): if self.has_bot_set_ni(bug): return None bugid = str(bug["id"]) assignee = bug.get("assigned_to", "") if utils.is_no_assignee(assignee): assignee = "" nickname = "" else: nickname = bug["assigned_to_detail"]["nick"] data[bugid] = { "assigned_to": assignee, "nickname": nickname, "deps": set(bug["depends_on"]), } return bug def filter_bugs(self, bugs): # We must remove bugs which have open dependencies (except meta bugs) # because devs may wait for those bugs to be fixed before their patch # can land. all_deps = set(dep for info in bugs.values() for dep in info["deps"]) def bug_handler(bug, data): if (bug["status"] in {"RESOLVED", "VERIFIED", "CLOSED"} or "meta" in bug["keywords"]): data.add(bug["id"]) useless = set() Bugzilla( bugids=list(all_deps), include_fields=["id", "keywords", "status"], bughandler=bug_handler, bugdata=useless, ).get_data().wait() for bugid, info in bugs.items(): # finally deps will contain open bugs which are not meta info["deps"] -= useless # keep bugs with no deps bugs = { bugid: info for bugid, info in bugs.items() if not info["deps"] } return bugs def check_phab(self, attachment, reviewers_phid): """Check if the patch in Phabricator has been r+""" if attachment["is_obsolete"] == 1: return None phab_url = base64.b64decode(attachment["data"]).decode("utf-8") # extract the revision rev = PHAB_URL_PAT.search(phab_url).group(1) try: data = self.phab.load_revision(rev_id=int(rev), queryKey="all", attachments={"reviewers": 1}) except PhabricatorRevisionNotFoundException: return None # this is a timestamp last_modified = data["fields"]["dateModified"] last_modified = lmdutils.get_date_from_timestamp(last_modified) if (self.date - last_modified).days <= self.nweeks * 7: # Do not do anything if recent changes in the bug return False reviewers = data["attachments"]["reviewers"]["reviewers"] if not reviewers: return False for reviewer in reviewers: if reviewer["status"] != "accepted": return False reviewers_phid.add(reviewer["reviewerPHID"]) value = data["fields"]["status"].get("value", "") if value == "changes-planned": # even if the patch is r+ and not published, some changes may be required # so with the value 'changes-planned', the dev can say it's still a wip return False if value != "published": return True return False def handle_attachment(self, attachment, res): ct = attachment["content_type"] c = None if ct == "text/x-phabricator-request": if "phab" not in res or res["phab"]: c = self.check_phab(attachment, res["reviewers_phid"]) if c is not None: res["phab"] = c if c is not None: attacher = attachment["creator"] if "author" in res: if attacher in res["author"]: res["author"][attacher] += 1 else: res["author"][attacher] = 1 else: res["author"] = {attacher: 1} if "count" in res: res["count"] += 1 else: res["count"] = 1 def get_patch_data(self, bugs): """Get patch information in bugs""" nightly_pat = Bugzilla.get_landing_patterns(channels=["nightly"])[0][0] def comment_handler(bug, bugid, data): # if a comment contains a backout: don't nag for comment in bug["comments"]: comment = comment["text"].lower() if nightly_pat.match(comment) and ("backed out" in comment or "backout" in comment): data[bugid]["backout"] = True def attachment_id_handler(attachments, bugid, data): for a in attachments: if (a["content_type"] == "text/x-phabricator-request" and a["is_obsolete"] == 0): data.append(a["id"]) def attachment_handler(attachments, data): for attachment in attachments: bugid = str(attachment["bug_id"]) if bugid in data: data[bugid].append(attachment) else: data[bugid] = [attachment] bugids = list(bugs.keys()) data = { bugid: { "backout": False, "author": None, "count": 0 } for bugid in bugids } # Get the ids of the attachments of interest # to avoid to download images, videos, ... attachment_ids = [] Bugzilla( bugids=bugids, attachmenthandler=attachment_id_handler, attachmentdata=attachment_ids, attachment_include_fields=["is_obsolete", "content_type", "id"], ).get_data().wait() # Once we've the ids we can get the data attachments_by_bug = {} Bugzilla( attachmentids=attachment_ids, attachmenthandler=attachment_handler, attachmentdata=attachments_by_bug, attachment_include_fields=[ "bug_id", "data", "is_obsolete", "content_type", "id", "creator", ], ).get_data().wait() for bugid, attachments in attachments_by_bug.items(): res = {"reviewers_phid": set()} for attachment in attachments: self.handle_attachment(attachment, res) if "phab" in res: if res["phab"]: data[bugid]["reviewers_phid"] = res["reviewers_phid"] data[bugid]["author"] = res["author"] data[bugid]["count"] = res["count"] data = {bugid: v for bugid, v in data.items() if v["author"]} if not data: return data Bugzilla( bugids=list(data.keys()), commenthandler=comment_handler, commentdata=data, comment_include_fields=["text"], ).get_data().wait() data = {bugid: v for bugid, v in data.items() if not v["backout"]} return data def get_bz_userid(self, phids): if not phids: return {} try: data = self.phab.load_bz_account(user_phids=list(phids)) users = {x["phid"]: x["id"] for x in data} except PhabricatorBzNotFoundException: return {} def handler(user, data): data[str(user["id"])] = user["name"] data = {} BugzillaUser( user_names=list(users.values()), include_fields=["id", "name"], user_handler=handler, user_data=data, ).wait() return {phid: data[id] for phid, id in users.items()} def get_nicks(self, nicknames): def handler(user, data): data[user["name"]] = user["nick"] users = set(nicknames.values()) data = {} if users: BugzillaUser( user_names=list(users), include_fields=["name", "nick"], user_handler=handler, user_data=data, ).wait() for bugid, name in nicknames.items(): nicknames[bugid] = (name, data[name]) return nicknames def get_bz_params(self, date): self.date = lmdutils.get_date_ymd(date) fields = ["flags", "depends_on"] params = { "include_fields": fields, "resolution": "---", "f1": "attachment.ispatch", "n2": 1, "f2": "attachments.isobsolete", "f3": "attachments.mimetype", "o3": "anywordssubstr", "v3": "text/x-phabricator-request", "f4": "creation_ts", "o4": "greaterthan", "v4": f"-{self.nyears}y", "f5": "days_elapsed", "o5": "greaterthaneq", "v5": self.nweeks * 7, "n6": 1, "f6": "longdesc", "o6": "casesubstring", "v6": "which didn't land and no activity in this bug for", } return params def get_bugs(self, date="today", bug_ids=[]): bugs = super(NotLanded, self).get_bugs(date=date, bug_ids=bug_ids) bugs = self.filter_bugs(bugs) bugs_patch = self.get_patch_data(bugs) res = {} reviewers_phid = set() nicknames = {} for bugid, data in bugs_patch.items(): reviewers_phid |= data["reviewers_phid"] assignee = bugs[bugid]["assigned_to"] if not assignee: assignee = max(data["author"], key=data["author"].get) nicknames[bugid] = assignee bz_reviewers = self.get_bz_userid(reviewers_phid) all_reviewers = set(bz_reviewers.keys()) nicknames = self.get_nicks(nicknames) for bugid, data in bugs_patch.items(): res[bugid] = d = bugs[bugid] self.extra_ni[bugid] = data["count"] assignee = d["assigned_to"] nickname = d["nickname"] if not assignee: assignee, nickname = nicknames[bugid] if not assignee: continue self.add_auto_ni(bugid, {"mail": assignee, "nickname": nickname}) common = all_reviewers & data["reviewers_phid"] if common: reviewer = random.choice(list(common)) self.add_auto_ni(bugid, { "mail": bz_reviewers[reviewer], "nickname": None }) return res
def apply_phab(self, hg, diff_id): def has_revision(revision): if not revision: return False try: hg.identify(revision) return True except hglib.error.CommandError: return False phabricator_api = PhabricatorAPI( api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL")) # Get the stack of patches stack = phabricator_api.load_patches_stack(diff_id) assert len(stack) > 0, "No patches to apply" # Find the first unknown base revision needed_stack = [] revisions = {} for patch in reversed(stack): needed_stack.insert(0, patch) # Stop as soon as a base revision is available if has_revision(patch.base_revision): logger.info( f"Stopping at diff {patch.id} and revision {patch.base_revision}" ) break if not needed_stack: logger.info("All the patches are already applied") return # Load all the diff revisions diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack]) revisions = { diff["phid"]: phabricator_api.load_revision(rev_phid=diff["revisionPHID"], attachments={"reviewers": True}) for diff in diffs } # Update repo to base revision hg_base = needed_stack[0].base_revision if not has_revision(hg_base): logger.warning( "Missing base revision {} from Phabricator".format(hg_base)) hg_base = "tip" if hg_base: hg.update(rev=hg_base, clean=True) logger.info(f"Updated repo to {hg_base}") try: self.git_base = vcs_map.mercurial_to_git(hg_base) subprocess.run( [ "git", "checkout", "-b", "analysis_branch", self.git_base ], check=True, cwd=self.git_repo_dir, ) logger.info(f"Updated git repo to {self.git_base}") except Exception as e: logger.info( f"Updating git repo to Mercurial {hg_base} failed: {e}") def load_user(phid): if phid.startswith("PHID-USER"): return phabricator_api.load_user(user_phid=phid) elif phid.startswith("PHID-PROJ"): # TODO: Support group reviewers somehow. logger.info(f"Skipping group reviewer {phid}") else: raise Exception(f"Unsupported reviewer {phid}") for patch in needed_stack: revision = revisions[patch.phid] message = "{}\n\n{}".format(revision["fields"]["title"], revision["fields"]["summary"]) author_name = None author_email = None if patch.commits: author_name = patch.commits[0]["author"]["name"] author_email = patch.commits[0]["author"]["email"] if author_name is None: author = load_user(revision["fields"]["authorPHID"]) author_name = author["fields"]["realName"] # XXX: Figure out a way to know the email address of the author. author_email = author["fields"]["username"] reviewers = list( filter( None, (load_user(reviewer["reviewerPHID"]) for reviewer in revision["attachments"]["reviewers"]["reviewers"]), )) reviewers = set(reviewer["fields"]["username"] for reviewer in reviewers) if len(reviewers): message = replace_reviewers(message, reviewers) logger.info( f"Applying {patch.phid} from revision {revision['id']}: {message}" ) hg.import_( patches=io.BytesIO(patch.patch.encode("utf-8")), message=message.encode("utf-8"), user=f"{author_name} <{author_email}>".encode("utf-8"), ) with tempfile.TemporaryDirectory() as tmpdirname: temp_file = os.path.join(tmpdirname, "temp.patch") with open(temp_file, "w") as f: f.write(patch.patch) subprocess.run( ["git", "apply", "--3way", temp_file], check=True, cwd=self.git_repo_dir, ) subprocess.run( [ "git", "-c", f"user.name={author_name}", "-c", f"user.email={author_email}", "commit", "-am", message, ], check=True, cwd=self.git_repo_dir, )
def PhabricatorMock(): ''' Mock phabricator authentication process ''' json_headers = { 'Content-Type': 'application/json', } def _response(name): path = os.path.join(MOCK_DIR, 'phabricator', '{}.json'.format(name)) assert os.path.exists(path), 'Missing mock {}'.format(path) return open(path).read() def _phab_params(request): # What a weird way to send parameters return json.loads(urllib.parse.parse_qs(request.body)['params'][0]) def _diff_search(request): params = _phab_params(request) assert 'constraints' in params if 'revisionPHIDs' in params['constraints']: # Search from revision mock_name = 'search-{}'.format(params['constraints']['revisionPHIDs'][0]) elif 'phids' in params['constraints']: # Search from diffs diffs = '-'.join(params['constraints']['phids']) mock_name = 'search-{}'.format(diffs) else: raise Exception('Unsupported diff mock {}'.format(params)) return (200, json_headers, _response(mock_name)) def _diff_raw(request): params = _phab_params(request) assert 'diffID' in params return (200, json_headers, _response('raw-{}'.format(params['diffID']))) def _edges(request): params = _phab_params(request) assert 'sourcePHIDs' in params return (200, json_headers, _response('edges-{}'.format(params['sourcePHIDs'][0]))) def _create_artifact(request): params = _phab_params(request) assert 'buildTargetPHID' in params return (200, json_headers, _response('artifact-{}'.format(params['buildTargetPHID']))) def _send_message(request): params = _phab_params(request) assert 'buildTargetPHID' in params name = 'message-{}-{}'.format(params['buildTargetPHID'], params['type']) if params['unit']: name += '-unit' if params['lint']: name += '-lint' return (200, json_headers, _response(name)) with responses.RequestsMock(assert_all_requests_are_fired=False) as resp: resp.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) resp.add_callback( responses.POST, 'http://phabricator.test/api/edge.search', callback=_edges, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.diff.search', callback=_diff_search, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', callback=_diff_raw, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/harbormaster.createartifact', callback=_create_artifact, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/harbormaster.sendmessage', callback=_send_message, ) resp.add( responses.POST, 'http://phabricator.test/api/diffusion.repository.search', body=_response('repositories'), content_type='application/json', ) api = PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', ) api.mocks = resp # used to assert in tests on callbacks yield api
def apply_phab(self, hg, phabricator_deployment, diff_id): if phabricator_deployment == PHAB_PROD: api_key = get_secret("PHABRICATOR_TOKEN") url = get_secret("PHABRICATOR_URL") else: api_key = get_secret("PHABRICATOR_DEV_TOKEN") url = get_secret("PHABRICATOR_DEV_URL") phabricator_api = PhabricatorAPI(api_key=api_key, url=url) # Get the stack of patches stack = phabricator_api.load_patches_stack(diff_id) assert len(stack) > 0, "No patches to apply" # Find the first unknown base revision needed_stack = [] revisions = {} for patch in reversed(stack): needed_stack.insert(0, patch) # Stop as soon as a base revision is available if self.has_revision(hg, patch.base_revision): logger.info( f"Stopping at diff {patch.id} and revision {patch.base_revision}" ) break if not needed_stack: logger.info("All the patches are already applied") return # Load all the diff revisions diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack]) revisions = { diff["phid"]: phabricator_api.load_revision(rev_phid=diff["revisionPHID"], attachments={"reviewers": True}) for diff in diffs } # Update repo to base revision hg_base = needed_stack[0].base_revision if not self.has_revision(hg, hg_base): logger.warning( "Missing base revision {} from Phabricator".format(hg_base)) hg_base = "tip" if hg_base: hg.update(rev=hg_base, clean=True) logger.info(f"Updated repo to {hg_base}") if self.git_repo_dir and hg_base != "tip": try: self.git_base = tuple( vcs_map.mercurial_to_git(self.git_repo_dir, [hg_base]))[0] subprocess.run( [ "git", "checkout", "-b", "analysis_branch", self.git_base ], check=True, cwd=self.git_repo_dir, ) logger.info(f"Updated git repo to {self.git_base}") except Exception as e: logger.info( f"Updating git repo to Mercurial {hg_base} failed: {e}" ) def load_user(phid): if phid.startswith("PHID-USER"): return phabricator_api.load_user(user_phid=phid) elif phid.startswith("PHID-PROJ"): # TODO: Support group reviewers somehow. logger.info(f"Skipping group reviewer {phid}") else: raise Exception(f"Unsupported reviewer {phid}") for patch in needed_stack: revision = revisions[patch.phid] message = "{}\n\n{}".format(revision["fields"]["title"], revision["fields"]["summary"]) author_name = None author_email = None if patch.commits: author_name = patch.commits[0]["author"]["name"] author_email = patch.commits[0]["author"]["email"] if author_name is None: author = load_user(revision["fields"]["authorPHID"]) author_name = author["fields"]["realName"] # XXX: Figure out a way to know the email address of the author. author_email = author["fields"]["username"] reviewers = list( filter( None, (load_user(reviewer["reviewerPHID"]) for reviewer in revision["attachments"]["reviewers"]["reviewers"]), )) reviewers = set(reviewer["fields"]["username"] for reviewer in reviewers) if len(reviewers): message = replace_reviewers(message, reviewers) logger.info( f"Applying {patch.phid} from revision {revision['id']}: {message}" ) hg.import_( patches=io.BytesIO(patch.patch.encode("utf-8")), message=message.encode("utf-8"), user=f"{author_name} <{author_email}>".encode("utf-8"), ) if self.git_repo_dir: patch_proc = subprocess.Popen( ["patch", "-p1", "--no-backup-if-mismatch", "--force"], stdin=subprocess.PIPE, cwd=self.git_repo_dir, ) patch_proc.communicate(patch.patch.encode("utf-8")) assert patch_proc.returncode == 0, "Failed to apply patch" subprocess.run( [ "git", "-c", f"user.name={author_name}", "-c", f"user.email={author_email}", "commit", "-am", message, ], check=True, cwd=self.git_repo_dir, )
def main(): args = parse_cli() taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token) taskcluster.load_secrets( name=args.taskcluster_secret, project_name=config.PROJECT_NAME, required=("APP_CHANNEL", "REPORTERS", "PHABRICATOR", "ALLOWED_PATHS"), existing={ "APP_CHANNEL": "development", "REPORTERS": [], "PUBLICATION": "IN_PATCH", "ZERO_COVERAGE_ENABLED": True, "ALLOWED_PATHS": ["*"], }, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"), PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"), SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"), ) # Setup settings before stats settings.setup( taskcluster.secrets["APP_CHANNEL"], taskcluster.secrets["PUBLICATION"], taskcluster.secrets["ALLOWED_PATHS"], ) # Setup statistics datadog_api_key = taskcluster.secrets.get("DATADOG_API_KEY") if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters(taskcluster.secrets["REPORTERS"]) # Load index service index_service = taskcluster.get_service("index") # Load queue service queue_service = taskcluster.get_service("queue") # Load Phabricator API phabricator = taskcluster.secrets["PHABRICATOR"] phabricator_reporting_enabled = "phabricator" in reporters phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"]) if phabricator_reporting_enabled: reporters["phabricator"].setup_api(phabricator_api) # Load unique revision revision = Revision( phabricator_api, try_task=queue_service.task(settings.try_task_id), # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, ) # Run workflow according to source w = Workflow( reporters, index_service, queue_service, phabricator_api, taskcluster.secrets["ZERO_COVERAGE_ENABLED"], ) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error("Static analysis failure", revision=revision, error=e) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras["error_code"] = e.code extras["error_message"] = str(e) w.index(revision, state="error", **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise
def mock_phabricator(mock_config): """ Mock phabricator authentication process """ def _response(name): path = os.path.join(MOCK_DIR, "phabricator_{}.json".format(name)) assert os.path.exists(path) return open(path).read() def diff_search(request): payload = dict(urllib.parse.parse_qsl(request.body)) assert "params" in payload params = json.loads(payload["params"]) name = ["diff_search"] for values in params.get("constraints", {}).values(): name += values content = _response("_".join(name)) return (200, {"Content-Type": "application/json"}, content) responses.add( responses.POST, "http://phabricator.test/api/user.whoami", body=_response("auth"), content_type="application/json", ) responses.add_callback( responses.POST, "http://phabricator.test/api/differential.diff.search", callback=diff_search, ) responses.add( responses.POST, "http://phabricator.test/api/differential.revision.search", body=_response("revision_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.query", body=_response("diff_query"), content_type="application/json", ) with open(os.path.join(os.path.join(MOCK_DIR, "phabricator_patch.diff"))) as f: test_patch = f.read() responses.add( responses.POST, "http://phabricator.test/api/differential.getrawdiff", body=json.dumps({ "error_code": None, "error_info": None, "result": test_patch }), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/differential.createinline", body=_response("createinline"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/edge.search", body=_response("edge_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/transaction.search", body=_response("transaction_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.target.search", body=_response("target_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.build.search", body=_response("build_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.buildable.search", body=_response("buildable_search"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/harbormaster.sendmessage", body=_response("send_message"), content_type="application/json", ) responses.add( responses.POST, "http://phabricator.test/api/diffusion.repository.search", body=_response("repository_search"), content_type="application/json", ) yield PhabricatorAPI(url="http://phabricator.test/api/", api_key="deadbeef")
def main( taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'PHABRICATOR', 'ALLOWED_PATHS', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'PUBLICATION': 'IN_PATCH', 'ZERO_COVERAGE_ENABLED': True, 'ALLOWED_PATHS': [ '*', ], }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_reporting_enabled = 'phabricator' in reporters phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if phabricator_reporting_enabled: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision revision = Revision( phabricator_api, try_task=queue_service.task(settings.try_task_id), # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, ) # Run workflow according to source w = Workflow(reporters, index_service, queue_service, phabricator_api, secrets['ZERO_COVERAGE_ENABLED']) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise