def post(data): phab = g.phabricator landing_path, confirmation_token = _unmarshal_transplant_request(data) logger.info( "transplant requested by user", extra={ "has_confirmation_token": confirmation_token is not None, "landing_path": landing_path, }, ) assessment, to_land, landing_repo, stack_data = _assess_transplant_request( phab, landing_path ) assessment.raise_if_blocked_or_unacknowledged(confirmation_token) if not all((to_land, landing_repo, stack_data)): raise ValueError( "One or more values missing in access transplant request: " f"{to_land}, {landing_repo}, {stack_data}" ) if assessment.warnings: # Log any warnings that were acknowledged, for auditing. logger.info( "Transplant with acknowledged warnings is being requested", extra={ "landing_path": landing_path, "warnings": [ {"i": w.i, "revision_id": w.revision_id, "details": w.details} for w in assessment.warnings ], }, ) involved_phids = set() revisions = [r[0] for r in to_land] for revision in revisions: involved_phids.update(gather_involved_phids(revision)) involved_phids = list(involved_phids) users = user_search(phab, involved_phids) projects = project_search(phab, involved_phids) secure_project_phid = get_secure_project_phid(phab) # Take note of any revisions that the checkin project tag must be # removed from. checkin_phid = get_checkin_project_phid(phab) checkin_revision_phids = [ r["phid"] for r in revisions if checkin_phid in phab.expect(r, "attachments", "projects", "projectPHIDs") ] sec_approval_project_phid = get_sec_approval_project_phid(phab) # Build the patches to land. patch_urls = [] for revision, diff in to_land: reviewers = get_collated_reviewers(revision) accepted_reviewers = reviewers_for_commit_message( reviewers, users, projects, sec_approval_project_phid ) secure = revision_is_secure(revision, secure_project_phid) commit_description = find_title_and_summary_for_landing(phab, revision, secure) commit_message = format_commit_message( commit_description.title, get_bugzilla_bug(revision), accepted_reviewers, commit_description.summary, urllib.parse.urljoin( current_app.config["PHABRICATOR_URL"], "D{}".format(revision["id"]) ), )[1] author_name, author_email = select_diff_author(diff) date_modified = phab.expect(revision, "fields", "dateModified") # Construct the patch that will be sent to transplant. raw_diff = phab.call_conduit("differential.getrawdiff", diffID=diff["id"]) patch = build_patch_for_revision( raw_diff, author_name, author_email, commit_message, date_modified ) # Upload the patch to S3 patch_url = upload( revision["id"], diff["id"], patch, current_app.config["PATCH_BUCKET_NAME"], aws_access_key=current_app.config["AWS_ACCESS_KEY"], aws_secret_key=current_app.config["AWS_SECRET_KEY"], ) patch_urls.append(patch_url) ldap_username = g.auth0_user.email revision_to_diff_id = {str(r["id"]): d["id"] for r, d in to_land} revision_order = [str(r["id"]) for r in revisions] stack_ids = [r["id"] for r in stack_data.revisions.values()] submitted_assessment = TransplantAssessment( blocker=( "This stack was submitted for landing by another user at the same time." ) ) if landing_repo.transplant_locally: with db.session.begin_nested(): _lock_table_for(db.session, model=LandingJob) if ( LandingJob.revisions_query(stack_ids) .filter( LandingJob.status.in_( [LandingJobStatus.SUBMITTED, LandingJobStatus.IN_PROGRESS] ) ) .count() != 0 ): submitted_assessment.raise_if_blocked_or_unacknowledged(None) # Trigger a local transplant job = LandingJob( status=LandingJobStatus.SUBMITTED, requester_email=ldap_username, repository_name=landing_repo.tree, repository_url=landing_repo.url, revision_to_diff_id=revision_to_diff_id, revision_order=revision_order, ) db.session.add(job) db.session.commit() logger.info("New landing job {job.id} created for {landing_repo.tree} repo") # NOTE: the response body is not being used anywhere. return {"id": job.id}, 202 trans = TransplantClient( current_app.config["TRANSPLANT_URL"], current_app.config["TRANSPLANT_USERNAME"], current_app.config["TRANSPLANT_PASSWORD"], ) # We pass the revision id of the base of our landing path to # transplant in rev as it must be unique until the request # has been serviced. While this doesn't use Autoland Transplant # to enforce not requesting from the same stack again, Lando # ensures this itself. root_revision_id = to_land[0][0]["id"] try: # WARNING: Entering critical section, do not add additional # code unless absolutely necessary. Acquires a lock on the # transplants table which gives exclusive write access and # prevents readers who are entering this critical section. # See https://www.postgresql.org/docs/9.3/static/explicit-locking.html # for more details on the specifics of the lock mode. with db.session.begin_nested(): _lock_table_for(db.session, model=Transplant) if ( Transplant.revisions_query(stack_ids) .filter_by(status=TransplantStatus.submitted) .first() is not None ): submitted_assessment.raise_if_blocked_or_unacknowledged(None) transplant_request_id = trans.land( revision_id=root_revision_id, ldap_username=ldap_username, patch_urls=patch_urls, tree=landing_repo.tree, pingback=current_app.config["PINGBACK_URL"], push_bookmark=landing_repo.push_bookmark, ) transplant = Transplant( request_id=transplant_request_id, revision_to_diff_id=revision_to_diff_id, revision_order=revision_order, requester_email=ldap_username, tree=landing_repo.tree, repository_url=landing_repo.url, status=TransplantStatus.submitted, ) db.session.add(transplant) except TransplantError: logger.exception( "error creating transplant", extra={"landing_path": landing_path} ) return problem( 502, "Transplant not created", "The requested landing_path is valid, but transplant failed." "Please retry your request at a later time.", type="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/502", ) # Transaction succeeded, commit the session. db.session.commit() logger.info( "transplant created", extra={"landing_path": landing_path, "transplant_id": transplant.id}, ) # Asynchronously remove the checkin project from any of the landing # revisions that had it. for r_phid in checkin_revision_phids: try: admin_remove_phab_project.apply_async( args=(r_phid, checkin_phid), kwargs=dict(comment=f"#{CHECKIN_PROJ_SLUG} handled, landing queued."), ) except kombu.exceptions.OperationalError: # Best effort is acceptable here, Transplant *is* going to land # these changes so it's better to return properly from the request. pass return {"id": transplant.id}, 202
def _assess_transplant_request(phab, landing_path): nodes, edges = _find_stack_from_landing_path(phab, landing_path) stack_data = request_extended_revision_data(phab, [phid for phid in nodes]) landing_path = _convert_path_id_to_phid(landing_path, stack_data) supported_repos = get_repos_for_env(current_app.config.get("ENVIRONMENT")) landable_repos = get_landable_repos_for_revision_data(stack_data, supported_repos) other_checks = get_blocker_checks( repositories=supported_repos, relman_group_phid=get_relman_group_phid(phab) ) landable, blocked = calculate_landable_subgraphs( stack_data, edges, landable_repos, other_checks=other_checks ) assessment = check_landing_blockers( g.auth0_user, landing_path, stack_data, landable, landable_repos ) if assessment.blocker is not None: return (assessment, None, None, None) # We have now verified that landable_path is valid and is indeed # landable (in the sense that it is a landable_subgraph, with no # revisions being blocked). Make this clear by using a different # value, and assume it going forward. valid_path = landing_path # Now that we know this is a valid path we can convert it into a list # of (revision, diff) tuples. to_land = [stack_data.revisions[r_phid] for r_phid, _ in valid_path] to_land = [ (r, stack_data.diffs[PhabricatorClient.expect(r, "fields", "diffPHID")]) for r in to_land ] # To be a landable path the entire path must have the same # repository, so we can get away with checking only one. repo = stack_data.repositories[to_land[0][0]["fields"]["repositoryPHID"]] landing_repo = landable_repos[repo["phid"]] involved_phids = set() for revision, _ in to_land: involved_phids.update(gather_involved_phids(revision)) involved_phids = list(involved_phids) users = user_search(phab, involved_phids) projects = project_search(phab, involved_phids) reviewers = { revision["phid"]: get_collated_reviewers(revision) for revision, _ in to_land } assessment = check_landing_warnings( g.auth0_user, to_land, repo, landing_repo, reviewers, users, projects, get_secure_project_phid(phab), ) return (assessment, to_land, landing_repo, stack_data)
def request_sec_approval(data=None): """Update a Revision with a sanitized commit message. Kicks off the sec-approval process. See https://wiki.mozilla.org/Security/Bug_Approval_Process. Args: revision_id: The ID of the revision that will have a sanitized commit message. e.g. D1234. sanitized_message: The sanitized commit message. """ phab = g.phabricator revision_id = revision_id_to_int(data["revision_id"]) alt_message = data["sanitized_message"] logger.info( "Got request for sec-approval review of revision", extra=dict(revision_phid=revision_id), ) if not alt_message: return problem( 400, "Empty commit message text", "The sanitized commit message text cannot be empty", type="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400", ) # FIXME: this is repeated in numerous places in the code. Needs refactoring! revision = phab.call_conduit( "differential.revision.search", constraints={"ids": [revision_id]}, attachments={"projects": True}, ) revision = phab.single(revision, "data", none_when_empty=True) if revision is None: return problem( 404, "Revision not found", "The requested revision does not exist", type="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404", ) # Only secure revisions are allowed to follow the sec-approval process. if not revision_is_secure(revision, get_secure_project_phid(phab)): return problem( 400, "Operation only allowed for secure revisions", "Only security-sensitive revisions can be given sanitized commit messages", type="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400", ) resulting_transactions = send_sanitized_commit_message_for_review( revision["phid"], alt_message, phab ) # Save the transactions that added the sec-approval comment so we can # quickly fetch the comment from Phabricator later in the process. # # NOTE: Each call to Phabricator returns two transactions: one for adding the # comment and one for adding the reviewer. We don't know which transaction is # which at this point so we record both of them. sa_request = SecApprovalRequest.build(revision, resulting_transactions) db.session.add(sa_request) db.session.commit() return {}, 200
def get(revision_id): """Get the stack a revision is part of. Args: revision_id: (string) ID of the revision in 'D{number}' format """ revision_id = revision_id_to_int(revision_id) phab = g.phabricator revision = phab.call_conduit("differential.revision.search", constraints={"ids": [revision_id]}) revision = phab.single(revision, "data", none_when_empty=True) if revision is None: return problem( 404, "Revision not found", "The requested revision does not exist", type="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404", ) # TODO: This assumes that all revisions and related objects in the stack # have uniform view permissions for the requesting user. Some revisions # being restricted could cause this to fail. nodes, edges = build_stack_graph(phab, phab.expect(revision, "phid")) stack_data = request_extended_revision_data(phab, [phid for phid in nodes]) supported_repos = get_repos_for_env(current_app.config.get("ENVIRONMENT")) landable_repos = get_landable_repos_for_revision_data( stack_data, supported_repos) other_checks = get_blocker_checks( repositories=supported_repos, relman_group_phid=get_relman_group_phid(phab)) landable, blocked = calculate_landable_subgraphs(stack_data, edges, landable_repos, other_checks=other_checks) uplift_repos = [ name for name, repo in supported_repos.items() if repo.approval_required ] involved_phids = set() for revision in stack_data.revisions.values(): involved_phids.update(gather_involved_phids(revision)) involved_phids = list(involved_phids) users = user_search(phab, involved_phids) projects = project_search(phab, involved_phids) secure_project_phid = get_secure_project_phid(phab) sec_approval_project_phid = get_sec_approval_project_phid(phab) revisions_response = [] for _phid, revision in stack_data.revisions.items(): revision_phid = PhabricatorClient.expect(revision, "phid") fields = PhabricatorClient.expect(revision, "fields") diff_phid = PhabricatorClient.expect(fields, "diffPHID") diff = stack_data.diffs[diff_phid] human_revision_id = "D{}".format( PhabricatorClient.expect(revision, "id")) revision_url = urllib.parse.urljoin( current_app.config["PHABRICATOR_URL"], human_revision_id) secure = revision_is_secure(revision, secure_project_phid) commit_description = find_title_and_summary_for_display( phab, revision, secure) bug_id = get_bugzilla_bug(revision) reviewers = get_collated_reviewers(revision) accepted_reviewers = reviewers_for_commit_message( reviewers, users, projects, sec_approval_project_phid) commit_message_title, commit_message = format_commit_message( commit_description.title, bug_id, accepted_reviewers, commit_description.summary, revision_url, ) author_response = serialize_author(phab.expect(fields, "authorPHID"), users) revisions_response.append({ "id": human_revision_id, "phid": revision_phid, "status": serialize_status(revision), "blocked_reason": blocked.get(revision_phid, ""), "bug_id": bug_id, "title": commit_description.title, "url": revision_url, "date_created": PhabricatorClient.to_datetime( PhabricatorClient.expect(revision, "fields", "dateCreated")).isoformat(), "date_modified": PhabricatorClient.to_datetime( PhabricatorClient.expect(revision, "fields", "dateModified")).isoformat(), "summary": commit_description.summary, "commit_message_title": commit_message_title, "commit_message": commit_message, "repo_phid": PhabricatorClient.expect(fields, "repositoryPHID"), "diff": serialize_diff(diff), "author": author_response, "reviewers": serialize_reviewers(reviewers, users, projects, diff_phid), "is_secure": secure, "is_using_secure_commit_message": commit_description.sanitized, }) repositories = [] for phid in stack_data.repositories.keys(): short_name = PhabricatorClient.expect(stack_data.repositories[phid], "fields", "shortName") repo = supported_repos.get(short_name) if repo is None: landing_supported, approval_required = False, None else: landing_supported, approval_required = True, repo.approval_required url = ("{phabricator_url}/source/{short_name}".format( phabricator_url=current_app.config["PHABRICATOR_URL"], short_name=short_name, ) if not landing_supported else supported_repos[short_name].url) repositories.append({ "phid": phid, "short_name": short_name, "url": url, "landing_supported": landing_supported, "approval_required": approval_required, }) return { "repositories": repositories, "revisions": revisions_response, "edges": [e for e in edges], "landable_paths": landable, "uplift_repositories": uplift_repos, }
def get(revision_id): """Get the stack a revision is part of. Args: revision_id: (string) ID of the revision in 'D{number}' format """ revision_id = revision_id_to_int(revision_id) phab = g.phabricator revision = phab.call_conduit("differential.revision.search", constraints={"ids": [revision_id]}) revision = phab.single(revision, "data", none_when_empty=True) if revision is None: return not_found_problem try: nodes, edges = build_stack_graph(phab, phab.expect(revision, "phid")) except PhabricatorAPIException: # If a revision within the stack causes an API exception, treat the whole stack # as not found. return not_found_problem stack_data = request_extended_revision_data(phab, [phid for phid in nodes]) supported_repos = get_repos_for_env(current_app.config.get("ENVIRONMENT")) landable_repos = get_landable_repos_for_revision_data( stack_data, supported_repos) other_checks = get_blocker_checks( repositories=supported_repos, relman_group_phid=get_relman_group_phid(phab)) landable, blocked = calculate_landable_subgraphs(stack_data, edges, landable_repos, other_checks=other_checks) uplift_repos = [ name for name, repo in supported_repos.items() if repo.approval_required ] involved_phids = set() for revision in stack_data.revisions.values(): involved_phids.update(gather_involved_phids(revision)) involved_phids = list(involved_phids) users = user_search(phab, involved_phids) projects = project_search(phab, involved_phids) secure_project_phid = get_secure_project_phid(phab) sec_approval_project_phid = get_sec_approval_project_phid(phab) revisions_response = [] for _phid, revision in stack_data.revisions.items(): revision_phid = PhabricatorClient.expect(revision, "phid") fields = PhabricatorClient.expect(revision, "fields") diff_phid = PhabricatorClient.expect(fields, "diffPHID") diff = stack_data.diffs[diff_phid] human_revision_id = "D{}".format( PhabricatorClient.expect(revision, "id")) revision_url = urllib.parse.urljoin( current_app.config["PHABRICATOR_URL"], human_revision_id) secure = revision_is_secure(revision, secure_project_phid) commit_description = find_title_and_summary_for_display( phab, revision, secure) bug_id = get_bugzilla_bug(revision) reviewers = get_collated_reviewers(revision) accepted_reviewers = reviewers_for_commit_message( reviewers, users, projects, sec_approval_project_phid) commit_message_title, commit_message = format_commit_message( commit_description.title, bug_id, accepted_reviewers, commit_description.summary, revision_url, ) author_response = serialize_author(phab.expect(fields, "authorPHID"), users) revisions_response.append({ "id": human_revision_id, "phid": revision_phid, "status": serialize_status(revision), "blocked_reason": blocked.get(revision_phid, ""), "bug_id": bug_id, "title": commit_description.title, "url": revision_url, "date_created": PhabricatorClient.to_datetime( PhabricatorClient.expect(revision, "fields", "dateCreated")).isoformat(), "date_modified": PhabricatorClient.to_datetime( PhabricatorClient.expect(revision, "fields", "dateModified")).isoformat(), "summary": commit_description.summary, "commit_message_title": commit_message_title, "commit_message": commit_message, "repo_phid": PhabricatorClient.expect(fields, "repositoryPHID"), "diff": serialize_diff(diff), "author": author_response, "reviewers": serialize_reviewers(reviewers, users, projects, diff_phid), "is_secure": secure, "is_using_secure_commit_message": commit_description.sanitized, }) repositories = [] for phid in stack_data.repositories.keys(): short_name = PhabricatorClient.expect(stack_data.repositories[phid], "fields", "shortName") repo = supported_repos.get(short_name) landing_supported = repo is not None url = (repo.url if landing_supported else f"{current_app.config['PHABRICATOR_URL']}/source/{short_name}") repositories.append({ "approval_required": landing_supported and repo.approval_required, "commit_flags": repo.commit_flags if repo else [], "landing_supported": landing_supported, "phid": phid, "short_name": short_name, "url": url, }) return { "repositories": repositories, "revisions": revisions_response, "edges": [e for e in edges], "landable_paths": landable, "uplift_repositories": uplift_repos, }