Beispiel #1
0
def test_augment_commits_from_body():
    # The actual parsing is tested in test_commit_parsing; this tests that the commits
    # structure is updated correctly.

    commits = [
        {
            "title": "Bug 1 - test r?reviewer",
            "body": "Differential Revision: https://example.com/D101",
        },
        {
            "title": "WIP: Bug 2 - blah r=blocker!",
            "body": "Differential Revision: https://example.com/D102",
        },
    ]
    helpers.augment_commits_from_body(commits)

    assert commits[0]["rev-id"] == "101"
    assert commits[0]["bug-id"] == "1"
    assert commits[0]["bug-id-orig"] == "1"
    assert commits[0]["reviewers"]["request"] == ["reviewer"]
    assert commits[0]["reviewers"]["granted"] == []
    assert commits[0]["title-preview"] == "Bug 1 - test r?reviewer"
    assert not commits[0]["wip"]

    assert commits[1]["rev-id"] == "102"
    assert commits[1]["bug-id"] == "2"
    assert commits[1]["bug-id-orig"] == "2"
    assert commits[1]["reviewers"]["request"] == []
    assert commits[1]["reviewers"]["granted"] == ["blocker!"]
    assert commits[1]["title-preview"] == "WIP: Bug 2 - blah r=blocker!"
    assert commits[1]["wip"]
Beispiel #2
0
def reorganise(repo, args):
    telemetry.metrics.mozphab.submission.preparation_time.start()

    with wait_message("Checking connection to Phabricator."):
        # Check if raw Conduit API can be used
        if not conduit.check():
            raise Error("Failed to use Conduit API")

    # Find and preview commits to submits.
    with wait_message("Looking for commits.."):
        commits = repo.commit_stack()

    if not commits:
        raise Error("Failed to find any commits to reorganise.")

    with wait_message("Loading commits.."):
        augment_commits_from_body(commits)

    localstack_ids = [c["rev-id"] for c in commits]
    if None in localstack_ids:
        names = [c["name"] for c in commits if c["rev-id"] is None]
        plural = len(names) > 1
        raise Error(
            "Found new commit{plural} in the local stack: {names}.\n"
            "Please submit {them} separately and call the `reorg` again.".
            format(
                plural="s" if plural else "",
                them="them" if plural else "it",
                names=", ".join(names),
            ))

    logger.warning("Reorganisation based on {} commit{}:".format(
        len(commits),
        "" if len(commits) == 1 else "s",
    ))

    # Get PhabricatorStack
    # Errors will be raised later in the `walk_llist` method
    with wait_message("Detecting the remote stack..."):
        try:
            phabstack = conduit.get_stack(localstack_ids)
        except Error:
            logger.error("Remote stack is not linear.")
            raise

    # Preload the phabricator stack
    with wait_message("Preloading Phabricator stack revisions..."):
        conduit.get_revisions(phids=list(phabstack.keys()))

    if phabstack:
        try:
            phabstack_phids = walk_llist(phabstack)
        except Error:
            logger.error("Remote stack is not linear.\n"
                         "Detected stack:\n{}".format(" <- ".join(
                             conduit.phids_to_ids(list(phabstack.keys())))))
            raise
    else:
        phabstack_phids = []

    localstack_phids = conduit.ids_to_phids(localstack_ids)
    try:
        transactions = stack_transactions(phabstack_phids, localstack_phids)
    except Error:
        logger.error("Unable to prepare stack transactions.")
        raise

    if not transactions:
        raise Error("Reorganisation is not needed.")

    logger.warning("Stack will be reorganised:")
    for phid, rev_transactions in transactions.items():
        node_id = conduit.phid_to_id(phid)
        if "abandon" in [t["type"] for t in rev_transactions]:
            logger.info(" * {} will be abandoned".format(node_id))
        else:
            for t in rev_transactions:
                if t["type"] == "children.set":
                    logger.info(" * {child} will depend on {parent}".format(
                        child=conduit.phid_to_id(t["value"][0]),
                        parent=node_id,
                    ))
                if t["type"] == "children.remove":
                    logger.info(
                        " * {child} will no longer depend on {parent}".format(
                            child=conduit.phid_to_id(t["value"][0]),
                            parent=node_id,
                        ))

    telemetry.metrics.mozphab.submission.preparation_time.stop()

    if args.yes:
        pass
    else:
        res = prompt("Perform reorganisation", ["Yes", "No"])
        if res == "No":
            sys.exit(1)

    telemetry.metrics.mozphab.submission.process_time.start()

    with wait_message("Applying transactions..."):
        for phid, rev_transactions in transactions.items():
            conduit.edit_revision(rev_id=phid, transactions=rev_transactions)

    telemetry.metrics.mozphab.submission.process_time.stop()
    logger.info("Stack has been reorganised.")
Beispiel #3
0
def submit(repo, args):
    if environment.DEBUG:
        arcanist.ARC.append("--trace")

    telemetry.metrics.mozphab.submission.preparation_time.start()
    with wait_message("Checking connection to Phabricator."):
        # Check if raw Conduit API can be used
        if not conduit.check():
            raise Error("Failed to use Conduit API")

        # Check if local and remote VCS matches
        repo.check_vcs()

        # Check if arc is configured
        if not args.no_arc and not repo.check_arc():
            raise Error("Failed to run %s." % arcanist.ARC_COMMAND)

    repo.before_submit()

    # Find and preview commits to submits.
    with wait_message("Looking for commits.."):
        commits = repo.commit_stack(single=args.single)
    if not commits:
        raise Error("Failed to find any commits to submit")
    logger.warning(
        "Submitting %s commit%s %s:",
        len(commits),
        "" if len(commits) == 1 else "s",
        "as Work In Progress" if args.wip else "for review",
    )

    with wait_message("Loading commits.."):
        # Pre-process to load metadata.
        morph_blocking_reviewers(commits)
        augment_commits_from_body(commits)
        update_commits_from_args(commits, args)

    # Validate commit stack is suitable for review.
    show_commit_stack(commits,
                      wip=args.wip,
                      validate=True,
                      ignore_reviewers=args.wip)
    try:
        with wait_message("Checking commits.."):
            repo.check_commits_for_submit(
                commits,
                validate_reviewers=not args.wip,
                require_bug=not args.no_bug,
            )
    except Error as e:
        if not args.force:
            raise Error("Unable to submit commits:\n\n%s" % e)
        logger.error("Ignoring issues found with commits:\n\n%s", e)

    # Show a warning if there are untracked files.
    if config.warn_untracked:
        untracked = repo.untracked()
        if untracked:
            logger.warning(
                "Warning: found %s untracked file%s (will not be submitted):",
                len(untracked),
                "" if len(untracked) == 1 else "s",
            )
            if len(untracked) <= 5:
                for filename in untracked:
                    logger.info("  %s", filename)

    # Show a warning if -m is used and there are new commits.
    if args.message and any([c for c in commits if not c["rev-id"]]):
        logger.warning(
            "Warning: --message works with updates only, and will not\n"
            "be result in a comment on new revisions.")

    telemetry.metrics.mozphab.submission.preparation_time.stop()
    telemetry.metrics.mozphab.submission.commits_count.add(len(commits))

    # Confirmation prompt.
    if args.yes:
        pass
    elif config.auto_submit and not args.interactive:
        logger.info(
            "Automatically submitting (as per submit.auto_submit in %s)",
            config.name)
    else:
        res = prompt(
            "Submit to %s" %
            PHABRICATOR_URLS.get(repo.phab_url, repo.phab_url),
            ["Yes", "No", "Always"],
        )
        if res == "No":
            return
        if res == "Always":
            config.auto_submit = True
            config.write()

    # Process.
    telemetry.metrics.mozphab.submission.process_time.start()
    previous_commit = None
    # Collect all existing revisions to get reviewers info.
    rev_ids_to_update = [int(c["rev-id"]) for c in commits if c.get("rev-id")]
    revisions_to_update = None
    if rev_ids_to_update:
        with wait_message("Loading revision data..."):
            list_to_update = conduit.get_revisions(ids=rev_ids_to_update)

        revisions_to_update = {str(r["id"]): r for r in list_to_update}

    last_node = commits[-1]["orig-node"]
    for commit in commits:
        diff = None

        check_in_needed = args.check_in_needed and commit[
            "orig-node"] == last_node
        # Only revisions being updated have an ID.  Newly created ones don't.
        if not commit["submit"]:
            previous_commit = commit
            continue

        is_update = bool(commit["rev-id"])
        revision_to_update = (revisions_to_update[commit["rev-id"]]
                              if is_update else None)
        existing_reviewers = (
            revision_to_update["attachments"]["reviewers"]["reviewers"]
            if revision_to_update else None)
        has_commit_reviewers = bool(commit["reviewers"]["granted"] +
                                    commit["reviewers"]["request"])

        # Let the user know something's happening.
        if is_update:
            logger.info("\nUpdating revision D%s:", commit["rev-id"])
        else:
            logger.info("\nCreating new revision:")

        logger.info("%s %s", commit["name"], commit["title-preview"])
        repo.checkout(commit["node"])

        # WIP submissions shouldn't set reviewers on phabricator.
        if args.wip:
            reviewers = ""
        else:
            reviewers = ", ".join(commit["reviewers"]["granted"] +
                                  commit["reviewers"]["request"])

        # Create arc-annotated commit description.
        template_vars = dict(
            title=commit["title-preview"],
            body=commit["body"],
            reviewers=reviewers,
            bug_id=commit["bug-id"],
        )
        summary = commit["body"]
        if previous_commit and not args.no_stack:
            template_vars[
                "depends_on"] = "Depends on D%s" % previous_commit["rev-id"]
            summary = "%s\n\n%s" % (summary, template_vars["depends_on"])

        message = arc_message(template_vars)

        if args.no_arc:
            # Create a diff if needed
            with wait_message("Creating local diff..."):
                diff = repo.get_diff(commit)

            if diff:
                telemetry.metrics.mozphab.submission.files_count.add(
                    len(diff.changes))
                with wait_message("Uploading binary file(s)..."):
                    diff.upload_files()

                with wait_message("Submitting the diff..."):
                    diff.submit(commit)

            if is_update:
                with wait_message("Updating revision..."):
                    rev = conduit.update_revision(
                        commit,
                        has_commit_reviewers,
                        existing_reviewers,
                        diff_phid=diff.phid,
                        wip=args.wip,
                        comment=args.message,
                        check_in_needed=check_in_needed,
                    )
            else:
                with wait_message("Creating a new revision..."):
                    rev = conduit.create_revision(
                        commit,
                        commit["title-preview"],
                        summary,
                        diff.phid,
                        has_commit_reviewers,
                        wip=args.wip,
                        check_in_needed=check_in_needed,
                    )

            revision_url = "%s/D%s" % (repo.phab_url, rev["object"]["id"])

        else:
            # Run arc.
            with temporary_file(message) as message_file:
                arc_args = (["diff"] + ["--base", "arc:this"] +
                            ["--allow-untracked", "--no-amend", "--no-ansi"] +
                            ["--message-file", message_file])
                if args.nolint:
                    arc_args.append("--nolint")
                if args.wip:
                    arc_args.append("--plan-changes")
                if args.lesscontext:
                    arc_args.append("--less-context")
                if is_update:
                    message = args.message if args.message else DEFAULT_UPDATE_MESSAGE
                    arc_args.extend(["--message", message] +
                                    ["--update", commit["rev-id"]])
                else:
                    arc_args.append("--create")

                revision_url = None
                for line in check_call_by_line(arcanist.ARC + arc_args,
                                               cwd=repo.path,
                                               never_log=True):
                    print(line)
                    revision_url = extract_revision_url(line) or revision_url

            if not revision_url:
                raise Error("Failed to find 'Revision URL' in arc output")

            if is_update:
                current_status = revision_to_update["fields"]["status"][
                    "value"]
                with wait_message("Updating D%s.." % commit["rev-id"]):
                    transactions = []
                    revision = conduit.get_revisions(
                        ids=[int(commit["rev-id"])])[0]

                    update_revision_description(transactions, commit, revision)
                    update_revision_bug_id(transactions, commit, revision)

                    # Add reviewers only if revision lacks them
                    if not args.wip and has_commit_reviewers and not existing_reviewers:
                        conduit.update_revision_reviewers(transactions, commit)
                        if current_status != "needs-review":
                            transactions.append(dict(type="request-review"))

                    if transactions:
                        arcanist.call_conduit(
                            "differential.revision.edit",
                            {
                                "objectIdentifier": "D%s" % commit["rev-id"],
                                "transactions": transactions,
                            },
                            repo.path,
                        )

        # Append/replace div rev url to/in commit description.
        body = amend_revision_url(commit["body"], revision_url)

        # Amend the commit if required.
        if commit["title-preview"] != commit["title"] or body != commit["body"]:
            commit["title"] = commit["title-preview"]
            commit["body"] = body
            commit["rev-id"] = parse_arc_diff_rev(commit["body"])
            with wait_message("Updating commit.."):
                repo.amend_commit(commit, commits)

        # Diff property has to be set after potential SHA1 change.
        if args.no_arc and diff:
            with wait_message("Setting diff metadata..."):
                diff.set_property(commit, message)

        previous_commit = commit

    # Cleanup (eg. strip nodes) and refresh to ensure the stack is right for the
    # final showing.
    with wait_message("Cleaning up.."):
        repo.finalize(commits)
        repo.after_submit()
        repo.cleanup()
        repo.refresh_commit_stack(commits)

    logger.warning("\nCompleted")
    show_commit_stack(commits,
                      validate=False,
                      show_rev_urls=True,
                      show_updated_only=True)
    telemetry.metrics.mozphab.submission.process_time.stop()
Beispiel #4
0
def submit(repo, args):
    telemetry().submission.preparation_time.start()
    with wait_message("Checking connection to Phabricator."):
        # Check if raw Conduit API can be used
        if not conduit.check():
            raise Error("Failed to use Conduit API")

        # Check if local and remote VCS matches
        repo.check_vcs()

    repo.before_submit()

    # Find and preview commits to submits.
    with wait_message("Looking for commits.."):
        commits = repo.commit_stack(single=args.single)
    if not commits:
        raise Error("Failed to find any commits to submit")

    if args.command == "uplift":
        # Perform uplift logic during submission.
        avoid_local_changes = local_uplift_if_possible(args, repo, commits)
    else:
        avoid_local_changes = False

    with wait_message("Loading commits.."):
        # Pre-process to load metadata.
        morph_blocking_reviewers(commits)
        augment_commits_from_body(commits)
        update_commits_from_args(commits, args)

    # Display a one-line summary of commit and WIP count.
    commit_count = len(commits)
    wip_commit_count = sum(1 for commit in commits if commit["wip"])

    if wip_commit_count == commit_count:
        status = "as Work In Progress"
    elif wip_commit_count:
        status = f"{wip_commit_count} as Work In Progress"
    else:
        status = "for review"

    logger.warning(
        f"Submitting {commit_count} commit{'s'[:commit_count^1]} {status}")

    # Validate commit stack is suitable for review.
    show_commit_stack(commits, validate=True)
    try:
        with wait_message("Checking commits.."):
            repo.check_commits_for_submit(commits, require_bug=not args.no_bug)
    except Error as e:
        if not args.force:
            raise Error("Unable to submit commits:\n\n%s" % e)
        logger.error("Ignoring issues found with commits:\n\n%s", e)

    if not any(commit["submit"] for commit in commits):
        logger.warning("No changes to submit.")
        return

    # Show a warning if there are untracked files.
    if config.warn_untracked:
        untracked = repo.untracked()
        if untracked:
            logger.warning(
                "Warning: found %s untracked file%s (will not be submitted):",
                len(untracked),
                "" if len(untracked) == 1 else "s",
            )
            if len(untracked) <= 5:
                for filename in untracked:
                    logger.info("  %s", filename)

    # Show a warning if -m is used and there are new commits.
    if args.message and any([c for c in commits if not c["rev-id"]]):
        logger.warning(
            "Warning: --message works with updates only, and will not\n"
            "be result in a comment on new revisions.")

    telemetry().submission.preparation_time.stop()
    telemetry().submission.commits_count.add(len(commits))

    # Confirmation prompt.
    if args.yes:
        pass
    elif config.auto_submit and not args.interactive:
        logger.info(
            "Automatically submitting (as per submit.auto_submit in %s)",
            config.name)
    else:
        res = prompt(
            "Submit to %s" %
            PHABRICATOR_URLS.get(repo.phab_url, repo.phab_url),
            ["Yes", "No", "Always"],
        )
        if res == "No":
            return
        if res == "Always":
            config.auto_submit = True
            config.write()

    # Process.
    telemetry().submission.process_time.start()
    previous_commit = None
    # Collect all existing revisions to get reviewers info.
    rev_ids_to_update = [int(c["rev-id"]) for c in commits if c.get("rev-id")]
    revisions_to_update = None
    if rev_ids_to_update:
        with wait_message("Loading revision data..."):
            list_to_update = conduit.get_revisions(ids=rev_ids_to_update)

        revisions_to_update = {str(r["id"]): r for r in list_to_update}

    last_node = commits[-1]["orig-node"]
    for commit in commits:
        diff = None

        check_in_needed = args.check_in_needed and commit[
            "orig-node"] == last_node
        # Only revisions being updated have an ID.  Newly created ones don't.
        if not commit["submit"]:
            previous_commit = commit
            continue

        is_update = bool(commit["rev-id"])
        revision_to_update = (revisions_to_update[commit["rev-id"]]
                              if is_update else None)
        existing_reviewers = (
            revision_to_update["attachments"]["reviewers"]["reviewers"]
            if revision_to_update else None)

        # Let the user know something's happening.
        if is_update:
            logger.info("\nUpdating revision D%s:", commit["rev-id"])
        else:
            logger.info("\nCreating new revision:")

        logger.info("%s %s", commit["name"],
                    revision_title_from_commit(commit))
        repo.checkout(commit["node"])

        # WIP submissions shouldn't set reviewers on phabricator.
        if commit["wip"]:
            reviewers = ""
        else:
            reviewers = ", ".join(commit["reviewers"]["granted"] +
                                  commit["reviewers"]["request"])

        # Create arc-annotated commit description.
        template_vars = dict(
            title=revision_title_from_commit(commit),
            body=commit["body"],
            reviewers=reviewers,
            bug_id=commit["bug-id"],
        )
        summary = commit["body"]
        if previous_commit and not args.no_stack:
            template_vars[
                "depends_on"] = "Depends on D%s" % previous_commit["rev-id"]
            summary = "%s\n\n%s" % (summary, template_vars["depends_on"])

        message = arc_message(template_vars)

        # Create a diff if needed
        with wait_message("Creating local diff..."):
            diff = repo.get_diff(commit)

        if diff:
            telemetry().submission.files_count.add(len(diff.changes))
            with wait_message("Uploading binary file(s)..."):
                diff.upload_files()

            with wait_message("Submitting the diff..."):
                diff.submit(commit, message)

        if is_update:
            with wait_message("Updating revision..."):
                rev = conduit.update_revision(
                    commit,
                    existing_reviewers,
                    diff_phid=diff.phid,
                    comment=args.message,
                    check_in_needed=check_in_needed,
                )
        else:
            with wait_message("Creating a new revision..."):
                rev = conduit.create_revision(
                    commit,
                    summary,
                    diff.phid,
                    check_in_needed=check_in_needed,
                )

        revision_url = "%s/D%s" % (repo.phab_url, rev["object"]["id"])

        # Append/replace div rev url to/in commit description.
        body = amend_revision_url(commit["body"], revision_url)

        # Amend the commit if required.
        # As commit rewriting can be expensive we avoid it in some circumstances, such
        # as pre-pending "WIP: " to commits submitted as WIP to Phabricator.
        if commit["title-preview"] != commit["title"] or body != commit["body"]:
            commit["title"] = commit["title-preview"]
            commit["body"] = body
            commit["rev-id"] = parse_arc_diff_rev(commit["body"])

            if not avoid_local_changes:
                with wait_message("Updating commit.."):
                    repo.amend_commit(commit, commits)

        # Diff property has to be set after potential SHA1 change.
        if diff:
            with wait_message("Setting diff metadata..."):
                diff.set_property(commit, message)

        previous_commit = commit

    # Cleanup (eg. strip nodes) and refresh to ensure the stack is right for the
    # final showing.
    with wait_message("Cleaning up.."):
        repo.finalize(commits)
        repo.after_submit()
        repo.cleanup()
        repo.refresh_commit_stack(commits)

    logger.warning("\nCompleted")
    show_commit_stack(commits,
                      validate=False,
                      show_rev_urls=True,
                      show_updated_only=True)
    telemetry().submission.process_time.stop()