コード例 #1
0
ファイル: queue.py プロジェクト: Madhu-1/mergify-engine
def remove_pull(pull):
    redis = utils.get_redis_for_cache()
    queue = _get_queue_cache_key(pull)
    redis.zrem(queue, pull.number)
    redis.delete(_get_update_method_cache_key(pull))
    pull.log.debug("pull request removed from merge queue", queue=queue)
コード例 #2
0
def _do_rebase(ctxt: context.Context, token: str) -> None:
    # NOTE(sileht):
    # $ curl https://api.github.com/repos/sileht/repotest/pulls/2 | jq .commits
    # 2
    # $ git clone https://[email protected]/sileht-tester/repotest \
    #           --depth=$((2 + 1)) -b sileht/testpr
    # $ cd repotest
    # $ git remote add upstream https://[email protected]/sileht/repotest.git
    # $ git log | grep Date | tail -1
    # Date:   Fri Mar 30 21:30:26 2018 (10 days ago)
    # $ git fetch upstream master --shallow-since="Fri Mar 30 21:30:26 2018"
    # $ git rebase upstream/master
    # $ git push origin sileht/testpr:sileht/testpr

    head_repo = (ctxt.pull["head"]["repo"]["owner"]["login"] + "/" +
                 ctxt.pull["head"]["repo"]["name"])
    base_repo = (ctxt.pull["base"]["repo"]["owner"]["login"] + "/" +
                 ctxt.pull["base"]["repo"]["name"])

    head_branch = ctxt.pull["head"]["ref"]
    base_branch = ctxt.pull["base"]["ref"]
    git = utils.Gitter(ctxt.log)
    try:
        git("init")
        git.configure()
        git.add_cred(token, "", head_repo)
        git.add_cred(token, "", base_repo)
        git("remote", "add", "origin", f"{config.GITHUB_URL}/{head_repo}")
        git("remote", "add", "upstream", f"{config.GITHUB_URL}/{base_repo}")

        depth = len(ctxt.commits) + 1
        git("fetch", "--quiet", "--depth=%d" % depth, "origin", head_branch)
        git("checkout", "-q", "-b", head_branch, "origin/%s" % head_branch)

        out = git("log", "--format=%cI")
        last_commit_date = [
            d for d in out.decode("utf8").split("\n") if d.strip()
        ][-1]

        git(
            "fetch",
            "--quiet",
            "upstream",
            base_branch,
            "--shallow-since='%s'" % last_commit_date,
        )

        # Try to find the merge base, but don't fetch more that 1000 commits.
        for _ in range(20):
            git("repack", "-d")
            if git("merge-base", f"upstream/{base_branch}",
                   f"origin/{head_branch}"):
                break
            git("fetch", "-q", "--deepen=50", "upsteam", base_branch)

        try:
            git("rebase", "upstream/%s" % base_branch)
            git("push", "--verbose", "origin", head_branch, "-f")
        except subprocess.CalledProcessError as e:  # pragma: no cover
            for message in GIT_MESSAGE_TO_UNSHALLOW:
                if message in e.output:
                    ctxt.log.info("Complete history cloned")
                    # NOTE(sileht): We currently assume we have only one parent
                    # commit in common. Since Git is a graph, in some case this
                    # graph can be more complicated.
                    # So, retrying with the whole git history for now
                    git("fetch", "--unshallow")
                    git("fetch", "--quiet", "origin", head_branch)
                    git("fetch", "--quiet", "upstream", base_branch)
                    git("rebase", "upstream/%s" % base_branch)
                    git("push", "--verbose", "origin", head_branch, "-f")
                    break
            else:
                raise

        expected_sha = git("log", "-1", "--format=%H").decode().strip()
        # NOTE(sileht): We store this for dismissal action
        with utils.get_redis_for_cache() as redis:  # type: ignore
            redis.setex("branch-update-%s" % expected_sha, 60 * 60,
                        expected_sha)
    except subprocess.CalledProcessError as in_exception:  # pragma: no cover
        if in_exception.output == b"":
            # SIGKILL...
            raise BranchUpdateNeedRetry()

        for message, out_exception in GIT_MESSAGE_TO_EXCEPTION.items():
            if message in in_exception.output:
                raise out_exception(
                    "Git reported the following error:\n"
                    f"```\n{in_exception.output.decode()}\n```\n")
        else:
            ctxt.log.error(
                "update branch failed: %s",
                in_exception.output.decode(),
                exc_info=True,
            )
            raise BranchUpdateFailure()

    except Exception:  # pragma: no cover
        ctxt.log.error("update branch failed", exc_info=True)
        raise BranchUpdateFailure()
    finally:
        git.cleanup()
コード例 #3
0
def collect_metrics():
    redis = utils.get_redis_for_cache()
    integration = github.GithubIntegration(config.INTEGRATION_ID,
                                           config.PRIVATE_KEY)

    installations = collections.defaultdict(int)
    repositories_per_installation = collections.defaultdict(int)
    users_per_installation = collections.defaultdict(int)

    LOG.info("GitHub Polling started")

    redis.delete("badges.tmp")

    for installation in utils.get_installations(integration):
        try:
            _id = installation["id"]
            target_type = installation["target_type"]
            account = installation["account"]["login"]

            LOG.info("Get subscription", account=account)
            subscribed = utils.get_subscription(redis, _id)["subscribed"]

            installations[(subscribed, target_type)] += 1

            token = integration.get_access_token(_id).token
            g = github.Github(token,
                              base_url="https://api.%s" % config.GITHUB_DOMAIN)

            if installation[
                    "target_type"] == "Organization":  # pragma: no cover
                LOG.info("Get members",
                         install=installation["account"]["login"])
                org = g.get_organization(installation["account"]["login"])
                value = len(list(org.get_members()))

                users_per_installation[(subscribed, target_type,
                                        account)] = value
            else:
                users_per_installation[(subscribed, target_type, account)] = 1

            LOG.info("Get repos", account=account)

            repositories = sorted(g.get_installation(_id).get_repos(),
                                  key=operator.attrgetter("private"))
            for private, repos in itertools.groupby(
                    repositories, key=operator.attrgetter("private")):

                configured_repos = 0
                unconfigured_repos = 0
                for repo in repos:
                    try:
                        repo.get_contents(".mergify.yml")
                        configured_repos += 1
                        redis.sadd("badges.tmp", repo.full_name)
                    except github.GithubException as e:
                        if e.status >= 500:  # pragma: no cover
                            raise
                        unconfigured_repos += 1

                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               True)] = configured_repos
                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               False)] = unconfigured_repos
        except github.GithubException as e:
            # Ignore rate limit/abuse
            if e.status != 403:
                raise

    LOG.info("GitHub Polling finished")

    # NOTE(sileht): Prometheus can scrape data during our loop. So make it fast
    # to ensure we always have the good values.
    # Also we can't known which labels we should delete from the Gauge,
    # that's why we delete all of them to re-add them.
    # And prometheus_client doesn't provide API to that, so we just
    # override _metrics
    set_gauges(INSTALLATIONS, installations)
    set_gauges(USERS_PER_INSTALLATION, users_per_installation)
    set_gauges(REPOSITORIES_PER_INSTALLATION, repositories_per_installation)

    if redis.exists("badges.tmp"):
        redis.rename("badges.tmp", "badges")

    LOG.info("Gauges and badges cache updated")
コード例 #4
0
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.existing_labels = []
        self.pr_counter = 0
        self.git_counter = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self.__class__.__name__,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=["method", "uri"],
            filter_headers=[
                ("Authorization", "<TOKEN>"),
                ("X-Hub-Signature", "<SIGNATURE>"),
                ("User-Agent", None),
                ("Accept-Encoding", None),
                ("Connection", None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((pygithub.MainClass, "HTTPSConnection",
                             vcr.stubs.VCRHTTPSConnection), ),
        )

        if RECORD:
            github.CachedToken.STORAGE = {}
        else:
            # Never expire token during replay
            mock.patch.object(github_app,
                              "get_or_create_jwt",
                              return_value="<TOKEN>").start()
            mock.patch.object(
                github.GithubAppInstallationAuth,
                "get_access_token",
                return_value="<TOKEN>",
            ).start()

            # NOTE(sileht): httpx pyvcr stubs does not replay auth_flow as it directly patch client.send()
            # So anything occurring during auth_flow have to be mocked during replay
            def get_auth(owner=None, repo=None, auth=None):
                if auth is None:
                    auth = github.get_auth(owner, repo)
                    auth.installation = {
                        "id": config.INSTALLATION_ID,
                    }
                    auth.permissions_need_to_be_updated = False
                    auth.owner_id = config.TESTING_ORGANIZATION_ID
                return auth

            async def github_aclient(owner=None, repo=None, auth=None):
                return github.AsyncGithubInstallationClient(
                    get_auth(owner, repo, auth))

            def github_client(owner=None, repo=None, auth=None):
                return github.GithubInstallationClient(
                    get_auth(owner, repo, auth))

            mock.patch.object(github, "get_client", github_client).start()
            mock.patch.object(github, "aget_client", github_aclient).start()

        with open(engine.mergify_rule_path, "r") as f:
            engine.MERGIFY_RULE = yaml.safe_load(f.read().replace(
                "mergify[bot]", "mergify-test[bot]"))

        mock.patch.object(branch_updater.utils, "Gitter",
                          self.get_gitter).start()
        mock.patch.object(duplicate_pull.utils, "Gitter",
                          self.get_gitter).start()

        if not RECORD:
            # NOTE(sileht): Don't wait exponentialy during replay
            mock.patch.object(context.Context._ensure_complete.retry, "wait",
                              None).start()

        # Web authentification always pass
        mock.patch("hmac.compare_digest", return_value=True).start()

        branch_prefix_path = os.path.join(self.cassette_library_dir,
                                          "branch_prefix")

        if RECORD:
            self.BRANCH_PREFIX = datetime.datetime.utcnow().strftime(
                "%Y%m%d%H%M%S")
            with open(branch_prefix_path, "w") as f:
                f.write(self.BRANCH_PREFIX)
        else:
            with open(branch_prefix_path, "r") as f:
                self.BRANCH_PREFIX = f.read()

        self.master_branch_name = self.get_full_branch_name("master")

        self.git = self.get_gitter(LOG)
        self.addCleanup(self.git.cleanup)

        loop = asyncio.get_event_loop()
        loop.run_until_complete(web.startup())
        self.app = testclient.TestClient(web.app)

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = {
            "tokens": {
                "mergify-test-1": config.ORG_ADMIN_GITHUB_APP_OAUTH_TOKEN
            },
            "subscription_active": self.SUBSCRIPTION_ACTIVE,
            "subscription_reason": "You're not nice",
        }
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            sub_utils.save_subscription_to_cache(
                config.INSTALLATION_ID,
                self.subscription,
            ))

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        integration = pygithub.GithubIntegration(config.INTEGRATION_ID,
                                                 config.PRIVATE_KEY)
        self.installation_token = integration.get_access_token(
            config.INSTALLATION_ID).token

        base_url = config.GITHUB_API_URL
        self.g_integration = pygithub.Github(self.installation_token,
                                             base_url=base_url)
        self.g_admin = pygithub.Github(config.ORG_ADMIN_PERSONAL_TOKEN,
                                       base_url=base_url)
        self.g_fork = pygithub.Github(self.FORK_PERSONAL_TOKEN,
                                      base_url=base_url)

        self.o_admin = self.g_admin.get_organization(
            config.TESTING_ORGANIZATION)
        self.o_integration = self.g_integration.get_organization(
            config.TESTING_ORGANIZATION)
        self.u_fork = self.g_fork.get_user()
        assert self.o_admin.login == "mergifyio-testing"
        assert self.o_integration.login == "mergifyio-testing"
        assert self.u_fork.login in ["mergify-test2", "mergify-test3"]

        self.r_o_admin = self.o_admin.get_repo(self.REPO_NAME)
        self.r_o_integration = self.o_integration.get_repo(self.REPO_NAME)
        self.r_fork = self.u_fork.get_repo(self.REPO_NAME)

        self.url_main = f"{config.GITHUB_URL}/{self.r_o_integration.full_name}"
        self.url_fork = (
            f"{config.GITHUB_URL}/{self.u_fork.login}/{self.r_o_integration.name}"
        )

        self.cli_integration = github.get_client(
            config.TESTING_ORGANIZATION,
            self.REPO_NAME,
        )

        real_get_subscription = sub_utils.get_subscription

        async def fake_retrieve_subscription_from_db(owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return self.subscription
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        async def fake_subscription(owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return await real_get_subscription(owner_id)
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        mock.patch(
            "mergify_engine.branch_updater.sub_utils.get_subscription",
            side_effect=fake_subscription,
        ).start()

        mock.patch(
            "mergify_engine.branch_updater.sub_utils._retrieve_subscription_from_db",
            side_effect=fake_retrieve_subscription_from_db,
        ).start()

        mock.patch(
            "mergify_engine.sub_utils.get_subscription",
            side_effect=fake_subscription,
        ).start()

        mock.patch(
            "github.MainClass.Installation.Installation.get_repos",
            return_value=[self.r_o_integration],
        ).start()

        self._event_reader = EventReader(self.app)
        self._event_reader.drain()
コード例 #5
0
def subscription_cache(installation_id):  # pragma: no cover
    authentification()
    r = utils.get_redis_for_cache()
    r.delete("subscription-cache-%s" % installation_id)
    return "Cache cleaned", 200
コード例 #6
0
def run(event_type, data):
    """Everything starts here."""
    installation_id = data["installation"]["id"]
    installation_token = utils.get_installation_token(installation_id)
    if not installation_token:
        return

    g = github.Github(installation_token,
                      base_url="https://api.%s" % config.GITHUB_DOMAIN)

    if config.LOG_RATELIMIT:  # pragma: no cover
        rate = g.get_rate_limit().rate
        LOG.info("ratelimit: %s/%s, reset at %s",
                 rate.remaining, rate.limit, rate.reset,
                 repository=data["repository"]["name"])

    repo = g.get_repo(data["repository"]["owner"]["login"] + "/" +
                      data["repository"]["name"])

    event_pull = get_github_pull_from_event(repo, event_type, data)

    if not event_pull:  # pragma: no cover
        LOG.info("No pull request found in the event %s, "
                 "ignoring", event_type)
        return

    LOG.info("Pull request found in the event %s", event_type,
             repo=repo.full_name,
             pull_request=event_pull)

    if ("base" not in event_pull.raw_data or
            "repo" not in event_pull.raw_data["base"] or
            len(list(event_pull.raw_data["base"]["repo"].keys())) < 70):
        LOG.warning("the pull request payload looks suspicious",
                    event_type=event_type,
                    data=data,
                    pull_request=event_pull.raw_data,
                    repo=repo.fullname)

    if (event_type == "status" and
            event_pull.head.sha != data["sha"]):  # pragma: no cover
        LOG.info("No need to proceed queue (got status of an old commit)",
                 repo=repo.full_name,
                 pull_request=event_pull)
        return

    elif (event_type in ["status", "check_suite", "check_run"] and
          event_pull.merged):  # pragma: no cover
        LOG.info("No need to proceed queue (got status of a merged "
                 "pull request)",
                 repo=repo.full_name,
                 pull_request=event_pull)
        return
    elif (event_type in ["check_suite", "check_run"] and
          event_pull.head.sha != data[event_type]["head_sha"]
          ):  # pragma: no cover
        LOG.info("No need to proceed queue (got %s of an old "
                 "commit)", event_type,
                 repo=repo.full_name,
                 pull_request=event_pull)
        return

    if check_configuration_changes(event_pull):
        LOG.info("Configuration changed, ignoring",
                 repo=repo.full_name,
                 pull_request=event_pull)
        return

    # BRANCH CONFIGURATION CHECKING
    try:
        mergify_config = rules.get_mergify_config(repo)
    except rules.NoRules:  # pragma: no cover
        LOG.info("No need to proceed queue (.mergify.yml is missing)",
                 repo=repo.full_name,
                 pull_request=event_pull)
        return
    except rules.InvalidRules as e:  # pragma: no cover
        # Not configured, post status check with the error message
        if (event_type == "pull_request" and
                data["action"] in ["opened", "synchronize"]):
            check_api.set_check_run(
                event_pull, "Summary", "completed",
                "failure", output={
                    "title": "The Mergify configuration is invalid",
                    "summary": str(e)
                })
        return

    subscription = sub_utils.get_subscription(utils.get_redis_for_cache(),
                                              installation_id)

    if repo.private and not subscription["subscription_active"]:
        check_api.set_check_run(
            event_pull, "Summary",
            "completed", "failure", output={
                "title": "Mergify is disabled",
                "summary": subscription["subscription_reason"],
            })
        return

    create_metrics(event_type, data)

    v2.handle.s(
        installation_id,
        mergify_config["pull_request_rules"].as_dict(),
        event_type, data, event_pull.raw_data
    ).apply_async()
コード例 #7
0
def report(url):
    path = url.replace("https://github.com/", "")
    try:
        owner, repo, _, pull_number = path.split("/")
    except ValueError:
        pull_number = None
        try:
            owner, repo = path.split("/")
        except ValueError:
            print(f"Wrong URL: {url}")
            return

    slug = owner + "/" + repo

    try:
        client = github.get_client(owner, repo)
    except exceptions.MergifyNotInstalled:
        print("* Mergify is not installed there")
        return

    print("* INSTALLATION ID: %s" % client.auth.installation["id"])

    cached_sub, db_sub = utils.async_run(
        sub_utils.get_subscription(client.auth.owner_id),
        sub_utils._retrieve_subscription_from_db(client.auth.owner_id),
    )
    print("* SUBSCRIBED (cache/db): %s / %s" %
          (cached_sub["subscription_active"], db_sub["subscription_active"]))
    report_sub(client.auth.installation["id"], slug, cached_sub,
               "ENGINE-CACHE")
    report_sub(client.auth.installation["id"], slug, db_sub, "DASHBOARD")

    repo = client.item(f"/repos/{owner}/{repo}")
    print(f"* REPOSITORY IS {'PRIVATE' if repo['private'] else 'PUBLIC'}")

    utils.async_run(report_worker_status(client.auth.owner))

    if pull_number:
        pull_raw = client.item(f"pulls/{pull_number}")
        ctxt = context.Context(
            client,
            pull_raw,
            cached_sub,
            [{
                "event_type": "mergify-debugger",
                "data": {}
            }],
        )

        q = queue.Queue.from_context(ctxt)
        print("* QUEUES: %s" % ", ".join([f"#{p}" for p in q.get_pulls()]))

    else:
        for branch in client.items("branches"):
            q = queue.Queue(
                utils.get_redis_for_cache(),
                client.auth.installation["id"],
                client.auth.owner,
                client.auth.repo,
                branch["name"],
            )
            pulls = q.get_pulls()
            if not pulls:
                continue

            print(f"* QUEUES {branch['name']}:")

            for priority, grouped_pulls in itertools.groupby(
                    pulls, key=lambda v: q.get_config(v)["priority"]):
                try:
                    fancy_priority = helpers.PriorityAliases(priority).name
                except ValueError:
                    fancy_priority = priority
                formatted_pulls = ", ".join((f"#{p}" for p in grouped_pulls))
                print(f"** {formatted_pulls} (priority: {fancy_priority})")

    print("* CONFIGURATION:")
    try:
        filename, mergify_config_content = rules.get_mergify_config_content(
            client)
    except rules.NoRules:  # pragma: no cover
        print(".mergify.yml is missing")
        pull_request_rules = None
    else:
        print(f"Config filename: {filename}")
        print(mergify_config_content.decode())
        try:
            mergify_config = rules.UserConfigurationSchema(
                mergify_config_content)
        except rules.InvalidRules as e:  # pragma: no cover
            print("configuration is invalid %s" % str(e))
        else:
            pull_request_rules_raw = mergify_config[
                "pull_request_rules"].as_dict()
            pull_request_rules = rules.PullRequestRules.from_list(
                pull_request_rules_raw["rules"] + engine.MERGIFY_RULE["rules"])

    if pull_number:
        print("* PULL REQUEST:")
        pr_data = dict(ctxt.pull_request.items())
        pprint.pprint(pr_data, width=160)

        print("is_behind: %s" % ctxt.is_behind)

        print("mergeable_state: %s" % ctxt.pull["mergeable_state"])

        print("* MERGIFY LAST CHECKS:")
        for c in ctxt.pull_engine_check_runs:
            print("[%s]: %s | %s" %
                  (c["name"], c["conclusion"], c["output"].get("title")))
            print("> " + "\n> ".join(c["output"].get("summary").split("\n")))

        if pull_request_rules is not None:
            print("* MERGIFY LIVE MATCHES:")
            match = pull_request_rules.get_pull_request_rule(ctxt)
            summary_title, summary = actions_runner.gen_summary(ctxt, match)
            print("> %s" % summary_title)
            print(summary)

        return ctxt
    else:
        return client
コード例 #8
0
def collect_metrics():
    redis = utils.get_redis_for_cache()
    integration = github.GithubIntegration(config.INTEGRATION_ID,
                                           config.PRIVATE_KEY)

    installations_lock = threading.Lock()
    installations = collections.defaultdict(int)
    repositories_per_installation = collections.defaultdict(int)
    users_per_installation = collections.defaultdict(int)
    costs = collections.defaultdict(int)

    LOG.info("GitHub Polling started")

    redis.delete("badges.tmp")

    @tenacity.retry(
        retry=_exception_need_retry,
        wait=_wait_time_for_exception,
        stop=tenacity.stop_after_attempt(3),
    )
    def handle_installation(installation):
        try:
            _id = installation["id"]
            target_type = installation["target_type"]
            account = installation["account"]["login"]

            LOG.info("Get subscription", account=account)
            subs = sub_utils.get_subscription(redis, _id)
            subscribed = subs["subscription_active"]

            costs[(subscribed, target_type,
                   account)] = subs["subscription_cost"]

            with installations_lock:
                installations[(subscribed, target_type)] += 1

            token = integration.get_access_token(_id).token
            g = github.Github(token,
                              base_url="https://api.%s" % config.GITHUB_DOMAIN)

            if installation["target_type"] == "Organization":
                LOG.info("Get members",
                         install=installation["account"]["login"])
                org = g.get_organization(installation["account"]["login"])
                value = len(list(org.get_members()))

                users_per_installation[(subscribed, target_type,
                                        account)] = value
            else:
                users_per_installation[(subscribed, target_type, account)] = 1

            LOG.info("Get repos", account=account)

            repositories = sorted(g.get_installation(_id).get_repos(),
                                  key=operator.attrgetter("private"))
            for private, repos in itertools.groupby(
                    repositories, key=operator.attrgetter("private")):

                configured_repos = 0
                unconfigured_repos = 0
                for repo in repos:
                    try:
                        rules.get_mergify_config(repo)
                        configured_repos += 1
                        redis.sadd("badges.tmp", repo.full_name)
                    except github.GithubException as e:
                        if e.status >= 500:  # pragma: no cover
                            raise
                        unconfigured_repos += 1
                    except (rules.InvalidRules, rules.NoRules):
                        unconfigured_repos += 1

                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               True)] = configured_repos
                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               False)] = unconfigured_repos
        except github.GithubException as e:  # pragma: no cover
            # Ignore rate limit/abuse, authorization issues
            # and GitHub malfunction
            if e.status not in (403, 401) and e.status < 500:
                raise

    with futures.ThreadPoolExecutor() as executor:
        list(
            executor.map(handle_installation,
                         utils.get_installations(integration)))

    LOG.info("GitHub Polling finished")

    # NOTE(sileht): Prometheus can scrape data during our loop. So make it fast
    # to ensure we always have the good values.
    # Also we can't known which labels we should delete from the Gauge,
    # that's why we delete all of them to re-add them.
    # And prometheus_client doesn't provide API to that, so we just
    # override _metrics
    set_gauges(INSTALLATIONS, installations)
    set_gauges(USERS_PER_INSTALLATION, users_per_installation)
    set_gauges(REPOSITORIES_PER_INSTALLATION, repositories_per_installation)
    set_gauges(COSTS, costs)

    if redis.exists("badges.tmp"):
        redis.rename("badges.tmp", "badges")

    LOG.info("Gauges and badges cache updated")
コード例 #9
0
def run(event_type, data):
    """Everything starts here."""
    installation_id = data["installation"]["id"]
    owner = data["repository"]["owner"]["login"]
    repo = data["repository"]["name"]

    client = github.get_client(owner, repo, installation_id)

    raw_pull = get_github_pull_from_event(client, event_type, data)

    if not raw_pull:  # pragma: no cover
        LOG.info(
            "No pull request found in the event %s, ignoring",
            event_type,
            gh_owner=owner,
            gh_repo=repo,
        )
        return

    pull = mergify_pull.MergifyPull(client, raw_pull)
    # Override pull_request with the updated one
    data["pull_request"] = pull.data

    pull.log.info("Pull request found in the event %s", event_type)

    if ("base" not in pull.data or "repo" not in pull.data["base"]
            or len(list(pull.data["base"]["repo"].keys())) < 70):
        pull.log.warning(
            "the pull request payload looks suspicious",
            event_type=event_type,
            data=data,
        )

    if (event_type == "status"
            and pull.data["head"]["sha"] != data["sha"]):  # pragma: no cover
        pull.log.info(
            "No need to proceed queue (got status of an old commit)", )
        return

    elif (event_type in ["status", "check_suite", "check_run"]
          and pull.data["merged"]):  # pragma: no cover
        pull.log.info(
            "No need to proceed queue (got status of a merged pull request)", )
        return
    elif (event_type in ["check_suite", "check_run"]
          and pull.data["head"]["sha"] !=
          data[event_type]["head_sha"]):  # pragma: no cover
        pull.log.info(
            "No need to proceed queue (got %s of an old "
            "commit)",
            event_type,
        )
        return

    if check_configuration_changes(pull.g_pull):
        pull.log.info("Configuration changed, ignoring", )
        return

    # BRANCH CONFIGURATION CHECKING
    try:
        mergify_config = rules.get_mergify_config(pull.g_pull.base.repo)
    except rules.NoRules:  # pragma: no cover
        pull.log.info("No need to proceed queue (.mergify.yml is missing)", )
        return
    except rules.InvalidRules as e:  # pragma: no cover
        # Not configured, post status check with the error message
        if event_type == "pull_request" and data["action"] in [
                "opened", "synchronize"
        ]:
            check_api.set_check_run(
                pull.g_pull,
                "Summary",
                "completed",
                "failure",
                output={
                    "title": "The Mergify configuration is invalid",
                    "summary": str(e),
                },
            )
        return

    # Add global and mandatory rules
    mergify_config["pull_request_rules"].rules.extend(
        rules.load_pull_request_rules_schema(MERGIFY_RULE["rules"]))

    subscription = sub_utils.get_subscription(utils.get_redis_for_cache(),
                                              installation_id)

    if pull.data["base"]["repo"][
            "private"] and not subscription["subscription_active"]:
        check_api.set_check_run(
            pull.g_pull,
            "Summary",
            "completed",
            "failure",
            output={
                "title": "Mergify is disabled",
                "summary": subscription["subscription_reason"],
            },
        )
        return

    # CheckRun are attached to head sha, so when user add commits or force push
    # we can't directly get the previous Mergify Summary. So we copy it here, then
    # anything that looks at it in next celery tasks will find it.
    if event_type == "pull_request" and data["action"] == "synchronize":
        copy_summary_from_previous_head_sha(pull.g_pull, data["before"])

    sources = [{"event_type": event_type, "data": data}]

    commands_runner.spawn_pending_commands_tasks(pull, sources)

    if event_type == "issue_comment":
        commands_runner.run_command(pull, sources, data["comment"]["body"],
                                    data["comment"]["user"])
    else:
        actions_runner.handle(mergify_config["pull_request_rules"], pull,
                              sources)
コード例 #10
0
ファイル: web.py プロジェクト: QualiNext/mergify-engine
def check_status_msg(key):  # pragma: no cover
    msg = utils.get_redis_for_cache().hget("status", key)
    if msg:
        return flask.render_template("msg.html", msg=msg)
    else:
        flask.abort(404)
コード例 #11
0
ファイル: base.py プロジェクト: eladb/mergify-engine
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.pr_counter = 0
        self.git_counter = 0
        self.cassette_library_dir = os.path.join(
            CASSETTE_LIBRARY_DIR_BASE, self.__class__.__name__, self._testMethodName
        )

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=["method", "uri"],
            filter_headers=[
                ("Authorization", "<TOKEN>"),
                ("X-Hub-Signature", "<SIGNATURE>"),
                ("User-Agent", None),
                ("Accept-Encoding", None),
                ("Connection", None),
            ],
            before_record_response=self.response_filter,
            custom_patches=(
                (pygithub.MainClass, "HTTPSConnection", vcr.stubs.VCRHTTPSConnection),
            ),
        )

        if RECORD:
            github.CachedToken.STORAGE = {}
        else:
            # Never expire token during replay
            mock.patch.object(
                github_app.GithubBearerAuth, "get_or_create_jwt", return_value="<TOKEN>"
            ).start()
            mock.patch.object(
                github.GithubInstallationAuth,
                "get_access_token",
                return_value="<TOKEN>",
            ).start()
            github.CachedToken.STORAGE = {}
            github.CachedToken(
                installation_id=config.INSTALLATION_ID,
                token="<TOKEN>",
                expiration=datetime.datetime.utcnow() + datetime.timedelta(minutes=10),
            )

        github_app_client = github_app._Client()

        mock.patch.object(github_app, "get_client", lambda: github_app_client).start()
        mock.patch.object(branch_updater.utils, "Gitter", self.get_gitter).start()
        mock.patch.object(duplicate_pull.utils, "Gitter", self.get_gitter).start()

        if not RECORD:
            # NOTE(sileht): Don't wait exponentialy during replay
            mock.patch.object(
                context.Context._ensure_complete.retry, "wait", None
            ).start()

        # Web authentification always pass
        mock.patch("hmac.compare_digest", return_value=True).start()

        branch_prefix_path = os.path.join(self.cassette_library_dir, "branch_prefix")

        if RECORD:
            self.BRANCH_PREFIX = datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S")
            with open(branch_prefix_path, "w") as f:
                f.write(self.BRANCH_PREFIX)
        else:
            with open(branch_prefix_path, "r") as f:
                self.BRANCH_PREFIX = f.read()

        self.master_branch_name = self.get_full_branch_name("master")

        self.git = self.get_gitter(LOG)
        self.addCleanup(self.git.cleanup)

        self.app = testclient.TestClient(web.app)

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = {
            "tokens": {"mergifyio-testing": config.MAIN_TOKEN},
            "subscription_active": False,
            "subscription_reason": "You're not nice",
        }
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            sub_utils.save_subscription_to_cache(
                config.INSTALLATION_ID, self.subscription,
            )
        )

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        integration = pygithub.GithubIntegration(
            config.INTEGRATION_ID, config.PRIVATE_KEY
        )
        self.installation_token = integration.get_access_token(
            config.INSTALLATION_ID
        ).token

        base_url = config.GITHUB_API_URL
        self.g_integration = pygithub.Github(self.installation_token, base_url=base_url)
        self.g_admin = pygithub.Github(config.MAIN_TOKEN, base_url=base_url)
        self.g_fork = pygithub.Github(config.FORK_TOKEN, base_url=base_url)

        self.o_admin = self.g_admin.get_organization(config.TESTING_ORGANIZATION)
        self.o_integration = self.g_integration.get_organization(
            config.TESTING_ORGANIZATION
        )
        self.u_fork = self.g_fork.get_user()
        assert self.o_admin.login == "mergifyio-testing"
        assert self.o_integration.login == "mergifyio-testing"
        assert self.u_fork.login == "mergify-test2"

        # NOTE(sileht): The repository have been manually created in mergifyio-testing
        # organization and then forked in mergify-test2 user account
        self.name = "functional-testing-repo"

        self.r_o_admin = self.o_admin.get_repo(self.name)
        self.r_o_integration = self.o_integration.get_repo(self.name)
        self.r_fork = self.u_fork.get_repo(self.name)

        self.url_main = f"{config.GITHUB_URL}/{self.r_o_integration.full_name}"
        self.url_fork = (
            f"{config.GITHUB_URL}/{self.u_fork.login}/{self.r_o_integration.name}"
        )

        installation = {"id": config.INSTALLATION_ID}
        self.cli_integration = github.get_client(
            config.TESTING_ORGANIZATION, self.name, installation
        )

        real_get_subscription = sub_utils.get_subscription

        def fake_retrieve_subscription_from_db(install_id):
            if int(install_id) == config.INSTALLATION_ID:
                return self.subscription
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        def fake_subscription(r, install_id):
            if int(install_id) == config.INSTALLATION_ID:
                return real_get_subscription(r, install_id)
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        mock.patch(
            "mergify_engine.branch_updater.sub_utils.get_subscription",
            side_effect=fake_subscription,
        ).start()

        mock.patch(
            "mergify_engine.branch_updater.sub_utils._retrieve_subscription_from_db",
            side_effect=fake_retrieve_subscription_from_db,
        ).start()

        mock.patch(
            "mergify_engine.sub_utils.get_subscription", side_effect=fake_subscription,
        ).start()

        mock.patch(
            "github.MainClass.Installation.Installation.get_repos",
            return_value=[self.r_o_integration],
        ).start()

        self._event_reader = EventReader(self.app)
        self._event_reader.drain()
        self._worker_thread = WorkerThread()
        self._worker_thread.start()
コード例 #12
0
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.pr_counter = 0
        self.git_counter = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=['method', 'uri'],
            filter_headers=[
                ('Authorization', '<TOKEN>'),
                ('X-Hub-Signature', '<SIGNATURE>'),
                ('User-Agent', None),
                ('Accept-Encoding', None),
                ('Connection', None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((github.MainClass, 'HTTPSConnection',
                             vcr.stubs.VCRHTTPSConnection), ))

        self.useFixture(
            fixtures.MockPatchObject(branch_updater.utils, 'Gitter',
                                     lambda: self.get_gitter()))

        self.useFixture(
            fixtures.MockPatchObject(backports.utils, 'Gitter',
                                     lambda: self.get_gitter()))

        # Web authentification always pass
        self.useFixture(
            fixtures.MockPatch('hmac.compare_digest', return_value=True))

        reponame_path = os.path.join(self.cassette_library_dir, "reponame")

        if RECORD:
            REPO_UUID = str(uuid.uuid4())
            with open(reponame_path, "w") as f:
                f.write(REPO_UUID)
        else:
            with open(reponame_path, "r") as f:
                REPO_UUID = f.read()

        self.name = "repo-%s-%s" % (REPO_UUID, self._testMethodName)

        utils.setup_logging()
        config.log()

        self.git = self.get_gitter()
        self.addCleanup(self.git.cleanup)

        web.app.testing = True
        self.app = web.app.test_client()

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = {
            "tokens": {
                "mergifyio-testing": config.MAIN_TOKEN
            },
            "subscription_active": False,
            "subscription_reason": "You're not nice"
        }
        self.redis.set("subscription-cache-%s" % config.INSTALLATION_ID,
                       sub_utils._encrypt(self.subscription))

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        self.session = requests.Session()
        self.session.trust_env = False

        # Cleanup the remote testing redis
        r = self.session.delete(
            "https://gh.mergify.io/events-testing",
            data=FAKE_DATA,
            headers={"X-Hub-Signature": "sha1=" + FAKE_HMAC})
        r.raise_for_status()

        integration = github.GithubIntegration(config.INTEGRATION_ID,
                                               config.PRIVATE_KEY)
        self.installation_token = integration.get_access_token(
            config.INSTALLATION_ID).token

        self.g_integration = github.Github(self.installation_token,
                                           base_url="https://api.%s" %
                                           config.GITHUB_DOMAIN)
        self.g_admin = github.Github(config.MAIN_TOKEN,
                                     base_url="https://api.%s" %
                                     config.GITHUB_DOMAIN)
        self.g_fork = github.Github(config.FORK_TOKEN,
                                    base_url="https://api.%s" %
                                    config.GITHUB_DOMAIN)

        self.o_admin = self.g_admin.get_organization(
            config.TESTING_ORGANIZATION)
        self.o_integration = self.g_integration.get_organization(
            config.TESTING_ORGANIZATION)
        self.u_fork = self.g_fork.get_user()
        assert self.o_admin.login == "mergifyio-testing"
        assert self.o_integration.login == "mergifyio-testing"
        assert self.u_fork.login == "mergify-test2"

        self.r_o_admin = self.o_admin.create_repo(self.name)
        self.r_o_integration = self.o_integration.get_repo(self.name)
        self.url_main = "https://%s/%s" % (config.GITHUB_DOMAIN,
                                           self.r_o_integration.full_name)
        self.url_fork = "https://%s/%s/%s" % (
            config.GITHUB_DOMAIN, self.u_fork.login, self.r_o_integration.name)

        # Limit installations/subscription API to the test account
        install = {
            "id": config.INSTALLATION_ID,
            "target_type": "Org",
            "account": {
                "login": "******"
            }
        }

        self.useFixture(
            fixtures.MockPatch('mergify_engine.utils.get_installations',
                               lambda integration: [install]))

        real_get_subscription = sub_utils.get_subscription

        def fake_subscription(r, install_id):
            if int(install_id) == config.INSTALLATION_ID:
                return real_get_subscription(r, install_id)
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing"
                }

        self.useFixture(
            fixtures.MockPatch(
                "mergify_engine.branch_updater.sub_utils.get_subscription",
                side_effect=fake_subscription))

        self.useFixture(
            fixtures.MockPatch("mergify_engine.sub_utils.get_subscription",
                               side_effect=fake_subscription))

        self.useFixture(
            fixtures.MockPatch(
                "github.MainClass.Installation.Installation.get_repos",
                return_value=[self.r_o_integration]))
コード例 #13
0
ファイル: queue.py プロジェクト: Honda619/mergify-engine
def remove_pull(ctxt):
    redis = utils.get_redis_for_cache()
    queue = _get_queue_cache_key(ctxt)
    redis.zrem(queue, ctxt.pull["number"])
    redis.delete(_get_update_method_cache_key(ctxt))
    ctxt.log.debug("pull request removed from merge queue", queue=queue)
コード例 #14
0
ファイル: queue.py プロジェクト: Honda619/mergify-engine
def _delete_queue(queue):
    redis = utils.get_redis_for_cache()
    redis.delete(queue)
コード例 #15
0
ファイル: queue.py プロジェクト: Madhu-1/mergify-engine
def _get_pulls(queue):
    redis = utils.get_redis_for_cache()
    return redis.zrange(queue, 0, -1)
コード例 #16
0
def run(event_type, data):
    """Everything starts here."""
    installation_id = data["installation"]["id"]
    installation_token = utils.get_installation_token(installation_id)
    if not installation_token:
        return

    g = github.Github(installation_token,
                      base_url="https://api.%s" % config.GITHUB_DOMAIN)

    if config.LOG_RATELIMIT:  # pragma: no cover
        rate = g.get_rate_limit().rate
        LOG.info(
            "ratelimit: %s/%s, reset at %s",
            rate.remaining,
            rate.limit,
            rate.reset,
            repository=data["repository"]["name"],
        )

    try:
        repo = g.get_repo(data["repository"]["owner"]["login"] + "/" +
                          data["repository"]["name"])
    except github.UnknownObjectException:  # pragma: no cover
        LOG.info("Repository not found in the event %s, ignoring", event_type)
        return

    event_pull = get_github_pull_from_event(repo, event_type, data)

    if not event_pull:  # pragma: no cover
        LOG.info("No pull request found in the event %s, "
                 "ignoring", event_type)
        return

    # Override pull_request with the updated one
    data["pull_request"] = event_pull.raw_data

    LOG.info(
        "Pull request found in the event %s",
        event_type,
        repo=repo.full_name,
        pull_request=event_pull,
    )

    if ("base" not in event_pull.raw_data
            or "repo" not in event_pull.raw_data["base"]
            or len(list(event_pull.raw_data["base"]["repo"].keys())) < 70):
        LOG.warning(
            "the pull request payload looks suspicious",
            event_type=event_type,
            data=data,
            pull_request=event_pull.raw_data,
            repo=repo.fullname,
        )

    if (event_type == "status"
            and event_pull.head.sha != data["sha"]):  # pragma: no cover
        LOG.info(
            "No need to proceed queue (got status of an old commit)",
            repo=repo.full_name,
            pull_request=event_pull,
        )
        return

    elif (event_type in ["status", "check_suite", "check_run"]
          and event_pull.merged):  # pragma: no cover
        LOG.info(
            "No need to proceed queue (got status of a merged "
            "pull request)",
            repo=repo.full_name,
            pull_request=event_pull,
        )
        return
    elif (event_type in ["check_suite", "check_run"] and event_pull.head.sha !=
          data[event_type]["head_sha"]):  # pragma: no cover
        LOG.info(
            "No need to proceed queue (got %s of an old "
            "commit)",
            event_type,
            repo=repo.full_name,
            pull_request=event_pull,
        )
        return

    if check_configuration_changes(event_pull):
        LOG.info(
            "Configuration changed, ignoring",
            repo=repo.full_name,
            pull_request=event_pull,
        )
        return

    # BRANCH CONFIGURATION CHECKING
    try:
        mergify_config = rules.get_mergify_config(repo)
    except rules.NoRules:  # pragma: no cover
        LOG.info(
            "No need to proceed queue (.mergify.yml is missing)",
            repo=repo.full_name,
            pull_request=event_pull,
        )
        return
    except rules.InvalidRules as e:  # pragma: no cover
        # Not configured, post status check with the error message
        if event_type == "pull_request" and data["action"] in [
                "opened", "synchronize"
        ]:
            check_api.set_check_run(
                event_pull,
                "Summary",
                "completed",
                "failure",
                output={
                    "title": "The Mergify configuration is invalid",
                    "summary": str(e),
                },
            )
        return

    subscription = sub_utils.get_subscription(utils.get_redis_for_cache(),
                                              installation_id)

    if repo.private and not subscription["subscription_active"]:
        check_api.set_check_run(
            event_pull,
            "Summary",
            "completed",
            "failure",
            output={
                "title": "Mergify is disabled",
                "summary": subscription["subscription_reason"],
            },
        )
        return

    # CheckRun are attached to head sha, so when user add commits or force push
    # we can't directly get the previous Mergify Summary. So we copy it here, then
    # anything that looks at it in next celery tasks will find it.
    if event_type == "pull_request" and data["action"] == "synchronize":
        copy_summary_from_previous_head_sha(event_pull, data["before"])

    commands_runner.spawn_pending_commands_tasks(installation_id, event_type,
                                                 data, event_pull)

    if event_type == "issue_comment":
        commands_runner.run_command.s(installation_id, event_type, data,
                                      data["comment"]["body"]).apply_async()
    else:
        actions_runner.handle.s(
            installation_id,
            mergify_config["pull_request_rules"].as_dict(),
            event_type,
            data,
        ).apply_async()
コード例 #17
0
def report(url):
    redis = utils.get_redis_for_cache()
    path = url.replace("https://github.com/", "")
    owner, repo, _, pull_number = path.split("/")

    integration = github.GithubIntegration(config.INTEGRATION_ID,
                                           config.PRIVATE_KEY)
    install_id = utils.get_installation_id(integration, owner, repo=repo)

    print("* INSTALLATION ID: %s" % install_id)

    cached_sub = sub_utils.get_subscription(redis, install_id)
    db_sub = sub_utils._retrieve_subscription_from_db(install_id)
    print("* SUBSCRIBED (cache/db): %s / %s" %
          (cached_sub["subscription_active"], db_sub["subscription_active"]))
    print("* SUB DETAIL: %s" % db_sub["subscription_reason"])

    print("* NUMBER OF CACHED TOKENS: %d" % len(cached_sub["tokens"]))

    try:
        for login, token in cached_sub["tokens"].items():
            try:
                repos = get_repositories_setuped(token, install_id)
            except github.BadCredentialsException:
                print("** token for %s invalid" % login)
            except github.GithubException as e:
                if e.status != 401:
                    raise
                print("** token for %s invalid" % login)
            else:
                if any((r["full_name"] == owner + "/" + repo) for r in repos):
                    print("* MERGIFY INSTALLED AND ENABLED ON THIS REPOSITORY")
                else:
                    print("* MERGIFY INSTALLED BUT DISABLED "
                          "ON THIS REPOSITORY")
                break
        else:
            print("* MERGIFY DOESN'T HAVE ANY VALID OAUTH TOKENS")
    except github.UnknownObjectException:
        print("* MERGIFY SEEMS NOT INSTALLED")

    installation_token = integration.get_access_token(install_id).token

    g = github.Github(installation_token,
                      base_url="https://api.%s" % config.GITHUB_DOMAIN)
    r = g.get_repo(owner + "/" + repo)
    print("* REPOSITORY IS %s" % "PRIVATE" if r.private else "PUBLIC")

    print("* CONFIGURATION:")
    try:
        mergify_config_content = rules.get_mergify_config_content(r)
    except rules.NoRules:  # pragma: no cover
        print(".mergify.yml is missing")

    print(mergify_config_content.decode())

    try:
        mergify_config = rules.UserConfigurationSchema(mergify_config_content)
    except rules.InvalidRules as e:  # pragma: no cover
        print("configuration is invalid %s" % str(e))
    else:
        pull_request_rules_raw = mergify_config["pull_request_rules"].as_dict()
        pull_request_rules_raw["rules"].extend(
            actions_runner.MERGIFY_RULE["rules"])
        pull_request_rules = rules.PullRequestRules(**pull_request_rules_raw)

    try:
        p = r.get_pull(int(pull_number))
    except github.UnknownObjectException:
        print("Wrong pull request number")
        return g, None

    mp = mergify_pull.MergifyPull(g, p, install_id)
    print("* PULL REQUEST:")
    pprint.pprint(mp.to_dict(), width=160)
    try:
        print("is_behind: %s" % mp.is_behind())
    except github.GithubException as e:
        print("Unable to know if pull request branch is behind: %s" % e)

    print("mergeable_state: %s" % mp.g_pull.mergeable_state)

    print("* MERGIFY LAST CHECKS:")
    checks = list(check_api.get_checks(p))
    for c in checks:
        if c._rawData["app"]["id"] == config.INTEGRATION_ID:
            print("[%s]: %s | %s" %
                  (c.name, c.conclusion, c.output.get("title")))
            print("> " + "\n> ".join(c.output.get("summary").split("\n")))

    print("* MERGIFY LIVE MATCHES:")
    match = pull_request_rules.get_pull_request_rule(mp)
    summary_title, summary = actions_runner.gen_summary(
        "refresh", {}, mp, match)
    print("> %s" % summary_title)
    print(summary)

    return g, p
コード例 #18
0
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.pr_counter = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD_MODE != "none":
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode=RECORD_MODE,
            match_on=['method', 'uri'],
            filter_headers=[
                ('Authorization', '<TOKEN>'),
                ('X-Hub-Signature', '<SIGNATURE>'),
                ('User-Agent', None),
                ('Accept-Encoding', None),
                ('Connection', None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((github.MainClass, 'HTTPSConnection',
                             vcr.stubs.VCRHTTPSConnection), ))

        self.useFixture(
            fixtures.MockPatchObject(
                branch_updater.utils, 'Gitter',
                lambda: GitterRecorder(self.cassette_library_dir)))

        self.useFixture(
            fixtures.MockPatchObject(
                backports.utils, 'Gitter',
                lambda: GitterRecorder(self.cassette_library_dir)))

        # Web authentification always pass
        self.useFixture(
            fixtures.MockPatch('hmac.compare_digest', return_value=True))

        reponame_path = os.path.join(self.cassette_library_dir, "reponame")

        gen_new_uuid = (RECORD_MODE == 'all'
                        or (RECORD_MODE == 'once'
                            and not os.path.exists(reponame_path)))

        if gen_new_uuid:
            REPO_UUID = str(uuid.uuid4())
            with open(reponame_path, "w") as f:
                f.write(REPO_UUID)
        else:
            with open(reponame_path, "r") as f:
                REPO_UUID = f.read()

        self.name = "repo-%s-%s" % (REPO_UUID, self._testMethodName)

        utils.setup_logging()
        config.log()

        self.git = GitterRecorder(self.cassette_library_dir, "tests")
        self.addCleanup(self.git.cleanup)

        web.app.testing = True
        self.app = web.app.test_client()

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = {"token": config.MAIN_TOKEN, "subscribed": False}
        self.redis.set("subscription-cache-%s" % config.INSTALLATION_ID,
                       json.dumps(self.subscription))

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        self.session = requests.Session()
        self.session.trust_env = False

        # Cleanup the remote testing redis
        r = self.session.delete(
            "https://gh.mergify.io/events-testing",
            data=FAKE_DATA,
            headers={"X-Hub-Signature": "sha1=" + FAKE_HMAC})
        r.raise_for_status()

        self.g_main = github.Github(config.MAIN_TOKEN)
        self.g_fork = github.Github(config.FORK_TOKEN)

        self.u_main = self.g_main.get_user()
        self.u_fork = self.g_fork.get_user()
        assert self.u_main.login == "mergify-test1"
        assert self.u_fork.login == "mergify-test2"

        self.r_main = self.u_main.create_repo(self.name)
        self.url_main = "https://github.com/%s" % self.r_main.full_name
        self.url_fork = "https://github.com/%s/%s" % (self.u_fork.login,
                                                      self.r_main.name)

        # Limit installations/subscription API to the test account
        install = {
            "id": config.INSTALLATION_ID,
            "target_type": "User",
            "account": {
                "login": "******"
            }
        }

        self.useFixture(
            fixtures.MockPatch('mergify_engine.utils.get_installations',
                               lambda integration: [install]))

        real_get_subscription = utils.get_subscription

        def fake_subscription(r, install_id):
            if install_id == config.INSTALLATION_ID:
                return real_get_subscription(r, install_id)
            else:
                return {"token": None, "subscribed": False}

        self.useFixture(
            fixtures.MockPatch("mergify_engine.web.utils.get_subscription",
                               side_effect=fake_subscription))

        self.useFixture(
            fixtures.MockPatch(
                "github.MainClass.Installation.Installation.get_repos",
                return_value=[self.r_main]))
コード例 #19
0
    def setUp(self):
        super(TestEngineScenario, self).setUp()

        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self._testMethodName)

        if RECORD_MODE != "none":
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode=RECORD_MODE,
            match_on=['method', 'uri'],
            filter_headers=[
                ('Authorization', '<TOKEN>'),
                ('X-Hub-Signature', '<SIGNATURE>'),
                ('User-Agent', None),
                ('Accept-Encoding', None),
                ('Connection', None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((github.MainClass, 'HTTPSConnection',
                             vcr.stubs.VCRHTTPSConnection), ))

        self.useFixture(
            fixtures.MockPatchObject(
                branch_updater.utils, 'Gitter',
                lambda: GitterRecorder(self.cassette_library_dir)))

        self.useFixture(
            fixtures.MockPatchObject(
                backports.utils, 'Gitter',
                lambda: GitterRecorder(self.cassette_library_dir)))

        # Web authentification always pass
        self.useFixture(
            fixtures.MockPatch('hmac.compare_digest', return_value=True))

        reponame_path = os.path.join(self.cassette_library_dir, "reponame")

        gen_new_uuid = (RECORD_MODE == 'all'
                        or (RECORD_MODE == 'once'
                            and not os.path.exists(reponame_path)))

        if gen_new_uuid:
            REPO_UUID = str(uuid.uuid4())
            with open(reponame_path, "w") as f:
                f.write(REPO_UUID)
        else:
            with open(reponame_path, "r") as f:
                REPO_UUID = f.read()

        self.name = "repo-%s-%s" % (REPO_UUID, self._testMethodName)

        self.pr_counter = 0
        self.remaining_events = []

        utils.setup_logging()
        config.log()

        self.git = GitterRecorder(self.cassette_library_dir, "tests")
        self.addCleanup(self.git.cleanup)

        web.app.testing = True
        self.app = web.app.test_client()

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        subscription = {"token": config.MAIN_TOKEN, "subscribed": False}
        self.redis.set("subscription-cache-%s" % config.INSTALLATION_ID,
                       json.dumps(subscription))

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        self.session = requests.Session()
        self.session.trust_env = False

        # Cleanup the remote testing redis
        r = self.session.delete(
            "https://gh.mergify.io/events-testing",
            data=FAKE_DATA,
            headers={"X-Hub-Signature": "sha1=" + FAKE_HMAC})
        r.raise_for_status()

        self.g_main = github.Github(config.MAIN_TOKEN)
        self.g_fork = github.Github(config.FORK_TOKEN)

        self.u_main = self.g_main.get_user()
        self.u_fork = self.g_fork.get_user()
        assert self.u_main.login == "mergify-test1"
        assert self.u_fork.login == "mergify-test2"

        self.r_main = self.u_main.create_repo(self.name)
        self.url_main = "https://github.com/%s" % self.r_main.full_name
        self.url_fork = "https://github.com/%s/%s" % (self.u_fork.login,
                                                      self.r_main.name)

        integration = github.GithubIntegration(config.INTEGRATION_ID,
                                               config.PRIVATE_KEY)

        access_token = integration.get_access_token(
            config.INSTALLATION_ID).token
        g = github.Github(access_token)
        user = g.get_user("mergify-test1")
        repo = user.get_repo(self.name)

        # Used to access the cache with its helper
        self.engine = engine.MergifyEngine(g, config.INSTALLATION_ID,
                                           access_token, subscription, user,
                                           repo)
        self.processor = self.engine.get_processor()

        self.rq_worker = rq.SimpleWorker([
            "incoming-events", "localhost-000-high", "localhost-001-high",
            "localhost-000-low", "localhost-001-low"
        ],
                                         connection=utils.get_redis_for_rq())

        if self._testMethodName != "test_creation_pull_of_initial_config":
            self.git("init")
            self.git.configure()
            self.git.add_cred(config.MAIN_TOKEN, "", self.r_main.full_name)
            self.git.add_cred(config.FORK_TOKEN, "",
                              "%s/%s" % (self.u_fork.login, self.r_main.name))
            self.git("config", "user.name", "%s-tester" % config.CONTEXT)
            self.git("remote", "add", "main", self.url_main)
            self.git("remote", "add", "fork", self.url_fork)

            with open(self.git.tmp + "/.mergify.yml", "w") as f:
                f.write(CONFIG)
            self.git("add", ".mergify.yml")
            self.git("commit", "--no-edit", "-m", "initial commit")
            self.git("push", "--quiet", "main", "master")

            self.git("checkout", "-b", "stable", "--quiet")
            self.git("push", "--quiet", "main", "stable")

            self.git("checkout", "-b", "nostrict", "--quiet")
            self.git("push", "--quiet", "main", "nostrict")

            self.git("checkout", "-b", "disabled", "--quiet")
            self.git("push", "--quiet", "main", "disabled")

            self.git("checkout", "-b", "enabling_label", "--quiet")
            self.git("push", "--quiet", "main", "enabling_label")

            self.r_fork = self.u_fork.create_fork(self.r_main)
            self.git("fetch", "--quiet", "fork")

            # NOTE(sileht): Github looks buggy here:
            # We receive for the new repo the expected events:
            # * installation_repositories
            # * integration_installation_repositories
            # but we receive them 6 times with the same sha1...
            self.push_events([(None, {"action": "added"})] * 12)
コード例 #20
0
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.pr_counter = 0
        self.git_counter = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=["method", "uri"],
            filter_headers=[
                ("Authorization", "<TOKEN>"),
                ("X-Hub-Signature", "<SIGNATURE>"),
                ("User-Agent", None),
                ("Accept-Encoding", None),
                ("Connection", None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((github.MainClass, "HTTPSConnection",
                             vcr.stubs.VCRHTTPSConnection), ),
        )

        self.useFixture(
            fixtures.MockPatchObject(branch_updater.utils, "Gitter",
                                     lambda: self.get_gitter()))

        self.useFixture(
            fixtures.MockPatchObject(duplicate_pull.utils, "Gitter",
                                     lambda: self.get_gitter()))

        # Web authentification always pass
        self.useFixture(
            fixtures.MockPatch("hmac.compare_digest", return_value=True))

        reponame_path = os.path.join(self.cassette_library_dir, "reponame")

        if RECORD:
            REPO_UUID = str(uuid.uuid4())
            with open(reponame_path, "w") as f:
                f.write(REPO_UUID)
        else:
            with open(reponame_path, "r") as f:
                REPO_UUID = f.read()

        self.name = "repo-%s-%s" % (REPO_UUID, self._testMethodName)

        self.git = self.get_gitter()
        self.addCleanup(self.git.cleanup)

        web.app.testing = True
        self.app = web.app.test_client()

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = {
            "tokens": {
                "mergifyio-testing": config.MAIN_TOKEN
            },
            "subscription_active": False,
            "subscription_cost": 100,
            "subscription_reason": "You're not nice",
        }
        sub_utils.save_subscription_to_cache(
            self.redis,
            config.INSTALLATION_ID,
            self.subscription,
        )

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        integration = github.GithubIntegration(config.INTEGRATION_ID,
                                               config.PRIVATE_KEY)
        self.installation_token = integration.get_access_token(
            config.INSTALLATION_ID).token

        self.g_integration = github.Github(self.installation_token,
                                           base_url="https://api.%s" %
                                           config.GITHUB_DOMAIN)
        self.g_admin = github.Github(config.MAIN_TOKEN,
                                     base_url="https://api.%s" %
                                     config.GITHUB_DOMAIN)
        self.g_fork = github.Github(config.FORK_TOKEN,
                                    base_url="https://api.%s" %
                                    config.GITHUB_DOMAIN)

        self.o_admin = self.g_admin.get_organization(
            config.TESTING_ORGANIZATION)
        self.o_integration = self.g_integration.get_organization(
            config.TESTING_ORGANIZATION)
        self.u_fork = self.g_fork.get_user()
        assert self.o_admin.login == "mergifyio-testing"
        assert self.o_integration.login == "mergifyio-testing"
        assert self.u_fork.login == "mergify-test2"

        self.r_o_admin = self.o_admin.create_repo(self.name)
        self.r_o_integration = self.o_integration.get_repo(self.name)
        self.url_main = "https://%s/%s" % (
            config.GITHUB_DOMAIN,
            self.r_o_integration.full_name,
        )
        self.url_fork = "https://%s/%s/%s" % (
            config.GITHUB_DOMAIN,
            self.u_fork.login,
            self.r_o_integration.name,
        )

        # Limit installations/subscription API to the test account
        install = {
            "id": config.INSTALLATION_ID,
            "target_type": "Org",
            "account": {
                "login": "******"
            },
        }

        self.useFixture(
            fixtures.MockPatch("mergify_engine.utils.get_installations",
                               lambda integration: [install]))

        real_get_subscription = sub_utils.get_subscription

        def fake_retrieve_subscription_from_db(install_id):
            if int(install_id) == config.INSTALLATION_ID:
                return self.subscription
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        def fake_subscription(r, install_id):
            if int(install_id) == config.INSTALLATION_ID:
                return real_get_subscription(r, install_id)
            else:
                return {
                    "tokens": {},
                    "subscription_active": False,
                    "subscription_reason": "We're just testing",
                }

        self.useFixture(
            fixtures.MockPatch(
                "mergify_engine.branch_updater.sub_utils.get_subscription",
                side_effect=fake_subscription,
            ))

        self.useFixture(
            fixtures.MockPatch(
                "mergify_engine.branch_updater.sub_utils._retrieve_subscription_from_db",
                side_effect=fake_retrieve_subscription_from_db,
            ))

        self.useFixture(
            fixtures.MockPatch(
                "mergify_engine.sub_utils.get_subscription",
                side_effect=fake_subscription,
            ))

        self.useFixture(
            fixtures.MockPatch(
                "github.MainClass.Installation.Installation.get_repos",
                return_value=[self.r_o_integration],
            ))
        self._event_reader = EventReader(self.app)
        self._event_reader.drain()
コード例 #21
0
ファイル: debug.py プロジェクト: jd/mergify-engine
def report(
    url: str,
) -> typing.Union[context.Context, github.GithubInstallationClient, None]:
    path = url.replace("https://github.com/", "")

    pull_number: typing.Optional[str]
    repo: typing.Optional[str]

    try:
        owner, repo, _, pull_number = path.split("/")
    except ValueError:
        pull_number = None
        try:
            owner, repo = path.split("/")
        except ValueError:
            owner = path
            repo = None

    try:
        client = github.get_client(owner)
    except exceptions.MergifyNotInstalled:
        print(f"* Mergify is not installed on account {owner}")
        return None

    # Do a dumb request just to authenticate
    client.get("/")

    if client.auth.installation is None:
        print("No installation detected")
        return None

    print("* INSTALLATION ID: %s" % client.auth.installation["id"])

    if client.auth.owner_id is None:
        raise RuntimeError("Unable to get owner_id")

    cached_sub, db_sub = utils.async_run(
        subscription.Subscription.get_subscription(client.auth.owner_id),
        subscription.Subscription._retrieve_subscription_from_db(client.auth.owner_id),
    )

    if repo is None:
        slug = None
    else:
        slug = owner + "/" + repo

    print("* SUBSCRIBED (cache/db): %s / %s" % (cached_sub.active, db_sub.active))
    print("* Features (cache):")
    for f in cached_sub.features:
        print(f"  - {f.value}")
    report_sub(client.auth.installation["id"], cached_sub, "ENGINE-CACHE", slug)
    report_sub(client.auth.installation["id"], db_sub, "DASHBOARD", slug)

    utils.async_run(report_worker_status(client.auth.owner))

    if repo is not None:

        repo_info = client.item(f"/repos/{owner}/{repo}")
        print(f"* REPOSITORY IS {'PRIVATE' if repo_info['private'] else 'PUBLIC'}")

        print("* CONFIGURATION:")
        mergify_config = None
        try:
            filename, mergify_config_content = rules.get_mergify_config_content(
                client, repo
            )
        except rules.NoRules:  # pragma: no cover
            print(".mergify.yml is missing")
        else:
            print(f"Config filename: {filename}")
            print(mergify_config_content.decode())
            try:
                mergify_config = rules.UserConfigurationSchema(mergify_config_content)
            except rules.InvalidRules as e:  # pragma: no cover
                print("configuration is invalid %s" % str(e))
            else:
                mergify_config["pull_request_rules"].rules.extend(
                    engine.DEFAULT_PULL_REQUEST_RULES.rules
                )

        if pull_number is None:
            for branch in typing.cast(
                typing.List[github_types.GitHubBranch],
                client.items(f"/repos/{owner}/{repo}/branches"),
            ):
                q = queue.Queue(
                    utils.get_redis_for_cache(),
                    repo_info["owner"]["id"],
                    repo_info["owner"]["login"],
                    repo_info["id"],
                    repo_info["name"],
                    branch["name"],
                )
                pulls = q.get_pulls()
                if not pulls:
                    continue

                print(f"* QUEUES {branch['name']}:")

                for priority, grouped_pulls in itertools.groupby(
                    pulls, key=lambda v: q.get_config(v)["priority"]
                ):
                    try:
                        fancy_priority = merge_base.PriorityAliases(priority).name
                    except ValueError:
                        fancy_priority = str(priority)
                    formatted_pulls = ", ".join((f"#{p}" for p in grouped_pulls))
                    print(f"** {formatted_pulls} (priority: {fancy_priority})")
        else:
            pull_raw = client.item(f"/repos/{owner}/{repo}/pulls/{pull_number}")
            ctxt = context.Context(
                client,
                pull_raw,
                cached_sub,
                [],
            )

            # FIXME queues could also be printed if no pull number given
            q = queue.Queue.from_context(ctxt)
            print("* QUEUES: %s" % ", ".join([f"#{p}" for p in q.get_pulls()]))
            print("* PULL REQUEST:")
            pr_data = dict(ctxt.pull_request.items())
            pprint.pprint(pr_data, width=160)

            print("is_behind: %s" % ctxt.is_behind)

            print("mergeable_state: %s" % ctxt.pull["mergeable_state"])

            print("* MERGIFY LAST CHECKS:")
            for c in ctxt.pull_engine_check_runs:
                print(
                    "[%s]: %s | %s"
                    % (c["name"], c["conclusion"], c["output"].get("title"))
                )
                print(
                    "> "
                    + "\n> ".join(
                        ("No Summary",)
                        if c["output"]["summary"] is None
                        else c["output"]["summary"].split("\n")
                    )
                )

            if mergify_config is not None:
                print("* MERGIFY LIVE MATCHES:")
                match = mergify_config["pull_request_rules"].get_pull_request_rule(ctxt)
                summary_title, summary = actions_runner.gen_summary(ctxt, match)
                print("> %s" % summary_title)
                print(summary)

            return ctxt

    return client
コード例 #22
0
def job_filter_and_dispatch(event_type, event_id, data):
    subscription = utils.get_subscription(
        utils.get_redis_for_cache(), data["installation"]["id"])

    if not subscription["token"]:
        msg_action = "ignored (no token)"

    elif event_type == "installation" and data["action"] == "deleted":
        # TODO(sileht): move out this engine V1 related code
        key = "queues~%s~*~*~*~*" % data["installation"]["id"]
        utils.get_redis_for_cache().delete(key)
        msg_action = "handled, cache cleaned"

    elif (event_type == "installation_repositories" and
          data["action"] == "removed"):
        for repository in data["repositories_removed"]:
            if repository["private"] and not subscription["subscribed"]:  # noqa pragma: no cover
                continue

            # TODO(sileht): move out this engine V1 related code
            key = "queues~%s~%s~%s~*~*" % (
                data["installation"]["id"],
                data["installation"]["account"]["login"].lower(),
                repository["name"].lower()
            )
            utils.get_redis_for_cache().delete(key)

        msg_action = "handled, cache cleaned"

    elif event_type in ["installation", "installation_repositories"]:
        msg_action = "ignored (action %s)" % data["action"]

    elif event_type in ["pull_request", "pull_request_review", "status",
                        "check_suite", "check_run"]:

        if data["repository"]["archived"]:  # pragma: no cover
            msg_action = "ignored (repository archived)"

        elif (data["repository"]["private"] and not
                subscription["subscribed"]):
            msg_action = "ignored (not public or subscribe)"

        elif event_type == "status" and data["state"] == "pending":
            msg_action = "ignored (state pending)"

        elif event_type == "status" and data["context"] == "mergify/pr":
            msg_action = "ignored (mergify status)"

        elif (event_type in ["check_run", "check_suite"] and
              data[event_type]["app"]["id"] == config.INTEGRATION_ID):
            msg_action = "ignored (mergify %s)" % event_type

        elif (event_type == "pull_request" and data["action"] not in [
                "opened", "reopened", "closed", "synchronize",
                "labeled", "unlabeled", "edited"]):
            msg_action = "ignored (action %s)" % data["action"]

        else:
            engine.run.s(event_type, data, subscription).apply_async()
            msg_action = "pushed to backend"

            if event_type == "pull_request":
                msg_action += ", action: %s" % data["action"]

            elif event_type == "pull_request_review":
                msg_action += ", action: %s, review-state: %s" % (
                    data["action"], data["review"]["state"])

            elif event_type == "pull_request_review_comment":
                msg_action += ", action: %s, review-state: %s" % (
                    data["action"], data["comment"]["position"])

            elif event_type == "status":
                msg_action += ", ci-status: %s, sha: %s" % (
                    data["state"], data["sha"])

            elif event_type in ["check_run", "check_suite"]:
                msg_action += (
                    ", action: %s, status: %s, conclusion: %s, sha: %s" % (
                        data["action"],
                        data[event_type]["status"],
                        data[event_type]["conclusion"],
                        data[event_type]["head_sha"]))
    else:
        msg_action = "ignored (unexpected event_type)"

    if "repository" in data:
        repo_name = data["repository"]["full_name"]
    else:
        repo_name = data["installation"]["account"]["login"]

    LOG.info('event %s', msg_action,
             event_type=event_type,
             event_id=event_id,
             install_id=data["installation"]["id"],
             sender=data["sender"]["login"],
             repository=repo_name,
             subscribed=subscription["subscribed"])
コード例 #23
0
def _do_update(pull, token, method="merge"):
    # NOTE(sileht):
    # $ curl https://api.github.com/repos/sileht/repotest/pulls/2 | jq .commits
    # 2
    # $ git clone https://[email protected]/sileht-tester/repotest \
    #           --depth=$((2 + 1)) -b sileht/testpr
    # $ cd repotest
    # $ git remote add upstream https://[email protected]/sileht/repotest.git
    # $ git log | grep Date | tail -1
    # Date:   Fri Mar 30 21:30:26 2018 (10 days ago)
    # $ git fetch upstream master --shallow-since="Fri Mar 30 21:30:26 2018"
    # $ git rebase upstream/master
    # $ git push origin sileht/testpr:sileht/testpr

    head_repo = pull.head_repo_owner_login + "/" + pull.head_repo_name
    base_repo = pull.base_repo_owner_login + "/" + pull.base_repo_name

    head_branch = pull.head_ref
    base_branch = pull.base_ref
    git = utils.Gitter()
    try:
        git("init")
        git.configure()
        git.add_cred(token, "", head_repo)
        git.add_cred(token, "", base_repo)
        git(
            "remote",
            "add",
            "origin",
            "https://%s/%s" % (config.GITHUB_DOMAIN, head_repo),
        )
        git(
            "remote",
            "add",
            "upstream",
            "https://%s/%s" % (config.GITHUB_DOMAIN, base_repo),
        )

        depth = len(pull.commits) + 1
        git("fetch", "--quiet", "--depth=%d" % depth, "origin", head_branch)
        git("checkout", "-q", "-b", head_branch, "origin/%s" % head_branch)

        out = git("log", "--format=%cI")
        last_commit_date = [
            d for d in out.decode("utf8").split("\n") if d.strip()
        ][-1]

        git(
            "fetch",
            "--quiet",
            "upstream",
            base_branch,
            "--shallow-since='%s'" % last_commit_date,
        )

        try:
            _do_update_branch(git, method, base_branch, head_branch)
        except subprocess.CalledProcessError as e:  # pragma: no cover
            for message in GIT_MESSAGE_TO_UNSHALLOW:
                if message in e.output:
                    pull.log.debug("Complete history cloned")
                    # NOTE(sileht): We currently assume we have only one parent
                    # commit in common. Since Git is a graph, in some case this
                    # graph can be more complicated.
                    # So, retrying with the whole git history for now
                    git("fetch", "--unshallow")
                    git("fetch", "--quiet", "origin", head_branch)
                    git("fetch", "--quiet", "upstream", base_branch)
                    _do_update_branch(git, method, base_branch, head_branch)
                    break
            else:
                raise

        expected_sha = git("log", "-1", "--format=%H").decode().strip()
        # NOTE(sileht): We store this for dismissal action
        redis = utils.get_redis_for_cache()
        redis.setex("branch-update-%s" % expected_sha, 60 * 60, expected_sha)
    except subprocess.CalledProcessError as in_exception:  # pragma: no cover
        for message, out_exception in GIT_MESSAGE_TO_EXCEPTION.items():
            if message in in_exception.output:
                raise out_exception(in_exception.output.decode())
        else:
            pull.log.error(
                "update branch failed: %s",
                in_exception.output.decode(),
                exc_info=True,
            )
            raise BranchUpdateFailure()

    except Exception:  # pragma: no cover
        pull.log.error("update branch failed",
                       pull_request=pull,
                       exc_info=True)
        raise BranchUpdateFailure()
    finally:
        git.cleanup()
コード例 #24
0
def job_refresh(owner, repo, refresh_ref):
    LOG.info("%s/%s/%s: refreshing", owner, repo, refresh_ref)

    integration = github.GithubIntegration(config.INTEGRATION_ID,
                                           config.PRIVATE_KEY)
    installation_id = utils.get_installation_id(integration, owner)
    if not installation_id:  # pragma: no cover
        LOG.warning("%s/%s/%s: mergify not installed",
                    owner, repo, refresh_ref)
        return

    token = integration.get_access_token(installation_id).token
    g = github.Github(token, base_url="https://api.%s" % config.GITHUB_DOMAIN)
    r = g.get_repo("%s/%s" % (owner, repo))
    try:
        r.get_contents(".mergify.yml")
    except github.GithubException as e:  # pragma: no cover
        if e.status == 404:
            LOG.warning("%s/%s/%s: mergify not configured",
                        owner, repo, refresh_ref)
            return
        else:
            raise

    if refresh_ref == "full" or refresh_ref.startswith("branch/"):
        if refresh_ref.startswith("branch/"):
            branch = refresh_ref[7:]
            pulls = r.get_pulls(base=branch)
        else:
            branch = '*'
            pulls = r.get_pulls()
        key = "queues~%s~%s~%s~%s~%s" % (installation_id, owner.lower(),
                                         repo.lower(), r.private, branch)
        utils.get_redis_for_cache().delete(key)
    else:
        try:
            pull_number = int(refresh_ref[5:])
        except ValueError:  # pragma: no cover
            LOG.info("%s/%s/%s: Invalid PR ref", owner, repo, refresh_ref)
            return
        pulls = [r.get_pull(pull_number)]

    subscription = utils.get_subscription(utils.get_redis_for_cache(),
                                          installation_id)

    if r.archived:  # pragma: no cover
        LOG.warning("%s/%s/%s: repository archived",
                    owner, repo, refresh_ref)
        return

    if not subscription["token"]:  # pragma: no cover
        LOG.warning("%s/%s/%s: installation without token",
                    owner, repo, refresh_ref)
        return

    if r.private and not subscription["subscribed"]:  # pragma: no cover
        LOG.warning("%s/%s/%s: mergify not installed",
                    owner, repo, refresh_ref)
        return

    for p in pulls:
        # Mimic the github event format
        data = {
            'repository': r.raw_data,
            'installation': {'id': installation_id},
            'pull_request': p.raw_data,
        }
        engine.run.s('refresh', data, subscription).apply_async()
コード例 #25
0
def _badge_color_mode(owner, repo):
    """Return badge (color, mode) for a repository."""
    redis = utils.get_redis_for_cache()
    if redis.sismember("badges", owner + "/" + repo):
        return "success", "enabled"
    return "critical", "disabled"
コード例 #26
0
ファイル: __init__.py プロジェクト: eladb/mergify-engine
def run(client, pull, sources):
    LOG.debug("engine get sub")
    subscription = sub_utils.get_subscription(
        utils.get_redis_for_cache(), client.installation["id"]
    )

    LOG.debug("engine get context")
    ctxt = context.Context(client, pull, subscription)
    ctxt.log.debug("engine start processing context")

    issue_comment_sources = []

    for source in sources:
        if source["event_type"] == "issue_comment":
            issue_comment_sources.append(source)
        else:
            ctxt.sources.append(source)

    ctxt.log.debug("engine run pending commands")
    commands_runner.run_pending_commands_tasks(ctxt)

    if issue_comment_sources:
        ctxt.log.debug("engine handle commands")
        for source in issue_comment_sources:
            commands_runner.handle(
                ctxt,
                source["data"]["comment"]["body"],
                source["data"]["comment"]["user"],
            )

    if not ctxt.sources:
        return

    if ctxt.client.installation["permissions_need_to_be_updated"]:
        check_api.set_check_run(
            ctxt,
            "Summary",
            "completed",
            "failure",
            output={
                "title": "Required GitHub permissions are missing.",
                "summary": "You can accept them at https://dashboard.mergify.io/",
            },
        )
        return

    ctxt.log.debug("engine check configuration change")
    if check_configuration_changes(ctxt):
        ctxt.log.info("Configuration changed, ignoring")
        return

    ctxt.log.debug("engine get configuration")
    # BRANCH CONFIGURATION CHECKING
    try:
        filename, mergify_config = rules.get_mergify_config(ctxt)
    except rules.NoRules:  # pragma: no cover
        ctxt.log.info("No need to proceed queue (.mergify.yml is missing)")
        return
    except rules.InvalidRules as e:  # pragma: no cover
        # Not configured, post status check with the error message
        if any(
            (
                s["event_type"] == "pull_request"
                and s["data"]["action"] in ["opened", "synchronize"]
                for s in ctxt.sources
            )
        ):
            check_api.set_check_run(
                ctxt,
                actions_runner.SUMMARY_NAME,
                "completed",
                "failure",
                output={
                    "title": "The Mergify configuration is invalid",
                    "summary": str(e),
                    "annotations": e.get_annotations(e.filename),
                },
            )
        return

    # Add global and mandatory rules
    mergify_config["pull_request_rules"].rules.extend(
        rules.PullRequestRules.from_list(MERGIFY_RULE["rules"]).rules
    )

    if ctxt.pull["base"]["repo"]["private"] and not subscription["subscription_active"]:
        check_api.set_check_run(
            ctxt,
            actions_runner.SUMMARY_NAME,
            "completed",
            "failure",
            output={
                "title": "Mergify is disabled",
                "summary": subscription["subscription_reason"],
            },
        )
        return

    # CheckRun are attached to head sha, so when user add commits or force push
    # we can't directly get the previous Mergify Summary. So we copy it here, then
    # anything that looks at it in next celery tasks will find it.

    synchronize_data = [
        s["data"]
        for s in ctxt.sources
        if s["event_type"] == "pull_request"
        and s["data"]["action"] == "synchronize"
        and s["data"]["after"] == ctxt.pull["head"]["sha"]
    ]
    if synchronize_data:
        ctxt.log.debug("engine synchronize summary")
        copy_summary_from_previous_head_sha(ctxt, synchronize_data[0]["before"])

    ctxt.log.debug("engine handle actions")
    actions_runner.handle(mergify_config["pull_request_rules"], ctxt)