Пример #1
0
def test_retrier_can_alter_db_objects(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40],
                                 [[('coucou', 'pending', 12),
                                   ('blah', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)

    def retry_func(pr_processor, pr_checks_status):
        pr_processor.pull_request.last_processed_sha = '3' * 40
        assert len(pr_checks_status.retrying) == 1
        pr_checks_status.retrying[0].last_errored_id = 82

    retrier = FakeRetrier(processor, retry_func=retry_func)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('3' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 82, 1)])
Пример #2
0
def test_basic_retry(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40],
                                 [[('coucou', 'pending', 12),
                                   ('blah', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'pending'))

    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('blah', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    assert retrier.cleanup_count == 0

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier) == 1

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 28, 1)])
Пример #3
0
def test_pending_retry_checks_are_retriggered_after_a_while(
        db_session, default_config):
    with patch.object(Datetime, 'now') as patched_now:
        generator = Generator(datetime.datetime(2019, 1, 1, 12, 12),
                              datetime.datetime(2019, 1, 1, 12, 18))
        patched_now.side_effect = generator.next

        pull_request = PullRequest('moby/moby', 34567)
        gh_client = FakeGithubClient(pull_request, ['1' * 40, '1' * 40],
                                     [[('coucou', 'pending', 12),
                                       ('blah', 'error', 28)], []])

        processor = PullRequestProcessor(pull_request, gh_client,
                                         default_config)
        retrier = FakeRetrier(processor)
        notifier = FakeNotifier(processor)

        # then we run twice
        processor.run(db_session, retrier, notifier)
        processor.run(db_session, retrier, notifier)
        gh_client.assert_exhausted()
        generator.assert_exhausted()

        # we should have retried twice
        assert len(retrier.retried) == 2
        for retried in retrier.retried:
            assert_checks_equal(pull_request, retried.retrying,
                                [('blah', 28, 1)])
            assert_checks_equal(pull_request, retried.pending, ['coucou'])
            assert len(retrier.retried[0]) == 2
        assert retrier.cleanup_count == 0
Пример #4
0
def test_it_ignores_checks_marked_as_such(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40, '1' * 40],
                                 [[('coucou', 'success', 12),
                                   ('codecov/patch', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'successful'))

    # we shouldn't have retried anything
    assert len(retrier.retried) == 0
    # but we should have cleaned up
    assert retrier.cleanup_count == 1

    # and let's check in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'successful'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(), ['coucou'])

    # running again should not do anything
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()
Пример #5
0
 def convert_pull_request(self, pr):
     reviewers = [r.login for r in pr.get_reviewer_requests()]
     return PullRequest(reviewers=reviewers,
                        created_at=pr.created_at,
                        url=pr.html_url,
                        title=pr.title,
                        creator=pr.user.name,
                        config=self.config)
Пример #6
0
 def get_pull_request_neo4j(self, graph, pull_req_id):
     """
     loads an existing Neo4j pull request or creates a new bare minimum one in the database
     :param graph: py2neo PullRequest
     :returns: from Neo4j either a new or existing PullRequest
     """
     pull = PullRequest.select(graph, pull_req_id).first()
     if pull is None:
         pull = PullRequest()
         pull.Id = pull_req_id
         graph.create(pull)
     return pull
Пример #7
0
 def post(self, url_path):
     last_row = UploadURL.all().order("-created_at").get()
     if last_row:
         if last_row.url_path == url_path:
             try:
                 payload = json.loads(self.request.get("payload"))
                 logging.info(payload)
             except json.JSONDecodeError:
                 self.error(400)
                 self.response.out.write("Incorrect request format\n")
             user_repo = payload["repository"]["full_name"]
             # Download complete pull request with information about mergeability
             pull_request = github_get_pull_request(user_repo,
                                                    payload["number"])
             num = payload["number"]
             # Get the old entity or create a new one:
             p = PullRequest.all()
             p.filter("num =", int(num))
             p = p.get()
             if p is None:
                 p = PullRequest(num=num)
             # Update all data that we can from GitHub:
             p.url = pull_request["html_url"]
             p.state = pull_request["state"]
             p.title = pull_request["title"]
             p.body = pull_request["body"]
             p.mergeable = pull_request["mergeable"]
             if pull_request["head"]["repo"]:
                 p.repo = pull_request["head"]["repo"]["url"]
             p.branch = pull_request["head"]["ref"]
             p.author_name = pull_request["user"].get("name", "")
             p.author_email = pull_request["user"].get("email", "")
             created_at = pull_request["created_at"]
             created_at = datetime.strptime(created_at,
                                            "%Y-%m-%dT%H:%M:%SZ")
             p.created_at = created_at
             u = User.all()
             u.filter("login ="******"user"]["login"])
             u = u.get()
             if u is None:
                 u = User(login=pull_request["user"]["login"])
                 u.id = pull_request["user"]["id"]
                 u.avatar_url = pull_request["user"]['avatar_url']
                 u.url = pull_request["user"]["url"]
                 u.put()
             p.author = u
             p.put()
         else:
             self.error(404)
             self.response.out.write("Requesting URL doesn't exist\n")
     else:
         self.error(500)
         self.response.out.write("URL for posting data not defined yet\n")
Пример #8
0
def test_pending_retry_checks_are_left_alone(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '1' * 40],
        [[('coucou', 'pending', 12),
          ('blah', 'error', 28)], [], [('coucou', 'error', 12)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'pending'))

    # everything should be the same as if we had just run once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('blah', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    assert retrier.cleanup_count == 0

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier) == 1

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 28, 1)])

    # now let's run a 3rd time, 'coucou' fails
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    assert len(retrier.retried) == 2
    assert_checks_equal(pull_request, retrier.retried[1].retrying,
                        [('coucou', 12, 1)])
    assert_checks_equal(pull_request, retrier.retried[1].retry_pending,
                        [('blah', 28, 1)])
    assert len(retrier.retried[1]) == 2
    assert retrier.cleanup_count == 0
Пример #9
0
    def test_PrcoessRepoLargePullRequestCount(self):
        testRepo = 3638964
        testRepoFullName = 'ansible/ansible'

        # Proccess a known repo
        ProcessRepo(testRepoFullName)

        # verify that a pagination was queued on the pullrequest import
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        # process the queued pagination
        ProcessGitHubRequest(1)

        myRepo = Repo.objects.get(pk=testRepo)

        # test that pull requests got imported
        # this repo should have more than 100 pull requests.
        # importing pull requests requires pagingation to go over 100 entries
        self.assertGreater(myRepo.pullrequest_set.count(), 100)

        #verify that another request was queued
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        #### Test hitting the data cache threshold for large pull request chains.
        # get a random user_id
        myUser = choice(User.objects.all())

        myPullRequest = PullRequest(number=10000,
                                    repo=myRepo,
                                    created_at='2013-01-01 01:01',
                                    user=myUser,
                                    updated_at='2015-01-01 01:01')
        myPullRequest.save()

        #proccess the queued pagination
        ProcessGitHubRequest(1)

        # Verify that there are no more queued paginations
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 0)
Пример #10
0
 def upload_task(num, result, interpreter, testcommand, log):
     p = PullRequest.all()
     p.filter("num =", int(num))
     p = p.get()
     if p is None:
         # Create the pull request:
         p = PullRequest(num=num)
         p.put()
     t = Task(pullrequest=p)
     t.result = result
     t.interpreter = interpreter
     t.testcommand = testcommand
     t.log = log
     t.put()
     result = {
         "ok": True,
         "task_url": "%s/report/%s" % (url_base, t.key())
     }
     return result
Пример #11
0
def test_resume_after_failure_if_new_patch(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '2' * 40],
        [[('coucou', 'pending', 12),
          ('fast_fail', 'error', 28)], [('fast_fail', 'error', 82)],
         [('coucou', 'pending', 13), ('fast_fail', 'error', 93)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    # we should only have retried once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('fast_fail', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    # and we should have cleaned up
    assert retrier.cleanup_count == 1

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'failed'))

    # now let's run again, it's a new patch
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    # we should have retried again
    assert len(retrier.retried) == 2
    assert_checks_equal(pull_request, retrier.retried[1].retrying,
                        [('fast_fail', 93, 1)])
    assert_checks_equal(pull_request, retrier.retried[1].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('2' * 40, 'pending'))
Пример #12
0
def test_too_many_failures(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '1' * 40],
        [[('coucou', 'pending', 12),
          ('fast_fail', 'error', 28)], [('fast_fail', 'error', 82)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    # we should only have retried once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('fast_fail', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    # and we should have cleaned up
    assert retrier.cleanup_count == 1

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier.too_many_failures()) == 1
    assert_checks_equal(pull_request,
                        notifier.too_many_failures()[0].too_many_failures,
                        [('fast_fail', 82, 2)])
    assert_checks_equal(pull_request,
                        notifier.too_many_failures()[0].pending, ['coucou'])
    assert len(notifier.too_many_failures()[0]) == 2
    assert len(notifier) == 2

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'failed'))

    # running again should not do anything
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()
Пример #13
0
    def post(self):
        user_repo = polled_user + "/" + polled_repo
        payload = github_get_pull_request_all_v3(user_repo)
        # checkout mergeability
        for pos in xrange(len(payload)):
            pull = github_get_pull_request(user_repo, payload[pos]["number"])
            payload[pos]["mergeable"] = pull["mergeable"]
        # Process each pull request from payload
        for pull in payload:
            p = PullRequest.all()
            num = pull["number"]
            p.filter("num =", num)
            p = p.get()
            if p is None:
                p = PullRequest(num=num)
            p.url = pull["html_url"]
            p.state = pull["state"]
            p.title = pull["title"]
            p.body = pull["body"]
            p.mergeable = pull["mergeable"]
            if pull["head"]["repo"]:
                p.repo = pull["head"]["repo"]["url"]
            p.branch = pull["head"]["ref"]
            created_at = pull["created_at"]
            created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
            p.created_at = created_at

            # Collect public information about user
            u = User.all()
            login = pull["user"]["login"]
            u.filter("login ="******"user"]["id"]
            u.avatar_url = pull["user"]["avatar_url"]
            u.url = pull["user"]["url"]
            u.put()

            p.author = u
            p.put()
Пример #14
0
def send_pull_request(user_id, house_name, mode, card_key_str, new_key,
                      new_value):
    pigeon_key = ndb.Key(Pigeon, user_id)
    house_list = House.query(House.name == house_name).fetch()
    house_key = house_list[0].key
    card_key = ndb.Key(Card, card_key_str, parent=house_key)

    pr = PullRequest(pigeon_key=pigeon_key,
                     house_key=house_key,
                     card_key=card_key,
                     new_key=new_key,
                     new_value=new_value,
                     mode=mode,
                     date_str='')
    pr.put()
    date2str = str(pr.date)
    str_list = date2str.split('.')
    pr.date_str = str_list[0]
    pr.put()

    return
Пример #15
0
    def save(self, requester, repo, assigner=None):
        commit_msg = self.cleaned_data["commit_msg"]
        from_head = self.cleaned_data["from_head"]
        to_head = self.cleaned_data["to_head"]
        commit_hexsha = self.cleaned_data["commit"]
        comment = self.cleaned_data["comment"]

        pull = PullRequest()
        pull.commit_msg = commit_msg
        pull.from_head = from_head
        pull.to_head = to_head
        pull.create_commit_hexsha = commit_hexsha
        pull.requester = requester
        pull.repo = repo
        if assigner:
            pull.assigner = assigner

        if comment:
            pull.comment = comment

        pull.save()

        return pull
Пример #16
0
def to_row(repo, author, pr):
    """
    Convert PR elements to a row of data.

    After processing the input repo, author and PR, the last part is to
    get the counts for each possible review action and add them as columns to
    the row (using zero as default value).

    :param github.Repository.Repository repo: GitHub repo object.
    :param github.NamedUser.NamedUser author: GitHub user object.
    :param github.PullRequest.PullRequest pr: GitHub PR object.

    :return dict out_row: dict of data around a PR's repo, the PR author and
        the PR itself. The status changed, created and updated date will be kept
        as datetime.datetime objects.
    """
    pr_data = PullRequest(pr)

    latest_commit_at = pr_data.latest_commit.datetime.date()
    oldest_commit_at = pr_data.oldest_commit.datetime.date()
    days_between_commits = (latest_commit_at - oldest_commit_at + ONE_DAY).days

    latest_commit_author = lib.display(pr_data.latest_commit.author)
    oldest_commit_author = lib.display(pr_data.oldest_commit.author)

    out_row = {
        "Repo Owner": lib.display(repo.owner),
        "Repo Name": repo.name,
        "Repo URL": repo.html_url,
        "Author": lib.display(author),
        "PR ID": f"#{pr_data.number}",
        "PR Title": pr_data.title,
        "PR From Branch": pr_data.from_branch_name,
        "PR To Branch": pr_data.to_branch_name,
        "PR URL": pr_data.url,
        "Jira Ticket": pr_data.jira_ticket,
        "PR Updated At": pr_data.updated_at,
        "PR Created At": pr_data.created_at,
        "Latest Commit At": latest_commit_at,
        "Latest Commit Author": latest_commit_author,
        "Oldest Commit At": oldest_commit_at,
        "Oldest Commit Author": oldest_commit_author,
        "Days Between Commits": days_between_commits,
        "Status": pr_data.status,
        "Merged/Closed WOY": pr_data.status_changed_week_of_year(),
        "Merged/Closed Date": pr_data.status_changed_at(),
        "Merged By": pr_data.merged_by_name(),
        "Reviewers": ", ".join(pr_data.reviewer_names()),
        "Comments": pr_data.comment_count,
        "Commits": pr_data.commit_count,
        "Changed Files": pr_data.changed_files,
        "Added Lines": pr_data.additions,
        "Deleted Lines": pr_data.deletions,
        "Changed Lines": pr_data.additions + pr.deletions,
    }

    review_states = Counter([r.state for r in pr_data.reviews])
    [review_states.setdefault(s, 0) for s in Review.get_states()]
    out_row.update(**dict(review_states))

    return out_row
Пример #17
0
 def convert_pull_request(self, pr):
     reviewers = [r.username for r in pr.reviewers]
     created_at = dateutil.parser.parse(pr.created_on)
     return PullRequest(reviewers=reviewers, created_at=created_at, url=pr.links['html']['href'],
                        title=pr.title, creator=pr.author.display_name, config=self.config)
Пример #18
0
        for comment in cls._get_all_comments_by_user(pr_processor):
            if not comment.body.startswith(cls._PREFIX):
                continue
            context = comment.body[len(cls._PREFIX):]
            if ' ' in context:
                continue
            if retry_pending is None or context not in retry_pending:
                comment.delete()


if __name__ == '__main__':
    from github import Github

    from pr_processor import PullRequestProcessor

    pull_request = PullRequest('kubernetes/kubernetes', 77953)
    # pull_request = PullRequest('moby/moby', 38349)
    config = Config()
    gh_client = Github(config.get('github', 'api_token'))
    processor = PullRequestProcessor(pull_request, gh_client, config)

    if False:
        GitAmendPushRetrier().retry(processor, None)

        print(pull_request.last_processed_sha)

    if False:
        print(CommentsRetrier._get_all_comments_by_user(processor))
        new_comment = CommentsRetrier._post_comment(processor, 'coucou')
        print(new_comment)
        new_comment.delete()