예제 #1
0
 def get_pull_request_neo4j(self, graph, pull_req_id):
     """
     loads an existing Neo4j pull request or creates a new bare minimum one in the database
     :param graph: py2neo PullRequest
     :returns: from Neo4j either a new or existing PullRequest
     """
     pull = PullRequest.select(graph, pull_req_id).first()
     if pull is None:
         pull = PullRequest()
         pull.Id = pull_req_id
         graph.create(pull)
     return pull
예제 #2
0
    def crawl(self, repository_id, pull_request_id):
        """
        Entry point for this class
        """
        if (repository_id is None) or (pull_request_id is None):
            print("could not get work item one of the id's was None")
            print(repository_id)
            print(pull_request_id)
            return

        graph = GraphBuilder().GetNewGraph()
        pull_request = PullRequest.select(graph, pull_request_id).first()
        if pull_request is None:
            print("Could not continue, pullrequest was not in db")
            return
        url = self.pull_request_workitems_url(repository_id, pull_request.Id)
        data = self.get_data(url)
        if data is None:
            return
        if "value" not in data:
            logging.info("no work items linked")
            return
        for raw in data["value"]:
            work_item = self.make_work_item(raw)
            if work_item is not None:
                self.link_to_pull_request(work_item, pull_request)
                self.fill_in_the_rest(work_item, graph)
                transaction = graph.begin()
                transaction.merge(work_item)
                transaction.graph.push(work_item)
예제 #3
0
파일: main.py 프로젝트: asmeurer/sympy-bot
 def get(self):
     p_closed = PullRequest.all()
     p_closed.filter("state =", "closed")
     p_closed.order("-created_at")
     self.render("closed_pullrequests.html", {
         "pullrequests_closed": p_closed,
     })
예제 #4
0
def approve_pull_request(house_name, user_id, date):
    pigeon_key = ndb.Key(Pigeon, user_id)
    house_list = House.query(House.name == house_name).fetch()
    house_key = house_list[0].key

    pr_list = PullRequest.query(PullRequest.pigeon_key == pigeon_key,
                                PullRequest.house_key == house_key,
                                PullRequest.date_str == date).fetch()
    if pr_list:
        pr = pr_list[0]
        if pr.mode == 'add':
            add_card(pr.house_key.get().name, pr.new_key, pr.new_value)
            pr.key.delete()
            return
        elif pr.mode == 'remove':
            remove_card(pr.house_key.get().name, pr.card_key.get().card_key)
            pr.key.delete()
            return
        elif pr.mode == 'key':
            edit_card_key(pr.house_key.get().name,
                          pr.card_key.get().card_key, pr.new_key)
            pr.key.delete()
            return
        elif pr.mode == 'content':
            edit_card_content(pr.house_key.get().name,
                              pr.card_key.get().card_key, pr.new_value)
            pr.key.delete()
            return
    else:
        return
예제 #5
0
def test_it_ignores_checks_marked_as_such(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40, '1' * 40],
                                 [[('coucou', 'success', 12),
                                   ('codecov/patch', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'successful'))

    # we shouldn't have retried anything
    assert len(retrier.retried) == 0
    # but we should have cleaned up
    assert retrier.cleanup_count == 1

    # and let's check in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'successful'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(), ['coucou'])

    # running again should not do anything
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()
예제 #6
0
파일: main.py 프로젝트: harishma/sympy-bot
 def upload_task(num, result, interpreter, testcommand, log):
     p = PullRequest.all()
     p.filter("num =", int(num))
     p = p.get()
     if p is None:
         # Create the pull request:
         p = PullRequest(num=num)
         p.put()
     t = Task(pullrequest=p)
     t.result = result
     t.interpreter = interpreter
     t.testcommand = testcommand
     t.log = log
     t.put()
     result = {"ok": True, "task_url": "%s/report/%s" % (url_base, t.key())}
     return result
예제 #7
0
def test_retrier_can_alter_db_objects(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40],
                                 [[('coucou', 'pending', 12),
                                   ('blah', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)

    def retry_func(pr_processor, pr_checks_status):
        pr_processor.pull_request.last_processed_sha = '3' * 40
        assert len(pr_checks_status.retrying) == 1
        pr_checks_status.retrying[0].last_errored_id = 82

    retrier = FakeRetrier(processor, retry_func=retry_func)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('3' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 82, 1)])
예제 #8
0
 def get(self, num):
     p = PullRequest.all()
     p.filter("num =", int(num))
     p = p.get()
     t = p.task_set
     t.order("uploaded_at")
     self.render("pullrequest.html", {'p': p, 'tasks': t})
예제 #9
0
 def get(self):
     p_closed = PullRequest.all()
     p_closed.filter("state =", "closed")
     p_closed.order("-created_at")
     self.render("closed_pullrequests.html", {
         "pullrequests_closed": p_closed,
     })
예제 #10
0
def test_pending_retry_checks_are_retriggered_after_a_while(
        db_session, default_config):
    with patch.object(Datetime, 'now') as patched_now:
        generator = Generator(datetime.datetime(2019, 1, 1, 12, 12),
                              datetime.datetime(2019, 1, 1, 12, 18))
        patched_now.side_effect = generator.next

        pull_request = PullRequest('moby/moby', 34567)
        gh_client = FakeGithubClient(pull_request, ['1' * 40, '1' * 40],
                                     [[('coucou', 'pending', 12),
                                       ('blah', 'error', 28)], []])

        processor = PullRequestProcessor(pull_request, gh_client,
                                         default_config)
        retrier = FakeRetrier(processor)
        notifier = FakeNotifier(processor)

        # then we run twice
        processor.run(db_session, retrier, notifier)
        processor.run(db_session, retrier, notifier)
        gh_client.assert_exhausted()
        generator.assert_exhausted()

        # we should have retried twice
        assert len(retrier.retried) == 2
        for retried in retrier.retried:
            assert_checks_equal(pull_request, retried.retrying,
                                [('blah', 28, 1)])
            assert_checks_equal(pull_request, retried.pending, ['coucou'])
            assert len(retrier.retried[0]) == 2
        assert retrier.cleanup_count == 0
예제 #11
0
파일: main.py 프로젝트: asmeurer/sympy-bot
 def get(self, num):
     p = PullRequest.all()
     p.filter("num =", int(num))
     p = p.get()
     t = p.task_set
     t.order("uploaded_at")
     self.render("pullrequest.html", {'p': p, 'tasks': t})
예제 #12
0
def test_basic_retry(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(pull_request, ['1' * 40],
                                 [[('coucou', 'pending', 12),
                                   ('blah', 'error', 28)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'pending'))

    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('blah', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    assert retrier.cleanup_count == 0

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier) == 1

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 28, 1)])
예제 #13
0
 def convert_pull_request(self, pr):
     reviewers = [r.login for r in pr.get_reviewer_requests()]
     return PullRequest(reviewers=reviewers,
                        created_at=pr.created_at,
                        url=pr.html_url,
                        title=pr.title,
                        creator=pr.user.name,
                        config=self.config)
예제 #14
0
    def test_PrcoessRepoLargePullRequestCount(self):
        testRepo = 3638964
        testRepoFullName = 'ansible/ansible'

        # Proccess a known repo
        ProcessRepo(testRepoFullName)

        # verify that a pagination was queued on the pullrequest import
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        # process the queued pagination
        ProcessGitHubRequest(1)

        myRepo = Repo.objects.get(pk=testRepo)

        # test that pull requests got imported
        # this repo should have more than 100 pull requests.
        # importing pull requests requires pagingation to go over 100 entries
        self.assertGreater(myRepo.pullrequest_set.count(), 100)

        #verify that another request was queued
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        #### Test hitting the data cache threshold for large pull request chains.
        # get a random user_id
        myUser = choice(User.objects.all())

        myPullRequest = PullRequest(number=10000,
                                    repo=myRepo,
                                    created_at='2013-01-01 01:01',
                                    user=myUser,
                                    updated_at='2015-01-01 01:01')
        myPullRequest.save()

        #proccess the queued pagination
        ProcessGitHubRequest(1)

        # Verify that there are no more queued paginations
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 0)
예제 #15
0
 def crawl(self, pull_request_id):
     '''
     Crawls the comments and puts them in Neo4J
     '''
     graph = GraphBuilder().GetNewGraph()
     pull_request = PullRequest.select(graph, pull_request_id).first()
     for repo in pull_request.ForRepository:
         self.copy_over_comments(repo.Id, pull_request)
     print("finished adding comments")
예제 #16
0
파일: main.py 프로젝트: asmeurer/sympy-bot
 def upload_task(num, result, interpreter, testcommand, log):
     p = PullRequest.all()
     p.filter("num =", int(num))
     p = p.get()
     if p is None:
         # Create the pull request:
         p = PullRequest(num=num)
         p.put()
     t = Task(pullrequest=p)
     t.result = result
     t.interpreter = interpreter
     t.testcommand = testcommand
     t.log = log
     t.put()
     result = {
         "ok": True,
         "task_url": "%s/report/%s" % (url_base, t.key())
     }
     return result
예제 #17
0
파일: main.py 프로젝트: wk8/github_retry
    def _fetch_prs(gh_client, config):
        user = config.get('github', 'user')
        if not user:
            raise RuntimeError('Missing Github username!')

        query = 'is:open is:pr author:%s archived:false' % (user, )

        return [
            PullRequest.from_url(issue.html_url)
            for issue in gh_client.search_issues(query)
        ]
예제 #18
0
파일: main.py 프로젝트: harishma/sympy-bot
 def get(self):
     q = PullRequest.all()
     q.order("last_updated")
     # This is the request that wasn't updated for the longest time:
     p = q.get()
     if p is None:
         last_update = None
         last_update_pretty = "never"
     else:
         last_update = p.last_updated
         last_update_pretty = pretty_date(last_update)
     q = PullRequest.all()
     q.filter("state =", "open")
     q.order("last_updated")
     # This is the open request that wasn't updated for the longest time:
     p = q.get()
     if p is None:
         last_quick_update = None
         last_quick_update_pretty = "never"
     else:
         last_quick_update = p.last_updated
         last_quick_update_pretty = pretty_date(last_quick_update)
     p_mergeable = PullRequest.all()
     p_mergeable.filter("mergeable =", True)
     p_mergeable.filter("state =", "open")
     p_mergeable.order("-created_at")
     p_nonmergeable = PullRequest.all()
     p_nonmergeable.filter("mergeable =", False)
     p_nonmergeable.filter("state =", "open")
     p_nonmergeable.order("-created_at")
     self.render(
         "index.html",
         {
             "pullrequests_mergeable": p_mergeable,
             "pullrequests_nonmergeable": p_nonmergeable,
             "last_update": last_update,
             "last_update_pretty": last_update_pretty,
             "last_quick_update": last_quick_update,
             "last_quick_update_pretty": last_quick_update_pretty,
         },
     )
예제 #19
0
파일: main.py 프로젝트: asmeurer/sympy-bot
 def get(self):
     q = PullRequest.all()
     q.order("last_updated")
     # This is the request that wasn't updated for the longest time:
     p = q.get()
     if p is None:
         last_update = None
         last_update_pretty = "never"
     else:
         last_update = p.last_updated
         last_update_pretty = pretty_date(last_update)
     q = PullRequest.all()
     q.filter("state =", "open")
     q.order("last_updated")
     # This is the open request that wasn't updated for the longest time:
     p = q.get()
     if p is None:
         last_quick_update = None
         last_quick_update_pretty = "never"
     else:
         last_quick_update = p.last_updated
         last_quick_update_pretty = pretty_date(last_quick_update)
     p_mergeable = PullRequest.all()
     p_mergeable.filter("mergeable =", True)
     p_mergeable.filter("state =", "open")
     p_mergeable.order("-created_at")
     p_nonmergeable = PullRequest.all()
     p_nonmergeable.filter("mergeable =", False)
     p_nonmergeable.filter("state =", "open")
     p_nonmergeable.order("-created_at")
     self.render(
         "index.html", {
             "pullrequests_mergeable": p_mergeable,
             "pullrequests_nonmergeable": p_nonmergeable,
             "last_update": last_update,
             "last_update_pretty": last_update_pretty,
             "last_quick_update": last_quick_update,
             "last_quick_update_pretty": last_quick_update_pretty,
         })
예제 #20
0
    def test_PrcoessRepoLargePullRequestCount(self):
        testRepo = 3638964
        testRepoFullName = 'ansible/ansible'

        # Proccess a known repo
        ProcessRepo(testRepoFullName)

        # verify that a pagination was queued on the pullrequest import
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 1)

        # process the queued pagination
        ProcessGitHubRequest(1)

        myRepo = Repo.objects.get(pk = testRepo)

        # test that pull requests got imported
        # this repo should have more than 100 pull requests.
        # importing pull requests requires pagingation to go over 100 entries
        self.assertGreater(myRepo.pullrequest_set.count(), 100)

        #verify that another request was queued
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 1)

        #### Test hitting the data cache threshold for large pull request chains.
        # get a random user_id
        myUser = choice(User.objects.all())

        myPullRequest = PullRequest(number = 10000,repo=myRepo, created_at = '2013-01-01 01:01', user = myUser, updated_at='2015-01-01 01:01')
        myPullRequest.save()

        #proccess the queued pagination
        ProcessGitHubRequest(1)

        # Verify that there are no more queued paginations
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 0)
예제 #21
0
def reject_pull_request(house_name, user_id, date):
    pigeon_key = ndb.Key(Pigeon, user_id)
    house_list = House.query(House.name == house_name).fetch()
    house_key = house_list[0].key

    pr_list = PullRequest.query(PullRequest.pigeon_key == pigeon_key,
                                PullRequest.house_key == house_key,
                                PullRequest.date_str == date).fetch()
    if pr_list:
        pr = pr_list[0]
        pr.key.delete()
        return
    else:
        return
예제 #22
0
def test_pending_retry_checks_are_left_alone(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '1' * 40],
        [[('coucou', 'pending', 12),
          ('blah', 'error', 28)], [], [('coucou', 'error', 12)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    assert_pr_equal(pull_request, pull_request, ('1' * 40, 'pending'))

    # everything should be the same as if we had just run once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('blah', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    assert retrier.cleanup_count == 0

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier) == 1

    # let's look at what's in the DB
    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'pending'))
    assert_checks_equal(pull_request,
                        db_session.query(Check).all(),
                        ['coucou', ('blah', 28, 1)])

    # now let's run a 3rd time, 'coucou' fails
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    assert len(retrier.retried) == 2
    assert_checks_equal(pull_request, retrier.retried[1].retrying,
                        [('coucou', 12, 1)])
    assert_checks_equal(pull_request, retrier.retried[1].retry_pending,
                        [('blah', 28, 1)])
    assert len(retrier.retried[1]) == 2
    assert retrier.cleanup_count == 0
예제 #23
0
def show_all_pull_request(house_name):
    house_list = House.query(House.name == house_name).fetch()
    house_key = house_list[0].key
    pr_list = PullRequest.query(PullRequest.house_key == house_key).order(
        PullRequest.date).fetch()
    if pr_list:
        #print pr_list
        return map(
            lambda s: {
                "user_id": s.pigeon_key.get().pigeon_id,
                "mode": s.mode,
                "newkey": s.new_key,
                "newcontent": s.new_value,
                "date": s.date_str,
                "card_key": s.card_key.id()
            }, pr_list)
    else:
        return
예제 #24
0
def test_resume_after_failure_if_new_patch(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '2' * 40],
        [[('coucou', 'pending', 12),
          ('fast_fail', 'error', 28)], [('fast_fail', 'error', 82)],
         [('coucou', 'pending', 13), ('fast_fail', 'error', 93)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    # we should only have retried once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('fast_fail', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    # and we should have cleaned up
    assert retrier.cleanup_count == 1

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'failed'))

    # now let's run again, it's a new patch
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()

    # we should have retried again
    assert len(retrier.retried) == 2
    assert_checks_equal(pull_request, retrier.retried[1].retrying,
                        [('fast_fail', 93, 1)])
    assert_checks_equal(pull_request, retrier.retried[1].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('2' * 40, 'pending'))
예제 #25
0
def test_too_many_failures(db_session, default_config):
    pull_request = PullRequest('moby/moby', 34567)
    gh_client = FakeGithubClient(
        pull_request, ['1' * 40, '1' * 40, '1' * 40],
        [[('coucou', 'pending', 12),
          ('fast_fail', 'error', 28)], [('fast_fail', 'error', 82)]])

    processor = PullRequestProcessor(pull_request, gh_client, default_config)
    retrier = FakeRetrier(processor)
    notifier = FakeNotifier(processor)

    # then we run twice
    processor.run(db_session, retrier, notifier)
    processor.run(db_session, retrier, notifier)

    # we should only have retried once
    assert len(retrier.retried) == 1
    assert_checks_equal(pull_request, retrier.retried[0].retrying,
                        [('fast_fail', 28, 1)])
    assert_checks_equal(pull_request, retrier.retried[0].pending, ['coucou'])
    assert len(retrier.retried[0]) == 2
    # and we should have cleaned up
    assert retrier.cleanup_count == 1

    assert len(notifier.retrying()) == 1
    assert notifier.retrying()[0] is retrier.retried[0]
    assert len(notifier.too_many_failures()) == 1
    assert_checks_equal(pull_request,
                        notifier.too_many_failures()[0].too_many_failures,
                        [('fast_fail', 82, 2)])
    assert_checks_equal(pull_request,
                        notifier.too_many_failures()[0].pending, ['coucou'])
    assert len(notifier.too_many_failures()[0]) == 2
    assert len(notifier) == 2

    assert_pr_equal(pull_request,
                    db_session.query(PullRequest).all()[0],
                    ('1' * 40, 'failed'))

    # running again should not do anything
    processor.run(db_session, retrier, notifier)
    gh_client.assert_exhausted()
예제 #26
0
    def retry(self, pr_processor, _pr_checks_status):
        gh_pr = self.__class__._github_pr(pr_processor)
        pr_repo = gh_pr.head.repo.full_name

        git_env = self.__class__._git_env(pr_processor, pr_repo)

        self.__class__._clone_repo_if_needed(pr_repo, git_env)

        self.__class__._git_command(pr_repo, 'clean -fdx')
        self.__class__._git_command(pr_repo, 'fetch origin', env=git_env)
        branch = gh_pr.head.ref
        self.__class__._git_command(pr_repo, 'checkout %s' % (branch, ))
        self.__class__._git_command(pr_repo,
                                    'reset --hard origin/%s' % (branch, ))
        self.__class__._git_command(pr_repo, 'commit --amend --no-edit')

        new_sha = self.__class__._git_command(pr_repo, 'rev-parse HEAD')
        if not PullRequest.is_valid_sha(new_sha):
            raise RuntimeError('New sha is invalid: %s' % (new_sha, ))
        pr_processor.pull_request.last_processed_sha = new_sha

        self.__class__._git_command(pr_repo, 'push --force', env=git_env)
예제 #27
0
def send_pull_request(user_id, house_name, mode, card_key_str, new_key,
                      new_value):
    pigeon_key = ndb.Key(Pigeon, user_id)
    house_list = House.query(House.name == house_name).fetch()
    house_key = house_list[0].key
    card_key = ndb.Key(Card, card_key_str, parent=house_key)

    pr = PullRequest(pigeon_key=pigeon_key,
                     house_key=house_key,
                     card_key=card_key,
                     new_key=new_key,
                     new_value=new_value,
                     mode=mode,
                     date_str='')
    pr.put()
    date2str = str(pr.date)
    str_list = date2str.split('.')
    pr.date_str = str_list[0]
    pr.put()

    return
예제 #28
0
파일: main.py 프로젝트: asmeurer/sympy-bot
 def post(self, url_path):
     last_row = UploadURL.all().order("-created_at").get()
     if last_row:
         if last_row.url_path == url_path:
             try:
                 payload = json.loads(self.request.get("payload"))
                 logging.info(payload)
             except json.JSONDecodeError:
                 self.error(400)
                 self.response.out.write("Incorrect request format\n")
             user_repo = payload["repository"]["full_name"]
             # Download complete pull request with information about mergeability
             pull_request = github_get_pull_request(user_repo,
                                                    payload["number"])
             num = payload["number"]
             # Get the old entity or create a new one:
             p = PullRequest.all()
             p.filter("num =", int(num))
             p = p.get()
             if p is None:
                 p = PullRequest(num=num)
             # Update all data that we can from GitHub:
             p.url = pull_request["html_url"]
             p.state = pull_request["state"]
             p.title = pull_request["title"]
             p.body = pull_request["body"]
             p.mergeable = pull_request["mergeable"]
             if pull_request["head"]["repo"]:
                 p.repo = pull_request["head"]["repo"]["url"]
             p.branch = pull_request["head"]["ref"]
             p.author_name = pull_request["user"].get("name", "")
             p.author_email = pull_request["user"].get("email", "")
             created_at = pull_request["created_at"]
             created_at = datetime.strptime(created_at,
                                            "%Y-%m-%dT%H:%M:%SZ")
             p.created_at = created_at
             u = User.all()
             u.filter("login ="******"user"]["login"])
             u = u.get()
             if u is None:
                 u = User(login=pull_request["user"]["login"])
                 u.id = pull_request["user"]["id"]
                 u.avatar_url = pull_request["user"]['avatar_url']
                 u.url = pull_request["user"]["url"]
                 u.put()
             p.author = u
             p.put()
         else:
             self.error(404)
             self.response.out.write("Requesting URL doesn't exist\n")
     else:
         self.error(500)
         self.response.out.write("URL for posting data not defined yet\n")
예제 #29
0
    def save(self, requester, repo, assigner=None):
        commit_msg = self.cleaned_data["commit_msg"]
        from_head = self.cleaned_data["from_head"]
        to_head = self.cleaned_data["to_head"]
        commit_hexsha = self.cleaned_data["commit"]
        comment = self.cleaned_data["comment"]

        pull = PullRequest()
        pull.commit_msg = commit_msg
        pull.from_head = from_head
        pull.to_head = to_head
        pull.create_commit_hexsha = commit_hexsha
        pull.requester = requester
        pull.repo = repo
        if assigner:
            pull.assigner = assigner

        if comment:
            pull.comment = comment

        pull.save()

        return pull
예제 #30
0
def to_row(repo, author, pr):
    """
    Convert PR elements to a row of data.

    After processing the input repo, author and PR, the last part is to
    get the counts for each possible review action and add them as columns to
    the row (using zero as default value).

    :param github.Repository.Repository repo: GitHub repo object.
    :param github.NamedUser.NamedUser author: GitHub user object.
    :param github.PullRequest.PullRequest pr: GitHub PR object.

    :return dict out_row: dict of data around a PR's repo, the PR author and
        the PR itself. The status changed, created and updated date will be kept
        as datetime.datetime objects.
    """
    pr_data = PullRequest(pr)

    latest_commit_at = pr_data.latest_commit.datetime.date()
    oldest_commit_at = pr_data.oldest_commit.datetime.date()
    days_between_commits = (latest_commit_at - oldest_commit_at + ONE_DAY).days

    latest_commit_author = lib.display(pr_data.latest_commit.author)
    oldest_commit_author = lib.display(pr_data.oldest_commit.author)

    out_row = {
        "Repo Owner": lib.display(repo.owner),
        "Repo Name": repo.name,
        "Repo URL": repo.html_url,
        "Author": lib.display(author),
        "PR ID": f"#{pr_data.number}",
        "PR Title": pr_data.title,
        "PR From Branch": pr_data.from_branch_name,
        "PR To Branch": pr_data.to_branch_name,
        "PR URL": pr_data.url,
        "Jira Ticket": pr_data.jira_ticket,
        "PR Updated At": pr_data.updated_at,
        "PR Created At": pr_data.created_at,
        "Latest Commit At": latest_commit_at,
        "Latest Commit Author": latest_commit_author,
        "Oldest Commit At": oldest_commit_at,
        "Oldest Commit Author": oldest_commit_author,
        "Days Between Commits": days_between_commits,
        "Status": pr_data.status,
        "Merged/Closed WOY": pr_data.status_changed_week_of_year(),
        "Merged/Closed Date": pr_data.status_changed_at(),
        "Merged By": pr_data.merged_by_name(),
        "Reviewers": ", ".join(pr_data.reviewer_names()),
        "Comments": pr_data.comment_count,
        "Commits": pr_data.commit_count,
        "Changed Files": pr_data.changed_files,
        "Added Lines": pr_data.additions,
        "Deleted Lines": pr_data.deletions,
        "Changed Lines": pr_data.additions + pr.deletions,
    }

    review_states = Counter([r.state for r in pr_data.reviews])
    [review_states.setdefault(s, 0) for s in Review.get_states()]
    out_row.update(**dict(review_states))

    return out_row
예제 #31
0
        for comment in cls._get_all_comments_by_user(pr_processor):
            if not comment.body.startswith(cls._PREFIX):
                continue
            context = comment.body[len(cls._PREFIX):]
            if ' ' in context:
                continue
            if retry_pending is None or context not in retry_pending:
                comment.delete()


if __name__ == '__main__':
    from github import Github

    from pr_processor import PullRequestProcessor

    pull_request = PullRequest('kubernetes/kubernetes', 77953)
    # pull_request = PullRequest('moby/moby', 38349)
    config = Config()
    gh_client = Github(config.get('github', 'api_token'))
    processor = PullRequestProcessor(pull_request, gh_client, config)

    if False:
        GitAmendPushRetrier().retry(processor, None)

        print(pull_request.last_processed_sha)

    if False:
        print(CommentsRetrier._get_all_comments_by_user(processor))
        new_comment = CommentsRetrier._post_comment(processor, 'coucou')
        print(new_comment)
        new_comment.delete()
예제 #32
0
    def post(self):
        user_repo = polled_user + "/" + polled_repo
        payload = github_get_pull_request_all_v3(user_repo)
        # checkout mergeability
        for pos in xrange(len(payload)):
            pull = github_get_pull_request(user_repo, payload[pos]["number"])
            payload[pos]["mergeable"] = pull["mergeable"]
        # Process each pull request from payload
        for pull in payload:
            p = PullRequest.all()
            num = pull["number"]
            p.filter("num =", num)
            p = p.get()
            if p is None:
                p = PullRequest(num=num)
            p.url = pull["html_url"]
            p.state = pull["state"]
            p.title = pull["title"]
            p.body = pull["body"]
            p.mergeable = pull["mergeable"]
            if pull["head"]["repo"]:
                p.repo = pull["head"]["repo"]["url"]
            p.branch = pull["head"]["ref"]
            created_at = pull["created_at"]
            created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
            p.created_at = created_at

            # Collect public information about user
            u = User.all()
            login = pull["user"]["login"]
            u.filter("login ="******"user"]["id"]
            u.avatar_url = pull["user"]["avatar_url"]
            u.url = pull["user"]["url"]
            u.put()

            p.author = u
            p.put()
예제 #33
0
 def post(self, url_path):
     last_row = UploadURL.all().order("-created_at").get()
     if last_row:
         if last_row.url_path == url_path:
             try:
                 payload = json.loads(self.request.get("payload"))
                 logging.info(payload)
             except json.JSONDecodeError:
                 self.error(400)
                 self.response.out.write("Incorrect request format\n")
             user_repo = payload["repository"]["full_name"]
             # Download complete pull request with information about mergeability
             pull_request = github_get_pull_request(user_repo, payload["number"])
             num = payload["number"]
             # Get the old entity or create a new one:
             p = PullRequest.all()
             p.filter("num =", int(num))
             p = p.get()
             if p is None:
                 p = PullRequest(num=num)
             # Update all data that we can from GitHub:
             p.url = pull_request["html_url"]
             p.state = pull_request["state"]
             p.title = pull_request["title"]
             p.body = pull_request["body"]
             p.mergeable = pull_request["mergeable"]
             if pull_request["head"]["repo"]:
                 p.repo = pull_request["head"]["repo"]["url"]
             p.branch = pull_request["head"]["ref"]
             p.author_name = pull_request["user"].get("name", "")
             p.author_email = pull_request["user"].get("email", "")
             created_at = pull_request["created_at"]
             created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
             p.created_at = created_at
             u = User.all()
             u.filter("login ="******"user"]["login"])
             u = u.get()
             if u is None:
                 u = User(login=pull_request["user"]["login"])
                 u.id = pull_request["user"]["id"]
                 u.avatar_url = pull_request["user"]['avatar_url']
                 u.url = pull_request["user"]["url"]
                 u.put()
             p.author = u
             p.put()
         else:
             self.error(404)
             self.response.out.write("Requesting URL doesn't exist\n")
     else:
         self.error(500)
         self.response.out.write("URL for posting data not defined yet\n")
예제 #34
0
 def convert_pull_request(self, pr):
     reviewers = [r.username for r in pr.reviewers]
     created_at = dateutil.parser.parse(pr.created_on)
     return PullRequest(reviewers=reviewers, created_at=created_at, url=pr.links['html']['href'],
                        title=pr.title, creator=pr.author.display_name, config=self.config)
예제 #35
0
파일: main.py 프로젝트: asmeurer/sympy-bot
    def post(self):
        user_repo = polled_user + "/" + polled_repo
        payload = github_get_pull_request_all_v3(user_repo)
        # checkout mergeability
        for pos in xrange(len(payload)):
            pull = github_get_pull_request(user_repo, payload[pos]["number"])
            payload[pos]["mergeable"] = pull["mergeable"]
        # Process each pull request from payload
        for pull in payload:
            p = PullRequest.all()
            num = pull["number"]
            p.filter("num =", num)
            p = p.get()
            if p is None:
                p = PullRequest(num=num)
            p.url = pull["html_url"]
            p.state = pull["state"]
            p.title = pull["title"]
            p.body = pull["body"]
            p.mergeable = pull["mergeable"]
            if pull["head"]["repo"]:
                p.repo = pull["head"]["repo"]["url"]
            p.branch = pull["head"]["ref"]
            created_at = pull["created_at"]
            created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
            p.created_at = created_at

            # Collect public information about user
            u = User.all()
            login = pull["user"]["login"]
            u.filter("login ="******"user"]["id"]
            u.avatar_url = pull["user"]["avatar_url"]
            u.url = pull["user"]["url"]
            u.put()

            p.author = u
            p.put()
예제 #36
0
 def txn():
     assert _type == "pullrequest"
     _num = int(self.request.get("num"))
     pull = github_get_pull_request("sympy/sympy", _num)
     p = PullRequest.all()
     p.filter("num =", int(_num))
     p = p.get()
     if p is None:
         p = PullRequest(num=_num)
     p.url = pull['html_url']
     p.state = pull["state"]
     p.title = pull["title"]
     p.body = pull["body"]
     p.mergeable = pull["mergeable"]
     if pull['head']['repo']:
         p.repo = pull['head']['repo']['url']
     p.branch = pull['head']['ref']
     p.author_name = pull["user"].get("name", "")
     p.author_email = pull["user"].get("email", "")
     created_at = pull["created_at"]
     created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
     p.created_at = created_at
     p.put()
예제 #37
0
    def update(self, full=False):
        data = github_get_pull_request_all_v3("sympy/sympy")
        if full:
            data += github_get_pull_request_all_v3("sympy/sympy", "closed")
        p = PullRequest.all()
        p.filter("state =", "open")
        open_list = [x.num for x in p]
        for pull in data:
            num = pull["number"]
            # Get the old entity or create a new one:
            p = PullRequest.all()
            p.filter("num =", int(num))
            p = p.get()
            if p is None:
                p = PullRequest(num=num)
            # Update all data that we can from GitHub:
            p.url = pull['html_url']
            p.state = pull["state"]
            p.title = pull["title"]
            p.body = pull["body"]
            created_at = pull["created_at"]
            created_at = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
            p.created_at = created_at

            u = User.all()
            u.filter("login ="******"user"]["login"])
            u = u.get()
            if u is None:
                u = User(login=pull["user"]["login"])
                u.put()
            p.author = u

            p.put()
            # Update the rest with a specific query to the pull request:
            if num not in open_list:
                # open_list pull requests will be updated below
                taskqueue.add(url="/worker", queue_name="github",
                        params={"type": "pullrequest", "num": num})
        for num in open_list:
            taskqueue.add(url="/worker", queue_name="github",
                    params={"type": "pullrequest", "num": num})
        if full:
            for u in User.all():
                taskqueue.add(url="/worker", queue_name="github",
                        params={"type": "user", "login": u.login})
예제 #38
0
파일: forms.py 프로젝트: shitiven/GitPower
    def save(self, requester, repo, assigner = None):
        commit_msg = self.cleaned_data["commit_msg"]
        from_head  = self.cleaned_data["from_head"]
        to_head  = self.cleaned_data["to_head"]
        commit_hexsha  = self.cleaned_data["commit"]
        comment  = self.cleaned_data["comment"]

        pull = PullRequest()
        pull.commit_msg = commit_msg
        pull.from_head  = from_head
        pull.to_head    = to_head
        pull.create_commit_hexsha = commit_hexsha
        pull.requester = requester
        pull.repo = repo
        if assigner:
            pull.assigner = assigner

        if comment:
            pull.comment = comment

        pull.save()

        return pull