Пример #1
0
    def test_PrcoessRepoLargePullRequestCount(self):
        testRepo = 3638964
        testRepoFullName = 'ansible/ansible'

        # Proccess a known repo
        ProcessRepo(testRepoFullName)

        # verify that a pagination was queued on the pullrequest import
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        # process the queued pagination
        ProcessGitHubRequest(1)

        myRepo = Repo.objects.get(pk=testRepo)

        # test that pull requests got imported
        # this repo should have more than 100 pull requests.
        # importing pull requests requires pagingation to go over 100 entries
        self.assertGreater(myRepo.pullrequest_set.count(), 100)

        #verify that another request was queued
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 1)

        #### Test hitting the data cache threshold for large pull request chains.
        # get a random user_id
        myUser = choice(User.objects.all())

        myPullRequest = PullRequest(number=10000,
                                    repo=myRepo,
                                    created_at='2013-01-01 01:01',
                                    user=myUser,
                                    updated_at='2015-01-01 01:01')
        myPullRequest.save()

        #proccess the queued pagination
        ProcessGitHubRequest(1)

        # Verify that there are no more queued paginations
        myRequestCaches = GitHubRequestCache.objects.filter(
            started_at__isnull=True)
        self.assertEqual(myRequestCaches.count(), 0)
Пример #2
0
    def test_PrcoessRepoLargePullRequestCount(self):
        testRepo = 3638964
        testRepoFullName = 'ansible/ansible'

        # Proccess a known repo
        ProcessRepo(testRepoFullName)

        # verify that a pagination was queued on the pullrequest import
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 1)

        # process the queued pagination
        ProcessGitHubRequest(1)

        myRepo = Repo.objects.get(pk = testRepo)

        # test that pull requests got imported
        # this repo should have more than 100 pull requests.
        # importing pull requests requires pagingation to go over 100 entries
        self.assertGreater(myRepo.pullrequest_set.count(), 100)

        #verify that another request was queued
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 1)

        #### Test hitting the data cache threshold for large pull request chains.
        # get a random user_id
        myUser = choice(User.objects.all())

        myPullRequest = PullRequest(number = 10000,repo=myRepo, created_at = '2013-01-01 01:01', user = myUser, updated_at='2015-01-01 01:01')
        myPullRequest.save()

        #proccess the queued pagination
        ProcessGitHubRequest(1)

        # Verify that there are no more queued paginations
        myRequestCaches = GitHubRequestCache.objects.filter(started_at__isnull = True)
        self.assertEqual(myRequestCaches.count(), 0)
Пример #3
0
    def save(self, requester, repo, assigner=None):
        commit_msg = self.cleaned_data["commit_msg"]
        from_head = self.cleaned_data["from_head"]
        to_head = self.cleaned_data["to_head"]
        commit_hexsha = self.cleaned_data["commit"]
        comment = self.cleaned_data["comment"]

        pull = PullRequest()
        pull.commit_msg = commit_msg
        pull.from_head = from_head
        pull.to_head = to_head
        pull.create_commit_hexsha = commit_hexsha
        pull.requester = requester
        pull.repo = repo
        if assigner:
            pull.assigner = assigner

        if comment:
            pull.comment = comment

        pull.save()

        return pull
Пример #4
0
    def save(self, requester, repo, assigner = None):
        commit_msg = self.cleaned_data["commit_msg"]
        from_head  = self.cleaned_data["from_head"]
        to_head  = self.cleaned_data["to_head"]
        commit_hexsha  = self.cleaned_data["commit"]
        comment  = self.cleaned_data["comment"]

        pull = PullRequest()
        pull.commit_msg = commit_msg
        pull.from_head  = from_head
        pull.to_head    = to_head
        pull.create_commit_hexsha = commit_hexsha
        pull.requester = requester
        pull.repo = repo
        if assigner:
            pull.assigner = assigner

        if comment:
            pull.comment = comment

        pull.save()

        return pull