示例#1
0
  def test_repo_scan_for_commits(self):
    my_project = model_helpers.create_project()
    my_project.put()
    my_repo = model_helpers.create_repo()
    my_repo.put()
    base_url = my_project.canonical_url_template % {'project': my_project.name}

    log_data = {u'log': [
        {
            'commit': 'deadbeef' * 5,
            'message': 'git-svn-id: svn://svn.chromium.org/chrome/trunk/'
                       'src@200000 0039d316-1c4b-4281-b951-d872f2087c98\n'
                       'Cr-Commit-Position: refs/heads/master@{#301813}',
        },
    ]}

    with self.mock_urlfetch() as urlfetch:
      urlfetch.register_handler(
          base_url + 'cool_src/+log/master?format=json&n=1000',
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))
    controller.scan_repos()
    self.execute_queued_tasks()
    self.assertEqual(1, len(list(models.RevisionMap.query())))
    self.assertEqual(
        'deadbeef' * 5,
        models.RevisionMap.query().fetch()[0].git_sha)
    self.assertEqual(4, len(list(models.NumberingMap.query())))
    def test_repo_scan_for_commits(self):
        my_project = model_helpers.create_project()
        my_project.put()
        my_repo = model_helpers.create_repo()
        my_repo.put()
        base_url = my_project.canonical_url_template % {
            'project': my_project.name
        }

        log_data = {
            u'log': [
                {
                    'commit':
                    'deadbeef' * 5,
                    'message':
                    'git-svn-id: svn://svn.chromium.org/chrome/trunk/'
                    'src@200000 0039d316-1c4b-4281-b951-d872f2087c98\n'
                    'Cr-Commit-Position: refs/heads/master@{#301813}',
                },
            ]
        }

        with self.mock_urlfetch() as urlfetch:
            urlfetch.register_handler(
                base_url + 'cool_src/+log/master?format=json&n=1000',
                self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))
        controller.scan_repos()
        self.execute_queued_tasks()
        self.assertEqual(1, len(list(models.RevisionMap.query())))
        self.assertEqual('deadbeef' * 5,
                         models.RevisionMap.query().fetch()[0].git_sha)
        self.assertEqual(4, len(list(models.NumberingMap.query())))
    def test_repo_scan_for_new_commits(self):
        """Test all forms of new commits, before and after what has been seen."""
        my_project = model_helpers.create_project()
        my_project.put()
        my_repo = model_helpers.create_repo()
        my_repo.put()
        base_url = my_project.canonical_url_template % {
            'project': my_project.name
        }

        commits = [
            {
                'commit': 'f007beef' * 5,
                'message': '',
            },
            {
                'commit': '000fbeef' * 5,
                'message': '',
            },
            {
                'commit': '700fbeef' * 5,
                'message': '',
            },
            {
                'commit': 'deadbeef' * 5,
                'message': '',
            },
            {
                'commit': 'feedbeef' * 5,
                'message': '',
            },
            {
                'commit': 'f00fbeef' * 5,
                'message': '',
            },
            {
                'commit': 'f33dbeef' * 5,
                'message': '',
            },
        ]

        log_data = {
            u'log': [
                commits[3],
            ]
        }

        with self.mock_urlfetch() as urlfetch:
            urlfetch.register_handler(
                base_url + 'cool_src/+log/master?format=json&n=1000',
                self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))

        controller.scan_repos()
        self.execute_queued_tasks()

        log_data = {
            u'log': [
                commits[3],
            ]
        }

        with self.mock_urlfetch() as urlfetch:
            urlfetch.register_handler(
                base_url + 'cool_src/+log/master?format=json&n=1000',
                self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1' %
                ('deadbeef' * 5, ), self._gitiles_json(log_data))

        controller.scan_repos()
        self.execute_queued_tasks()

        my_repo = models.Repo.get_key_by_id(my_project.name,
                                            my_repo.repo).get()
        my_repo.root_commit_scanned = False
        my_repo.first_commit = None
        my_repo.put()

        log_data = {
            u'log': commits[0:2],
            'next': '000fbeef' * 5,
        }
        ooofbeef_data = {
            u'log': commits[1:3],
            'next': 'deadbeef',
        }
        deadbeef_data = {
            u'log': commits[3:5],
            'next': 'feedbeef' * 5,
        }
        feedbeef_data = {
            u'log': commits[-3:-1],
            'next': 'f00fbeef' * 5,
        }
        toofbeef_data = {
            u'log': commits[2:4],
            'next': 'feedbeef' * 5,
        }
        foofbeef_data = {
            u'log': commits[-2:],
        }
        with self.mock_urlfetch() as urlfetch:
            urlfetch.register_handler(
                base_url + 'cool_src/+log/master?format=json&n=1000',
                self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('f007beef' * 5, ), self._gitiles_json(log_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('000fbeef' * 5, ), self._gitiles_json(ooofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('000fbeef' * 5, ), self._gitiles_json(ooofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('deadbeef' * 5, ), self._gitiles_json(deadbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('deadbeef' * 5, ), self._gitiles_json(deadbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1' %
                ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('feedbeef' * 5, ), self._gitiles_json(feedbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('feedbeef' * 5, ), self._gitiles_json(feedbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=1000' %
                ('f00fbeef' * 5, ), self._gitiles_json(foofbeef_data))
            urlfetch.register_handler(
                base_url + 'cool_src/+log/%s?format=json&n=2' %
                ('f00fbeef' * 5, ), self._gitiles_json(foofbeef_data))
        controller.scan_repos()
        self.execute_queued_tasks()

        self.assertEqual(7, len(list(models.RevisionMap.query())))
        my_repo = models.Repo.get_key_by_id(my_project.name,
                                            my_repo.repo).get()
        self.assertTrue(my_repo.root_commit_scanned)
示例#4
0
  def test_repo_scan_for_new_commits(self):
    """Test all forms of new commits, before and after what has been seen."""
    my_project = model_helpers.create_project()
    my_project.put()
    my_repo = model_helpers.create_repo()
    my_repo.put()
    base_url = my_project.canonical_url_template % {'project': my_project.name}

    commits = [
        {
            'commit': 'f007beef' * 5,
            'message': '',
        },
        {
            'commit': '000fbeef' * 5,
            'message': '',
        },
        {
            'commit': '700fbeef' * 5,
            'message': '',
        },
        {
            'commit': 'deadbeef' * 5,
            'message': '',
        },
        {
            'commit': 'feedbeef' * 5,
            'message': '',
        },
        {
            'commit': 'f00fbeef' * 5,
            'message': '',
        },
        {
            'commit': 'f33dbeef' * 5,
            'message': '',
        },
    ]

    log_data = {u'log': [
        commits[3],
    ]}

    with self.mock_urlfetch() as urlfetch:
      urlfetch.register_handler(
          base_url + 'cool_src/+log/master?format=json&n=1000',
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))

    controller.scan_repos()
    self.execute_queued_tasks()

    log_data = {u'log': [
        commits[3],
    ]}

    with self.mock_urlfetch() as urlfetch:
      urlfetch.register_handler(
          base_url + 'cool_src/+log/master?format=json&n=1000',
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,),
          self._gitiles_json(log_data))

    controller.scan_repos()
    self.execute_queued_tasks()

    my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get()
    my_repo.root_commit_scanned = False
    my_repo.first_commit = None
    my_repo.put()

    log_data = {
        u'log': commits[0:2],
        'next': '000fbeef' * 5,
    }
    ooofbeef_data = {
        u'log': commits[1:3],
        'next': 'deadbeef',
    }
    deadbeef_data = {
        u'log': commits[3:5],
        'next': 'feedbeef' * 5,
    }
    feedbeef_data = {
        u'log':  commits[-3:-1],
        'next': 'f00fbeef' * 5,
    }
    toofbeef_data = {
        u'log': commits[2:4],
        'next': 'feedbeef' * 5,
    }
    foofbeef_data = {
        u'log': commits[-2:],
    }
    with self.mock_urlfetch() as urlfetch:
      urlfetch.register_handler(
          base_url + 'cool_src/+log/master?format=json&n=1000',
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('f007beef' * 5,),
          self._gitiles_json(log_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('000fbeef' * 5,),
          self._gitiles_json(ooofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('000fbeef' * 5,),
          self._gitiles_json(ooofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,),
          self._gitiles_json(deadbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('deadbeef' * 5,),
          self._gitiles_json(deadbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('700fbeef' * 5,),
          self._gitiles_json(toofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1' % ('700fbeef' * 5,),
          self._gitiles_json(toofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('700fbeef' * 5,),
          self._gitiles_json(toofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('feedbeef' * 5,),
          self._gitiles_json(feedbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('feedbeef' * 5,),
          self._gitiles_json(feedbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=1000' % ('f00fbeef' * 5,),
          self._gitiles_json(foofbeef_data))
      urlfetch.register_handler(
          base_url + 'cool_src/+log/%s?format=json&n=2' % ('f00fbeef' * 5,),
          self._gitiles_json(foofbeef_data))
    controller.scan_repos()
    self.execute_queued_tasks()

    self.assertEqual(7, len(list(models.RevisionMap.query())))
    my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get()
    self.assertTrue(my_repo.root_commit_scanned)
示例#5
0
 def get(self):
     projects = controller.scan_repos()
     for project in projects:  # pragma: no cover
         logging.info('launching pipeline: %s' % project)
     self.response.write('pipelines: %s' % '<br>'.join(projects))
示例#6
0
文件: views.py 项目: eunchong/infra
 def get(self):
   projects = controller.scan_repos()
   for project in projects:  # pragma: no cover
     logging.info('launching pipeline: %s' % project)
   self.response.write('pipelines: %s' % '<br>'.join(projects))