def test_project_scan_lag(self): """Test that the project_scan_lag endpoint properly calculates lag.""" model_helpers.create_project().put() my_repo = model_helpers.create_repo() my_repo.put() generated_time = u'1970-01-02T00:00:00.000000' response = self._make_api_call('get_project_lag_list', params={ 'generated': generated_time}) expected = { u'projects': [ {u'project': my_repo.project, u'total_active_repos': u'1', u'repos_without_root': u'1', u'repos_with_root': u'0', u'scanned_repos': u'1', u'unscanned_repos': u'0', u'generated': generated_time, u'most_lagging_repo': u'%s:%s' % (my_repo.project, my_repo.repo), u'max': float(24 * 60 * 60), u'min': float(24 * 60 * 60), u'p50': float(24 * 60 * 60), u'p75': float(24 * 60 * 60), u'p90': float(24 * 60 * 60), u'p95': float(24 * 60 * 60), u'p99': float(24 * 60 * 60), } ], u'generated': generated_time, } self.assertEqual(expected, response.json)
def test_repo_scan_for_commits(self): my_project = model_helpers.create_project() my_project.put() my_repo = model_helpers.create_repo() my_repo.put() base_url = my_project.canonical_url_template % {'project': my_project.name} log_data = {u'log': [ { 'commit': 'deadbeef' * 5, 'message': 'git-svn-id: svn://svn.chromium.org/chrome/trunk/' 'src@200000 0039d316-1c4b-4281-b951-d872f2087c98\n' 'Cr-Commit-Position: refs/heads/master@{#301813}', }, ]} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() self.assertEqual(1, len(list(models.RevisionMap.query()))) self.assertEqual( 'deadbeef' * 5, models.RevisionMap.query().fetch()[0].git_sha) self.assertEqual(4, len(list(models.NumberingMap.query())))
def test_get_projects(self): """Test that all projects are listed.""" my_project = model_helpers.create_project() my_project.put() response = self._make_api_call('get_projects') expected = {u'items': [ my_project.ToMessage(), ]} resp = model_helpers.convert_items_to_protos(models.Project, response.json) self.assertEqual(expected, resp)
def test_calculate_lag_stats(self): model_helpers.create_project().put() my_repo = model_helpers.create_repo() my_repo.put() second_repo = model_helpers.create_repo() second_repo.repo = 'cooler' second_repo.root_commit_scanned = True second_repo.last_scanned = None second_repo.put() generated_time = datetime.datetime(1970, 01, 02) expected = models.ProjectLagList( generated=generated_time, projects=[ models.ProjectLagStats( project=my_repo.project, total_active_repos=2, repos_without_root=1, repos_with_root=1, scanned_repos=1, unscanned_repos=1, generated=generated_time, most_lagging_repo='%s:%s' % (my_repo.project, my_repo.repo), max=float(24 * 60 * 60), min=float(24 * 60 * 60), p50=float(24 * 60 * 60), p75=float(24 * 60 * 60), p90=float(24 * 60 * 60), p95=float(24 * 60 * 60), p99=float(24 * 60 * 60) ), ], ) generated = controller.calculate_lag_stats(generated=generated_time) self.assertEqual(expected, generated)
def test_calculate_lag_stats(self): model_helpers.create_project().put() my_repo = model_helpers.create_repo() my_repo.put() second_repo = model_helpers.create_repo() second_repo.repo = 'cooler' second_repo.root_commit_scanned = True second_repo.last_scanned = None second_repo.put() generated_time = datetime.datetime(1970, 01, 02) expected = models.ProjectLagList( generated=generated_time, projects=[ models.ProjectLagStats(project=my_repo.project, total_active_repos=2, repos_without_root=1, repos_with_root=1, scanned_repos=1, unscanned_repos=1, generated=generated_time, most_lagging_repo='%s:%s' % (my_repo.project, my_repo.repo), max=float(24 * 60 * 60), min=float(24 * 60 * 60), p50=float(24 * 60 * 60), p75=float(24 * 60 * 60), p90=float(24 * 60 * 60), p95=float(24 * 60 * 60), p99=float(24 * 60 * 60)), ], ) generated = controller.calculate_lag_stats(generated=generated_time) self.assertEqual(expected, generated)
def test_project_repo_scan(self): my_project = model_helpers.create_project() my_project.put() base_url = my_project.canonical_url_template % {'project': my_project.name} repo_data = {'cool_src': {}} log_data = {u'log': []} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(1, len(controller.get_active_repos(my_project.name)))
def test_project_repo_scan(self): my_project = model_helpers.create_project() my_project.put() base_url = my_project.canonical_url_template % { 'project': my_project.name } repo_data = {'cool_src': {}} log_data = {u'log': []} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler(base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(1, len(controller.get_active_repos(my_project.name)))
def test_repo_scan_for_commits(self): my_project = model_helpers.create_project() my_project.put() my_repo = model_helpers.create_repo() my_repo.put() base_url = my_project.canonical_url_template % { 'project': my_project.name } log_data = { u'log': [ { 'commit': 'deadbeef' * 5, 'message': 'git-svn-id: svn://svn.chromium.org/chrome/trunk/' 'src@200000 0039d316-1c4b-4281-b951-d872f2087c98\n' 'Cr-Commit-Position: refs/heads/master@{#301813}', }, ] } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() self.assertEqual(1, len(list(models.RevisionMap.query()))) self.assertEqual('deadbeef' * 5, models.RevisionMap.query().fetch()[0].git_sha) self.assertEqual(4, len(list(models.NumberingMap.query())))
def test_empty_calculate_lag_stats(self): my_project = model_helpers.create_project() my_project.put() generated_time = datetime.datetime(1970, 01, 02) expected = models.ProjectLagList( generated=generated_time, projects=[ models.ProjectLagStats( project=my_project.name, total_active_repos=0, repos_without_root=0, repos_with_root=0, scanned_repos=0, unscanned_repos=0, generated=generated_time, ), ], ) generated = controller.calculate_lag_stats(generated=generated_time) self.assertEqual(expected, generated)
def test_repo_scan_for_new_commits(self): """Test all forms of new commits, before and after what has been seen.""" my_project = model_helpers.create_project() my_project.put() my_repo = model_helpers.create_repo() my_repo.put() base_url = my_project.canonical_url_template % { 'project': my_project.name } commits = [ { 'commit': 'f007beef' * 5, 'message': '', }, { 'commit': '000fbeef' * 5, 'message': '', }, { 'commit': '700fbeef' * 5, 'message': '', }, { 'commit': 'deadbeef' * 5, 'message': '', }, { 'commit': 'feedbeef' * 5, 'message': '', }, { 'commit': 'f00fbeef' * 5, 'message': '', }, { 'commit': 'f33dbeef' * 5, 'message': '', }, ] log_data = { u'log': [ commits[3], ] } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() log_data = { u'log': [ commits[3], ] } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5, ), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get() my_repo.root_commit_scanned = False my_repo.first_commit = None my_repo.put() log_data = { u'log': commits[0:2], 'next': '000fbeef' * 5, } ooofbeef_data = { u'log': commits[1:3], 'next': 'deadbeef', } deadbeef_data = { u'log': commits[3:5], 'next': 'feedbeef' * 5, } feedbeef_data = { u'log': commits[-3:-1], 'next': 'f00fbeef' * 5, } toofbeef_data = { u'log': commits[2:4], 'next': 'feedbeef' * 5, } foofbeef_data = { u'log': commits[-2:], } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('f007beef' * 5, ), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('000fbeef' * 5, ), self._gitiles_json(ooofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('000fbeef' * 5, ), self._gitiles_json(ooofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5, ), self._gitiles_json(deadbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('deadbeef' * 5, ), self._gitiles_json(deadbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('700fbeef' * 5, ), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('feedbeef' * 5, ), self._gitiles_json(feedbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('feedbeef' * 5, ), self._gitiles_json(feedbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('f00fbeef' * 5, ), self._gitiles_json(foofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('f00fbeef' * 5, ), self._gitiles_json(foofbeef_data)) controller.scan_repos() self.execute_queued_tasks() self.assertEqual(7, len(list(models.RevisionMap.query()))) my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get() self.assertTrue(my_repo.root_commit_scanned)
def test_project_repo_scan_active(self): my_project = model_helpers.create_project() my_project.put() base_url = my_project.canonical_url_template % { 'project': my_project.name } repo_data = { 'cool_src': {}, 'cooler_src': {}, 'uncool_src': {}, } log_data = {u'log': []} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler(base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) # Don't handle uncool_src, making it nonreal. controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(2, len(controller.get_active_repos(my_project.name))) # Now test that active repos are inactive when they go away. repo_data = { 'cooler_src': {}, } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler(base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(1, len(controller.get_active_repos(my_project.name))) # And test that they can come back. repo_data = { 'cool_src': {}, 'cooler_src': {}, } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler(base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(2, len(controller.get_active_repos(my_project.name)))
def test_get_projects(self): my_project = model_helpers.create_project() my_project.put() self.assertEqual([my_project], controller.get_projects())
def test_repo_scan_for_new_commits(self): """Test all forms of new commits, before and after what has been seen.""" my_project = model_helpers.create_project() my_project.put() my_repo = model_helpers.create_repo() my_repo.put() base_url = my_project.canonical_url_template % {'project': my_project.name} commits = [ { 'commit': 'f007beef' * 5, 'message': '', }, { 'commit': '000fbeef' * 5, 'message': '', }, { 'commit': '700fbeef' * 5, 'message': '', }, { 'commit': 'deadbeef' * 5, 'message': '', }, { 'commit': 'feedbeef' * 5, 'message': '', }, { 'commit': 'f00fbeef' * 5, 'message': '', }, { 'commit': 'f33dbeef' * 5, 'message': '', }, ] log_data = {u'log': [ commits[3], ]} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() log_data = {u'log': [ commits[3], ]} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('deadbeef' * 5,), self._gitiles_json(log_data)) controller.scan_repos() self.execute_queued_tasks() my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get() my_repo.root_commit_scanned = False my_repo.first_commit = None my_repo.put() log_data = { u'log': commits[0:2], 'next': '000fbeef' * 5, } ooofbeef_data = { u'log': commits[1:3], 'next': 'deadbeef', } deadbeef_data = { u'log': commits[3:5], 'next': 'feedbeef' * 5, } feedbeef_data = { u'log': commits[-3:-1], 'next': 'f00fbeef' * 5, } toofbeef_data = { u'log': commits[2:4], 'next': 'feedbeef' * 5, } foofbeef_data = { u'log': commits[-2:], } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1000', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('f007beef' * 5,), self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('000fbeef' * 5,), self._gitiles_json(ooofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('000fbeef' * 5,), self._gitiles_json(ooofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('deadbeef' * 5,), self._gitiles_json(deadbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('deadbeef' * 5,), self._gitiles_json(deadbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('700fbeef' * 5,), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1' % ('700fbeef' * 5,), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('700fbeef' * 5,), self._gitiles_json(toofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('feedbeef' * 5,), self._gitiles_json(feedbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('feedbeef' * 5,), self._gitiles_json(feedbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=1000' % ('f00fbeef' * 5,), self._gitiles_json(foofbeef_data)) urlfetch.register_handler( base_url + 'cool_src/+log/%s?format=json&n=2' % ('f00fbeef' * 5,), self._gitiles_json(foofbeef_data)) controller.scan_repos() self.execute_queued_tasks() self.assertEqual(7, len(list(models.RevisionMap.query()))) my_repo = models.Repo.get_key_by_id(my_project.name, my_repo.repo).get() self.assertTrue(my_repo.root_commit_scanned)
def test_project_repo_scan_active(self): my_project = model_helpers.create_project() my_project.put() base_url = my_project.canonical_url_template % {'project': my_project.name} repo_data = { 'cool_src': {}, 'cooler_src': {}, 'uncool_src': {}, } log_data = {u'log': []} with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) # Don't handle uncool_src, making it nonreal. controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(2, len(controller.get_active_repos(my_project.name))) # Now test that active repos are inactive when they go away. repo_data = { 'cooler_src': {}, } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(1, len(controller.get_active_repos(my_project.name))) # And test that they can come back. repo_data = { 'cool_src': {}, 'cooler_src': {}, } with self.mock_urlfetch() as urlfetch: urlfetch.register_handler( base_url + '?format=json&n=1000', self._gitiles_json(repo_data)) urlfetch.register_handler( base_url + 'cool_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) urlfetch.register_handler( base_url + 'cooler_src/+log/master?format=json&n=1', self._gitiles_json(log_data)) controller.scan_projects_for_repos() self.execute_queued_tasks() self.assertEquals(2, len(controller.get_active_repos(my_project.name)))