def __init__(self, commit_position, code_review_url, author=None, committer=None): self.commit_position = commit_position self.revision = 'rev_{}'.format(commit_position) self.code_review_url = code_review_url self.review_change_id = str(commit_position) self.review_server_host = 'host' self.author = author or Contributor('author', '*****@*****.**', '2018-05-17 00:49:48') self.committer = committer or Contributor( 'committer', '*****@*****.**', '2018-05-17 00:49:48')
def testIsAuthoredByNoAutoRevertAccount(self, mock_author): author = Contributor( '*****@*****.**', '*****@*****.**', datetime.strptime('Wed Jun 11 19:35:32 2014', '%a %b %d %H:%M:%S %Y')) mock_author.return_value = author self.assertFalse(git.IsAuthoredByNoAutoRevertAccount('rev1'))
def __init__(self, revision, touched_files, **kwargs): self.author = kwargs.get('author') or Contributor( 'name', '*****@*****.**', # use revision as the date as well for testing purpose. datetime.strptime('Jun %s 04:35:32 2015' % revision, '%b %d %H:%M:%S %Y')) self.touched_files = touched_files self.revision = revision
def testFindCulpritSucceeds(self, mock_find_culprit): crash_identifiers = self.GetDummyChromeCrashData()['crash_identifiers'] analysis = FracasCrashAnalysis.Create(crash_identifiers) analysis.status = analysis_status.RUNNING analysis.put() dummy_cl = ChangeLog( Contributor('AUTHOR_NAME', 'AUTHOR_EMAIL', 'AUTHOR_TIME'), Contributor('COMITTER_NAME', 'COMITTER_EMAIL', 'COMITTER_TIME'), 'REVISION', 'COMMIT_POSITION', 'MESSAGE', 'TOUCHED_FILES', 'COMMIT_URL', ) dummy_project_path = 'PROJECT_PATH' dummy_suspect = Suspect(dummy_cl, dummy_project_path) dummy_culprit = Culprit( project='PROJECT', components=['COMPONENT_1', 'CPOMPONENT_2'], suspected_cls=[dummy_suspect], # N.B., we must use a list here for the assertion to work # TODO(wrengr): fix that. regression_range=['VERSION_0', 'VERSION_1'], algorithm='ALGORITHM', success=True, ) mock_find_culprit.return_value = dummy_culprit pipeline = crash_pipeline.CrashAnalysisPipeline( CrashClient.FRACAS, crash_identifiers) pipeline.start() self.execute_queued_tasks() analysis = FracasCrashAnalysis.Get(crash_identifiers) self.assertEqual(analysis_status.COMPLETED, analysis.status) self.assertTrue(analysis.result['found']) self.assertTrue(analysis.found_suspects) self.assertTrue(analysis.found_project) self.assertTrue(analysis.found_components) dummy_suspect, dummy_tags = dummy_culprit.ToDicts() self.assertDictEqual(analysis.result, dummy_suspect)
def ChangeLogFromDict(data): touched_files = [ FileChangeInfo.FromDict(touched_file) for touched_file in data.get('touched_files', []) ] author = Contributor( data.get('author', {}).get('name', 'author'), data.get('author', {}).get('email', 'email'), data.get('author', {}).get('time', 'time')) committer = Contributor( data.get('committer', {}).get('name', 'committer'), data.get('committer', {}).get('email', 'email'), data.get('committer', {}).get('time', 'time')) return ChangeLog(author, committer, data.get('revision'), data.get('commit_position'), data.get('message'), touched_files, data.get('commit_url'), data.get('code_review_url'), data.get('reverted_revision'), data.get('review_server_host'), data.get('review_change_id'))
def _inner(_self, _revision, n): result = [] end_commit_position = 100 end_datetime = SOME_TIME for i in range(n): result.append( MockedChangeLog(end_commit_position - i, 'url', committer=Contributor( 'committer', '*****@*****.**', end_datetime - (i * delta)))) return result, next_rev
def testUpdateFlakeAnalysisDataPointsPipeline(self, mocked_change_log): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.Save() commit_position = 1000 pass_rate = 0.5 revision = 'r1000' expected_time = datetime(2018, 9, 18, 0, 0, 0) committer = Contributor(name='name', email='email', time=expected_time) change_log = ChangeLog(None, committer, revision, None, None, None, None, None) mocked_change_log.return_value = change_log flakiness = Flakiness(build_number=123, build_url='url', commit_position=commit_position, total_test_run_seconds=100, error=None, failed_swarming_task_attempts=0, iterations=50, pass_rate=pass_rate, revision=revision, try_job_url=None, task_ids=ListOfBasestring.FromSerializable( ['task_id'])) expected_data_point = DataPoint.Create(build_number=123, build_url='url', commit_position=commit_position, elapsed_seconds=100, error=None, failed_swarming_task_attempts=0, iterations=50, pass_rate=pass_rate, git_hash=revision, try_job_url=None, task_ids=['task_id'], commit_timestamp=expected_time) update_data_points_input = UpdateFlakeAnalysisDataPointsInput( analysis_urlsafe_key=analysis.key.urlsafe(), flakiness=flakiness) pipeline_job = UpdateFlakeAnalysisDataPointsPipeline( update_data_points_input) pipeline_job.start() self.execute_queued_tasks() self.assertEqual(1, len(analysis.data_points)) self.assertEqual(expected_data_point, analysis.data_points[0])
def testIsAuthoredByAutoRevertAccount(self, mock_author): emails = [ 'chromium-internal-autoroll@' 'skia-corp.google.com.iam.gserviceaccount.com', '*****@*****.**', '*****@*****.**' ] for email in emails: author = Contributor( 'autoroller', email, datetime.strptime('Wed Jun 11 19:35:32 2014', '%a %b %d %H:%M:%S %Y')) mock_author.return_value = author self.assertTrue(git.IsAuthoredByNoAutoRevertAccount('rev1'))
def _ContributorFromDict(self, data): return Contributor(data['name'], commit_util.NormalizeEmail(data['email']), self._GetDateTimeFromString(data['time']))
def testGetAuthor(self, mock_change_log): revision = 'rev2' mock_change_log.return_value = self._MockGetChangeLog(revision) self.assertEqual( Contributor('author', '*****@*****.**', '2018-05-17 00:49:48'), git.GetAuthor(revision))
def __init__(self, date, touched_files): self.author = Contributor( 'name', '*****@*****.**', datetime.strptime('Jun %s 04:35:32 2015' % date, '%b %d %H:%M:%S %Y')) self.touched_files = touched_files