def testUpdateSuspectedBuild(self, mock_build_number): build_number = 120 build_id = 1200 mock_build_number.return_value = build_number lower_bound_commit_position = 90 upper_bound_commit_position = 100 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=upper_bound_commit_position, pass_rate=0.4), DataPoint.Create(commit_position=lower_bound_commit_position, pass_rate=1.0), ] analysis.Save() lower_bound_target = IsolatedTarget.Create( build_id - 1, '', '', 'm', 'b', '', '', '', '', '', '', lower_bound_commit_position, '') upper_bound_target = IsolatedTarget.Create( build_id, '', '', 'm', 'b', '', '', '', '', '', '', upper_bound_commit_position, '') analysis.UpdateSuspectedBuild(lower_bound_target, upper_bound_target) self.assertEqual(build_id, analysis.suspected_build_id) self.assertEqual(build_number, analysis.suspected_flake_build_number)
def testGenerateCommitIDsForBoundingTargetsQueryGit(self, mock_revisions): data_points = [ DataPoint.Create(commit_position=1010, git_hash='r1010'), DataPoint.Create(commit_position=1000, git_hash='r1000'), ] mock_revisions.return_value = { 1003: 'r1003', 1004: 'r1004', 1005: 'r1005', 1006: 'r1006', 1007: 'r1007', 1008: 'r1008', 1009: 'r1009', 1010: 'r1010' } lower_bound_target = IsolatedTarget.Create(67890, '', '', 'm', 'b', '', '', '', '', '', '', 1003, None) upper_bound_target = IsolatedTarget.Create(67890, '', '', 'm', 'b', '', '', '', '', '', '', 1008, None) lower_bound_commit_id = CommitID(commit_position=1003, revision='r1003') upper_bound_commit_id = CommitID(commit_position=1008, revision='r1008') self.assertEqual( (lower_bound_commit_id, upper_bound_commit_id), next_commit_position_utils.GenerateCommitIDsForBoundingTargets( data_points, lower_bound_target, upper_bound_target)) mock_revisions.assert_called_once_with('r1010', 1010, 1003)
def testGetBoundingIsolatedTargets(self): lower_bound_commit_position = 1000 upper_bound_commit_position = 1010 requested_commit_position = 1005 build_id = 10000 target_name = 'browser_tests' master_name = 'm' builder_name = 'b' luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' lower_bound_revision = 'r1000' upper_bound_revision = 'r1010' lower_bound_target = IsolatedTarget.Create( build_id - 1, luci_name, bucket_name, master_name, builder_name, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, target_name, 'hash_1', lower_bound_commit_position, lower_bound_revision) lower_bound_target.put() upper_bound_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, master_name, builder_name, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, target_name, 'hash_2', upper_bound_commit_position, upper_bound_revision) upper_bound_target.put() self.assertEqual((lower_bound_target, upper_bound_target), step_util.GetBoundingIsolatedTargets( master_name, builder_name, target_name, requested_commit_position))
def testUpdateSuspectedBuildRegressionRangeTooWide(self): analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=100), DataPoint.Create(commit_position=80), ] analysis.Save() lower_bound_target = IsolatedTarget.Create(999, '', '', 'm', 'b', '', '', '', '', '', '', 90, '') upper_bound_target = IsolatedTarget.Create(1000, '', '', 'm', 'b', '', '', '', '', '', '', 100, '') analysis.UpdateSuspectedBuild(lower_bound_target, upper_bound_target) self.assertIsNone(analysis.suspected_flake_build_number)
def testGetIsolateShaForTargetPipeline(self): master_name = 'm' builder_name = 'b' build_id = 100 commit_position = 1000 luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' isolate_target_name = 'browser_tests' isolated_hash = 'isolated_hash' git_hash = 'r1000' isolated_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, master_name, builder_name, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, isolate_target_name, isolated_hash, commit_position, git_hash) isolated_target.put() get_sha_input = GetIsolateShaForTargetInput( isolated_target_urlsafe_key=isolated_target.key.urlsafe()) pipeline_job = GetIsolateShaForTargetPipeline(get_sha_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id( pipeline_job.pipeline_id) pipeline_output = pipeline_job.outputs.default.value self.assertEqual(isolated_hash, pipeline_output.get('isolate_sha'))
def testBuildUrl(self): build_id = 10000 target = IsolatedTarget.Create(build_id, 'chromium', 'ci', 'chromium.linux', 'Linux Builder', 'chromium.googlesource.com', 'chromium/src', 'refs/heads/master', '', 'browser_tests', 'a1b2c3d4', 55000, '55000') self.assertEqual('https://ci.chromium.org/b/10000', target.build_url)
def testIsolatedHash(self): isolated_hash = 'isolated_hash' target = IsolatedTarget.Create( 10000, 'chromium', 'ci', 'chromium.linux', 'Linux Builder', 'chromium.googlesource.com', 'chromium/src', 'refs/heads/master', '', 'browser_tests', isolated_hash, 55000, '55000') self.assertEqual(isolated_hash, target.GetIsolatedHash())
def testGenerateCommitIDsForBoundingTargets(self): data_points = [] lower_bound_target = IsolatedTarget.Create(67890, '', '', 'm', 'b', '', '', '', '', '', '', 1000, 'r1000') upper_bound_target = IsolatedTarget.Create(67890, '', '', 'm', 'b', '', '', '', '', '', '', 1010, 'r1010') lower_bound_commit_id = CommitID(commit_position=1000, revision='r1000') upper_bound_commit_id = CommitID(commit_position=1010, revision='r1010') self.assertEqual( (lower_bound_commit_id, upper_bound_commit_id), next_commit_position_utils.GenerateCommitIDsForBoundingTargets( data_points, lower_bound_target, upper_bound_target))
def setUp(self): super(IsolatedTargetTest, self).setUp() for pos in range(100): commit_position = 55000 + pos * 13 entry = IsolatedTarget.Create( 843400990909000 + pos, 'chromium', 'ci', 'chromium.linux', 'Linux Builder', 'chromium.googlesource.com', 'chromium/src', 'refs/heads/master', '', 'browser_tests', 'abcdef%dabcdef' % pos, commit_position, '%d' % commit_position) entry.put()
def testUpdateSuspectedBuildExistingSuspectedBuild(self): lower_bound_commit_position = 90 upper_bound_commit_position = 100 build_id = 1000 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't') analysis.data_points = [ DataPoint.Create(commit_position=upper_bound_commit_position), DataPoint.Create(commit_position=lower_bound_commit_position), ] analysis.suspected_flake_build_number = 123 analysis.Save() lower_bound_target = IsolatedTarget.Create( build_id - 1, '', '', 'm', 'b', '', '', '', '', '', '', lower_bound_commit_position, '') upper_bound_target = IsolatedTarget.Create( build_id, '', '', 'm', 'b', '', '', '', '', '', '', upper_bound_commit_position, '') analysis.UpdateSuspectedBuild(lower_bound_target, upper_bound_target) self.assertEqual(123, analysis.suspected_flake_build_number)
def testGetLatestCommitPositionAndRevisionWithTargets( self, mocked_revision, mocked_target): master_name = 'm' builder_name = 'b' target_name = 't' expected_commit_position = 1000 expected_revision = 'r1000' mocked_revision.return_value = {'git_sha': expected_revision} target = IsolatedTarget.Create(87654321, '', '', master_name, builder_name, '', '', '', '', target_name, '', expected_commit_position, None) mocked_target.return_value = [target] self.assertEqual((expected_commit_position, expected_revision), build_util.GetLatestCommitPositionAndRevision( master_name, builder_name, target_name))
def testGetIsolateShaForCommitPositionPipelineCommitLevel( self, mocked_reference_build, mocked_cache, mocked_dimensions): master_name = 'm' builder_name = 'b' build_number = 100 step_name = 's' test_name = 't' dimensions = ['dimensions'] requested_commit_position = 1000 containing_build_commit_position = 1001 containing_build_revision = 'r1001' requested_revision = 'r1000' expected_sha = 'sha1' cache_name = 'cache' try_job_id = 'try_job_id' url = 'url' isolate_target_name = 'browser_tests' step_metadata = StepMetadata(canonical_step_name=None, dimensions=None, full_step_name=None, isolate_target_name=isolate_target_name, patched=True, swarm_task_ids=None, waterfall_buildername=None, waterfall_mastername=None) build_id = 100 luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' isolated_hash = 'isolated_hash' isolated_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, master_name, builder_name, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, isolate_target_name, isolated_hash, containing_build_commit_position, containing_build_revision) isolated_target.put() mocked_cache.return_value = cache_name mocked_dimensions.return_value = dimensions expected_isolated_tests = IsolatedTests() expected_isolated_tests[isolate_target_name] = expected_sha build = BuildInfo(master_name, builder_name, build_number) build.commit_position = containing_build_commit_position mocked_reference_build.return_value = build analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() try_job = FlakeTryJob.Create(master_name, builder_name, step_name, test_name, requested_revision) try_job.put() run_flake_try_job_parameters = RunFlakeTryJobParameters( analysis_urlsafe_key=analysis.key.urlsafe(), revision=requested_revision, flake_cache_name=cache_name, isolate_target_name=isolate_target_name, dimensions=ListOfBasestring.FromSerializable(dimensions), urlsafe_try_job_key=try_job.key.urlsafe()) get_sha_input = GetIsolateShaForCommitPositionParameters( analysis_urlsafe_key=unicode(analysis.key.urlsafe()), commit_position=requested_commit_position, revision=requested_revision, dimensions=ListOfBasestring.FromSerializable(dimensions), step_metadata=step_metadata, upper_bound_build_number=analysis.build_number) expected_try_job_report = FlakeTryJobReport( isolated_tests=expected_isolated_tests, last_checked_out_revision=None, previously_cached_revision=None, previously_checked_out_revision=None, metadata=None) expected_try_job_result = FlakeTryJobResult( report=expected_try_job_report, url=url, try_job_id=try_job_id) get_isolate_sha_for_try_job_pipeline = GetIsolateShaForTryJobParameters( try_job_result=expected_try_job_result, step_name=step_name) self.MockAsynchronousPipeline(RunFlakeTryJobPipeline, run_flake_try_job_parameters, expected_try_job_report) self.MockSynchronousPipeline(GetIsolateShaForTryJobPipeline, get_isolate_sha_for_try_job_pipeline, expected_sha) pipeline_job = GetIsolateShaForCommitPositionPipeline(get_sha_input) pipeline_job.start() self.execute_queued_tasks()
def testGetIsolateShaForCommitPositionPipelineMatchingTarget( self, mocked_reference_build): master_name = 'm' builder_name = 'b' parent_mastername = 'p_m' parent_buildername = 'p_b' build_number = 100 build_id = 123 test_name = 't' requested_commit_position = 1000 requested_revision = 'r1000' expected_sha = 'sha1' build_url = 'url' luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' isolate_target_name = 'browser_tests' step_name = 's' isolated_hash = 'isolated_hash' expected_output = GetIsolateShaOutput(isolate_sha=expected_sha, build_number=None, build_url=build_url, try_job_url=None) analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.Save() build = BuildInfo(master_name, builder_name, build_number) build.commit_position = requested_commit_position build.parent_mastername = parent_mastername build.parent_buildername = parent_buildername mocked_reference_build.return_value = build isolated_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, parent_mastername, parent_buildername, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, isolate_target_name, isolated_hash, requested_commit_position, requested_revision) isolated_target.put() step_metadata = StepMetadata(canonical_step_name=None, dimensions=None, full_step_name=None, isolate_target_name=isolate_target_name, patched=True, swarm_task_ids=None, waterfall_buildername=None, waterfall_mastername=None) get_sha_input = GetIsolateShaForCommitPositionParameters( analysis_urlsafe_key=unicode(analysis.key.urlsafe()), commit_position=requested_commit_position, dimensions=ListOfBasestring.FromSerializable([]), revision=requested_revision, step_metadata=step_metadata, upper_bound_build_number=analysis.build_number) get_sha_for_target_input = GetIsolateShaForTargetInput( isolated_target_urlsafe_key=isolated_target.key.urlsafe()) self.MockSynchronousPipeline(GetIsolateShaForTargetPipeline, get_sha_for_target_input, expected_output) pipeline_job = GetIsolateShaForCommitPositionPipeline(get_sha_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id( pipeline_job.pipeline_id) pipeline_output = pipeline_job.outputs.default.value self.assertEqual(expected_output.ToSerializable(), pipeline_output)
def testNextCommitPositionPipelineContinueAnalysis( self, mock_reference_build, mock_heuristic, mock_next_commit, mock_bound_commits): master_name = 'm' builder_name = 'b' parent_mastername = 'p_m' parent_buildername = 'p_b' build_number = 100 build_id = 10000 step_name = 's' test_name = 't' start_commit_position = 1000 expected_next_commit_id = CommitID(commit_position=990, revision='r990') reference_build = BuildInfo(master_name, builder_name, build_number) reference_build.commit_position = start_commit_position reference_build.parent_mastername = parent_mastername reference_build.parent_buildername = parent_buildername mock_reference_build.return_value = reference_build mock_heuristic.return_value = False calculated_next_commit_id = CommitID(commit_position=999, revision='r999') mock_next_commit.return_value = (calculated_next_commit_id, None) target_name = 'browser_tests' step_metadata = StepMetadata( canonical_step_name=None, dimensions=None, full_step_name=None, isolate_target_name=target_name, patched=True, swarm_task_ids=None, waterfall_buildername=None, waterfall_mastername=None) luci_name = 'chromium' bucket_name = 'ci' gitiles_host = 'chromium.googlesource.com' gitiles_project = 'chromium/src' gitiles_ref = 'refs/heads/master' gerrit_patch = '' lower_bound_target = IsolatedTarget.Create( build_id - 1, luci_name, bucket_name, parent_mastername, parent_buildername, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, target_name, 'hash_1', expected_next_commit_id.commit_position, None) lower_bound_target.put() upper_bound_target = IsolatedTarget.Create( build_id, luci_name, bucket_name, parent_mastername, parent_buildername, gitiles_host, gitiles_project, gitiles_ref, gerrit_patch, target_name, 'hash_2', start_commit_position, None) upper_bound_target.put() mock_bound_commits.return_value = ( expected_next_commit_id, CommitID(commit_position=start_commit_position, revision='r1000')) analysis = MasterFlakeAnalysis.Create(master_name, builder_name, build_number, step_name, test_name) analysis.data_points = [ DataPoint.Create(commit_position=start_commit_position) ] analysis.Save() next_commit_position_input = NextCommitPositionInput( analysis_urlsafe_key=analysis.key.urlsafe(), commit_position_range=IntRange(lower=None, upper=start_commit_position), step_metadata=step_metadata) pipeline_job = NextCommitPositionPipeline(next_commit_position_input) pipeline_job.start() self.execute_queued_tasks() pipeline_job = pipelines.pipeline.Pipeline.from_id(pipeline_job.pipeline_id) next_commit_position_output = pipeline_job.outputs.default.value self.assertFalse(pipeline_job.was_aborted) self.assertIsNone(next_commit_position_output['culprit_commit_id']) self.assertEqual(expected_next_commit_id.ToSerializable(), next_commit_position_output['next_commit_id']) mock_bound_commits.assert_called_once_with( analysis.data_points, lower_bound_target, upper_bound_target)
def _IngestProto(build_id): """Process a build described in a proto, i.e. buildbucket v2 api format.""" assert build_id build = GetV2Build( build_id, fields=FieldMask( paths=['id', 'output.properties', 'input', 'status', 'builder'])) if not build: return BaseHandler.CreateError( 'Could not retrieve build #%d from buildbucket, retry' % build_id, 404) # Sanity check. assert build_id == build.id properties_struct = build.output.properties commit = build.input.gitiles_commit patches = build.input.gerrit_changes # Convert the Struct to standard dict, to use .get, .iteritems etc. properties = dict(properties_struct.items()) swarm_hashes_properties = {} for k, v in properties.iteritems(): if _PROP_NAME_REGEX.match(k): swarm_hashes_properties[k] = v if not swarm_hashes_properties: logging.debug('Build %d does not have swarm_hashes property', build_id) return master_name = properties.get('target_mastername', properties.get('mastername')) if not master_name: logging.error('Build %d does not have expected "mastername" property', build_id) return luci_project = build.builder.project luci_bucket = build.builder.bucket luci_builder = properties.get( 'target_buildername') or build.builder.builder if commit.host: gitiles_host = commit.host gitiles_project = commit.project gitiles_ref = commit.ref or 'refs/heads/master' else: # Non-ci build, use 'repository' property instead to get base revision # information. repo_url = urlparse.urlparse(properties.get('repository', '')) gitiles_host = repo_url.hostname or '' gitiles_project = repo_url.path or '' # Trim "/" prefix so that "/chromium/src" becomes # "chromium/src", also remove ".git" suffix if present. if gitiles_project.startswith('/'): # pragma: no branch gitiles_project = gitiles_project[1:] if gitiles_project.endswith('.git'): # pragma: no branch gitiles_project = gitiles_project[:-len('.git')] gitiles_ref = properties.get('gitiles_ref', 'refs/heads/master') gerrit_patch = None if len(patches) > 0: gerrit_patch = '/'.join( map(str, [patches[0].host, patches[0].change, patches[0].patchset])) entities = [] for prop_name, swarm_hashes in swarm_hashes_properties.iteritems(): ref, commit_position, with_patch = _DecodeSwarmingHashesPropertyName( prop_name) for target_name, isolated_hash in swarm_hashes.items(): entities.append( IsolatedTarget.Create( build_id=build_id, luci_project=luci_project, bucket=luci_bucket, master_name=master_name, builder_name=luci_builder, gitiles_host=gitiles_host, gitiles_project=gitiles_project, gitiles_ref=gitiles_ref or ref, gerrit_patch=gerrit_patch if with_patch else '', target_name=target_name, isolated_hash=isolated_hash, commit_position=commit_position, revision=properties.get('got_revision'))) result = [key.pairs() for key in ndb.put_multi(entities)] return {'data': {'created_rows': result}}