def SaveFailedBuild(context, build, build_failure_type): """Saves the failed build. Args: context (findit_v2.services.context.Context): Scope of the analysis. build (buildbucket build.proto): ALL info about the build. build_failure_type (str): Type of failures in build. """ repo_url = git.GetRepoUrlFromContext(context) build_entity = LuciFailedBuild.Create( luci_project=build.builder.project, luci_bucket=build.builder.bucket, luci_builder=build.builder.builder, build_id=build.id, legacy_build_number=build.number, gitiles_host=context.gitiles_host, gitiles_project=context.gitiles_project, gitiles_ref=context.gitiles_ref, gitiles_id=context.gitiles_id, commit_position=git.GetCommitPositionFromRevision(context.gitiles_id, repo_url=repo_url), status=build.status, create_time=build.create_time.ToDatetime(), start_time=build.start_time.ToDatetime(), end_time=build.end_time.ToDatetime(), build_failure_type=build_failure_type) build_entity.put() return build_entity
def testGetCommitPositionFromRevision(self, mocked_cl_info): requested_revision = 'r1' expected_commit_position = 1000 mocked_cl_info.return_value = { requested_revision: { 'revision': requested_revision, 'repo_name': 'chromium', 'commit_position': expected_commit_position, 'url': 'url', 'author': '*****@*****.**' } } self.assertEqual(expected_commit_position, git.GetCommitPositionFromRevision(requested_revision))
def GetLatestCommitPositionAndRevision(master_name, builder_name, target_name): """Gets the latest commit position and revision for a configuration. Args: master_name (str): The name of the master to query. builder_name (str): The name of the builder to query. target_name (str): The desired target name. Returns: (int, str): The latest commit position known and its corresponding revision. """ latest_targets = (IsolatedTarget.FindLatestIsolateByMaster( master_name, builder_name, services_constants.GITILES_HOST, services_constants.GITILES_PROJECT, services_constants.GITILES_REF, target_name)) if latest_targets: commit_position = latest_targets[0].commit_position revision = latest_targets[0].revision if not revision: # Historical data doesn't have revision. commit_info = crrev.RedirectByCommitPosition( FinditHttpClient(), commit_position) assert commit_info is not None, 'No info: r%d' % commit_position revision = commit_info['git_sha'] return commit_position, revision # Fallback to buildbot for builds not yet migrated to LUCI. # TODO (crbug.com/804617): Remove fallback logic after migration is complete. luci_project, luci_bucket = buildbot.GetLuciProjectAndBucketForMaster( master_name) search_builds_response = buildbucket_client.SearchV2BuildsOnBuilder( BuilderID(project=luci_project, bucket=luci_bucket, builder=builder_name), page_size=1) if not search_builds_response: # Something is wrong. Calling code should be responsible for checking for # the return value. return None, None latest_build = search_builds_response.builds[0] revision = latest_build.input.gitiles_commit.id repo_url = git.GetRepoUrlFromV2Build(latest_build) return git.GetCommitPositionFromRevision( latest_build.input.gitiles_commit.id, repo_url=repo_url), revision
def SaveFlakeCulpritsForSuspectedRevisions(analysis_urlsafe_key, suspected_revisions, repo_name='chromium'): """Saves each suspect to the datastore as a FlakeCulprit. Also updates a MasterFlakeAnalysis' heuristic analysis results to include each suspect. Args: analysis_urlsafe_key (str): The urlsafe key of the MasterFlakeAnalysis to update. suspected_revisions (list): A list of revisions suspected to have caused the flakiness to create FlakeCulprits for. """ analysis = ndb.Key(urlsafe=analysis_urlsafe_key).get() assert analysis for revision in suspected_revisions: commit_position = git.GetCommitPositionFromRevision(revision) assert commit_position, 'Canot create FlakeCulprit without commit position' suspect = (FlakeCulprit.Get(repo_name, revision) or FlakeCulprit.Create(repo_name, revision, commit_position)) if suspect.url is None: commits_info = git.GetCommitsInfo([revision]) if commits_info: suspect.url = commits_info[revision]['url'] suspect.put() else: logging.error('Unable to retrieve change logs for %s', revision) continue # Save each culprit to the analysis' list of heuristic culprits. suspect_urlsafe_key = suspect.key.urlsafe() if suspect_urlsafe_key not in analysis.suspect_urlsafe_keys: analysis.suspect_urlsafe_keys.append(suspect_urlsafe_key) analysis.heuristic_analysis_status = analysis_status.COMPLETED analysis.put()
def ExtractBuildInfoFromV2Build(master_name, builder_name, build_number, build): """Generates BuildInfo using bb v2 build info. This conversion is needed to keep Findit v1 running, will be deprecated in v2 (TODO: crbug.com/966982). Args: master_name (str): The name of the master. builder_name (str): The name of the builder. build_number (int): The build number. build (build_pb2.Build): All info about the build. Returns: (BuildInfo) """ build_info = BuildInfo(master_name, builder_name, build_number) input_properties = json_format.MessageToDict(build.input.properties) chromium_revision = build.input.gitiles_commit.id runtime = input_properties.get('$recipe_engine/runtime') or {} build_info.chromium_revision = chromium_revision repo_url = git.GetRepoUrlFromV2Build(build) build_info.commit_position = git.GetCommitPositionFromRevision( build.input.gitiles_commit.id, repo_url=repo_url) build_info.build_start_time = build.create_time.ToDatetime() build_info.build_end_time = build.end_time.ToDatetime() build_info.completed = bool(build_info.build_end_time) build_info.result = build.status build_info.parent_buildername = input_properties.get('parent_buildername') build_info.parent_mastername = input_properties.get('parent_mastername') build_info.buildbucket_id = str(build.id) build_info.buildbucket_bucket = build.builder.bucket build_info.is_luci = runtime.get('is_luci') build_info.blame_list = GetBlameListForV2Build(build) # Step categories: # 1. A step is passed if it is in SUCCESS status. # 2. A step is failed if it is in FAILURE status. # 3. A step is not passed if it is not in SUCCESS status. This category # includes steps in statuses: FAILURE, INFRA_FAILURE, CANCELED, etc. for step in build.steps: step_name = step.name step_status = step.status if step_status in [ common_pb2.STATUS_UNSPECIFIED, common_pb2.SCHEDULED, common_pb2.STARTED ]: continue if step_status != common_pb2.SUCCESS: build_info.not_passed_steps.append(step_name) if step_name == 'Failure reason': # 'Failure reason' is always red when the build breaks or has exception, # but it is not a failed step. continue if not step.logs: # Skip wrapping steps. continue if step_status == common_pb2.SUCCESS: build_info.passed_steps.append(step_name) elif step_status == common_pb2.FAILURE: build_info.failed_steps.append(step_name) return build_info
def _OldEnough(try_bot_cache, bot_id): """Checks if the build in the given bot's cache is older than threshold.""" built_cp = try_bot_cache.full_build_commit_positions[bot_id] tot_cp = git.GetCommitPositionFromRevision('HEAD') return built_cp < tot_cp - STALE_CACHE_AGE
def SaveCompileAnalysis(context, build, failures_without_existing_group, should_group_failures): """Creates and saves CompileFailureAnalysis entity for the build being analyzed if there are first failures in the build. Args: context (findit_v2.services.context.Context): Scope of the analysis. build (buildbucket build.proto): ALL info about the build. failures_without_existing_group (dict): A dict for failures that happened the first time in current build and with no matching group. { 'failures': { 'compile': { 'output_targets': ['target4', 'target1', 'target2'], 'last_passed_build': { 'id': 8765432109, 'number': 122, 'commit_id': 'git_sha1' }, }, }, 'last_passed_build': { 'id': 8765432109, 'number': 122, 'commit_id': 'git_sha1' } } should_group_failures (bool): Project config for if failures should be grouped to reduce duplicated analyses. """ luci_project = context.luci_project_name project_api = projects.GetProjectAPI(luci_project) assert project_api, 'Unsupported project {}'.format(luci_project) rerun_builder_id = project_api.GetRerunBuilderId(build) # Gets keys to the compile failures that failed the first time in the build. # They will be the failures to analyze in the analysis. compile_failure_keys = _GetCompileFailureKeys( build, failures_without_existing_group) repo_url = git.GetRepoUrlFromContext(context) last_passed_gitiles_id = failures_without_existing_group['last_passed_build'][ 'commit_id'] last_passed_commit_position = git.GetCommitPositionFromRevision( last_passed_gitiles_id, repo_url, ref=context.gitiles_ref) first_failed_commit_position = git.GetCommitPositionFromRevision( context.gitiles_id, repo_url, ref=context.gitiles_ref) if should_group_failures: _CreateAndSaveFailureGroupEntity( context, build, compile_failure_keys, last_passed_gitiles_id, last_passed_commit_position, first_failed_commit_position) analysis = CompileFailureAnalysis.Create( luci_project=luci_project, luci_bucket=build.builder.bucket, luci_builder=build.builder.builder, build_id=build.id, gitiles_host=context.gitiles_host, gitiles_project=context.gitiles_project, gitiles_ref=context.gitiles_ref, last_passed_gitiles_id=last_passed_gitiles_id, last_passed_commit_position=last_passed_commit_position, first_failed_gitiles_id=context.gitiles_id, first_failed_commit_position=first_failed_commit_position, rerun_builder_id=rerun_builder_id, compile_failure_keys=compile_failure_keys) analysis.Save() return analysis