def GetComments(issue_id, monorail_project='chromium'): """Returns a list of Monorail Comment objects given an issue id.""" issue_tracker_api = IssueTrackerAPI( monorail_project, use_staging=appengine_util.IsStaging()) try: return issue_tracker_api.getComments(issue_id) except HttpError as e: logging.warning('Failed to get comments of issue %d: %s', issue_id, e) return []
def HandleGet(self): """Update the metrics based on monorail bugs.""" issue_tracker_api = IssueTrackerAPI( 'chromium', use_staging=appengine_util.IsStaging()) for metric, client_to_query in METRIC_TO_CLIENT_TO_QUERY.iteritems(): for client, query in client_to_query.iteritems(): issues = issue_tracker_api.getIssues(query) logging.info('Fetch %d issues for client %s using query %s', len(issues), client, query) getattr(monitoring, metric).set(len(issues), fields={'client_id': client})
def CreateBug(issue, project_id='chromium'): """Creates a bug with the given information. Returns: (int) id of the bug that was filed. """ assert issue issue_tracker_api = IssueTrackerAPI( project_id, use_staging=appengine_util.IsStaging()) issue_tracker_api.create(issue) return issue.id
def UpdateBug(issue, comment, project_id='chromium'): """Creates a bug with the given information.""" assert issue issue_tracker_api = IssueTrackerAPI( project_id, use_staging=appengine_util.IsStaging()) try: issue_tracker_api.update(issue, comment, send_email=True) except HttpError as e: logging.warning('Failed to update monorail issue %d: %s.', issue.id, e) return issue.id return issue.id
def GetMergedDestinationIssueForId(issue_id, monorail_project='chromium', force_follow_merge_chain=False): """Given an id, traverse the merge chain to get the destination issue. Args: issue_id: The id to get merged destination issue for. monorail_project: The Monorail project the issue is on. force_follow_merge_chain: True for traversing the merge chain regardless of the status of requested change; False if only traverse when the issue is in Duplicate status. issue.merged_into is the most recent issue it was merged into, so if an issue was merged then unmerged, this field will still have value. Returns: The destination issue if the original issue was merged, otherwise itself, and returns None if there is an exception while communicating with Monorail. NOTE: If there is a circle in the merge chain, the first visited issue in the circle will be returned. """ if issue_id is None: return None issue_tracker_api = IssueTrackerAPI(monorail_project, use_staging=appengine_util.IsStaging()) issue = GetMonorailIssueForIssueId(issue_id, issue_tracker_api=issue_tracker_api) visited_issues = set() while issue and issue.merged_into and (force_follow_merge_chain or issue.status == 'Duplicate'): logging.info('Issue %s was merged into %s on project: %s.', issue.id, issue.merged_into, monorail_project) visited_issues.add(issue) merged_issue = GetMonorailIssueForIssueId( issue.merged_into, issue_tracker_api=issue_tracker_api) if not merged_issue: # Cannot access merged_issue, could be an restricted issue. return issue issue = merged_issue if issue in visited_issues: # There is a circle, bails out. break return issue
def AsyncProcessFlakeReport(flake_analysis_request, user_email, is_admin): """Pushes a task on the backend to process the flake report.""" if appengine_util.IsStaging(): # Bails out for staging. logging.info( 'Got flake_analysis_request for %s on staging. No flake ' 'analysis runs on staging.', flake_analysis_request.name) return target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND) payload = pickle.dumps((flake_analysis_request, user_email, is_admin)) taskqueue.add(url=constants.WATERFALL_PROCESS_FLAKE_ANALYSIS_REQUEST_URL, payload=payload, target=target, queue_name=constants.WATERFALL_FLAKE_ANALYSIS_REQUEST_QUEUE)
def GetIssuesClosedWithinAWeek(query, monorail_project): """Searches for bugs that match the query and closed within a week. Args: query: A query to search for bugs on Monorail. monorail_project: The monorail project to search for. Returns: A list of recently closed bugs that match the query. """ issue_tracker_api = IssueTrackerAPI( monorail_project, use_staging=appengine_util.IsStaging()) issues = issue_tracker_api.getIssues(query) if not issues: return [] return [issue for issue in issues if IsIssueClosedWithinAWeek(issue)]
def GetMergedDestinationIssueForId(issue_id, monorail_project='chromium'): """Given an id, traverse the merge chain to get the destination issue. Args: issue_id: The id to get merged destination issue for. monorail_project: The Monorail project the issue is on. Returns: The destination issue if the original issue was merged, otherwise itself, and returns None if there is an exception while communicating with Monorail. NOTE: If there is a cycle in the merge chain, the first visited issue in the cycle will be returned. """ if issue_id is None: return None issue_tracker_api = IssueTrackerAPI( monorail_project, use_staging=appengine_util.IsStaging()) issue = GetMonorailIssueForIssueId( issue_id, issue_tracker_api=issue_tracker_api) visited_issues = set() while issue and issue.merged_into: logging.info('Issue %s was merged into %s on project: %s.', issue.id, issue.merged_into, monorail_project) visited_issues.add(issue) merged_issue = GetMonorailIssueForIssueId( issue.merged_into, issue_tracker_api=issue_tracker_api) if not merged_issue: # Cannot access merged_issue, could be an restricted issue. return issue issue = merged_issue if issue in visited_issues: # There is a cycle, bails out. break return issue
def GetMonorailIssueForIssueId(issue_id, monorail_project='chromium', issue_tracker_api=None): """Returns a Monorail Issue object representation given an issue_id. Args: issue_id (int): The id to query Monorail with. monorail_project (str): The project name to query Monorail with. issue_tracker_api (IssueTrackerAPI): When provided, no need to create a new one. Returns: (Issue): An Issue object representing what is currently stored on Monorail. """ issue_tracker_api = issue_tracker_api or IssueTrackerAPI( monorail_project, use_staging=appengine_util.IsStaging()) try: return issue_tracker_api.getIssue(issue_id) except HttpError as e: logging.warning('Failed to download monorail issue %d: %s.', issue_id, e) return None
def GetOpenIssues(query, monorail_project): """Searches for open bugs that match the query. This method wraps a call IssueTrackerAPI.getIssues(), it is needed because it's unclear from the API name and the documentation whether the returned issues are all open issues or not, so check the property to make sure that only open bugs are considered. Args: query: A query to search for bugs on Monorail. monorail_project: The monorail project to search for. Returns: A list of open bugs that match the query. """ issue_tracker_api = IssueTrackerAPI( monorail_project, use_staging=appengine_util.IsStaging()) issues = issue_tracker_api.getIssues(query) if not issues: return [] return [issue for issue in issues if issue.open]
def AnalyzeBuildFailures(self, request): """Returns analysis results for the given build failures in the request. Analysis of build failures will be triggered automatically on demand. Args: request (_BuildFailureCollection): A list of build failures. Returns: _BuildFailureAnalysisResultCollection A list of analysis results for the given build failures. """ _ValidateOauthUser() results = [] supported_builds = [] confidences = SuspectedCLConfidence.Get() for build in request.builds: master_name = buildbot.GetMasterNameFromUrl(build.master_url) if not (master_name and waterfall_config.MasterIsSupported(master_name)): logging.info('%s/%s/%s is not supported', build.master_url, build.builder_name, build.build_number) continue supported_builds.append({ 'master_name': master_name, 'builder_name': build.builder_name, 'build_number': build.build_number, 'failed_steps': sorted(build.failed_steps), }) # If the build failure was already analyzed and a new analysis is # scheduled to analyze new failed steps, the returned WfAnalysis will # still have the result from last completed analysis. # If there is no analysis yet, no result is returned. heuristic_analysis = WfAnalysis.Get(master_name, build.builder_name, build.build_number) if not heuristic_analysis: continue self._GenerateResultsForBuild(build, heuristic_analysis, results, confidences) logging.info('%d build failure(s), while %d are supported', len(request.builds), len(supported_builds)) if appengine_util.IsStaging(): # Findit staging accepts requests, but not actually run any analyses. logging.info( 'Got build failure requests on staging. No analysis runs on ' 'staging.') return _BuildFailureAnalysisResultCollection(results=[]) try: supported_builds.sort() _AsyncProcessFailureAnalysisRequests(supported_builds) except Exception: # pragma: no cover. # If we fail to post a task to the task queue, we ignore and wait for next # request. logging.exception( 'Failed to add analysis request to task queue: %s', repr(supported_builds)) return _BuildFailureAnalysisResultCollection(results=results)
def testNonStagingApp(self, _): self.assertFalse(appengine_util.IsStaging())
def testStagingApp(self, _): self.assertTrue(appengine_util.IsStaging()) self.assertTrue(appengine_util.IsStaging())