def get_usage(): nrql = "SELECT%20max(needInvestigation)%20FROM%20push_health_need_investigation%20FACET%20revision%20SINCE%201%20DAY%20AGO%20TIMESERIES%20where%20repo%3D'{}'%20AND%20appName%3D'{}'".format( 'try', 'treeherder-prod') new_relic_url = '{}?nrql={}'.format(settings.NEW_RELIC_INSIGHTS_API_URL, nrql) headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'X-Query-Key': settings.NEW_RELIC_INSIGHTS_API_KEY, } # TODO: make this check happen during deploy or setup? Not here. if not settings.NEW_RELIC_INSIGHTS_API_KEY: logger.error('NEW_RELIC_INSIGHTS_API_KEY not set.') resp = make_request(new_relic_url, headers=headers) data = resp.json() push_revisions = [facet['name'] for facet in data['facets']] pushes = Push.objects.filter(revision__in=push_revisions) results = [{ 'push': PushSerializer(pushes.get(revision=facet['name'])).data, 'peak': get_peak(facet), 'latest': get_latest(facet) } for facet in data['facets']] return results
def get_commit_history(repository, revision, push): mozciPush = MozciPush([revision], repository.name) parent = mozciPush.parent parent_sha = parent.revs[-1] parents = Push.objects.filter(repository__name=parent.branch, revision=parent_sha) parent_repo = Repository.objects.get(name=parent.branch) parent_push = parents[0] if len(parents) else None resp = { 'parentSha': parent_sha, 'exactMatch': False, 'parentPushRevision': None, 'parentRepository': RepositorySerializer(parent_repo).data, 'id': None, 'jobCounts': None, 'revisions': [ CommitSerializer(commit).data for commit in push.commits.all().order_by('-id') ], 'revisionCount': push.commits.count(), 'currentPush': PushSerializer(push).data, } if parent_push: resp.update( { # This will be the revision of the Parent, as long as we could find a Push in # Treeherder for it. 'parentPushRevision': parent_push.revision, 'id': parent_push.id, 'jobCounts': parent_push.get_status(), 'exactMatch': parent_sha == parent_push.revision, } ) return resp
def retrieve(self, request, project, pk=None): """ GET method implementation for detail view of ``push`` """ try: push = Push.objects.get(repository__name=project, id=pk) serializer = PushSerializer(push) return Response(serializer.data) except Push.DoesNotExist: return Response("No push with id: {0}".format(pk), status=HTTP_404_NOT_FOUND)
def get_commit_history(mozciPush, push): parent = None parent_sha = None parent_push = None try: parent = mozciPush.parent except Exception as e: logger.error('Could not retrieve parent push because {}'.format(e)) pass if parent: parent_sha = parent.revs[-1] parents = Push.objects.filter( repository__name=parent.branch, revision=parent_sha).select_related('repository') parent_push = parents[0] if len(parents) else None revisions = [ CommitSerializer(commit).data for commit in push.commits.all().order_by('-id') ] resp = { 'parentSha': parent_sha, 'exactMatch': False, 'parentPushRevision': None, 'id': None, 'revisions': revisions, 'revisionCount': len(revisions), 'currentPush': PushSerializer(push).data, } if parent_push: resp.update({ # This will be the revision of the Parent, as long as we could find a Push in # Treeherder for it. 'parentRepository': parent_push.repository.name, 'parentPushRevision': parent_push.revision, 'id': parent_push.id, 'exactMatch': parent_sha == parent_push.revision, }) return resp
def get_response_object(parent_sha, revisions, revision_count, push, repository, current_push): """Build a response object that shows the parent and commit history. parent_sha -- The SHA of the parent of the latest commit revisions -- The revisions/commits of the current Push revision_count -- The count of those revisions (may be different from len(revisions) because we only keep so many actual revisions in Treeherder, even if the Push has more. push -- The Push for the parent. This might be the actual parent Push, or the closest thing we could find. Could also be the Push for the commit of the `parent_sha`. repository -- The repository of the parent. If we can't find a parent Push, then this will be the repository of the current Push. """ resp = { 'parentSha': parent_sha, 'exactMatch': False, 'parentPushRevision': None, 'parentRepository': RepositorySerializer(repository).data, 'id': None, 'jobCounts': None, 'revisions': revisions, 'revisionCount': revision_count, 'parentPush': None, 'currentPush': PushSerializer(current_push).data, } if push: resp.update({ # This will be the revision of the Parent, as long as we could find a Push in # Treeherder for it. 'parentPushRevision': push.revision, 'id': push.id, 'jobCounts': push.get_status(), 'exactMatch': parent_sha == push.revision, 'parentPush': push, }) return resp
def list(self, request, project): """ GET method for list of ``push`` records with revisions """ # What is the upper limit on the number of pushes returned by the api MAX_PUSH_COUNT = 1000 # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ["fromchange", "tochange", "startdate", "enddate", "revision"]: v = filter_params.get(param, None) if v: del filter_params[param] meta[param] = v try: repository = Repository.objects.get(name=project) except Repository.DoesNotExist: return Response({ "detail": "No project with name {}".format(project) }, status=HTTP_404_NOT_FOUND) pushes = Push.objects.filter(repository=repository).order_by('-time') for (param, value) in iteritems(meta): if param == 'fromchange': frompush_time = Push.objects.values_list('time', flat=True).get( repository=repository, revision__startswith=value) pushes = pushes.filter(time__gte=frompush_time) filter_params.update({ "push_timestamp__gte": to_timestamp(frompush_time) }) elif param == 'tochange': topush_time = Push.objects.values_list('time', flat=True).get( repository=repository, revision__startswith=value) pushes = pushes.filter(time__lte=topush_time) filter_params.update({ "push_timestamp__lte": to_timestamp(topush_time) }) elif param == 'startdate': pushes = pushes.filter(time__gte=to_datetime(value)) filter_params.update({ "push_timestamp__gte": to_timestamp(to_datetime(value)) }) elif param == 'enddate': real_end_date = to_datetime(value) + datetime.timedelta(days=1) pushes = pushes.filter(time__lte=real_end_date) filter_params.update({ "push_timestamp__lt": to_timestamp(real_end_date) }) elif param == 'revision': # revision can be either the revision of the push itself, or # any of the commits it refers to pushes = pushes.filter(commits__revision__startswith=value) rev_key = "revisions_long_revision" \ if len(meta['revision']) == 40 else "revisions_short_revision" filter_params.update({rev_key: meta['revision']}) for param in ['push_timestamp__lt', 'push_timestamp__lte', 'push_timestamp__gt', 'push_timestamp__gte']: if filter_params.get(param): # translate push timestamp directly into a filter try: value = datetime.datetime.fromtimestamp( float(filter_params.get(param))) except ValueError: return Response({ "detail": "Invalid timestamp specified for {}".format( param) }, status=HTTP_400_BAD_REQUEST) pushes = pushes.filter(**{ param.replace('push_timestamp', 'time'): value }) for param in ['id__lt', 'id__lte', 'id__gt', 'id__gte', 'id']: try: value = int(filter_params.get(param, 0)) except ValueError: return Response({ "detail": "Invalid timestamp specified for {}".format( param) }, status=HTTP_400_BAD_REQUEST) if value: pushes = pushes.filter(**{param: value}) id_in = filter_params.get("id__in") if id_in: try: id_in_list = [int(id) for id in id_in.split(',')] except ValueError: return Response({"detail": "Invalid id__in specification"}, status=HTTP_400_BAD_REQUEST) pushes = pushes.filter(id__in=id_in_list) author = filter_params.get("author") if author: pushes = pushes.filter(author=author) try: count = int(filter_params.get("count", 10)) except ValueError: return Response({"detail": "Valid count value required"}, status=HTTP_400_BAD_REQUEST) if count > MAX_PUSH_COUNT: msg = "Specified count exceeds api limit: {}".format(MAX_PUSH_COUNT) return Response({"detail": msg}, status=HTTP_400_BAD_REQUEST) # we used to have a "full" parameter for this endpoint so you could # specify to not fetch the revision information if it was set to # false. however AFAIK no one ever used it (default was to fetch # everything), so let's just leave it out. it doesn't break # anything to send extra data when not required. pushes = pushes.select_related('repository').prefetch_related('commits')[:count] serializer = PushSerializer(pushes, many=True) meta['count'] = len(pushes) meta['repository'] = project meta['filter_params'] = filter_params resp = { 'meta': meta, 'results': serializer.data } return Response(resp)
def health_summary(self, request, project): """ Return a calculated summary of the health of this push. """ revision = request.query_params.get('revision') author = request.query_params.get('author') count = request.query_params.get('count') all_repos = request.query_params.get('all_repos') with_history = request.query_params.get('with_history') with_in_progress_tests = request.query_params.get('with_in_progress_tests', False) if revision: try: pushes = Push.objects.filter( revision__in=revision.split(','), repository__name=project ) except Push.DoesNotExist: return Response( "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND ) else: try: pushes = ( Push.objects.filter(author=author) .select_related('repository') .prefetch_related('commits') .order_by('-time') ) if not all_repos: pushes = pushes.filter(repository__name=project) pushes = pushes[: int(count)] except Push.DoesNotExist: return Response( "No pushes found for author: {0}".format(author), status=HTTP_404_NOT_FOUND ) data = [] commit_history = None for push in list(pushes): result_status, jobs = get_test_failure_jobs(push) test_result, push_health_test_failures = get_test_failures( push, jobs, result_status, ) build_result, push_health_build_failures, builds_in_progress_count = get_build_failures( push ) lint_result, push_health_lint_failures, linting_in_progress_count = get_lint_failures( push ) test_failure_count = len(push_health_test_failures['needInvestigation']) build_failure_count = len(push_health_build_failures) lint_failure_count = len(push_health_lint_failures) test_in_progress_count = None if with_history: serializer = PushSerializer([push], many=True) commit_history = serializer.data if with_in_progress_tests: test_in_progress_count = get_test_in_progress_count(push) data.append( { 'revision': push.revision, 'repository': push.repository.name, 'testFailureCount': test_failure_count, 'testInProgressCount': test_in_progress_count, 'buildFailureCount': build_failure_count, 'buildInProgressCount': builds_in_progress_count, 'lintFailureCount': lint_failure_count, 'lintingInProgressCount': linting_in_progress_count, 'needInvestigation': test_failure_count + build_failure_count + lint_failure_count, 'status': push.get_status(), 'history': commit_history, 'metrics': { 'linting': { 'name': 'Linting', 'result': lint_result, }, 'tests': { 'name': 'Tests', 'result': test_result, }, 'builds': { 'name': 'Builds', 'result': build_result, }, }, } ) return Response(data)