def get_usage(): nrql = "SELECT%20max(needInvestigation)%20FROM%20push_health_need_investigation%20FACET%20revision%20SINCE%201%20DAY%20AGO%20TIMESERIES%20where%20repo%3D'{}'%20AND%20appName%3D'{}'".format( 'try', 'treeherder-prod') new_relic_url = '{}?nrql={}'.format(settings.NEW_RELIC_INSIGHTS_API_URL, nrql) headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'X-Query-Key': settings.NEW_RELIC_INSIGHTS_API_KEY, } # TODO: make this check happen during deploy or setup? Not here. if not settings.NEW_RELIC_INSIGHTS_API_KEY: logger.error('NEW_RELIC_INSIGHTS_API_KEY not set.') resp = make_request(new_relic_url, headers=headers) data = resp.json() push_revisions = [facet['name'] for facet in data['facets']] pushes = Push.objects.filter(revision__in=push_revisions) results = [{ 'push': PushSerializer(pushes.get(revision=facet['name'])).data, 'peak': get_peak(facet), 'latest': get_latest(facet) } for facet in data['facets']] return results
def create_bug(self, request): """ Create a bugzilla bug with passed params """ if settings.BUGFILER_API_KEY is None: return Response({"failure": "Bugzilla API key not set!"}, status=HTTP_400_BAD_REQUEST) params = request.data # Arbitrarily cap crash signatures at 2048 characters to prevent perf issues on bmo crash_signature = params.get("crash_signature") if crash_signature and len(crash_signature) > 2048: return Response( { "failure": "Crash signature can't be more than 2048 characters." }, status=HTTP_400_BAD_REQUEST) description = u"**Filed by:** {}\n{}".format( request.user.email.replace('@', " [at] "), params.get("comment", "")).encode("utf-8") summary = params.get("summary").encode("utf-8").strip() url = settings.BUGFILER_API_URL + "/rest/bug" headers = { 'x-bugzilla-api-key': settings.BUGFILER_API_KEY, 'Accept': 'application/json' } data = { 'type': "defect", 'product': params.get("product"), 'component': params.get("component"), 'summary': summary, 'keywords': params.get("keywords"), 'blocks': params.get("blocks"), 'depends_on': params.get("depends_on"), 'see_also': params.get("see_also"), 'version': params.get("version"), 'cf_crash_signature': params.get("crash_signature"), 'severity': params.get("severity"), 'priority': params.get("priority"), 'description': description, 'comment_tags': "treeherder", } try: response = make_request(url, method='POST', headers=headers, json=data) except requests.exceptions.HTTPError as e: try: message = e.response.json()['message'] except (ValueError, KeyError): message = e.response.text return Response({"failure": message}, status=HTTP_400_BAD_REQUEST) return Response({"success": response.json()["id"]})
def parse(self): """ Iterate over each line of the log, running each parser against it. Stream lines from the gzip file and run each parser against it, building the ``artifact`` as we go. """ with make_request(self.url, stream=True) as response: download_size_in_bytes = int( response.headers.get('Content-Length', -1)) # Temporary annotation of log size to help set thresholds in bug 1295997. newrelic.agent.add_custom_parameter('unstructured_log_size', download_size_in_bytes) newrelic.agent.add_custom_parameter( 'unstructured_log_encoding', response.headers.get('Content-Encoding', 'None')) if download_size_in_bytes > MAX_DOWNLOAD_SIZE_IN_BYTES: raise LogSizeException( 'Download size of %i bytes exceeds limit' % download_size_in_bytes) # Lines must be explicitly decoded since `iter_lines()`` returns bytes by default # and we cannot use its `decode_unicode=True` mode, since otherwise Unicode newline # characters such as `\u0085` (which can appear in test output) are treated the same # as `\n` or `\r`, and so split into unwanted additional lines by `iter_lines()`. for line in response.iter_lines(): for builder in self.builders: try: # Using `replace` to prevent malformed unicode (which might possibly exist # in test message output) from breaking parsing of the rest of the log. builder.parse_line(line.decode('utf-8', 'replace')) except EmptyPerformanceData: logger.warning( "We have parsed an empty PERFHERDER_DATA for %s", self.url) # gather the artifacts from all builders for builder in self.builders: # Run end-of-parsing actions for this parser, # in case the artifact needs clean-up/summarising. builder.finish_parse() name = builder.name artifact = builder.get_artifact() if name == 'performance_data' and not artifact[name]: continue self.artifacts[name] = artifact
def download_artifact(root_url, task_id, path): """ Downloads a Taskcluster artifact. Supports specific file formats like json and yaml. Returns either the parsed json, the parsed yaml or the plain response. """ artifact_url = taskcluster_urls.api( root_url, 'queue', 'v1', 'task/{}/artifacts/{}'.format(task_id, path)) if path.endswith(".json"): return fetch_json(artifact_url) if path.endswith(".yml"): return yaml.safe_load(fetch_text(artifact_url)) return make_request(artifact_url)
def create_bug(self, request): """ Create a bugzilla bug with passed params """ if settings.BUGFILER_API_KEY is None: return Response({"failure": "Bugzilla API key not set!"}, status=HTTP_400_BAD_REQUEST) params = request.data # Arbitrarily cap crash signatures at 2048 characters to prevent perf issues on bmo crash_signature = params.get("crash_signature") if crash_signature and len(crash_signature) > 2048: return Response( { "failure": "Crash signature can't be more than 2048 characters." }, status=HTTP_400_BAD_REQUEST, ) description = u"**Filed by:** {}\n{}".format( request.user.email.replace('@', " [at] "), params.get("comment", "")).encode("utf-8") summary = params.get("summary").encode("utf-8").strip() url = settings.BUGFILER_API_URL + "/rest/bug" headers = { 'x-bugzilla-api-key': settings.BUGFILER_API_KEY, 'Accept': 'application/json' } data = { 'type': "defect", 'product': params.get("product"), 'component': params.get("component"), 'summary': summary, 'keywords': params.get("keywords"), 'whiteboard': params.get("whiteboard"), 'regressed_by': params.get("regressed_by"), 'see_also': params.get("see_also"), 'version': params.get("version"), 'cf_crash_signature': params.get("crash_signature"), 'severity': params.get("severity"), 'priority': params.get("priority"), 'description': description, 'comment_tags': "treeherder", } if params.get("is_security_issue"): security_group_list = list( BugzillaSecurityGroup.objects.filter( product=data.get("product")).values_list("security_group", flat=True)) if len(security_group_list) == 0: return Response( { "failure": "Cannot file security bug for product without default security group in Bugzilla." }, status=HTTP_400_BAD_REQUEST, ) data["groups"] = security_group_list try: response = make_request(url, method='POST', headers=headers, json=data) except requests.exceptions.HTTPError as e: try: message = e.response.json()['message'] except (ValueError, KeyError): message = e.response.text return Response({"failure": message}, status=HTTP_400_BAD_REQUEST) return Response({"success": response.json()["id"]})
def reopen_request(url, method, headers, json): make_request(url, method=method, headers=headers, json=json)
def request(path="", method="GET"): return make_request("https://api.heroku.com/apps/{}".format(path), method=method, headers={ 'Accept': 'application/vnd.heroku+json; version=3' })