def send_request(self):
     """
     Send request to Elasticsearch.
     """
     es_url = "%s/bugs/bug_info/" % settings.ES_HOST
     logger.info("Submitting %s job %s's classification of bug %s to Elasticsearch", self.project, self.job_id, self.bug_id)
     try:
         make_request(es_url, method='POST', json=self.body)
     except requests.exceptions.HTTPError as e:
         r = e.response
         logger.error("HTTPError %s submitting to %s: %s", r.status_code, es_url, r.text)
         raise
 def send_request(self):
     """
     Submit classification report to Elasticsearch, via OrangeFactor's API.
     """
     url = settings.ORANGEFACTOR_SUBMISSION_URL
     auth = HawkAuth(id=settings.ORANGEFACTOR_HAWK_ID, key=settings.ORANGEFACTOR_HAWK_KEY)
     logger.info("Submitting %s job %s's classification of bug %s to OrangeFactor", self.project, self.job_id, self.bug_id)
     try:
         make_request(url, method='POST', json=self.body, auth=auth)
     except requests.exceptions.HTTPError as e:
         r = e.response
         logger.error("HTTPError %s submitting to %s: %s", r.status_code, url, r.text)
         raise
 def send_request(self):
     """
     Submit classification report to Elasticsearch, via OrangeFactor's API.
     """
     url = settings.ORANGEFACTOR_SUBMISSION_URL
     auth = HawkAuth(id=settings.ORANGEFACTOR_HAWK_ID, key=settings.ORANGEFACTOR_HAWK_KEY)
     logger.info("Submitting %s job %s's classification of bug %s to OrangeFactor", self.project, self.job_id, self.bug_id)
     try:
         make_request(url, method='POST', json=self.body, auth=auth)
     except requests.exceptions.HTTPError as e:
         r = e.response
         logger.error("HTTPError %s submitting to %s: %s", r.status_code, url, r.text)
         raise
예제 #4
0
 def send_request(self):
     """
     Send request to Elasticsearch.
     """
     es_url = "%s/bugs/bug_info/" % settings.ES_HOST
     logger.info(
         "Submitting %s job %s's classification of bug %s to Elasticsearch",
         self.project, self.job_id, self.bug_id)
     try:
         make_request(es_url, method='POST', json=self.body)
     except requests.exceptions.HTTPError as e:
         r = e.response
         logger.error("HTTPError %s submitting to %s: %s", r.status_code,
                      es_url, r.text)
         raise
예제 #5
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """

        if settings.BZ_API_KEY is None:
            return Response({"failure": "Bugzilla API key not defined. This shouldn't happen."}, status=status.HTTP_400_BAD_REQUEST)
        else:
            params = request.data
            url = settings.BZ_API_URL + "/rest/bug"
            headers = {
                'x-bugzilla-api-key': settings.BZ_API_KEY
            }
            data = {
                'product': params["product"],
                'component': params["component"],
                'summary': params["summary"],
                'keywords': params["keywords"],
                'version': params["version"],
                'description': "Filed by: " + request.user.username + "\n\n" + params["description"],
                'comment_tags': "treeherder",
            }

            try:
                response = make_request(url, method='POST', headers=headers, json=data)
            except requests.exceptions.HTTPError as e:
                response = e.response
                try:
                    rsperror = response.json()['message']
                except:
                    rsperror = response
                return Response({"failure": rsperror}, status=status.HTTP_400_BAD_REQUEST)

            return Response({"success": response.json()["id"]})
    def parse(self):
        """
        Iterate over each line of the log, running each parser against it.

        Stream lines from the gzip file and run each parser against it,
        building the ``artifact`` as we go.
        """
        with closing(make_request(self.url, stream=True)) as response:
            # Temporary annotation of log size to help set thresholds in bug 1295997.
            newrelic.agent.add_custom_parameter(
                'unstructured_log_size',
                int(response.headers.get('Content-Length', -1))
            )
            newrelic.agent.add_custom_parameter(
                'unstructured_log_encoding',
                response.headers.get('Content-Encoding', 'None')
            )
            for line in response.iter_lines():
                for builder in self.builders:
                    builder.parse_line(line)

        # gather the artifacts from all builders
        for builder in self.builders:
            # Run end-of-parsing actions for this parser,
            # in case the artifact needs clean-up/summarising.
            builder.finish_parse()
            name = builder.name
            artifact = builder.get_artifact()
            if name == 'performance_data' and not artifact[name]:
                continue
            self.artifacts[name] = artifact
예제 #7
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """
        if settings.BUGFILER_API_KEY is None:
            return Response({"failure": "Bugzilla API key not set!"},
                            status=HTTP_400_BAD_REQUEST)

        params = request.data

        # Arbitrarily cap crash signatures at 2048 characters to prevent perf issues on bmo
        crash_signature = params.get("crash_signature")
        if crash_signature and len(crash_signature) > 2048:
            return Response(
                {
                    "failure":
                    "Crash signature can't be more than 2048 characters."
                },
                status=HTTP_400_BAD_REQUEST)

        description = u"**Filed by:** {}\n{}".format(
            request.user.email.replace('@', " [at] "),
            params.get("comment", "")).encode("utf-8")
        summary = params.get("summary").encode("utf-8").strip()
        url = settings.BUGFILER_API_URL + "/rest/bug"
        headers = {
            'x-bugzilla-api-key': settings.BUGFILER_API_KEY,
            'Accept': 'application/json'
        }
        data = {
            'type': "defect",
            'product': params.get("product"),
            'component': params.get("component"),
            'summary': summary,
            'keywords': params.get("keywords"),
            'blocks': params.get("blocks"),
            'depends_on': params.get("depends_on"),
            'see_also': params.get("see_also"),
            'version': params.get("version"),
            'cf_crash_signature': params.get("crash_signature"),
            'severity': params.get("severity"),
            'priority': params.get("priority"),
            'description': description,
            'comment_tags': "treeherder",
        }

        try:
            response = make_request(url,
                                    method='POST',
                                    headers=headers,
                                    json=data)
        except requests.exceptions.HTTPError as e:
            try:
                message = e.response.json()['message']
            except (ValueError, KeyError):
                message = e.response.text
            return Response({"failure": message}, status=HTTP_400_BAD_REQUEST)

        return Response({"success": response.json()["id"]})
예제 #8
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """

        if settings.BZ_API_KEY is None:
            return Response(
                {
                    "failure":
                    "Bugzilla API key not defined. This shouldn't happen."
                },
                status=status.HTTP_400_BAD_REQUEST)
        else:
            params = request.data
            url = settings.BZ_API_URL + "/rest/bug"
            headers = {
                'x-bugzilla-api-key': settings.BZ_API_KEY,
                'Accept': 'application/json'
            }
            data = {
                'product':
                params["product"],
                'component':
                params["component"],
                'summary':
                params["summary"],
                'keywords':
                params["keywords"],
                'version':
                params["version"],
                'description':
                "Filed by: " + request.user.username + "\n\n" +
                params["description"],
                'comment_tags':
                "treeherder",
            }

            try:
                response = make_request(url,
                                        method='POST',
                                        headers=headers,
                                        json=data)
            except requests.exceptions.HTTPError as e:
                response = e.response
                try:
                    rsperror = response.json()['message']
                except:
                    rsperror = response
                return Response({"failure": rsperror},
                                status=status.HTTP_400_BAD_REQUEST)

            return Response({"success": response.json()["id"]})
예제 #9
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """
        if settings.BUGFILER_API_KEY is None:
            return Response({"failure": "Bugzilla API key not set!"},
                            status=HTTP_400_BAD_REQUEST)

        params = request.data

        # Arbitrarily cap crash signatures at 2048 characters to prevent perf issues on bmo
        crash_signature = params.get("crash_signature")
        if crash_signature and len(crash_signature) > 2048:
            return Response({"failure": "Crash signature can't be more than 2048 characters."},
                            status=HTTP_400_BAD_REQUEST)

        description = u"**Filed by:** {}\n{}".format(
            request.user.email.replace('@', " [at] "),
            params.get("comment", "")
        ).encode("utf-8")
        summary = params.get("summary").encode("utf-8").strip()
        url = settings.BUGFILER_API_URL + "/rest/bug"
        headers = {
            'x-bugzilla-api-key': settings.BUGFILER_API_KEY,
            'Accept': 'application/json'
        }
        data = {
            'product': params.get("product"),
            'component': params.get("component"),
            'summary': summary,
            'keywords': params.get("keywords"),
            'blocks': params.get("blocks"),
            'depends_on': params.get("depends_on"),
            'see_also': params.get("see_also"),
            'version': params.get("version"),
            'cf_crash_signature': params.get("crash_signature"),
            'severity': params.get("severity"),
            'priority': params.get("priority"),
            'description': description,
            'comment_tags': "treeherder",
        }

        try:
            response = make_request(url, method='POST', headers=headers, json=data)
        except requests.exceptions.HTTPError as e:
            try:
                message = e.response.json()['message']
            except (ValueError, KeyError):
                message = e.response.text
            return Response({"failure": message}, status=HTTP_400_BAD_REQUEST)

        return Response({"success": response.json()["id"]})
예제 #10
0
    def parse(self):
        """
        Iterate over each line of the log, running each parser against it.

        Stream lines from the gzip file and run each parser against it,
        building the ``artifact`` as we go.
        """
        with make_request(self.url, stream=True) as response:
            download_size_in_bytes = int(
                response.headers.get('Content-Length', -1))

            # Temporary annotation of log size to help set thresholds in bug 1295997.
            newrelic.agent.add_custom_parameter('unstructured_log_size',
                                                download_size_in_bytes)
            newrelic.agent.add_custom_parameter(
                'unstructured_log_encoding',
                response.headers.get('Content-Encoding', 'None'))

            if download_size_in_bytes > MAX_DOWNLOAD_SIZE_IN_BYTES:
                raise LogSizeException(
                    'Download size of %i bytes exceeds limit' %
                    download_size_in_bytes)

            # Lines must be explicitly decoded since `iter_lines()`` returns bytes by default
            # and we cannot use its `decode_unicode=True` mode, since otherwise Unicode newline
            # characters such as `\u0085` (which can appear in test output) are treated the same
            # as `\n` or `\r`, and so split into unwanted additional lines by `iter_lines()`.
            for line in response.iter_lines():
                for builder in self.builders:
                    try:
                        # Using `replace` to prevent malformed unicode (which might possibly exist
                        # in test message output) from breaking parsing of the rest of the log.
                        builder.parse_line(line.decode('utf-8', 'replace'))
                    except EmptyPerformanceData:
                        logger.warning(
                            "We have parsed an empty PERFHERDER_DATA for %s",
                            self.url)

        # gather the artifacts from all builders
        for builder in self.builders:
            # Run end-of-parsing actions for this parser,
            # in case the artifact needs clean-up/summarising.
            builder.finish_parse()
            name = builder.name
            artifact = builder.get_artifact()
            if name == 'performance_data' and not artifact[name]:
                continue
            self.artifacts[name] = artifact
예제 #11
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """
        if settings.BZ_API_KEY is None:
            return Response(
                {
                    "failure":
                    "Bugzilla API key not defined. This shouldn't happen."
                },
                status=HTTP_400_BAD_REQUEST)

        params = request.data
        description = "Filed by: {}\n\n{}".format(
            request.user.email.replace('@', " [at] "),
            params.get("comment", ""))
        url = settings.BZ_API_URL + "/rest/bug"
        headers = {
            'x-bugzilla-api-key': settings.BZ_API_KEY,
            'Accept': 'application/json'
        }
        data = {
            'product': params.get("product"),
            'component': params.get("component"),
            'summary': params.get("summary"),
            'keywords': params.get("keywords"),
            'blocks': params.get("blocks"),
            'depends_on': params.get("depends_on"),
            'see_also': params.get("see_also"),
            'version': params.get("version"),
            'description': description,
            'comment_tags': "treeherder",
        }

        try:
            response = make_request(url,
                                    method='POST',
                                    headers=headers,
                                    json=data)
        except requests.exceptions.HTTPError as e:
            try:
                message = e.response.json()['message']
            except (ValueError, KeyError):
                message = e.response.text
            return Response({"failure": message}, status=HTTP_400_BAD_REQUEST)

        return Response({"success": response.json()["id"]})
    def parse(self):
        """
        Iterate over each line of the log, running each parser against it.

        Stream lines from the gzip file and run each parser against it,
        building the ``artifact`` as we go.
        """
        with make_request(self.url, stream=True) as response:
            download_size_in_bytes = int(response.headers.get('Content-Length', -1))

            # Temporary annotation of log size to help set thresholds in bug 1295997.
            newrelic.agent.add_custom_parameter(
                'unstructured_log_size',
                download_size_in_bytes
            )
            newrelic.agent.add_custom_parameter(
                'unstructured_log_encoding',
                response.headers.get('Content-Encoding', 'None')
            )

            if download_size_in_bytes > MAX_DOWNLOAD_SIZE_IN_BYTES:
                raise LogSizeException('Download size of %i bytes exceeds limit' % download_size_in_bytes)

            # Lines must be explicitly decoded since `iter_lines()`` returns bytes by default
            # and we cannot use its `decode_unicode=True` mode, since otherwise Unicode newline
            # characters such as `\u0085` (which can appear in test output) are treated the same
            # as `\n` or `\r`, and so split into unwanted additional lines by `iter_lines()`.
            for line in response.iter_lines():
                for builder in self.builders:
                    # Using `replace` to prevent malformed unicode (which might possibly exist
                    # in test message output) from breaking parsing of the rest of the log.
                    builder.parse_line(line.decode('utf-8', 'replace'))

        # gather the artifacts from all builders
        for builder in self.builders:
            # Run end-of-parsing actions for this parser,
            # in case the artifact needs clean-up/summarising.
            builder.finish_parse()
            name = builder.name
            artifact = builder.get_artifact()
            if name == 'performance_data' and not artifact[name]:
                continue
            self.artifacts[name] = artifact
예제 #13
0
    def create_bug(self, request):
        """
        Create a bugzilla bug with passed params
        """
        if settings.BZ_API_KEY is None:
            return Response({"failure": "Bugzilla API key not defined. This shouldn't happen."},
                            status=HTTP_400_BAD_REQUEST)

        params = request.data
        description = "Filed by: {}\n\n{}".format(
            request.user.email.replace('@', " [at] "),
            params.get("comment", "")
        )
        url = settings.BZ_API_URL + "/rest/bug"
        headers = {
            'x-bugzilla-api-key': settings.BZ_API_KEY,
            'Accept': 'application/json'
        }
        data = {
            'product': params.get("product"),
            'component': params.get("component"),
            'summary': params.get("summary"),
            'keywords': params.get("keywords"),
            'blocks': params.get("blocks"),
            'depends_on': params.get("depends_on"),
            'see_also': params.get("see_also"),
            'version': params.get("version"),
            'description': description,
            'comment_tags': "treeherder",
        }

        try:
            response = make_request(url, method='POST', headers=headers, json=data)
        except requests.exceptions.HTTPError as e:
            try:
                message = e.response.json()['message']
            except (ValueError, KeyError):
                message = e.response.text
            return Response({"failure": message}, status=HTTP_400_BAD_REQUEST)

        return Response({"success": response.json()["id"]})
    def parse(self):
        """
        Iterate over each line of the log, running each parser against it.

        Stream lines from the gzip file and run each parser against it,
        building the ``artifact`` as we go.
        """
        with closing(make_request(self.url, stream=True)) as response:
            for line in response.iter_lines():
                for builder in self.builders:
                    builder.parse_line(line)

        # gather the artifacts from all builders
        for builder in self.builders:
            # Run end-of-parsing actions for this parser,
            # in case the artifact needs clean-up/summarising.
            builder.finish_parse()
            name = builder.name
            artifact = builder.get_artifact()
            if name == 'performance_data' and not artifact[name]:
                continue
            self.artifacts[name] = artifact
예제 #15
0
    def parse(self):
        """
        Iterate over each line of the log, running each parser against it.

        Stream lines from the gzip file and run each parser against it,
        building the ``artifact`` as we go.
        """
        with closing(make_request(self.url, stream=True)) as response:
            for line in response.iter_lines():
                for builder in self.builders:
                    builder.parse_line(line)

        # gather the artifacts from all builders
        for builder in self.builders:
            # Run end-of-parsing actions for this parser,
            # in case the artifact needs clean-up/summarising.
            builder.finish_parse()
            name = builder.name
            artifact = builder.get_artifact()
            if name == 'performance_data' and not artifact[name]:
                continue
            self.artifacts[name] = artifact
예제 #16
0
 def get_log_handle(self, url):
     """Hook to get a handle to the log with this url"""
     response = make_request(url)
     return closing(BytesIO(response).content)
예제 #17
0
    def list(self, request, project):
        """
        GET method implementation for log slicer

        Receives a line range and job_id and returns those lines
        """
        job_id = request.query_params.get("job_id")
        log_name = request.query_params.get("name")
        if log_name:
            log_names = [log_name]
        else:
            log_names = ["buildbot_text", "builds-4h"]
        format = 'json' if log_name == 'mozlog_json' else 'text'

        file = None

        start_line = request.query_params.get("start_line")
        end_line = request.query_params.get("end_line")
        if not start_line or not end_line:
            return Response(
                "``start_line`` and ``end_line`` parameters are both required",
                400)

        try:
            start_line = abs(int(start_line))
            end_line = abs(int(end_line))
        except ValueError:
            return Response("parameters could not be converted to integers",
                            400)

        if start_line >= end_line:
            return Response("``end_line`` must be larger than ``start_line``",
                            400)

        try:
            job = Job.objects.get(repository__name=project,
                                  project_specific_id=job_id)
        except Job.DoesNotExist:
            return Response("Job does not exist", 404)

        try:
            url = JobLog.objects.filter(
                job=job, name__in=log_names)[0:1].values_list('url',
                                                              flat=True)[0]
        except JobLog.DoesNotExist:
            return Response("Job log does not exist", 404)

        try:
            file = filesystem.get(url)
            if not file:
                r = make_request(url)
                try:
                    file = gzip.GzipFile(fileobj=BytesIO(r.content))
                    # read 16 bytes, just to make sure the file is gzipped
                    file.read(16)
                    file.seek(0)
                    filesystem.set(url, file.fileobj)
                except IOError:
                    # file is not gzipped, but we should still store / read
                    # it as such, to save space
                    file = BytesIO(r.content)
                    gz_file_content = BytesIO()
                    with gzip.GzipFile('none', 'w',
                                       fileobj=gz_file_content) as gz:
                        gz.write(r.content)
                    filesystem.set(url, gz_file_content)
            else:
                file = gzip.GzipFile(fileobj=file)

            lines = []
            for i, line in enumerate(file):
                if i < start_line:
                    continue
                elif i >= end_line:
                    break

                if format == 'json':
                    lines.append({"data": json.loads(line), "index": i})
                else:
                    lines.append({"text": line, "index": i})

            return Response(lines)

        finally:
            if file:
                file.close()
예제 #18
0
    def list(self, request, project):
        """
        GET method implementation for log slicer

        Receives a line range and job_id and returns those lines
        """
        job_id = request.query_params.get("job_id")
        log_name = request.query_params.get("name")
        if log_name:
            log_names = [log_name]
        else:
            log_names = ["buildbot_text", "builds-4h"]
        format = 'json' if log_name == 'mozlog_json' else 'text'

        file = None

        start_line = request.query_params.get("start_line")
        end_line = request.query_params.get("end_line")
        if not start_line or not end_line:
            return Response("``start_line`` and ``end_line`` parameters are both required", 400)

        try:
            start_line = abs(int(start_line))
            end_line = abs(int(end_line))
        except ValueError:
            return Response("parameters could not be converted to integers", 400)

        if start_line >= end_line:
            return Response("``end_line`` must be larger than ``start_line``", 400)

        try:
            job = Job.objects.get(repository__name=project,
                                  project_specific_id=job_id)
        except Job.DoesNotExist:
            return Response("Job does not exist", 404)

        try:
            url = JobLog.objects.filter(
                job=job, name__in=log_names)[0:1].values_list('url',
                                                              flat=True)[0]
        except JobLog.DoesNotExist:
            return Response("Job log does not exist", 404)

        try:
            file = filesystem.get(url)
            if not file:
                r = make_request(url)
                try:
                    file = gzip.GzipFile(fileobj=BytesIO(r.content))
                    # read 16 bytes, just to make sure the file is gzipped
                    file.read(16)
                    file.seek(0)
                    filesystem.set(url, file.fileobj)
                except IOError:
                    # file is not gzipped, but we should still store / read
                    # it as such, to save space
                    file = BytesIO(r.content)
                    gz_file_content = BytesIO()
                    with gzip.GzipFile('none', 'w', fileobj=gz_file_content) as gz:
                        gz.write(r.content)
                    filesystem.set(url, gz_file_content)
            else:
                file = gzip.GzipFile(fileobj=file)

            lines = []
            for i, line in enumerate(file):
                if i < start_line:
                    continue
                elif i >= end_line:
                    break

                if format == 'json':
                    lines.append({"data": json.loads(line), "index": i})
                else:
                    lines.append({"text": line, "index": i})

            return Response(lines)

        finally:
            if file:
                file.close()
예제 #19
0
 def get_log_handle(self, url):
     """Hook to get a handle to the log with this url"""
     response = make_request(url)
     return closing(BytesIO(response).content)