示例#1
0
    def _parse_batch_api_response(self, response):
        """Parses an individual part of the MIME multipart server response.

    Args:
      response: One part of the MIME mutlipart message, string.
    Raises:
      ValueError: if an invalid HTTP header is encountered.
    Returns:
      An instance of GoogleComputeEngineBase.BATCH_RESPONSE named tuple.
    """
        status, payload = response.split('\n', 1)
        split = status.split(None, 2)
        if len(split) > 1:
            status = split[1]
            reason = split[2] if len(split) > 2 else ''
        else:
            raise ValueError('Invalid HTTP server response.')

        parser = FeedParser()
        parser.feed(payload)
        msg = parser.close()
        msg['status'] = status
        http_response = httplib2.Response(msg)
        http_response.reason = reason
        payload = msg.get_payload()
        return GoogleComputeEngineBase.BATCH_RESPONSE(http_response, payload)
示例#2
0
 def __init__(self, message=None):
     """Initialize a Message instance."""
     feedparser = FeedParser(email.message.Message)
     feedparser._set_headersonly()
     data = message.read(4096)
     feedparser.feed(data)
     self._become_message(feedparser.close())
示例#3
0
    async def _deserialize_response(self, payload):
        """Convert string into httplib2 response and content.

    Args:
      payload: string, headers and body as a string.

    Returns:
      A pair (resp, content), such as would be returned from httplib2.request.
    """
        # Strip off the status line
        status_line, payload = payload.split("\n", 1)
        protocol, status, reason = status_line.split(" ", 2)

        # Parse the rest of the response
        parser = FeedParser()
        parser.feed(payload)
        msg = parser.close()
        msg["status"] = status

        # Create httplib2.Response from the parsed headers.
        resp = httplib2.Response(msg)
        resp.reason = reason
        resp.version = int(protocol.split("/", 1)[1].replace(".", ""))

        content = payload.split("\r\n\r\n", 1)[1]

        return resp, content
示例#4
0
def get_email_message_from_file(filename="notification.email"):
    """Load a mail and parse it into a email.message."""
    email_string = get_string_from_file(filename)
    parser = FeedParser()
    parser.feed(email_string)
    email_message = parser.close()
    return email_message
 def message_from_response(self, headers, body):
     fp = FeedParser()
     for header, value in headers.iteritems():
         fp.feed("%s: %s\n" % (header, Header(value).encode()))
     fp.feed("\n")
     fp.feed(body)
     response = fp.close()
     return response
示例#6
0
def from_string(data, base, plugin, enabled=False):
    parser = FeedParser(PluginMessage)
    parser.feed(data)
    message = parser.close()
    message.is_new = False
    message.enabled = enabled
    message.plugin = plugin
    message.base = base
    return message
示例#7
0
 def parse(self, fp, headersonly=False):
     feedparser = FeedParser(self._class, policy=self.policy)
     if headersonly:
         feedparser._set_headersonly()
     while True:
         data = fp.read(8192)
         if not data:
             break
         feedparser.feed(data)
     return feedparser.close()
示例#8
0
    def __break_into_parts(self):

        p = FeedParser()
        for h in self.get_headers_array():
            p.feed(h + "\r\n")
        p.feed("\r\n")
        p.feed(self.__body)
        msg = p.close()
        parts = msg.get_payload()
        return parts
示例#9
0
    def _break_into_parts(self):
        p = FeedParser()

        for h in self._response.headers:
            p.feed(h + ':' + self._response.headers[h] + "\r\n")

        p.feed("\r\n")
        p.feed(self.text())

        msg = p.close()

        parts = msg.get_payload()

        return parts
示例#10
0
    def __init__(self):
        """Initialise class"""
        # initiate class for feedparser
        self.raw_stream = FeedParser()

        # variables for parsed mail data
        self.raw_data = ''
        self.raw_msg = ''
        self.headers = {}
        self.body = ''
        self.sender = ''
        self.to = ''
        self.subject = ''
        self.date_s = ''
        self.date_d = ''
示例#11
0
    def parse(self, fp, headersonly=False):
        """Create a message structure from the data in a file.

        Reads all the data from the file and returns the root of the message
        structure.  Optional headersonly is a flag specifying whether to stop
        parsing after reading the headers or not.  The default is False,
        meaning it parses the entire contents of the file.
        """
        feedparser = FeedParser(self._class, policy=self.policy)
        if headersonly:
            feedparser._set_headersonly()
        while True:
            data = fp.read(8192)
            if not data:
                break
            feedparser.feed(data)
        return feedparser.close()
示例#12
0
    async def handle_response(self, response, content):
        header = f"content-type: {response.headers['content-type']}\r\n\r\n"
        for_parser = header + content
        parser = FeedParser()
        parser.feed(for_parser)
        mime_response = parser.close()

        failed_requests = []
        # Separation of the multipart response message.
        error_401, error_403, error_429 = False, False, False
        for part in mime_response.get_payload():
            http_request_idx = int(self._header_to_id(part["Content-ID"]))
            http_request = self.requests[http_request_idx]

            response, content = await asyncio.create_task(
                self._deserialize_response(part.get_payload()))
            parsed_response = json.loads(content)
            if isinstance(parsed_response,
                          dict) and 'error' in parsed_response:
                error_code = parsed_response['error']['code']
                if error_code == 429: error_429 = True
                elif error_code == 403: error_403 = True
                elif error_code == 401: error_401 = True
                else:
                    LOG.error(
                        f"BatchApiRequest: Unhandled error in one of the responses: {parsed_response}."
                        f"\n\tRequest uri, method, headers: {http_request.uri}, {http_request.method},"
                        f"{http_request.headers}")
                    continue
                failed_requests.append(http_request)
            else:
                self.completed_responses.append(parsed_response)

        self.requests = failed_requests
        if error_401:
            self.access_token = await asyncio.create_task(
                get_cached_token(GMAIL_TOKEN_ID))
        if error_403 or error_429:
            LOG.warning(
                f"One or more responses failed with rate limit exceeded(403={error_403}, 429={error_429}), "
                f"waiting {self.backoff} seconds.")
            await asyncio.sleep(self.backoff)
            self.backoff *= 2
            if self.backoff > 32:
                # TODO: Backoff is too high, just throw an error.
                pass
示例#13
0
    def lineReceived(self, line):
        log.debug("Line In: %s" % line)
        self.parser = FeedParser(Event)
        self.parser.feed(line.decode())  # update: line (to) line.decode()
        self.message = self.parser.close()
        # if self.state is not READ_CONTENT (i.e Content-Type is already read) and the Content-Length is present
        # read rest of the message and set it as payload

        if 'Content-Length' in self.message and self.state != 'READ_CONTENT':
            # update: self.message.has_key('Content-Length') (to) in
            if self.enterRawMode():
                log.debug("Entering raw mode to read message payload")
                return
        try:
            self.inspectMessage()
        except:
            log.error("Exception in message processing ", exc_info=True)
示例#14
0
 def headers_factory(_, fp, *args):
     headers = 0
     feedparser = FeedParser(OldMessage)
     try:
         while True:
             line = fp.readline(client._MAXLINE + 1)
             if len(line) > client._MAXLINE:
                 ret = OldMessage()
                 ret.status = 'Line too long'
                 return ret
             headers += 1
             if headers > client._MAXHEADERS:
                 raise client.HTTPException("got more than %d headers" %
                                            client._MAXHEADERS)
             feedparser.feed(line.decode('iso-8859-1'))
             if line in (b'\r\n', b'\n', b''):
                 return feedparser.close()
     finally:
         # break the recursive reference chain
         feedparser.__dict__.clear()
示例#15
0
    def _send_batch_request(self, requests):
        """Sends a batch of requests to the server and processes the HTTP responses.

    Args:
      requests: List of GoogleComputeEngineBase.API_REQUEST named tuples. Must
        contain <= MAX_BATCH_SIZE elements.

    Raises:
      ValueError: If requests has more than MAX_BATCH_SIZE elements.

    Returns:
      List of GoogleComputeEngineBase.BATCH_RESPONSE named tuples, one for
      each element of request parameter.
    """
        if len(requests) > MAX_BATCH_SIZE:
            raise ValueError('Too many requests provided'
                             '(maximum is {0})'.format(MAX_BATCH_SIZE))

        batch = _BatchApiRequest()
        base = urlparse.urlsplit(self.base_url)
        base_path = base.path.rstrip('/')
        for i, request in enumerate(requests):
            msg = MIMENonMultipart('application', 'http')
            msg.add_header('Content-ID', '<{0}>'.format(i))
            msg.set_payload(
                self._serialize_batch_api_request(base_path, request))
            batch.attach(msg)

        batch_string = batch.as_string()
        content_type = 'multipart/mixed; boundary="{0}"'.format(
            batch.get_boundary())

        url = urlparse.urlunsplit((base.scheme, base.netloc, 'batch',
                                   self._create_url_query(None), None))
        response, data = self._send_request(url, 'POST', batch_string,
                                            content_type)

        if response.status >= 300:
            error = gce.GceError(message=response.reason,
                                 status=response.status)
            return [error] * len(requests)  # Return all errors.
        elif not data:
            error = gce.GceError(message='Server returned no data',
                                 status=response.status)
            return [error] * len(requests)  # Return all errors.

        # Process successful response.
        data = 'content-type: {0}\r\n\r\n'.format(
            response['content-type']) + data
        parser = FeedParser()
        parser.feed(data)
        response = parser.close()

        responses = []
        for part in response.get_payload():
            responses.append(
                (int(RESPONSE_ID_REGEX.match(part['Content-ID']).group(1)),
                 self._parse_batch_api_response(part.get_payload())))

        responses.sort(key=lambda r: r[0])
        return [r[1] for r in responses]
示例#16
0
    def raw_info(self, pkg, chkdistro):
        if not pkg.strip():
            return ''
        _pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
        distro = chkdistro
        if len(pkg.strip().split()) > 1:
            distro = ''.join([x for x in pkg.strip().split(None,2)[1] if x.isalnum() or x in '-._+'])
        if distro not in self.distros:
            return "%r is not a valid distribution: %s" % (distro, ", ".join(self.distros))

        pkg = _pkg

        try:
            data = apt_cache(self.aptdir, distro, ['show', pkg])
        except subprocess.CalledProcessError as e:
            data = e.output
        try:
            data2 = apt_cache(self.aptdir, distro, ['showsrc', pkg])
        except subprocess.CalledProcessError as e:
            data2 = e.output
        if not data or 'E: No packages found' in data:
            return 'Package %s does not exist in %s' % (pkg, distro)
        maxp = {'Version': '0~'}
        packages = [x.strip() for x in data.split('\n\n')]
        for p in packages:
            if not p.strip():
                continue
            parser = FeedParser()
            parser.feed(p)
            p = parser.close()
            if type(p) == type(""):
                self.log.error("PackageInfo/packages: apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
                return "Package lookup faild"
            if not p.get("Version", None):
                continue
            if apt.apt_pkg.version_compare(maxp['Version'], p['Version']) <= 0:
                maxp = p
            del parser
        maxp2 = {'Version': '0~'}
        packages2 = [x.strip() for x in data2.split('\n\n')]
        for p in packages2:
            if not p.strip():
                continue
            parser = FeedParser()
            parser.feed(p)
            p = parser.close()
            if type(p) == type(""):
                self.log.error("PackageInfo/packages: apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
                return "Package lookup faild"
            if not p['Version']:
                continue
            if apt.apt_pkg.version_compare(maxp2['Version'], p['Version']) <= 0:
                maxp2 = p
            del parser
        archs = ''
        if 'Architecture' in maxp2:
            archs = [_.strip() for _ in maxp2['Architecture'].split() if _.strip()]
            for arch in archs:
                if arch not in ('any', 'all'):
                    continue
                else:
                    archs = ''
                    break

            if archs:
                archs = ' (Only available for %s)' % '; '.join(archs)

        maxp["Distribution"] = distro
        maxp["Architectures"] = archs
        return maxp