Beispiel #1
0
def get_email_message_from_file(filename="notification.email"):
    """Load a mail and parse it into a email.message."""
    email_string = get_string_from_file(filename)
    parser = FeedParser()
    parser.feed(email_string)
    email_message = parser.close()
    return email_message
Beispiel #2
0
  def _parse_batch_api_response(self, response):
    """Parses an individual part of the MIME multipart server response.

    Args:
      response: One part of the MIME mutlipart message, string.
    Raises:
      ValueError: if an invalid HTTP header is encountered.
    Returns:
      An instance of GoogleComputeEngineBase.BATCH_RESPONSE named tuple.
    """
    status, payload = response.split('\n', 1)
    split = status.split(None, 2)
    if len(split) > 1:
      status = split[1]
      reason = split[2] if len(split) > 2 else ''
    else:
      raise ValueError('Invalid HTTP server response.')

    parser = FeedParser()
    parser.feed(payload)
    msg = parser.close()
    msg['status'] = status
    http_response = httplib2.Response(msg)
    http_response.reason = reason
    payload = msg.get_payload()
    return GoogleComputeEngineBase.BATCH_RESPONSE(http_response, payload)
Beispiel #3
0
def get_email_message_from_file(filename="notification.email"):
    """Load a mail and parse it into a email.message."""
    email_string = get_string_from_file(filename)
    parser = FeedParser()
    parser.feed(email_string)
    email_message = parser.close()
    return email_message
Beispiel #4
0
class mail:
    """Class to filter mail."""
    def __init__(self):
        """Initialise class"""
        # initiate class for feedparser
        self.raw_stream = FeedParser()

        # variables for parsed mail data
        self.raw_data = ''
        self.raw_msg = ''
        self.headers = {}
        self.body = ''
        self.sender = ''
        self.to = ''
        self.subject = ''
        self.date_s = ''
        self.date_d = ''

    def feed(self, r1):
        """Read data before parsing."""
        self.raw_data = ''.join(r1)

    def parse(self):
        """Parse raw data using FeedParser to extract body and headers."""

        # pass raw data to feedparser instance
        self.raw_stream.feed(self.raw_data)

        # close and create a feedparser instance
        self.raw_msg = self.raw_stream.close()
        # Mail processing
        # sort raw messages to extract variables
        for each_key in self.raw_msg.keys():
            self.headers[each_key] = self.raw_msg.get(each_key)

        # mail related variables
        # Get payload without parsing if it is not multipart
        if self.raw_msg.is_multipart() == False:
            self.body = h.html_to_text(self.raw_msg.get_payload())
        # If message is multi-part and has both html/text parts,
        # get only the text message
        elif self.raw_msg.get_content_type() == 'multipart/alternative':
            for part in self.raw_msg.walk():
                if h.contains(part.get_content_type(), 'text/plain'):
                    self.body = part.get_payload(decode=True)
            self.body = h.html_to_text(self.body)
        else:
            # If message is multi-part and encoded with base-64, combine plain
            # text and html text and strip all html tags
            for part in self.raw_msg.walk():
                if h.contains(part.get_content_type(), 'text'):
                    self.body = self.body + part.get_payload(decode=True)
            self.body = h.html_to_text(self.body)

        # Store data into essential variables
        self.sender = self.headers['From'].lower()
        self.to = self.headers['To'].lower()
        self.date_s = self.headers['Date']
        self.date_d = h.c_date(self.date_s)
        self.subject = self.headers['Subject'].lower()
Beispiel #5
0
    def _parse_batch_api_response(self, response):
        """Parses an individual part of the MIME multipart server response.

    Args:
      response: One part of the MIME mutlipart message, string.
    Raises:
      ValueError: if an invalid HTTP header is encountered.
    Returns:
      An instance of GoogleComputeEngineBase.BATCH_RESPONSE named tuple.
    """
        status, payload = response.split('\n', 1)
        split = status.split(None, 2)
        if len(split) > 1:
            status = split[1]
            reason = split[2] if len(split) > 2 else ''
        else:
            raise ValueError('Invalid HTTP server response.')

        parser = FeedParser()
        parser.feed(payload)
        msg = parser.close()
        msg['status'] = status
        http_response = httplib2.Response(msg)
        http_response.reason = reason
        payload = msg.get_payload()
        return GoogleComputeEngineBase.BATCH_RESPONSE(http_response, payload)
Beispiel #6
0
class mail:
    """Class to filter mail."""
    def __init__(self):
        """Initialise class"""
        # initiate class for feedparser
        self.raw_stream = FeedParser()

        # variables for parsed mail data
        self.raw_data   = ''
        self.raw_msg    = ''
        self.headers    = {}
        self.body    = ''
        self.sender  = ''
        self.to      = ''
        self.subject = ''
        self.date_s  = ''
        self.date_d  = ''

    def feed(self, r1):
        """Read data before parsing."""
        self.raw_data = ''.join(r1) 

    def parse(self):
        """Parse raw data using FeedParser to extract body and headers."""

        # pass raw data to feedparser instance
        self.raw_stream.feed(self.raw_data)

        # close and create a feedparser instance 
        self.raw_msg = self.raw_stream.close()
        # Mail processing
        # sort raw messages to extract variables
        for each_key in self.raw_msg.keys():
            self.headers[each_key] = self.raw_msg.get(each_key)

        # mail related variables
        # Get payload without parsing if it is not multipart
        if self.raw_msg.is_multipart() == False:
            self.body    = h.html_to_text(self.raw_msg.get_payload())
        # If message is multi-part and has both html/text parts,
        # get only the text message
        elif self.raw_msg.get_content_type() == 'multipart/alternative':
            for part in self.raw_msg.walk():
                if h.contains(part.get_content_type(),'text/plain'):
                    self.body =  part.get_payload(decode=True)
            self.body = h.html_to_text(self.body)
        else:
            # If message is multi-part and encoded with base-64, combine plain 
            # text and html text and strip all html tags
            for part in self.raw_msg.walk():
                if h.contains(part.get_content_type(), 'text'):
                    self.body = self.body + part.get_payload(decode=True)
            self.body = h.html_to_text(self.body)

        # Store data into essential variables
        self.sender  = self.headers['From'].lower()
        self.to      = self.headers['To'].lower()
        self.date_s  = self.headers['Date']
        self.date_d = h.c_date(self.date_s)
        self.subject = self.headers['Subject'].lower()
Beispiel #7
0
    async def _deserialize_response(self, payload):
        """Convert string into httplib2 response and content.

    Args:
      payload: string, headers and body as a string.

    Returns:
      A pair (resp, content), such as would be returned from httplib2.request.
    """
        # Strip off the status line
        status_line, payload = payload.split("\n", 1)
        protocol, status, reason = status_line.split(" ", 2)

        # Parse the rest of the response
        parser = FeedParser()
        parser.feed(payload)
        msg = parser.close()
        msg["status"] = status

        # Create httplib2.Response from the parsed headers.
        resp = httplib2.Response(msg)
        resp.reason = reason
        resp.version = int(protocol.split("/", 1)[1].replace(".", ""))

        content = payload.split("\r\n\r\n", 1)[1]

        return resp, content
Beispiel #8
0
  def _send_batch_request(self, requests):
    """Sends a batch of requests to the server and processes the HTTP responses.

    Args:
      requests: List of GoogleComputeEngineBase.API_REQUEST named tuples. Must
        contain <= MAX_BATCH_SIZE elements.

    Raises:
      ValueError: If requests has more than MAX_BATCH_SIZE elements.

    Returns:
      List of GoogleComputeEngineBase.BATCH_RESPONSE named tuples, one for
      each element of request parameter.
    """
    if len(requests) > MAX_BATCH_SIZE:
      raise ValueError('Too many requests provided'
                       '(maximum is {0})'.format(MAX_BATCH_SIZE))

    batch = _BatchApiRequest()
    base = urlparse.urlsplit(self.base_url)
    base_path = base.path.rstrip('/')
    for i, request in enumerate(requests):
      msg = MIMENonMultipart('application', 'http')
      msg.add_header('Content-ID', '<{0}>'.format(i))
      msg.set_payload(self._serialize_batch_api_request(base_path, request))
      batch.attach(msg)

    batch_string = batch.as_string()
    content_type = 'multipart/mixed; boundary="{0}"'.format(
        batch.get_boundary())

    url = urlparse.urlunsplit((base.scheme, base.netloc, 'batch',
                               self._create_url_query(None), None))
    response, data = self._send_request(url, 'POST', batch_string, content_type)

    if response.status >= 300:
      error = gce.GceError(
          message=response.reason, status=response.status)
      return [error] * len(requests)  # Return all errors.
    elif not data:
      error = gce.GceError(
          message='Server returned no data', status=response.status)
      return [error] * len(requests)  # Return all errors.

    # Process successful response.
    data = 'content-type: {0}\r\n\r\n'.format(response['content-type']) + data
    parser = FeedParser()
    parser.feed(data)
    response = parser.close()

    responses = []
    for part in response.get_payload():
      responses.append((
          int(RESPONSE_ID_REGEX.match(part['Content-ID']).group(1)),
          self._parse_batch_api_response(part.get_payload())))

    responses.sort(key=lambda r: r[0])
    return [r[1] for r in responses]
Beispiel #9
0
def from_string(data, base, plugin, enabled=False):
    parser = FeedParser(PluginMessage)
    parser.feed(data)
    message = parser.close()
    message.is_new = False
    message.enabled = enabled
    message.plugin = plugin
    message.base = base
    return message
Beispiel #10
0
def from_string(data, base, plugin, enabled=False):
    parser = FeedParser(PluginMessage)
    parser.feed(data)
    message = parser.close()
    message.is_new = False
    message.enabled = enabled
    message.plugin = plugin
    message.base = base
    return message
def load_message(fp):
    """Load message from a file handle.

    Reads data from a file handler and parse it
    to a message object.
    """
    parser = FeedParser()
    for line in fp.readlines():
        parser.feed(line)
    
    return parser.close()
Beispiel #12
0
    def __break_into_parts(self):

        p = FeedParser()
        for h in self.get_headers_array():
            p.feed(h + "\r\n")
        p.feed("\r\n")
        p.feed(self.__body)
        msg = p.close()
        parts = msg.get_payload()
        return parts
Beispiel #13
0
    def __init__(self):
        """Initialise class"""
        # initiate class for feedparser
        self.raw_stream = FeedParser()

        # variables for parsed mail data
        self.raw_data = ''
        self.raw_msg = ''
        self.headers = {}
        self.body = ''
        self.sender = ''
        self.to = ''
        self.subject = ''
        self.date_s = ''
        self.date_d = ''
Beispiel #14
0
 def __init__(self):
     self._body_start = 0
     self._filename = None
     self._fp = StringIO()
     self._headers = None
     self._parser = FeedParser(Message)
     self._rollover = 262144  # 256kb
Beispiel #15
0
    async def handle_response(self, response, content):
        header = f"content-type: {response.headers['content-type']}\r\n\r\n"
        for_parser = header + content
        parser = FeedParser()
        parser.feed(for_parser)
        mime_response = parser.close()

        failed_requests = []
        # Separation of the multipart response message.
        error_401, error_403, error_429 = False, False, False
        for part in mime_response.get_payload():
            http_request_idx = int(self._header_to_id(part["Content-ID"]))
            http_request = self.requests[http_request_idx]

            response, content = await asyncio.create_task(
                self._deserialize_response(part.get_payload()))
            parsed_response = json.loads(content)
            if isinstance(parsed_response,
                          dict) and 'error' in parsed_response:
                error_code = parsed_response['error']['code']
                if error_code == 429: error_429 = True
                elif error_code == 403: error_403 = True
                elif error_code == 401: error_401 = True
                else:
                    LOG.error(
                        f"BatchApiRequest: Unhandled error in one of the responses: {parsed_response}."
                        f"\n\tRequest uri, method, headers: {http_request.uri}, {http_request.method},"
                        f"{http_request.headers}")
                    continue
                failed_requests.append(http_request)
            else:
                self.completed_responses.append(parsed_response)

        self.requests = failed_requests
        if error_401:
            self.access_token = await asyncio.create_task(
                get_cached_token(GMAIL_TOKEN_ID))
        if error_403 or error_429:
            LOG.warning(
                f"One or more responses failed with rate limit exceeded(403={error_403}, 429={error_429}), "
                f"waiting {self.backoff} seconds.")
            await asyncio.sleep(self.backoff)
            self.backoff *= 2
            if self.backoff > 32:
                # TODO: Backoff is too high, just throw an error.
                pass
    def lineReceived(self, line):
        log.debug("Line In: %s" % line)
        self.parser = FeedParser(Event)
        self.parser.feed(line.decode())  # update: line (to) line.decode()
        self.message = self.parser.close()
        # if self.state is not READ_CONTENT (i.e Content-Type is already read) and the Content-Length is present
        # read rest of the message and set it as payload

        if 'Content-Length' in self.message and self.state != 'READ_CONTENT':
            # update: self.message.has_key('Content-Length') (to) in
            if self.enterRawMode():
                log.debug("Entering raw mode to read message payload")
                return
        try:
            self.inspectMessage()
        except:
            log.error("Exception in message processing ", exc_info=True)
    def __break_into_parts(self):

        p = FeedParser()
        for h in self.get_headers_array():
            p.feed(h + "\r\n")
        p.feed("\r\n")
        p.feed(self.__body)
        msg = p.close()
        parts = msg.get_payload()
        return parts
Beispiel #18
0
 def __init__(self, message=None):
     """Initialize a Message instance."""
     feedparser = FeedParser(email.message.Message)
     feedparser._set_headersonly()
     data = message.read(4096)
     feedparser.feed(data)
     self._become_message(feedparser.close())
Beispiel #19
0
    def _break_into_parts(self):
        p = FeedParser()

        for h in self._response.headers:
            p.feed(h + ':' + self._response.headers[h] + "\r\n")

        p.feed("\r\n")
        p.feed(self.text())

        msg = p.close()

        parts = msg.get_payload()

        return parts
 def message_from_response(self, headers, body):
     fp = FeedParser()
     for header, value in headers.iteritems():
         fp.feed("%s: %s\n" % (header, Header(value).encode()))
     fp.feed("\n")
     fp.feed(body)
     response = fp.close()
     return response
Beispiel #21
0
 def headers_factory(_, fp, *args):
     headers = 0
     feedparser = FeedParser(OldMessage)
     try:
         while True:
             line = to_local(fp.readline(client._MAXLINE + 1))
             if len(line) > client._MAXLINE:
                 ret = OldMessage()
                 ret.status = 'Line too long'
                 return ret
             headers += 1
             if headers > client._MAXHEADERS:
                 raise client.HTTPException("got more than %d headers" % client._MAXHEADERS)
             feedparser.feed(line)
             if line in ('\r\n', '\n', ''):
                 return feedparser.close()
     finally:
         # break the recursive reference chain
         feedparser.__dict__.clear()
Beispiel #22
0
 def headers_factory(_, fp, *args):
     headers = 0
     feedparser = FeedParser(OldMessage)
     try:
         while True:
             line = fp.readline(client._MAXLINE + 1)
             if len(line) > client._MAXLINE:
                 ret = OldMessage()
                 ret.status = 'Line too long'
                 return ret
             headers += 1
             if headers > client._MAXHEADERS:
                 raise client.HTTPException("got more than %d headers" %
                                            client._MAXHEADERS)
             feedparser.feed(line.decode('iso-8859-1'))
             if line in (b'\r\n', b'\n', b''):
                 return feedparser.close()
     finally:
         # break the recursive reference chain
         feedparser.__dict__.clear()
Beispiel #23
0
 def parse(self, fp, headersonly=False):
     feedparser = FeedParser(self._class, policy=self.policy)
     if headersonly:
         feedparser._set_headersonly()
     while True:
         data = fp.read(8192)
         if not data:
             break
         feedparser.feed(data)
     return feedparser.close()
Beispiel #24
0
 def __init__(self, message=None):
     """Initialize a Message instance."""
     feedparser = FeedParser(email.message.Message)
     feedparser._set_headersonly()
     data = message.read(4096)
     feedparser.feed(data)
     self._become_message(feedparser.close())
    def _break_into_parts(self):
        p = FeedParser()

        for h in self._response.headers:
            p.feed(h + ':' + self._response.headers[h] + "\r\n")

        p.feed("\r\n")
        p.feed(self.text())

        msg = p.close()

        parts = msg.get_payload()

        return parts
Beispiel #26
0
def message_from_string_safe(text):
    'handle failure by email standard library by bypassing StringIO blowup'
    try: # use the standard library by default
        return email.message_from_string(text)
    except UnicodeEncodeError: # code copied from email.parser.Parser.parse()
        feedparser = FeedParser(Message)
        feedparser._set_headersonly()
        feedparser.feed(text)
        return feedparser.close()
Beispiel #27
0
    def __init__(self):
        """Initialise class"""
        # initiate class for feedparser
        self.raw_stream = FeedParser()

        # variables for parsed mail data
        self.raw_data   = ''
        self.raw_msg    = ''
        self.headers    = {}
        self.body    = ''
        self.sender  = ''
        self.to      = ''
        self.subject = ''
        self.date_s  = ''
        self.date_d  = ''
Beispiel #28
0
    def parse(self, fp, headersonly=False):
        """Create a message structure from the data in a file.

        Reads all the data from the file and returns the root of the message
        structure.  Optional headersonly is a flag specifying whether to stop
        parsing after reading the headers or not.  The default is False,
        meaning it parses the entire contents of the file.
        """
        feedparser = FeedParser(self._class, policy=self.policy)
        if headersonly:
            feedparser._set_headersonly()
        while True:
            data = fp.read(8192)
            if not data:
                break
            feedparser.feed(data)
        return feedparser.close()
 def __init__(self, fd):
   self.message = ''
   self.attachments = list()
   self.headers = {
       'subject': 'None',
       'sent': int(time.time()),
       'sender': 'Anonymous',
       'email': '*****@*****.**',
       'group_name': '',
       'parent': '',
       'sage': False,
       'public_key': ''
   }
   self._signature = None
   self.signature_valid = None
   self._fd = fd
   self._parser = FeedParser()
   self._parse_headers()
   if self.headers and self.headers['public_key']:
     self._check_signature()
Beispiel #30
0
    def parse(self, fp, headersonly=False):
        """Create a message structure from the data in a file.

        Reads all the data from the file and returns the root of the message
        structure.  Optional headersonly is a flag specifying whether to stop
        parsing after reading the headers or not.  The default is False,
        meaning it parses the entire contents of the file.
        """
        feedparser = FeedParser(self._class)
        if headersonly:
            feedparser._set_headersonly()
        while True:
            data = fp.read(8192)
            if not data:
                break
            feedparser.feed(data)
        return feedparser.close()
Beispiel #31
0
def parse_mbox(mbox_path, open_function = open):
    """
    parse_mbox(string_path, [open_function = open]) -> mail.Message (generator)

    Iterates over the messages of a mbox file and return a Python email.Message
    Object (Generator, can be used in for loops). The optional second argument
    must be a callable function to be used to open the file (it must have
    similar interface to the default built in "open" command.)

    In the case of failure to open the file and read the contents an IOError
    exception will be raised. If there are problems with the file contents or
    if any argument have the wrong type, a TypeError exception will be raised
    with more details on the problem.

    """

    # Test the function arguments if they are ok...
    if not isinstance(mbox_path,str):
        try:
            mbox_path = str(mbox_path)
        except:
            raise TypeError("'mbox_path' argument must be a mbox path string.")

    if not callable(open_function):
        raise TypeError("'open_function' argument must be a callable object.")

    # Try to open the file... any problems the exceptions will be raised!
    mbox_file = open_function(mbox_path)

    # A blank line indicates that a new message may begin on mboxo format
    # fist_line tells us we dont have to deliver the last message
    last_line_blank = True
    first_line = True
    # MailParser can feed from the read lines and reconstruct the message
    MailParser = FeedParser()
    for line in mbox_file:
        # If its the beginning of the file or last line was left blank
        # and the line begins with "From " then this is a
        # new message!
        if line[0:5] == 'From ' and last_line_blank:
            # If this is not the first line, return the last message
            if not first_line:
                # Return last message contents
                yield (MailParser.close())
                # Delete the last MessageParser reference (for the garbage
                # collector)
                del(MailParser)
                # Create a new MessageParser
                MailParser = FeedParser()
            else:
                first_line = False
        else:
            if line in ['\n','\r','\r\n']:
                last_line_blank = True
            # Keep reading..
            #print "alimentei com : %s " % line
            MailParser.feed(line)
        #if line_number > 10000:
        #    break
    # If there is still one message to be delivered:
    last_message = MailParser.close()
    if last_message:
        yield (last_message)
    mbox_file.close()
Beispiel #32
0
    def _send_batch_request(self, requests):
        """Sends a batch of requests to the server and processes the HTTP responses.

    Args:
      requests: List of GoogleComputeEngineBase.API_REQUEST named tuples. Must
        contain <= MAX_BATCH_SIZE elements.

    Raises:
      ValueError: If requests has more than MAX_BATCH_SIZE elements.

    Returns:
      List of GoogleComputeEngineBase.BATCH_RESPONSE named tuples, one for
      each element of request parameter.
    """
        if len(requests) > MAX_BATCH_SIZE:
            raise ValueError('Too many requests provided'
                             '(maximum is {0})'.format(MAX_BATCH_SIZE))

        batch = _BatchApiRequest()
        base = urlparse.urlsplit(self.base_url)
        base_path = base.path.rstrip('/')
        for i, request in enumerate(requests):
            msg = MIMENonMultipart('application', 'http')
            msg.add_header('Content-ID', '<{0}>'.format(i))
            msg.set_payload(
                self._serialize_batch_api_request(base_path, request))
            batch.attach(msg)

        batch_string = batch.as_string()
        content_type = 'multipart/mixed; boundary="{0}"'.format(
            batch.get_boundary())

        url = urlparse.urlunsplit((base.scheme, base.netloc, 'batch',
                                   self._create_url_query(None), None))
        response, data = self._send_request(url, 'POST', batch_string,
                                            content_type)

        if response.status >= 300:
            error = gce.GceError(message=response.reason,
                                 status=response.status)
            return [error] * len(requests)  # Return all errors.
        elif not data:
            error = gce.GceError(message='Server returned no data',
                                 status=response.status)
            return [error] * len(requests)  # Return all errors.

        # Process successful response.
        data = 'content-type: {0}\r\n\r\n'.format(
            response['content-type']) + data
        parser = FeedParser()
        parser.feed(data)
        response = parser.close()

        responses = []
        for part in response.get_payload():
            responses.append(
                (int(RESPONSE_ID_REGEX.match(part['Content-ID']).group(1)),
                 self._parse_batch_api_response(part.get_payload())))

        responses.sort(key=lambda r: r[0])
        return [r[1] for r in responses]
Beispiel #33
0
    def raw_info(self, pkg, chkdistro):
        if not pkg.strip():
            return ''
        _pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
        distro = chkdistro
        if len(pkg.strip().split()) > 1:
            distro = ''.join([x for x in pkg.strip().split(None,2)[1] if x.isalnum() or x in '-._+'])
        if distro not in self.distros:
            return "%r is not a valid distribution: %s" % (distro, ", ".join(self.distros))

        pkg = _pkg

        try:
            data = apt_cache(self.aptdir, distro, ['show', pkg])
        except subprocess.CalledProcessError as e:
            data = e.output
        try:
            data2 = apt_cache(self.aptdir, distro, ['showsrc', pkg])
        except subprocess.CalledProcessError as e:
            data2 = e.output
        if not data or 'E: No packages found' in data:
            return 'Package %s does not exist in %s' % (pkg, distro)
        maxp = {'Version': '0~'}
        packages = [x.strip() for x in data.split('\n\n')]
        for p in packages:
            if not p.strip():
                continue
            parser = FeedParser()
            parser.feed(p)
            p = parser.close()
            if type(p) == type(""):
                self.log.error("PackageInfo/packages: apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
                return "Package lookup faild"
            if not p.get("Version", None):
                continue
            if apt.apt_pkg.version_compare(maxp['Version'], p['Version']) <= 0:
                maxp = p
            del parser
        maxp2 = {'Version': '0~'}
        packages2 = [x.strip() for x in data2.split('\n\n')]
        for p in packages2:
            if not p.strip():
                continue
            parser = FeedParser()
            parser.feed(p)
            p = parser.close()
            if type(p) == type(""):
                self.log.error("PackageInfo/packages: apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
                return "Package lookup faild"
            if not p['Version']:
                continue
            if apt.apt_pkg.version_compare(maxp2['Version'], p['Version']) <= 0:
                maxp2 = p
            del parser
        archs = ''
        if 'Architecture' in maxp2:
            archs = [_.strip() for _ in maxp2['Architecture'].split() if _.strip()]
            for arch in archs:
                if arch not in ('any', 'all'):
                    continue
                else:
                    archs = ''
                    break

            if archs:
                archs = ' (Only available for %s)' % '; '.join(archs)

        maxp["Distribution"] = distro
        maxp["Architectures"] = archs
        return maxp
Beispiel #34
0
 def _parse_headers(self, lines):
     FeedParser._parse_headers(self, lines)
     if self._cur.get_content_type() == 'application/http-response':
         self._set_headersonly()
class MessageParser(object):

  def __init__(self, fd):
    self.message = ''
    self.attachments = list()
    self.headers = {
        'subject': 'None',
        'sent': int(time.time()),
        'sender': 'Anonymous',
        'email': '*****@*****.**',
        'group_name': '',
        'parent': '',
        'sage': False,
        'public_key': ''
    }
    self._signature = None
    self.signature_valid = None
    self._fd = fd
    self._parser = FeedParser()
    self._parse_headers()
    if self.headers and self.headers['public_key']:
      self._check_signature()

  def _parse_headers(self):
    headers_found = False
    line = self._fd.readline()
    while line:
      self._parser.feed(line)
      head, _, data = line.partition(': ')
      head = head.lower()
      data = data[:-1]
      if head == 'subject':
        self.headers['subject'] = basicHTMLencode(data[4:]) if data.lower().startswith('re: ') else basicHTMLencode(data)
      elif head == 'date':
        sent_tz = parsedate_tz(data)
        if sent_tz:
          offset = 0
          if sent_tz[-1]:
            offset = sent_tz[-1]
          self.headers['sent'] = timegm((datetime(*sent_tz[:6]) - timedelta(seconds=offset)).timetuple())
      elif head == 'from':
        sender, _, email = data.rpartition(' <')
        email = email.replace('>', '')
        if sender:
          self.headers['sender'] = sender
        if email:
          self.headers['email'] = email
      elif head == 'references':
        self.headers['parent'] = data.split(' ')[0]
      elif head == 'newsgroups':
        self.headers['group_name'] = data.split(';')[0].split(',')[0]
      elif head == 'x-sage':
        self.headers['sage'] = True
      elif head == 'x-pubkey-ed25519':
        self.headers['public_key'] = data
      elif head == 'x-signature-ed25519-sha512':
        self._signature = data
      elif line == '\n':
        headers_found = True
        break
      line = self._fd.readline()
    if not headers_found:
      self.headers = None

  def _check_signature(self):
    bodyoffset = self._fd.tell()
    hasher = sha512()
    oldline = None
    for line in self._fd:
      if oldline:
        hasher.update(oldline)
      oldline = line.replace("\n", "\r\n")
    hasher.update(oldline.replace("\r\n", ""))
    self._fd.seek(bodyoffset)
    try:
      nacl.signing.VerifyKey(unhexlify(self.headers['public_key'])).verify(hasher.digest(), unhexlify(self._signature))
    except:
      self.headers['public_key'] = ''
      self.signature_valid = False
    else:
      self.signature_valid = True
    del hasher

  @staticmethod
  def _read_filedata(part):
    data = dict()
    data['obj'] = part.get_payload(decode=True)
    data['hash'] = sha1(data['obj']).hexdigest()
    data['name'] = 'empty_file_name' if part.get_filename() is None or part.get_filename().strip() == '' else basicHTMLencode(part.get_filename())
    data['ext'] = os.path.splitext(data['name'])[1].lower()
    data['type'] = mimetypes.types_map.get(data['ext'], '/')
    if data['type'] == '/':
      # mime not detected from file ext. Use remote mimetype for detection file ext. Ignore unknown mimetype.
      test_ext = mimetypes.guess_extension(part.get_content_type())
      if test_ext:
        data['ext'] = test_ext
        data['type'] = mimetypes.types_map.get(data['ext'], '/')
        data['name'] += data['ext']
    if len(data['name']) > 512:
      data['name'] = data['name'][:512] + '...'
    data['maintype'], data['subtype'] = data['type'].split('/', 2)
    return data

  def parse_body(self):
    self._parser.feed(self._fd.read())
    result = self._parser.close()
    self._parser = None
    if result.is_multipart():
      if len(result.get_payload()) == 1 and result.get_payload()[0].get_content_type() == "multipart/mixed":
        result = result.get_payload()[0]
      for part in result.get_payload():
        if part.get_content_type().lower() == 'text/plain':
          self.message += part.get_payload(decode=True)
        else:
          self.attachments.append(self._read_filedata(part))
    else:
      if result.get_content_type().lower() == 'text/plain':
        self.message += result.get_payload(decode=True)
      else:
        self.attachments.append(self._read_filedata(result))
    del result
    self.message = basicHTMLencode(self.message)
    def _parse (self):
        """Parse the BDF mime structure and record the locations of the binary
        blobs. Sets up various data fields in the BDFData object."""

        feedparser = FeedParser (Message)
        binarychunks = {}
        sizeinfo = None
        headxml = None
        self.fp.seek (0, 0)

        while True:
            data = self.fp.readline ()
            if not data:
                break

            feedparser.feed (data)

            skip = (data == '\n' and
                    len (feedparser._msgstack) == 3 and
                    feedparser._msgstack[-1].get_content_type () in ('application/octet-stream',
                                                                     'binary/octet-stream'))
            if skip:
                # We just finished reading the headers for a huge binary blob.
                # Time to remember where the data chunk is and pretend it doesn't
                # exist.
                msg = feedparser._msgstack[-1]
                ident = msg['Content-Location']
                assert ident.endswith ('.bin'), 'confusion #1 in hacky MIME parsing!'
                binarychunks[ident] = self.fp.tell ()
                if sizeinfo is None:
                    headxml, sizeinfo, tagpfx = _extract_size_info (feedparser)
                kind = ident.split ('/')[-1]
                assert kind in sizeinfo, 'no size info for binary chunk kind %s in MIME!' % kind
                self.fp.seek (sizeinfo[kind] + 1, 1) # skip ahead by data chunk size
                sample = self.fp.read (16)
                assert sample.startswith ('--MIME'), 'crap, unexpected chunk size in MIME parsing: %r' % sample
                self.fp.seek (-16, 1) # go back

            # check that two major kinds of data are read at least once
            if any([k.split('/')[3] == '3' for k in binarychunks.iterkeys()]):
                break

        if headxml is None:
            raise RuntimeError ('never found any binary data')

        self.mimemsg = feedparser.close ()
        self.headxml = headxml
        self.sizeinfo = sizeinfo
        self.binarychunks = binarychunks

        headsize, intsize = self.calc_intsize()

        # Compute some miscellaneous parameters that we'll need.
#        self.n_integrations = len (self.mimemsg.get_payload ()) - 1
        self.n_integrations = os.stat(self.fp.name).st_size/intsize
        self.n_antennas = int (headxml.find (tagpfx + nanttag).text)
        self.n_baselines = (self.n_antennas * (self.n_antennas - 1)) // 2

        ds = headxml.find (tagpfx + dstag)
        nbb = 0
        nspw = 0
        nchan = 0
        crosspolstr = None

        for bb in ds.findall (tagpfx + basebandtag):
            nbb += 1

            for spw in bb.getchildren ():
                nspw += 1
                nchan += int (spw.get ('numSpectralPoint'))

                if crosspolstr is None:
                    crosspolstr = spw.get ('crossPolProducts')
                elif spw.get ('crossPolProducts') != crosspolstr:
                    raise Exception ('can only handle spectral windows with identical cross pol products')

        self.n_basebands = nbb
        self.n_spws = nspw
        self.n_channels = nchan
        self.crosspols = crosspolstr.split ()
        self.n_pols = len(self.crosspols)

        # if bdf info pkl not present, write it
        if os.path.exists(os.path.dirname(self.pklname)) and self.pklname and (not os.path.exists(self.pklname)):
            logger.info('Writing bdf pkl info to %s...' % (self.pklname))
            with open(self.pklname,'wb') as pkl:
                # Compute some miscellaneous parameters that we'll need.
                pickle.dump( (self.mimemsg, self.headxml, self.sizeinfo, self.binarychunks, self.n_integrations, self.n_antennas, self.n_baselines, self.n_basebands, self.n_spws, self.n_channels, self.crosspols), pkl)

        return self # convenience
Beispiel #37
0
class Spool(object):
    """
    Thin wrapper around either a file-object or a cStringIO object. Allows
    use of the body attribute without losing functionality by avoiding
    the class level methods of the transaction.
    """

    @property
    def body_start(self):
        return self._body_start

    @property
    def headers(self):
        return self._headers

    @property
    def name(self):
        return self._filename

    def __init__(self):
        self._body_start = 0
        self._filename = None
        self._fp = StringIO()
        self._headers = None
        self._parser = FeedParser(Message)
        self._rollover = 262144  # 256kb

    def __getattr__(self, key):
        return getattr(self._fp, key)

    def __iter__(self):
        return iter(self._fp)

    def end_headers(self):
        """
        Close off the parser and return the headers.
        """
        self._body_start = self._fp.tell()
        self._headers = self._parser.close()
        log.debug("headers marked as closed at %d chars", self._body_start)
        return self._headers

    def flush(self):
        """
        Flushes the data held in memory to a temporary file on disk.
        """
        # Check to see if the data has been flushed already
        if self._filename:
            return

        log.debug("flushing spool to disk")

        # Create the named temporary file to write the data to
        fp = self._fp
        newfp = self._fp = NamedTemporaryFile(dir="/tmp", prefix="")
        if fp:
            newfp.write(fp.getvalue())
            newfp.seek(fp.tell(), 0)
            fp.close()

        self._filename = newfp.name

        log.debug("flushed to disk with name %s", self._filename)

    def write(self, data):
        """
        Write data to the end of the email.

        :param data: The data to add to the end of the email
        :type data: str
        """

        datalen = len(data)
        log.debug("writing %d bytes of data", datalen)

        if (self._fp.tell() + datalen) > self._rollover:
            self.flush()

        self._fp.write(data)
        self._parser.feed(data)
class FSProtocol(basic.LineReceiver):
    """FreeSWITCH EventSocket protocol implementation.
    
    All the FreeSWITCH api and dptool commands are defined in this class
    """
    delimiter = b"\n\n"
    jobType = False
    state = "READ_CONTENT"

    def connectionMade(self):
        self.contentCallbacks = {
            "auth/request": self.auth,
            "api/response": self.onAPIReply,
            "command/reply": self.onCommandReply,
            "text/event-plain": self.onEvent,
            "text/disconnect-notice": self.disconnectNotice
        }
        self.pendingJobs = []
        self.pendingBackgroundJobs = {}
        self.eventCallbacks = {}
        self.customEventCallbacks = {}
        self.subscribedEvents = []
        log.info("Connected to FreeSWITCH")

    def connectionLost(self, reason):
        log.info("Cleaning up")
        self.disconnectedFromFreeSWITCH()

    def disconnectedFromFreeSWITCH(self):
        """Over-ride this to get notified of FreeSWITCH disconnection"""
        pass

    def registerEvent(self, event, subscribe, function, *args, **kwargs):
        """Register a callback for the event 
        event -- (str) Event name as sent by FreeSWITCH , Custom events should give subclass also 
                                eg : CUSTOM conference::maintenance
        subscribe -- (bool) if True subscribe to this event
        function -- callback function accepts a event dictionary as first argument
        args -- argument to be passed to callback function
        kwargs -- keyword arguments to be passed to callback function
        
        returns instance of  EventCallback , keep a reference of this around if you want to de-register it later
        """
        if subscribe:
            if self.needToSubscribe(event):
                self.subscribeEvents(event)
        ecb = EventCallback(event, function, *args, **kwargs)
        ecb_list = self.eventCallbacks.get(event, [])
        event_callbacks = self.eventCallbacks
        #handle CUSTOM events
        if event.startswith("CUSTOM"):
            subclass = event.split(' ')
            event = subclass[1]
            ecb.subclass = event
            ecb_list = self.customEventCallbacks.get(event, [])
            event_callbacks = self.customEventCallbacks
        ecb_list.append(ecb)
        event_callbacks[event] = ecb_list
        return ecb

    def needToSubscribe(self, event):
        """Decide if we need to subscribe to an event or not by comparing
           the event provided against already subscribeEvents
        
        event -- (str) event name 
        
        returns bool
        """
        if "all" in self.subscribedEvents:
            return False
        if event in self.subscribedEvents:
            return False
        if 'myevents' in self.subscribedEvents:
            return False
        else:
            return True

    def deregisterEvent(self, ecb):
        """De-register a callback for the given event
        
        ecb -- (EventCallback) instance of EventCallback object
        """
        callbacks_list = self.eventCallbacks
        if ecb.eventname == 'CUSTOM':
            callbacks_list = self.customEventCallbacks
        ecbs = callbacks_list[ecb.eventname]
        try:
            ecbs.remove(ecb)
        except ValueError:
            log.error("%s already deregistered " % ecb)

    def dataReceived(self, data):
        """
        We override this twisted method to avoid being disconnected by default MAX_LENGTH for messages which cross
        that limit
        """
        if self._busyReceiving:
            self._buffer += data
            return

        try:
            self._busyReceiving = True
            self._buffer += data
            while self._buffer and not self.paused:
                if self.line_mode:
                    try:
                        line, self._buffer = self._buffer.split(
                            self.delimiter, 1)
                    except ValueError:
                        return
                    else:
                        why = self.lineReceived(line)
                        if why or self.transport and self.transport.disconnecting:
                            return why
                else:
                    data = self._buffer
                    self._buffer = b''
                    why = self.rawDataReceived(data)
                    if why:
                        return why
        finally:
            self._busyReceiving = False

    def lineReceived(self, line):
        log.debug("Line In: %s" % line)
        self.parser = FeedParser(Event)
        self.parser.feed(line.decode())  # update: line (to) line.decode()
        self.message = self.parser.close()
        # if self.state is not READ_CONTENT (i.e Content-Type is already read) and the Content-Length is present
        # read rest of the message and set it as payload

        if 'Content-Length' in self.message and self.state != 'READ_CONTENT':
            # update: self.message.has_key('Content-Length') (to) in
            if self.enterRawMode():
                log.debug("Entering raw mode to read message payload")
                return
        try:
            self.inspectMessage()
        except:
            log.error("Exception in message processing ", exc_info=True)

    def rawDataReceived(self, data):
        """Read length of raw data specified by self.contentLength and set it as message payload """
        log.debug("Data In : %s" % data)
        self.rawdataCache = b''.join([self.rawdataCache, data])  # update: b
        if len(self.rawdataCache) >= self.contentLength - 1:
            self.clearLineBuffer()
            extra = self.rawdataCache[self.contentLength:]
            currentResult = self.rawdataCache[:self.contentLength]
            self.message.set_payload(currentResult)
            try:
                self.inspectMessage()
            except:
                log.error("Exception in message processing ", exc_info=True)
            self.setLineMode(extra)

    def enterRawMode(self):
        """
        Change to raw mode from line mode if self.contentLength > 0
        """
        self.contentLength = int(self.message['Content-Length'].strip())
        if self.contentLength > 0:
            self.rawdataCache = b''  # update
            self.setRawMode()
            return True
        return False

    def inspectMessage(self):
        """Inspect message and dispatch based on self.state or Content-Type of message """
        if self.state == "READ_EVENT":
            return self.dispatchEvent()
        if self.state == "READ_CHANNELINFO":
            return self.onConnect()
        if self.state == 'READ_API':
            return self.fireAPIDeferred()
        if "Content-Type" not in self.message:  # update: self.message.has_key("Content-Type") (to) in
            return
        ct = self.message['Content-Type']
        try:
            cb = self.contentCallbacks[ct]
            cb()
        except KeyError:
            log.error("Got unimplemented Content-Type : %s" % ct)

    def dispatchEvent(self):
        self.state = "READ_CONTENT"
        eventname = self.message['Event-Name']
        # Handle background job event
        if eventname == "BACKGROUND_JOB":
            try:
                df = self.pendingBackgroundJobs.pop(self.message['Job-UUID'])
                df.callback(self.message)
            except KeyError:
                log.error("Stray BACKGROUND_JOB event received %s",
                          self.message['Job-UUID'])
            except:
                log.error("Error in BACKGROUND_JOB event handler",
                          exc_info=True)
        if eventname == 'CUSTOM':
            self.message.decode()
            ecbs = self.customEventCallbacks.get(
                self.message['Event-Subclass'], None)
        else:
            ecbs = self.eventCallbacks.get(eventname, None)
        if ecbs is None:
            return

        for ecb in ecbs:
            try:
                ecb.func(self.message, *ecb.args, **ecb.kwargs)
            except:
                log.error(
                    "Message %s\nError in event handler %s on event %s:" %
                    (self.message, ecb.func, eventname),
                    exc_info=True)

    def onConnect(self):
        """Channel Information is ready to be read.
        """
        log.info("onconnect")

    def auth(self):
        """
        FreeSWITCH is requesting to authenticate         
        """
        pass

    def fireAPIDeferred(self):
        self.state = 'READ_CONTENT'
        df = self.pendingJobs.pop(0)
        df.callback(self.message)

    def onAPIReply(self):
        """
        Handle API reply 
        """

        if "Content-Length" not in self.message:  # update: has_key (to) in
            self.currentDeferred = self.pendingJobs.pop(0)
            return self.currentDeferred.callback(self.message)
        if self.enterRawMode():
            self.state = "READ_API"
            log.debug("Entering raw mode to read API response")
            return
        else:
            self.currentDeferred.callback(self.message)

    def onCommandReply(self):
        """
        Handle CommandReply        
        """

        if "Job-UUID" in self.message:  # update: has_ket (to) in
            return
        try:
            df = self.pendingJobs.pop(0)
        except IndexError:
            log.error(
                "Command reply message received with out pending deferred %s" %
                self.message)
            return
        if self.message['Reply-Text'].startswith("+OK"):
            df.callback(self.message)
        else:
            e = CommandError(self.message['Reply-Text'])
            df.errback(e)

    def onEvent(self):
        """
        Handle a new event
        """
        self.state = "READ_EVENT"

    def disconnectNotice(self):
        """
        Handle disconnect notice 
        """

        if "Content-Length" not in self.message:  # update: has_key (to) in
            return self.disconnectNoticeReceived(self.message)
        self.contentLength = int(self.message['Content-Length'])
        if self.contentLength > 0:
            self.currentDeferred = defer.Deferred()
            log.info("Enter raw mode to read disconnect notice")
            self.rawdataCache = b''  # update
            self.setRawMode()
        else:
            self.disconnectNoticeReceived(self.message)

    def disconnectNoticeReceived(self, msg):
        """Override this to receive disconnect notice from FreeSWITCH"""
        log.error("disconnectNoticeReceived not implemented")
        log.info(msg)

    def sendData(self, cmd, args=''):
        df = defer.Deferred()
        # self.pendingJobs.append((cmd, df))
        self.pendingJobs.append(df)
        if args:
            cmd = ' '.join([cmd, args])
        self.sendLine(cmd.encode())  # update: encode()
        log.debug("Line Out: %r" % cmd)
        return df

    def sendMsg(self, msg):
        """Send message to FreeSWITCH
        
        msg -- (event) Event object 
        
        """
        df = defer.Deferred()
        self.pendingJobs.append(df)
        msg = msg.as_string(True)
        self.transport.write(msg)
        log.debug("Line Out: %r" % msg)
        return df

    def sendCommand(self, cmd, args='', uuid='', lock=True):
        msg = Event()
        if uuid:
            msg.set_unixfrom("SendMsg %s" % uuid)
        else:
            msg.set_unixfrom("SendMsg")
        msg['call-command'] = "execute"
        msg['execute-app-name'] = cmd
        if args:
            msg['execute-app-arg'] = args
        if lock:
            msg['event-lock'] = "true"
        return self.sendMsg(msg)

    def sendAPI(self, apicmd, background=jobType):
        if background:
            return self.sendBGAPI(apicmd)
        else:
            return self.sendData("api", apicmd)

    def sendBGAPI(self, apicmd):
        jobid = str(uuid.uuid1())
        apicmd = ' '.join(['bgapi', apicmd])
        apicmd = '\n'.join([apicmd, "Job-UUID:%s" % jobid])

        backgroundJobDeferred = defer.Deferred()
        self.pendingBackgroundJobs[jobid] = backgroundJobDeferred

        log.debug("Line Out: %r", apicmd)
        self.sendLine(apicmd.encode())  # update: encode()
        return backgroundJobDeferred

    def subscribeEvents(self, events):
        """Subscribe to FreeSWITCH events.
        
        events -(str) 'all'  subscribe to all events or event names separated by space
        this method can subscribe to multiple events but if the event is of CUSTOM type
        then only one CUSTOM event with subclass should be given
        """
        _events = []
        if not events.startswith("CUSTOM"):
            _events = events.split(' ')
        for event in _events:
            self.subscribedEvents.append(events)  # update: checking

        return self.sendData("event plain", events)

    def myevents(self, uuid=''):
        """Tie up the connection to particular channel events"""
        self.subscribedEvents.append("myevents")
        if uuid:
            return self.sendData("myevents %s" % uuid)
        else:
            return self.sendData("myevents")

    def apiAvmd(self, uuid, start=True, background=jobType):
        """Execute avmd on provided channel. 
        uuid (str) -- uuid of the target channel
        start (bool)  -- If True avmd will start if false avmd will be stopped
        """
        if start:
            return self.sendAPI("avmd %s start" % uuid, background)
        else:
            return self.sendAPI("avmd %s stop" % uuid, background)

    def apiConferenceDial(self, name, url, background=jobType):
        """Dial the given url from conference 
        
        name -- (str) name of the conference 
        url -- (str) FreeSWITCH compatible call URL"""
        cmd = 'conference %s dial %s' % (name, url)
        return self.sendAPI(cmd, background)

    def apiConferenceKick(self, name, member, background=jobType):
        """Kick the given member from conference 
        
        name -- (str) name of the conference 
        member -- (str) member id or all or last 
        """
        cmd = "conference %s kick %s" % (name, member)
        return self.sendAPI(cmd, background)

    def apiConferenceList(self, name=None, delim=None, background=jobType):
        """List the conference 
        name - (str) name of the conference. if not given all the conferences will be listed 
        delim - (str) delimiter to use for separating values """

        cmd = "conference"
        if name is not None:
            cmd = ' '.join([cmd, name, 'list'])
        else:
            cmd = ' '.join([cmd, 'list'])

        if delim is not None:
            cmd = ' '.join([cmd, 'delim', delim])
        return self.sendAPI(cmd, background)

    def apiConferenceListCount(self, name, background=True):
        """Return number of members in the conference
        
        name -- (str) name of the conference
        """
        cmd = 'conference %s list count' % name
        return self.sendAPI(cmd, background)

    def apiConferenceVolume(self,
                            name,
                            member,
                            value=0,
                            direction='out',
                            background=jobType):
        """Set volume of conference 
        
        name -- (str) name of the conference 
        member -- (str) member id or all or last
        value -- (int) 0 - 4
        direction -- (str) in or out"""

        cmd = "conference %s volume_%s %s %s" % (name, direction, member,
                                                 value)
        return self.sendAPI(cmd, background)

    def apiConferenceMute(self, name, member, background=jobType):
        """Mute given member in a conference
        
        name -- (str) name of the conference
        member -- (str) member id or all or last
        """
        cmd = "conference %s mute %s" % (name, member)
        return self.sendAPI(cmd, background)

    def apiConferencePlay(self,
                          name,
                          filename,
                          member=None,
                          background=jobType):
        """Playback given file in conference
        
        name -- (str) name of the conference
        filename -- (str) name of the audio file to be played in conference
        member -- (str) member id in conference
        """
        if member:
            cmd = "conference %s play %s %s" % (name, filename, member)
        else:
            cmd = "conference %s play %s" % (
                name,
                filename,
            )
        return self.sendAPI(cmd, background)

    def apiConferenceStop(self,
                          name,
                          fid=None,
                          member=None,
                          background=jobType):
        """Stop an ongoing/queued playback in conference
        
        name -- (str) name of the conference
        fid -- (str) file ID to stop takes all|async|current|last
        member -- (str) member id in conference
        """
        if member:
            cmd = "conference %s stop %s %s" % (name, fid, member)
        elif fid:
            cmd = "conference %s stop %s" % (name, fid)
        else:
            cmd = "conference %s stop" % (name, )
        return self.sendAPI(cmd, background)

    def apiConferenceUnMute(self, name, member, background=jobType):
        """UnMute given member in a conference
        
        name -- (str) name of the conference
        member -- (str) member id or all or last
        """
        cmd = "conference %s unmute %s" % (name, member)
        return self.sendAPI(cmd, background)

    def apiDomainExists(self, domain, background=jobType):
        cmd = "domain_exists %s" % domain
        return self.sendAPI(cmd, background)

    def apiGlobalGetVar(self, variable='', background=jobType):
        """Get the value of a global variable
        
        variable -- name of the variable
        
        returns the value of the provided global variable if
        argument variable is not present then all global variables are returned.
        """
        apicmd = ' '.join(["global_getvar", variable])
        df = self.sendAPI(apicmd, background)
        if variable != '':
            return df
        else:
            finalDF = defer.Deferred()
            df.addCallback(self._parseGlobalGetVar, finalDF)
            return finalDF

    def _parseGlobalGetVar(self, result, df):
        result = result.get_payload()
        res = result.strip().split("\n")
        finalResult = {}
        try:
            for r in res:
                k, v = r.split("=", 1)
                finalResult[k] = v
            else:
                df.callback(finalResult)
        except Exception as err:  # update: Exception, err (to) Exception as err
            log.error(err)

    def apiGlobalSetVar(self, variable, value, background=jobType):
        """Set the value of a global variable
        
        variable -- name of the variable whose value needs to be set
        value -- value of the variable to be set
        """
        pass

    def apiHupAll(self,
                  cause='NORMAL_CLEARING',
                  variable='',
                  value='',
                  background=jobType):
        """Hangup all the existing channels 
        
        cause -- cause for hanging up 
        variable -- channel having the provided variable will be checked for hangup
        value -- value of the variable. Hangup only if this matches
        """
        apicmd = ' '.join(['hupall', cause, variable, value]).strip()
        return self.sendAPI(apicmd, background)

    def apiLoad(self, module_name, background=jobType):
        """Load external module 
        
        module_name -- (str) name of the module 
        """
        apicmd = ' '.join(["load", module_name])
        return self.sendAPI(apicmd, background)

    def apiReload(self, module_name, background=jobType):
        """Reload and external module
        
        module_name -- (str) name of the module 
        """
        apicmd = ' '.join(["reload", module_name])
        return self.sendAPI(apicmd, background)

    def apiReloadXML(self, background=jobType):
        """Reload XML configuration
        """
        apicmd = "reloadxml"
        return self.sendAPI(apicmd, background)

    def apiStatus(self, background=jobType):
        """Fetch freeswitch status
        """
        apicmd = "status"
        return self.sendAPI(apicmd, background)

    def apiUnload(self, module_name, background=jobType):
        """Unload external module 
        
        module_name -- (str) name of the module to unload 
        """
        apicmd = ' '.join(["unload", module_name])
        return self.sendAPI(apicmd, background)

    def apiVersion(self, background=jobType):
        """Fetch FreeSWITCH version"""
        apicmd = "version"
        return self.sendAPI(apicmd, background)

    #Call management
    def apiOriginate(self,
                     url,
                     application='',
                     appargs='',
                     extension='',
                     dialplan='',
                     context='',
                     cidname='',
                     cidnum='',
                     timeout='',
                     channelvars={},
                     background=jobType):
        """Originate a new channel and connect it back to the specified extension or application
        
        url -- (str) call url . Should be a valid FreeSWITCH supported URL
        extension -- (str) Extension number that the originated call has to be connected back to. make sure 
                        that you provide dialplan and context also when this arg is provided.
        application -- (str) Application name to connect to, either extension or application has to be provided
        appargs -- (str) application arguments 
        dialplan -- (str) FreeSWITCH dialplan
        context -- (str) Context to look for the extension 
        cidname -- (str) Outbound caller ID name 
        cidnum -- (str) Outbound caller ID number
        channelvars -- (dict) key value pairs of channel variables to be set on originated channel.
        """
        apicmd = "originate"
        if channelvars:
            vars = []
            for k, v in channelvars.items():
                var = '='.join([k, v])
                vars.append(var)
            else:
                vars = ','.join(vars)
                vars = "{" + vars + "}"
                url = vars + url
        apicmd = ' '.join([apicmd, url])

        if application:
            application = "&" + application
            if appargs:
                appargs = "(" + appargs + ")"
                application = "'%s'" % ''.join([application, appargs])
                apicmd = ' '.join([apicmd, application])
            else:
                apicmd = ' '.join([apicmd, application])
        else:
            apicmd = ' '.join([apicmd, extension])
        apicmd = ' '.join(
            [apicmd, dialplan, context, cidname, cidnum, timeout])
        return self.sendAPI(apicmd, background)

    def apiPause(self, uuid, flag=True, background=jobType):
        if flag:
            apicmd = ' '.join(['pause', uuid, 'on'])
        else:
            apicmd = ' '.join(['pause', uuid, 'off'])
        return self.sendAPI(apicmd, background)

    def apiUUIDBreak(self, uuid, all=True, background=jobType):
        """Break out of media being sent to a channel. For example, 
        if an audio file is being played to a channel, issuing uuid_break 
        will discontinue the media and the call will move on in the dialplan, 
        script, or whatever is controlling the call.
        
        uuid - (str) uuid of the target channel 
        all - (bool) to break all queued up audio files or only the current one
        """
        apicmd = ' '.join(['uuid_break', uuid, all])
        return self.sendAPI(apicmd, background)

    def apiUUIDBridge(self, uuid1, uuid2, background=jobType):
        """Bridge two active channel uuids 
        
        uuid1 -- (str) Channel 1 uuid 
        uuid2 -- (str) Second channel uuid 
        """
        apicmd = ' '.join(['uuid_bridge', uuid1, uuid2])
        return self.sendAPI(apicmd, background)

    def apiUUIDBroadcast(self, uuid, path, leg='aleg', background=jobType):
        """Play a <path> file to a specific <uuid> call. 
        
        uuid -- (str) uuid of the target channel
        path -- (str) path of the file to be played
        leg -- (str) on which leg to play file possible options - aleg,bleg,both defaults to aleg
        """
        apicmd = ' '.join(["uuid_broadcast", uuid, path, leg])
        return self.sendAPI(apicmd, background)

    def apiUUIDChat(self, uuid, msg, background=jobType):
        """Send a chat message to target channel
        
        uuid -- (str) uuid of the target channel
        msg -- (str) chat message to be sent
        """
        apicmd = ' '.join(['uuid_chat', uuid, msg])
        return self.sendAPI(apicmd, background)

    def apiUUIDDeflect(self, uuid, uri, background=jobType):
        """Deflect an answered SIP call off of FreeSWITCH by sending the REFER method. 
        
        uuid_deflect waits for the final response from the far end to be reported. 
        It returns the sip fragment from that response as the text in the FreeSWITCH response to uuid_deflect. 
        If the far end reports the REFER was successful, then FreeSWITCH will issue a bye on the channel. 
        
        uuid -- (str) uuid of the target channel 
        uri -- (str) destination sip URI
        """
        apicmd = ' '.join(['uuid_deflect', uuid, uri])
        return self.sendAPI(apicmd, background)

    def apiUUIDDisplace(self,
                        uuid,
                        switch,
                        path,
                        limit,
                        mux=True,
                        background=jobType):
        """Displace the audio for the target <uuid> with the specified audio <path>. 
        
        uuid -- (str) uuid of the target channel 
        switch -- (str) possible options are start,stop
        path -- (str) path of the file to be played 
        limit -- (int/str) number of seconds before terminating the displacement 
        mux -- (bool) cause the original audio to be mixed, i.e. you can still converse with
                the other party while the file is playing
        """
        if mux:
            apicmd = ' '.join(['uuid_displace', switch, path, limit, "mux"])
        else:
            apicmd = ' '.join([
                'uuid_displace',
                switch,
                path,
                limit,
            ])
        return self.sendAPI(apicmd, background)

    def apiUUIDExists(self, uuid, background=jobType):
        """Check if a given uuid exists 
        
        uuid -- (str) uuid of the target channel
        """
        apicmd = ' '.join(['uuid_exists', uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDFlushDTMF(self, uuid, background=jobType):
        """Flush queued DTMF digits 
        
        uuid -- (str) uuid of the target channel"""
        apicmd = ' '.join(["uuid_flush_dtmf", uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDHold(self, uuid, off=False, background=jobType):
        """Place a call on hold
        
        uuid -- (str) uuid of the target channel
        off -- (bool) turn on or turn off hold 
        """
        if off:
            apicmd = ' '.join(['uuid_hold', 'off', uuid])
        else:
            apicmd = ' '.join(['uuid_hold', uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDKill(self, uuid, cause='', background=jobType):
        """Kill a given channel
        
        uuid -- (str) uuid of the target channel
        cause -- (str) hangup reason"""
        if cause:
            apicmd = ' '.join(['uuid_kill', uuid, cause])
        else:
            apicmd = ' '.join(['uuid_kill', uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDMedia(self, uuid, off=False, background=jobType):
        """Reinvite a channel bridging media 
        
        uuid -- (str) uuid of the target channel
        off -- (bool) 
        """
        if off:
            apicmd = ' '.join(['uuid_media', 'off', uuid])
        else:
            apicmd = ' '.join(['uuid_media', uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDPark(self, uuid, background=jobType):
        """Park a given channel
        
        uuid -- (str) uuid of the target channel
        """
        apicmd = ' '.join(['uuid_park', uuid])
        return self.sendAPI(apicmd, background)

    def apiUUIDRecord(self, uuid, start, path, limit=None, background=jobType):
        """Record channel to given path
        
        uuid--(str) uuid of the target channel
        start--(bool) start or stop recording
        path --(str) path of file to where channel should be recorded """
        if start:
            flag = 'start'
        else:
            flag = 'stop'
        apicmd = ' '.join(['uuid_record', uuid, flag, path])
        if limit:
            apicmd = ' '.join([apicmd, limit])
        return self.sendAPI(apicmd, background)

    def apiUUIDSendDTMF(self, uuid, dtmf, background=jobType):
        """Send dtmf to given channel 
        
        uuid -- (str) uuid of the target channel
        dtmf -- (str) DTMF data to be sent 
        """
        apicmd = ' '.join(['uuid_send_dtmf', uuid, dtmf])
        return self.sendAPI(apicmd, background)

    # dp tools for dp tools uuid is optional in outbound socket connection
    def answer(self, uuid='', lock=True):
        """Answer channel
        
        uuid -- (str) uuid of target channel
        lock -- (bool) lock the channel until execution is finished 
        """
        return self.sendCommand('answer', '', uuid, lock)

    def avmd(self, start=True, uuid='', lock=True):
        """Start or stop avmd on current channel
        """
        if start:
            return self.sendCommand("avmd", '', uuid, lock)
        else:
            return self.sendCommand("avmd", "stop", uuid, lock)

    def hangup(self, uuid='', lock=True):
        """Hangup current channel
        """
        return self.sendCommand("hangup", '', uuid, lock)

    def bridge(self, endpoints=[], uuid='', lock=True):
        """Bridge and endpoint to given channel 
        
        endpoints -- (list) list of endpoint FreeSWITCH URIs
        """
        endpoints = ','.join(endpoints)
        return self.sendCommand("bridge", endpoints, uuid, lock)

    def flowBreak(self, clear_queue=False, uuid='', lock=True):
        """Break the current action that is being performed on call """
        if clear_queue:
            return self.sendCommand("break", "all", uuid, lock)
        return self.sendCommand("break", '', uuid, lock=True)

    def conference(self, confname, uuid='', lock=True):
        """Connect the channel to give conference
        
        confname -- (str) conference name
        """
        cmd = "conference"
        args = confname
        return self.sendCommand(cmd, args, uuid, lock)

    def endlessPlayback(self, path, uuid='', lock=True):
        """
        Play a file endlessly
        """
        return self.sendCommand("endless_playback", path, uuid, lock)

    def playback(self, path, terminators=None, uuid='', lock=True):
        """Playback given file name on channel
        
        path -- (str) path of the file to be played '"""

        self.set("playback_terminators", terminators or "none", uuid, lock)
        return self.sendCommand("playback", path, uuid, lock)

    def playbackSync(self, *args, **kwargs):
        finalDF = defer.Deferred()
        df = self.playback(*args, **kwargs)
        df.addCallback(self.playbackSyncSuccess, finalDF)
        df.addErrback(self.playbackSyncFailed, finalDF)
        return finalDF

    def playbackSyncSuccess(self, result, finalDF):
        ecb = self.registerEvent("CHANNEL_EXECUTE_COMPLETE", True,
                                 self.playbackSyncComplete, finalDF)
        finalDF.ecb = ecb

    def playbackSyncFailed(self, error, finalDF):
        finalDF.errback(error)

    def playbackSyncComplete(self, event, finalDF):
        if event['Application'] == 'playback':
            self.deregisterEvent(finalDF.ecb)
            finalDF.callback(event)

    def say(self,
            module='en',
            say_type='NUMBER',
            say_method="PRONOUNCED",
            text='',
            uuid='',
            lock=True):
        arglist = [module, say_type, say_method, text]
        arglist = map(str, arglist)
        data = ' '.join(arglist)
        return self.sendCommand("say", data, uuid, lock)

    def set(self, variable, value, uuid='', lock=True):
        """Set a channel variable 
        
        variable -- (str) name of the channel variable
        value -- (str) value of the channel variable
        """
        args = '='.join([variable, value])
        return self.sendCommand("set", args, uuid, lock)

    def playAndGetDigits(self,
                         min,
                         max,
                         tries=3,
                         timeout=4000,
                         terminators='#',
                         filename='',
                         invalidfile='',
                         varname='',
                         regexp='\d',
                         uuid='',
                         lock=True):
        """Play the given sound file and get back caller's DTMF
        min -- (int) minimum digits length
        max -- (int) maximum digits length
        tires -- (int) number of times to play the audio file default is 3
        timeout -- (int) time to wait after fileblack in milliseconds . default is 4000
        filename -- (str) name of the audio file to be played 
        varname -- (str) DTMF digit value will be set as value to the variable of this name
        regexp -- (str) regurlar expression to match the DTMF 
        uuid -- (str) uuid of the target channel 
        
        Make sure CHANNEL_EXECUTE_COMPLETE  event is subcribed otherwise finalDF will never get invoked
        """
        arglist = [
            min, max, tries, timeout, terminators, filename, invalidfile,
            varname, regexp
        ]
        arglist = map(str, arglist)

        #arglist = map(repr, arglist)
        data = ' '.join(arglist)

        finalDF = defer.Deferred()
        df = self.sendCommand("play_and_get_digits", data, uuid, lock)
        df.addCallback(self._playAndGetDigitsSuccess, finalDF, varname)
        df.addErrback(self._playAndGetDigitsFailure, finalDF)
        return finalDF

    def _playAndGetDigitsSuccess(self, msg, finalDF, varname):
        """Successfully executed playAndGetDigits. Register a callback to catch DTMF"""

        ecb = self.registerEvent("CHANNEL_EXECUTE_COMPLETE", True,
                                 self._checkPlaybackResult, finalDF, varname)
        finalDF.ecb = ecb

    def _playAndGetDigitsFailure(self, error, finalDF):
        """Failed to execute playAndGetDigits, invoke finalDF errback"""
        finalDF.errback(error)

    def _checkPlaybackResult(self, event, finalDF, varname):
        if event['Application'] == "play_and_get_digits":
            self.deregisterEvent(finalDF.ecb)
            if (
                    "variable_" + varname
            ) in event:  # update: event.has_key("variable_"+varname) (to) in
                finalDF.callback(event['variable_' + varname])
            else:
                finalDF.callback(None)

    def schedHangup(self, secs, uuid='', lock=True):
        """Schedule hangup 
        
        seconds -- (int/str) seconds to wait before hangup 
        """
        args = "+" + str(secs)
        return self.sendCommand("sched_hangup", args, uuid, lock)

    def record(self,
               path,
               time_limit_secs=' ',
               silence_thresh=' ',
               silence_hits=' ',
               terminators='',
               uuid='',
               lock=True):
        terminators = terminators or 'none'
        self.set("playback_terminators", terminators)
        args = ' '.join([path, time_limit_secs, silence_thresh, silence_hits])
        return self.sendCommand("record", args, uuid, lock)

    def recordSession(self, filename, uuid='', lock=True):
        """Record entire session using record_session dialplan tool"""
        return self.sendCommand("record_session", filename, uuid, lock)

    def stopRecordSession(self, path, uuid='', lock=True):
        """Stop recording session """
        return self.sendCommand("stop_record_session", path, uuid, lock)

    # The following commands work on commercial mod_amd module
    def voice_start(self, uuid='', lock=True):
        """Start AMD on current channel"""
        return self.sendCommand("voice_start", uuid=uuid, lock=lock)

    def voice_stop(self, uuid='', lock=True):
        """Stop AMD on current channel"""
        return self.sendCommand("voice_stop", uuid=uuid, lock=lock)