示例#1
0
 def __init__(self,
              server_url,
              parse_event_body=True,
              last_event_seen=None):
     self.server_url = server_url
     self.last_event_seen = last_event_seen
     self.deserializer = Deserializer()
     self.parse_event_body = parse_event_body
     self.stream_finished = False
示例#2
0
 def __init__(self, server_url, parse_event_body=True,
              last_event_seen=None):
     self.server_url = server_url
     self.last_event_seen = last_event_seen
     self.deserializer = Deserializer()
     self.parse_event_body = parse_event_body
     self.stream_finished = False
示例#3
0
class SynchronousClient(object):
    """Synchronous client.

    This client should be used in long-polling mode.

    """
    def __init__(self, server_url, parse_event_body=True,
                 last_event_seen=None):
        self.server_url = server_url
        self.last_event_seen = last_event_seen
        self.deserializer = Deserializer()
        self.parse_event_body = parse_event_body
        self.stream_finished = False

    def receive_events(self):
        url = self.server_url
        if self.last_event_seen is not None:
            url += '?last-seen=' + self.last_event_seen
        connection = urllib2.urlopen(url)
        data = connection.read()
        evs = self.deserializer.deserialize(data, complete=True,
                                            parse_body=self.parse_event_body)
        connection.close()
        if len(evs) > 0:
            self.last_event_seen = evs[-1].event_id
        for event in evs:
            if (isinstance(event, Command)
                and event.command == 'Stream-Finished'):
                self.stream_finished = True
                break
        return [e for e in evs if not isinstance(e, Command)]
示例#4
0
class SynchronousClient(object):
    """Synchronous client.

    This client should be used in long-polling mode.

    """
    def __init__(self,
                 server_url,
                 parse_event_body=True,
                 last_event_seen=None):
        self.server_url = server_url
        self.last_event_seen = last_event_seen
        self.deserializer = Deserializer()
        self.parse_event_body = parse_event_body
        self.stream_finished = False

    def receive_events(self):
        url = self.server_url
        if self.last_event_seen is not None:
            url += '?last-seen=' + self.last_event_seen
        connection = urllib2.urlopen(url)
        data = connection.read()
        evs = self.deserializer.deserialize(data,
                                            complete=True,
                                            parse_body=self.parse_event_body)
        connection.close()
        if len(evs) > 0:
            self.last_event_seen = evs[-1].event_id
        for event in evs:
            if (isinstance(event, Command)
                    and event.command == 'Stream-Finished'):
                self.stream_finished = True
                break
        return [e for e in evs if not isinstance(e, Command)]
示例#5
0
    def __init__(self, url, event_callback=None, error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None, source_finish_callback=None,
                 label=None, retrieve_missing_events=False,
                 ioloop=None, parse_event_body=True, separate_events=True,
                 reconnect=True, disable_compression=False):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        A 'label' (string) may be set to this client. Setting a label
        allows the client to save the id of the latest event it received
        and ask for missed events when the client is run again.
        Set 'retrieve_missing_events' to True in order to do that.
        If 'retrieve_missing_events' is True, a non-empty label must be set.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        When 'reconnect' is True (which is the default), the client tries
        to automatically reconnect when it loses connection
        with the server, following an exponential back-off mechanism.

        """
        if retrieve_missing_events and not label:
            raise ValueError('Retrieving missing events'
                             ' requires a client label')
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.label = label
        if retrieve_missing_events:
            self.status_file = self._create_status_file(False)
        elif label:
            self.status_file = self._create_status_file(True)
        else:
            self.status_file = None
        self.last_event_id = None
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._deserializer = Deserializer()
        self.reconnect = reconnect
        self.disable_compression = disable_compression
        self.reconnection = ReconnectionManager()
示例#6
0
    def __init__(self,
                 url,
                 event_callback=None,
                 error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None,
                 source_finish_callback=None,
                 ioloop=None,
                 parse_event_body=True,
                 separate_events=True,
                 reconnect=True,
                 disable_compression=False):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        """
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._deserializer = Deserializer()
        self.last_event = None
        self.reconnect = reconnect
        self.disable_compression = disable_compression
        self.connection_attempts = 0
示例#7
0
def count_events(filename):
    """Counts the number of events in a file and their total size.

    Returns a tuple (num_events, num_bytes).

    """
    num_bytes = 0
    num_events = 0
    if filename.endswith('.gz'):
        file_ = gzip.GzipFile(filename, 'r')
    else:
        file_ = open(filename, 'r')
    deserializer = Deserializer()
    while True:
        data = file_.read(1024)
        if data == '':
            break
        evs = deserializer.deserialize(data, parse_body=False, complete=False)
        num_bytes += len(data)
        num_events += len(evs)
    file_.close()
    return num_events, num_bytes
示例#8
0
def count_events(filename):
    """Counts the number of events in a file and their total size.

    Returns a tuple (num_events, num_bytes).

    """
    num_bytes = 0
    num_events = 0
    if filename.endswith('.gz'):
        file_ = gzip.GzipFile(filename, 'r')
    else:
        file_ = open(filename, 'r')
    deserializer = Deserializer()
    while True:
        data = file_.read(1024)
        if data == '':
            break
        evs = deserializer.deserialize(data, parse_body=False, complete=False)
        num_bytes += len(data)
        num_events += len(evs)
    file_.close()
    return num_events, num_bytes
示例#9
0
    def __init__(self, url, event_callback=None, error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None, source_finish_callback=None,
                 ioloop=None, parse_event_body=True, separate_events=True,
                 reconnect=True):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        """
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._compressed = False
        self._deserializer = Deserializer()
        self.last_event = None
        self.reconnect = reconnect
        self.connection_attempts = 0
示例#10
0
class AsyncStreamingClient(object):
    """Asynchronous client for a single event source.

    If you need to receive events from several sources, use the class
    'Client' instead.

    """
    def __init__(self, url, event_callback=None, error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None, source_finish_callback=None,
                 ioloop=None, parse_event_body=True, separate_events=True,
                 reconnect=True):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        """
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._compressed = False
        self._deserializer = Deserializer()
        self.last_event = None
        self.reconnect = reconnect
        self.connection_attempts = 0
#        self.data_history = []

    def start(self, loop=False):
        """Starts the client.

        This function has to be called in order to connect to the
        streams and begin to receive events.

        If 'loop' is True (the default is False), the server will
        block on the ioloop until 'close()' is called.

        """
        self._connect()
        if loop:
            self._looping = True
            self.ioloop.start()
            self._looping = False

    def stop(self, notify_connection_close=True):
        """Stops and closes this client.

        The client can no longer be used in the future.

        Unless the keywork parameter `notify_connection_close` is set
        to false, the connection close callback will be invoked if set
        for this client.

        If the server is blocked on the ioloop in the 'start()'
        method, it is released.

        Note: if the backend behind
        'tornado.httpclient.AsyncHTTPClient()' is 'SimpleHTTPClient',
        invoking 'stop()' does not actually close the HTTP connections
        (as of Tornado branch master september 1st 2011).

        """
        self._finish_internal(notify_connection_close)

    def _connect(self):
        http_client = AsyncHTTPClient()
        if self.last_event is None:
            url = self.url
        else:
            url = self.url + '?last-seen=' + self.last_event
        req = HTTPRequest(url, streaming_callback=self._stream_callback,
                          request_timeout=0, connect_timeout=0)
        http_client.fetch(req, self._request_callback)
        self.connection_attempts += 1

    def _reconnect(self):
        logging.info('Reconnecting to the stream...')
        t = 3 + random.expovariate(0.3)
        self.ioloop.add_timeout(datetime.timedelta(seconds=t), self._connect)

    def _finish_internal(self, notify_connection_close):
        if self._closed:
            return
        if (notify_connection_close
            and self.connection_close_callback is not None):
            self.connection_close_callback(self)
        if self._looping:
            self.ioloop.stop()
            self._looping = False
        self._closed = True

    def _stream_callback(self, data):
        self.connection_attempts = 0
        self._process_received_data(data)

    def _request_callback(self, response):
        if response.error:
            if (self.connection_attempts < 5
                and not response.error.code // 100 == 4
                and not self._closed
                and self.reconnect):
                self._reconnect()
                finish = False
            else:
                if self.error_callback is not None:
                    self.error_callback('Error in HTTP request',
                                        http_error=response.error)
                finish = True
        elif len(response.body) > 0:
            self._process_received_data(response.body)
            finish = True
        if finish:
            logging.info('Finishing client')
            self._finish_internal(True)

    def _process_received_data(self, data):
        global transferred_bytes
        transferred_bytes += len(data)
        evs = self._deserialize(data, parse_body=self.parse_event_body)
        for e in evs:
            logger.logger.event_delivered(e)
        if self.event_callback is not None:
            if not self.separate_events:
                self.event_callback(evs)
            else:
                for ev in evs:
                    self.event_callback(ev)
        if len(evs) > 0:
            self.last_event = evs[-1].event_id

    def _reset_compression(self):
        self._compressed = True
        self._decompressor = zlib.decompressobj()

    def _deserialize(self, data, parse_body=True):
        evs = []
        event = None
        compressed_len = len(data)
        if self._compressed:
            data = self._decompressor.decompress(data)
        logger.logger.data_received(compressed_len, len(data))
        self._deserializer.append_data(data)
        event = self._deserializer.deserialize_next(parse_body=parse_body)
        while event is not None:
            if isinstance(event, Command):
                if event.command == 'Set-Compression':
                    self._reset_compression()
                    pos = self._deserializer.data_consumed()
                    self._deserializer.reset()
                    evs.extend(self._deserialize(data[pos:], parse_body))
                    return evs
                elif event.command == 'Event-Source-Started':
                    if self.source_start_callback:
                        self.source_start_callback()
                    evs.append(event)
                elif event.command == 'Event-Source-Finished':
                    if self.source_finish_callback:
                        self.source_finish_callback()
                    evs.append(event)
                elif event.command == 'Stream-Finished':
                    self._finish_internal(True)
                    ## logging.info('Stream finished')
            else:
                evs.append(event)
            event = self._deserializer.deserialize_next(parse_body=parse_body)
        return evs
示例#11
0
class AsyncStreamingClient(object):
    """Asynchronous client for a single event source.

    If you need to receive events from several sources, use the class
    'Client' instead.

    """
    def __init__(self, url, event_callback=None, error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None, source_finish_callback=None,
                 label=None, retrieve_missing_events=False,
                 ioloop=None, parse_event_body=True, separate_events=True,
                 reconnect=True, disable_compression=False):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        A 'label' (string) may be set to this client. Setting a label
        allows the client to save the id of the latest event it received
        and ask for missed events when the client is run again.
        Set 'retrieve_missing_events' to True in order to do that.
        If 'retrieve_missing_events' is True, a non-empty label must be set.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        When 'reconnect' is True (which is the default), the client tries
        to automatically reconnect when it loses connection
        with the server, following an exponential back-off mechanism.

        """
        if retrieve_missing_events and not label:
            raise ValueError('Retrieving missing events'
                             ' requires a client label')
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.label = label
        if retrieve_missing_events:
            self.status_file = self._create_status_file(False)
        elif label:
            self.status_file = self._create_status_file(True)
        else:
            self.status_file = None
        self.last_event_id = None
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._deserializer = Deserializer()
        self.reconnect = reconnect
        self.disable_compression = disable_compression
        self.reconnection = ReconnectionManager()
#        self.data_history = []

    def start(self, loop=False):
        """Starts the client.

        This function has to be called in order to connect to the
        streams and begin to receive events.

        If 'loop' is True (the default is False), the server will
        block on the ioloop until 'close()' is called.

        """
        self._connect()
        if loop:
            self._looping = True
            self.ioloop.start()
            self._looping = False

    def stop(self, notify_connection_close=True):
        """Stops and closes this client.

        The client can no longer be used in the future.

        Unless the keywork parameter `notify_connection_close` is set
        to false, the connection close callback will be invoked if set
        for this client.

        If the server is blocked on the ioloop in the 'start()'
        method, it is released.

        Note: if the backend behind
        'tornado.httpclient.AsyncHTTPClient()' is 'SimpleHTTPClient',
        invoking 'stop()' does not actually close the HTTP connections
        (as of Tornado branch master september 1st 2011).

        """
        self._finish_internal(notify_connection_close)

    def _connect(self):
        http_client = AsyncHTTPClient()
        last_event_received = self._read_last_event_id()
        if last_event_received is None:
            url = self.url
        else:
            url = self.url + '?last-seen=' + last_event_received
        if not self.disable_compression:
            headers = {'Accept-Encoding': 'deflate;q=1, identity;q=0.5'}
        else:
            headers = {'Accept-Encoding': 'identity'}
        req = HTTPRequest(url, streaming_callback=self._stream_callback,
                          headers=headers,
                          request_timeout=0, connect_timeout=0)
        http_client.fetch(req, self._request_callback)
        self.reconnection.notify_failure()
        logging.info('Connecting to {}'.format(self.url))

    def _reconnect(self):
        t = self.reconnection.compute_delay()
        logging.info('Disconnected from {}. Next attempt in {:.02f}s'\
                     .format(self.url, t))
        self.ioloop.add_timeout(datetime.timedelta(seconds=t), self._connect)

    def _finish_internal(self, notify_connection_close):
        if self._closed:
            return
        if (notify_connection_close
            and self.connection_close_callback is not None):
            self.connection_close_callback(self)
        if self._looping:
            self.ioloop.stop()
            self._looping = False
        self._closed = True

    def _stream_callback(self, data):
        self.reconnection.notify_success()
        self._process_received_data(data)

    def _request_callback(self, response):
        if response.error:
            if (not response.error.code // 100 == 4
                and not self._closed
                and self.reconnect):
                self._reconnect()
                finish = False
            else:
                if self.error_callback is not None:
                    self.error_callback('Error in HTTP request',
                                        http_error=response.error)
                finish = True
        else:
            if len(response.body) > 0:
                self._process_received_data(response.body)
            self._reconnect()
            finish = False
        if finish:
            logging.info('Finishing client')
            self._finish_internal(True)

    def _process_received_data(self, data):
        global transferred_bytes
        transferred_bytes += len(data)
        evs = self._deserialize(data, parse_body=self.parse_event_body)
        if self.event_callback is not None:
            if not self.separate_events:
                self.event_callback(evs)
            else:
                for ev in evs:
                    self.event_callback(ev)
        if len(evs) > 0:
            self._write_last_event_id(evs[-1])

    def _deserialize(self, data, parse_body=True):
        evs = []
        event = None
        self._deserializer.append_data(data)
        event = self._deserializer.deserialize_next(parse_body=parse_body)
        while event is not None:
            if isinstance(event, Command):
                if event.command == 'Event-Source-Started':
                    if self.source_start_callback:
                        self.source_start_callback()
                    evs.append(event)
                elif event.command == 'Event-Source-Finished':
                    if self.source_finish_callback:
                        self.source_finish_callback()
                    evs.append(event)
                elif event.command == 'Stream-Finished':
                    self._finish_internal(True)
                    ## logging.info('Stream finished')
            else:
                evs.append(event)
            event = self._deserializer.deserialize_next(parse_body=parse_body)
        return evs

    def _create_status_file(self, overwrite):
        dirname = '.ztreamy-client-' + self.label
        if not os.path.exists(dirname):
            os.makedirs(dirname)
        status_file = os.path.join(dirname, base64.urlsafe_b64encode(self.url))
        if overwrite:
            mode = 'w'
        else:
            mode = 'a'
        with open(status_file, mode=mode):
            pass
        return status_file

    def _write_last_event_id(self, event):
        if self.status_file is not None:
            with open(self.status_file, mode='w') as f:
                f.write(event.event_id)
        else:
            self.last_event_id = event.event_id

    def _read_last_event_id(self):
        if self.status_file is not None:
            with open(self.status_file) as f:
                event_id_read = f.read().strip()
            if event_id_read:
                event_id = event_id_read
            else:
                event_id = None
        else:
            event_id = self.last_event_id
        return event_id
示例#12
0
class AsyncStreamingClient(object):
    """Asynchronous client for a single event source.

    If you need to receive events from several sources, use the class
    'Client' instead.

    """
    def __init__(self,
                 url,
                 event_callback=None,
                 error_callback=None,
                 connection_close_callback=None,
                 source_start_callback=None,
                 source_finish_callback=None,
                 ioloop=None,
                 parse_event_body=True,
                 separate_events=True,
                 reconnect=True,
                 disable_compression=False):
        """Creates a new client for a given stream URL.

        The client connects to the stream URL given by 'url'.  For
        every single received event, the 'event_callback' function is
        invoked. It receives an event object as parameter.

        If 'separate_events' is set to None, then the event callback
        will receive a list of events instead of a single events.

        If a 'ioloop' object is given, the client will block on it
        apon calling the 'start()' method. If not, it will block on
        the default 'ioloop' of Tornado.

        """
        if isinstance(event_callback, Filter):
            event_callback = event_callback.filter_event
        self.url = url
        self.event_callback = event_callback
        self.error_callback = error_callback
        self.connection_close_callback = connection_close_callback
        self.source_start_callback = source_start_callback
        self.source_finish_callback = source_finish_callback
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self.separate_events = separate_events
        self._closed = False
        self._looping = False
        self._deserializer = Deserializer()
        self.last_event = None
        self.reconnect = reconnect
        self.disable_compression = disable_compression
        self.connection_attempts = 0


#        self.data_history = []

    def start(self, loop=False):
        """Starts the client.

        This function has to be called in order to connect to the
        streams and begin to receive events.

        If 'loop' is True (the default is False), the server will
        block on the ioloop until 'close()' is called.

        """
        self._connect()
        if loop:
            self._looping = True
            self.ioloop.start()
            self._looping = False

    def stop(self, notify_connection_close=True):
        """Stops and closes this client.

        The client can no longer be used in the future.

        Unless the keywork parameter `notify_connection_close` is set
        to false, the connection close callback will be invoked if set
        for this client.

        If the server is blocked on the ioloop in the 'start()'
        method, it is released.

        Note: if the backend behind
        'tornado.httpclient.AsyncHTTPClient()' is 'SimpleHTTPClient',
        invoking 'stop()' does not actually close the HTTP connections
        (as of Tornado branch master september 1st 2011).

        """
        self._finish_internal(notify_connection_close)

    def _connect(self):
        http_client = AsyncHTTPClient()
        if self.last_event is None:
            url = self.url
        else:
            url = self.url + '?last-seen=' + self.last_event
        if not self.disable_compression:
            headers = {'Accept-Encoding': 'deflate;q=1, identity;q=0.5'}
        else:
            headers = {'Accept-Encoding': 'identity'}
        req = HTTPRequest(url,
                          streaming_callback=self._stream_callback,
                          headers=headers,
                          request_timeout=0,
                          connect_timeout=0)
        http_client.fetch(req, self._request_callback)
        self.connection_attempts += 1

    def _reconnect(self):
        logging.info('Reconnecting to the stream...')
        t = 3 + random.expovariate(0.3)
        self.ioloop.add_timeout(datetime.timedelta(seconds=t), self._connect)

    def _finish_internal(self, notify_connection_close):
        if self._closed:
            return
        if (notify_connection_close
                and self.connection_close_callback is not None):
            self.connection_close_callback(self)
        if self._looping:
            self.ioloop.stop()
            self._looping = False
        self._closed = True

    def _stream_callback(self, data):
        self.connection_attempts = 0
        self._process_received_data(data)

    def _request_callback(self, response):
        if response.error:
            if (self.connection_attempts < 5
                    and not response.error.code // 100 == 4
                    and not self._closed and self.reconnect):
                self._reconnect()
                finish = False
            else:
                if self.error_callback is not None:
                    self.error_callback('Error in HTTP request',
                                        http_error=response.error)
                finish = True
        else:
            if len(response.body) > 0:
                self._process_received_data(response.body)
            finish = True
        if finish:
            logging.info('Finishing client')
            self._finish_internal(True)

    def _process_received_data(self, data):
        global transferred_bytes
        transferred_bytes += len(data)
        evs = self._deserialize(data, parse_body=self.parse_event_body)
        for e in evs:
            logger.logger.event_delivered(e)
        if self.event_callback is not None:
            if not self.separate_events:
                self.event_callback(evs)
            else:
                for ev in evs:
                    self.event_callback(ev)
        if len(evs) > 0:
            self.last_event = evs[-1].event_id

    def _deserialize(self, data, parse_body=True):
        evs = []
        event = None
        compressed_len = len(data)
        logger.logger.data_received(compressed_len, len(data))
        self._deserializer.append_data(data)
        event = self._deserializer.deserialize_next(parse_body=parse_body)
        while event is not None:
            if isinstance(event, Command):
                if event.command == 'Event-Source-Started':
                    if self.source_start_callback:
                        self.source_start_callback()
                    evs.append(event)
                elif event.command == 'Event-Source-Finished':
                    if self.source_finish_callback:
                        self.source_finish_callback()
                    evs.append(event)
                elif event.command == 'Stream-Finished':
                    self._finish_internal(True)
                    ## logging.info('Stream finished')
            else:
                evs.append(event)
            event = self._deserializer.deserialize_next(parse_body=parse_body)
        return evs