class SseStream(object):
    def __init__(self, queue, bidder_id=None, client_id=None, timeout=None):
        self.queue = queue
        self.client_id = client_id
        self.bidder_id = bidder_id
        if timeout:
            self.sse = PySse(default_retry=0)
            spawn(sse_timeout, queue, timeout)
        else:
            self.sse = PySse(default_retry=2000)

    def __iter__(self):
        self.sse = PySse()
        # TODO: https://app.asana.com/0/17412748309135/22939294056733
        yield CHUNK
        for data in self.sse:
            yield data.encode('u8')

        while True:
            message = self.queue.get()
            if message["event"] == "StopSSE":
                return
            LOGGER.debug(' '.join([
                'Event Message to bidder:',
                str(self.bidder_id), ' Client:',
                str(self.client_id), 'MSG:',
                str(repr(message))
            ]))
            self.sse.add_message(message['event'], json.dumps(message['data']))
            for data in self.sse:
                yield data.encode('u8')
Exemple #2
0
def auctions_proxy(auction_doc_id, path):
    auctions_server.logger.debug('Auction_doc_id: {}'.format(auction_doc_id))
    proxy_path = auctions_server.proxy_mappings.get(str(auction_doc_id),
                                                    auctions_server.redis.get,
                                                    (str(auction_doc_id), ),
                                                    max_age=60)
    auctions_server.logger.debug('Proxy path: {}'.format(proxy_path))
    if proxy_path:
        request.environ['PATH_INFO'] = '/' + path
        auctions_server.logger.debug('Start proxy to path: {}'.format(path))
        return StreamProxy(
            proxy_path,
            auction_doc_id=str(auction_doc_id),
            event_sources_pool=auctions_server.event_sources_pool,
            event_source_connection_limit=auctions_server.
            config['event_source_connection_limit'],
            pool=auctions_server.proxy_connection_pool,
            backend="gevent")
    elif path == 'login' and auction_doc_id in auctions_server.db:
        return redirect((url_for('auction_url',
                                 auction_doc_id=auction_doc_id,
                                 wait=1,
                                 **request.args)))
    elif path == 'event_source':
        events_close = PySse()
        events_close.add_message("Close", "Disable")
        return Response(events_close,
                        mimetype='text/event-stream',
                        content_type='text/event-stream')

    return abort(404)
class SseStream(object):
    def __init__(self, queue, bidder_id=None, client_id=None, timeout=None):
        self.queue = queue
        self.client_id = client_id
        self.bidder_id = bidder_id
        if timeout:
            self.sse = PySse(default_retry=0)
            spawn(sse_timeout, queue, timeout)
        else:
            self.sse = PySse(default_retry=2000)

    def __iter__(self):
        self.sse = PySse()
        # TODO: https://app.asana.com/0/17412748309135/22939294056733
        yield CHUNK
        for data in self.sse:
            yield data.encode('u8')

        while True:
            message = self.queue.get()
            if message["event"] == "StopSSE":
                return
            LOGGER.debug(' '.join([
                'Event Message to bidder:', str(self.bidder_id), ' Client:',
                str(self.client_id), 'MSG:', str(repr(message))
            ]))
            self.sse.add_message(message['event'], json.dumps(message['data']))
            for data in self.sse:
                yield data.encode('u8')
def auctions_proxy(auction_doc_id, path):
    auctions_server.logger.debug('Auction_doc_id: {}'.format(auction_doc_id))
    proxy_path = auctions_server.proxy_mappings.get(
        str(auction_doc_id),
        get_mapping,
        (auctions_server.config['REDIS'], str(auction_doc_id), False),
        max_age=60)
    auctions_server.logger.debug('Proxy path: {}'.format(proxy_path))
    if proxy_path:
        request.environ['PATH_INFO'] = '/' + path
        auctions_server.logger.debug('Start proxy to path: {}'.format(path))
        return StreamProxy(
            proxy_path,
            auction_doc_id=str(auction_doc_id),
            event_sources_pool=auctions_server.event_sources_pool,
            event_source_connection_limit=auctions_server.
            config['event_source_connection_limit'],
            pool=auctions_server.proxy_connection_pool,
            backend='gevent')
    elif path == 'login' and auction_doc_id in auctions_server.db:
        if 'X-Forwarded-For' in request.headers:
            url = urlunparse(
                urlparse(request.url)._replace(
                    netloc=request.headers['Host'])).replace('/login', '')
            auctions_server.logger.info(
                'Redirecting loging path to {}'.format(url))
            return redirect(url)
    elif path == 'event_source':
        events_close = PySse()
        events_close.add_message('Close', 'Disable')
        return Response(events_close,
                        mimetype='text/event-stream',
                        content_type='text/event-stream')
    return abort(404)
def auctions_proxy(auction_doc_id, path):
    auctions_server.logger.debug('Auction_doc_id: {}'.format(auction_doc_id))
    proxy_path = auctions_server.proxy_mappings.get(
        str(auction_doc_id),
        auctions_server.redis.get,
        (str(auction_doc_id), ), max_age=60
    )
    auctions_server.logger.debug('Proxy path: {}'.format(proxy_path))
    if proxy_path:
        request.environ['PATH_INFO'] = '/' + path
        auctions_server.logger.debug('Start proxy to path: {}'.format(path))
        return StreamProxy(
            proxy_path,
            auction_doc_id=str(auction_doc_id),
            event_sources_pool=auctions_server.event_sources_pool,
            event_source_connection_limit=auctions_server.config['event_source_connection_limit'],
            pool=auctions_server.proxy_connection_pool,
            backend="gevent"
        )
    elif path == 'login' and auction_doc_id in auctions_server.db:
        return redirect((
            url_for('auction_url', auction_doc_id=auction_doc_id,
                    wait=1, **request.args)
        ))
    elif path == 'event_source':
        events_close = PySse()
        events_close.add_message("Close", "Disable")
        return Response(
            events_close,
            mimetype='text/event-stream',
            content_type='text/event-stream'
        )

    return abort(404)
def auctions_proxy(auction_doc_id, path):
    auctions_server.logger.debug('Auction_doc_id: {}'.format(auction_doc_id))
    proxy_path = auctions_server.proxy_mappings.get(
        str(auction_doc_id),
        get_mapping,
        (auctions_server.config['REDIS'], str(auction_doc_id), False), max_age=60
    )
    auctions_server.logger.debug('Proxy path: {}'.format(proxy_path))
    if proxy_path:
        request.environ['PATH_INFO'] = '/' + path
        auctions_server.logger.debug('Start proxy to path: {}'.format(path))
        return StreamProxy(
            proxy_path,
            auction_doc_id=str(auction_doc_id),
            event_sources_pool=auctions_server.event_sources_pool,
            event_source_connection_limit=auctions_server.config['event_source_connection_limit'],
            pool=auctions_server.proxy_connection_pool,
            backend='gevent'
        )
    elif path == 'login' and auction_doc_id in auctions_server.db:
        if 'X-Forwarded-For' in request.headers:
            url = urlunparse(
                urlparse(request.url)._replace(netloc=request.headers['Host'])
            ).replace('/login', '')
            auctions_server.logger.info('Redirecting loging path to {}'.format(url))
            return redirect(url)
    elif path == 'event_source':
        events_close = PySse()
        events_close.add_message('Close', 'Disable')
        return Response(
            events_close,
            mimetype='text/event-stream',
            content_type='text/event-stream'
        )
    return abort(404)
def eventstream():
    sse = Sse()
    pubsub = redis.pubsub()
    while True:
        for event in event_types:
            pubsub.subscribe(event)
        with Timeout(TIMEOUT) as timeout:
            try:
                for message in pubsub.listen():
                    if message['type'] != "message":
                        continue
                    try:
                        data = json.loads(message['data'])
                    except ValueError:  # broken json
                        continue
                    if 'site_id' not in data or data['site_id'] != SITE_ID:
                        continue

                    sse.add_message(message['channel'], str(message['data']))
                    for event in sse:
                        yield str(event)
                    sse.flush()

                    timeout.cancel()
                    timeout.start()

            # heartbeat, to detect if a user is disconnected
            except Timeout as t:
                if t is not timeout:  # not our timeout
                    raise
                yield ":\n\n"  # heartbeat message

            finally:
                pubsub.close()
Exemple #8
0
class DynamicChannelRedisQueueView(RedisQueueView):
    def get_redis_channel(self):
        return self.kwargs.get('channel') or self.redis_channel

    def _iterator(self):
        yield u":" + (" " * 2048) + "\n" + ":retry 2000\n"
        for subiterator in self.iterator():
            msg = u''
            for bufferitem in self.sse:
                msg = msg + bufferitem
            yield msg

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        # This is basically the same method as in RedisQueueView.dispatch,
        # which should be a BaseSseView.dispatch. The only thing we modify
        # here is an extra header, called X-Accel-Buffering, which disables
        # buffering of this view by a webserver, for example nginx needs
        # that: http://wiki.nginx.org/X-accel#X-Accel-Buffering .

        # Also, we close db connection, as it won't be needed here.

        self.sse = Sse()

        self.request = request
        self.args = args
        self.kwargs = kwargs

        from django import db
        db.close_connection()

        response = HttpResponse(self._iterator(), content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['Software'] = 'django-sse'
        response['X-Accel-Buffering'] = 'no'
        return response


    def iterator(self):
        if settings.TESTING:
            #
            # When testing, with current Django (1.5.1) the LiveServerTestCase
            # servers only one thread for the server. So, if we listen for
            # Redis messages, we block the only socket of the test server. So,
            # to be able to test Javascript in web browsers (EventSource
            # support) we just fake incoming messages. Yes, this does not
            # test our Redis communication properly. On the other hand,
            # I rather leave Redis communication w/o testing, because
            # that's job of django-sse package - and focus on testing
            # browsers with EventSource support.
            #
            for message in testutil.MESSAGES:
                self.sse.add_message("message", message)
            testutil.MESSAGES = []
            return [1]

        self.sse.add_message('debug', 'Hi browser, you are connected.')
        return RedisQueueView.iterator(self)
Exemple #9
0
class DynamicChannelRedisQueueView(RedisQueueView):
    def get_redis_channel(self):
        return self.kwargs.get('channel') or self.redis_channel

    def _iterator(self):
        yield u":" + (" " * 2048) + "\n" + ":retry 2000\n"
        for subiterator in self.iterator():
            msg = u''
            for bufferitem in self.sse:
                msg = msg + bufferitem
            yield msg

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        # This is basically the same method as in RedisQueueView.dispatch,
        # which should be a BaseSseView.dispatch. The only thing we modify
        # here is an extra header, called X-Accel-Buffering, which disables
        # buffering of this view by a webserver, for example nginx needs
        # that: http://wiki.nginx.org/X-accel#X-Accel-Buffering .

        # Also, we close db connection, as it won't be needed here.

        self.sse = Sse()

        self.request = request
        self.args = args
        self.kwargs = kwargs

        from django import db
        db.close_connection()

        response = HttpResponse(self._iterator(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['Software'] = 'django-sse'
        response['X-Accel-Buffering'] = 'no'
        return response

    def iterator(self):
        if settings.TESTING:
            #
            # When testing, with current Django (1.5.1) the LiveServerTestCase
            # servers only one thread for the server. So, if we listen for
            # Redis messages, we block the only socket of the test server. So,
            # to be able to test Javascript in web browsers (EventSource
            # support) we just fake incoming messages. Yes, this does not
            # test our Redis communication properly. On the other hand,
            # I rather leave Redis communication w/o testing, because
            # that's job of django-sse package - and focus on testing
            # browsers with EventSource support.
            #
            for message in testutil.MESSAGES:
                self.sse.add_message("message", message)
            testutil.MESSAGES = []
            return [1]

        self.sse.add_message('debug', 'Hi browser, you are connected.')
        return RedisQueueView.iterator(self)
 def __init__(self, queue, bidder_id=None, client_id=None, timeout=None):
     self.queue = queue
     self.client_id = client_id
     self.bidder_id = bidder_id
     if timeout:
         self.sse = PySse(default_retry=0)
         spawn(sse_timeout, queue, timeout)
     else:
         self.sse = PySse(default_retry=2000)
 def __init__(self, queue, bidder_id=None, client_id=None, timeout=None):
     self.queue = queue
     self.client_id = client_id
     self.bidder_id = bidder_id
     if timeout:
         self.sse = PySse(default_retry=0)
         spawn(sse_timeout, queue, timeout)
     else:
         self.sse = PySse(default_retry=2000)
Exemple #12
0
 def __iter__(self):
     sse = PySse()
     for data in sse:
         yield str(data)
     for message in self.pubsub.listen():
         if message['type'] == 'message':
             event, data = json.loads(message['data'])
             sse.add_message(event, data)
             for data in sse:
                 yield str(data)
Exemple #13
0
 def __iter__(self):
     sse = PySse()
     for data in sse:
         yield data.encode('u8')
     for message in self.pubsub.listen():
         if message['type'] == 'message':
             event, data = json.loads(message['data'])
             sse.add_message(event, data)
             for data in sse:
                 yield data.encode('u8')
Exemple #14
0
 def __iter__(self):
     sse = PySse()
     for data in sse:
         yield data.encode('u8')
     for message in self.pubsub.listen():
         if message['type'] == 'message':
             if message['data'] != '_flush':
                 event, data = json.loads(message['data'])
                 sse.add_message(event, data)
                 for data in sse:
                     yield data.encode('u8')
             else:
                 yield ":\n".encode('u8')
Exemple #15
0
    def test_add_message__list(self):
        sse = Sse()

        sse.add_message("foo", ["foo-message"])
        sse.add_message("bar", ["bar-message"])

        self.assertEqual(list(sse), [
            'retry: 2000\n\n',
            'event: foo\n',
            'data: foo-message\n',
            '\n',
            'event: bar\n',
            'data: bar-message\n',
            '\n'
        ])
Exemple #16
0
class SseNotifier(object):
    """Iterator that yields the published messages in a channel."""
    def __init__(self, backend, channel):
        """Initialise PublishSubscribe instance and channel."""
        self.sse = Sse()
        self.backend = backend
        self.backend.subscribe(channel)

    def __iter__(self):
        """Yield the published messages in a SSE format."""
        for message in self.backend.listen():
            if message['type'] == 'message':
                self.sse.add_message("", message['data'])
                for data in self.sse:
                    yield data.encode('u8')
Exemple #17
0
    def test_add_message__simple_text_split(self):
        sse = Sse()
        sse.add_message("foo", "foo\nmessage")
        sse.add_message("bar", "bar\nmessage")

        self.assertEqual(list(sse), [
            'retry: 2000\n\n',
            'event: foo\n',
            'data: foo\n',
            'data: message\n',
            '\n',
            'event: bar\n',
            'data: bar\n',
            'data: message\n',
            '\n'
        ])
class SseNotifier(object):
    """Iterator that yields the published messages in a channel."""

    def __init__(self, backend, channel):
        """Initialise PublishSubscribe instance and channel."""
        self.sse = Sse()
        self.backend = backend
        self.backend.subscribe(channel)

    def __iter__(self):
        """Yield the published messages in a SSE format."""
        for message in self.backend.listen():
            if message['type'] == 'message':
                self.sse.add_message("", message['data'])
                for data in self.sse:
                    yield data.encode('u8')
Exemple #19
0
    def dispatch(self, request, *args, **kwargs):
        self.sse = Sse()

        # Check if there is a channel extension in kwargs.
        # This may be used to separate events of the same kind
        # by some identifier (some object id, for example)
        channel_extension = kwargs.get('channel_extension', '')
        if channel_extension:
            self.channel = '%s/%s' % (self.channel, channel_extension)

        response = HttpResponse(self._generate_content(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['X-Accel-Buffering'] = 'no'
        response['Software'] = 'django-sse-wrapper'
        return response
Exemple #20
0
class StreamHandler(tornado.web.RequestHandler):
    def initialize(self):
        self.set_header('Content-Type', 'text/event-stream')
        self.set_header('Cache-Control', 'no-cache')
        self.set_header('X-Accel-Buffering', 'no')
        self.sse = Sse()
        self.stream = True

    def on_connection_close(self):
        self.stream = False
        super().on_connection_close()

    async def publish(self, message=None):
        try:
            if message is not None:
                self.sse.add_message('message', message)
            for item in self.sse:
                self.write(item)
            await self.flush()
        except StreamClosedError:
            self.stream = False

    async def get(self):
        # Send retry option to client
        await self.publish()

        ts = time.time() - 120  # last 2 minutes
        collection = self.settings['db'].prices
        cursor = collection.find({'ts': {
            '$gt': ts
        }},
                                 cursor_type=CursorType.TAILABLE_AWAIT)
        while self.stream:
            if not cursor.alive:
                cursor = collection.find({'ts': {
                    '$gt': ts
                }},
                                         cursor_type=CursorType.TAILABLE_AWAIT)

            if (await cursor.fetch_next):
                doc = cursor.next_object()
                doc.pop('_id')
                ts = doc['ts']
                await self.publish(json.dumps(doc))
Exemple #21
0
    def test_add_message__simple_text(self):
        sse = Sse()

        sse.add_message("foo", "foo-message")
        sse.add_message("bar", "bar-message")

        self.assertEqual(to_unicode(sse), "retry: 2000\n\nevent: foo\ndata: "
                                          "foo-message\n\nevent: bar\ndata: "
                                          "bar-message\n\n")

        self.assertEqual(list(sse), [
            'retry: 2000\n\n',
            'event: foo\n',
            'data: foo-message\n',
            '\n',
            'event: bar\n',
            'data: bar-message\n',
            '\n'
        ])
Exemple #22
0
    def dispatch(self, request, *args, **kwargs):
        self.sse = Sse()

        self.request = request
        self.args = args
        self.kwargs = kwargs

        response = StreamingHttpResponse(self._iterator(), content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['Software'] = 'django-sse'
        return response
Exemple #23
0
class EventStreamView(View):
    """
    This is the view you must use in your urls.py to expose an event stream.
    """
    channel = DEFAULT_CHANNEL

    def _generate_content(self):
        for subiterator in self.iterator():
            for bufferitem in self.sse:
                yield bufferitem

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        self.sse = Sse()

        # Check if there is a channel extension in kwargs.
        # This may be used to separate events of the same kind
        # by some identifier (some object id, for example)
        channel_extension = kwargs.get('channel_extension', '')
        if channel_extension:
            self.channel = '%s/%s' % (self.channel, channel_extension)

        response = HttpResponse(self._generate_content(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['X-Accel-Buffering'] = 'no'
        response['Software'] = 'django-sse-wrapper'
        return response

    def iterator(self):
        # get the class object from settings (or default if not specified).
        Backend = class_from_str(SSE_BACKEND_CLASS)

        # create a backend instance and subscribe the channel.
        backend = Backend()
        backend.subscribe(self.channel)

        for event, data in backend.listen():
            self.sse.add_message(event, data)
            yield
Exemple #24
0
def eventstream():
    sse = Sse()
    pubsub = redis.pubsub()
    while True:
        for event in event_types:
            pubsub.subscribe(event)
        with Timeout(TIMEOUT) as timeout:
            try:
                for message in pubsub.listen():
                    if message['type'] != "message":
                        continue
                    try:
                        data = json.loads(message['data'])
                    except ValueError:  # broken json
                        continue
                    if 'site_id' not in data or data['site_id'] != SITE_ID:
                        continue

                    sse.add_message(message['channel'], str(message['data']))
                    for event in sse:
                        yield str(event)
                    sse.flush()

                    timeout.cancel()
                    timeout.start()

            # heartbeat, to detect if a user is disconnected
            except Timeout as t:
                if t is not timeout:  # not our timeout
                    raise
                yield ":\n\n"  # heartbeat message

            finally:
                pubsub.close()
Exemple #25
0
class EventStreamView(View):
    """
    This is the view you must use in your urls.py to expose an event stream.
    """
    channel = DEFAULT_CHANNEL

    def _generate_content(self):
        for subiterator in self.iterator():
            for bufferitem in self.sse:
                yield bufferitem

    @method_decorator(csrf_exempt)
    def dispatch(self, request, *args, **kwargs):
        self.sse = Sse()

        # Check if there is a channel extension in kwargs.
        # This may be used to separate events of the same kind
        # by some identifier (some object id, for example)
        channel_extension = kwargs.get('channel_extension', '')
        if channel_extension:
            self.channel = '%s/%s' % (self.channel, channel_extension)

        response = HttpResponse(self._generate_content(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['X-Accel-Buffering'] = 'no'
        response['Software'] = 'django-sse-wrapper'
        return response

    def iterator(self):
        # get the class object from settings (or default if not specified).
        Backend = class_from_str(SSE_BACKEND_CLASS)

        # create a backend instance and subscribe the channel.
        backend = Backend()
        backend.subscribe(self.channel)

        for event, data in backend.listen():
            self.sse.add_message(event, data)
            yield
Exemple #26
0
def build_sse_msg(message, event=None, id_=None):
    sse = Sse()
    if id_:
        sse.set_event_id(id_)
    sse.add_message(event, message)
    sse_msg = "".join(sse)
    return sse_msg
Exemple #27
0
    def send_message(cls, msg):
        """ Sends a message to all live connections """
        id = str(uuid.uuid4())
        event, data = json.loads(msg.body)

        sse = Sse()
        sse.set_event_id(id)
        sse.add_message(event, data)
        message =  ''.join(sse)
        cls._cache.append({
            'id': id,
            'channel': msg.channel,
            'body': ''.join(sse),
        })
        if len(cls._cache) > cls._cache_size:
            cls._cache = cls._cache[-cls._cache_size:]

        clients = cls._channels.get(msg.channel, [])
        logger.info('Sending %s "%s" to channel %s for %s clients' % (event, data, msg.channel, len(clients)))
        for client_id in clients:
            client = cls._connections[client_id]
            client.on_message(message)
def build_sse_msg(message, event=None, id_=None):
    sse = Sse()
    if id_:
        sse.set_event_id(id_)
    sse.add_message(event, message)
    sse_msg = "".join(sse)
    print("YYYYYYYYYYY: {}".format(sse_msg))
    return sse_msg
Exemple #29
0
    def dispatch(self, request, *args, **kwargs):
        # This is basically the same method as in RedisQueueView.dispatch,
        # which should be a BaseSseView.dispatch. The only thing we modify
        # here is an extra header, called X-Accel-Buffering, which disables
        # buffering of this view by a webserver, for example nginx needs
        # that: http://wiki.nginx.org/X-accel#X-Accel-Buffering .

        # Also, we close db connection, as it won't be needed here.

        self.sse = Sse()

        self.request = request
        self.args = args
        self.kwargs = kwargs

        from django import db
        db.close_connection()

        response = HttpResponse(self._iterator(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['Software'] = 'django-sse'
        response['X-Accel-Buffering'] = 'no'
        return response
Exemple #30
0
    def dispatch(self, request, *args, **kwargs):
        self.sse = Sse()

        # Check if there is a channel extension in kwargs.
        # This may be used to separate events of the same kind
        # by some identifier (some object id, for example)
        channel_extension = kwargs.get('channel_extension', '')
        if channel_extension:
            self.channel = '%s/%s' % (self.channel, channel_extension)

        response = HttpResponse(self._generate_content(),
                                content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['X-Accel-Buffering'] = 'no'
        response['Software'] = 'django-sse-wrapper'
        return response
    def __iter__(self):
        self.sse = PySse()
        # TODO: https://app.asana.com/0/17412748309135/22939294056733
        yield CHUNK
        for data in self.sse:
            yield data.encode('u8')

        while True:
            message = self.queue.get()
            if message["event"] == "StopSSE":
                return
            LOGGER.debug(' '.join([
                'Event Message to bidder:', str(self.bidder_id), ' Client:',
                str(self.client_id), 'MSG:', str(repr(message))
            ]))
            self.sse.add_message(message['event'], json.dumps(message['data']))
            for data in self.sse:
                yield data.encode('u8')
    def __iter__(self):
        self.sse = PySse()
        # TODO: https://app.asana.com/0/17412748309135/22939294056733
        yield CHUNK
        for data in self.sse:
            yield data.encode('u8')

        while True:
            message = self.queue.get()
            if message["event"] == "StopSSE":
                return
            LOGGER.debug(' '.join([
                'Event Message to bidder:',
                str(self.bidder_id), ' Client:',
                str(self.client_id), 'MSG:',
                str(repr(message))
            ]))
            self.sse.add_message(message['event'], json.dumps(message['data']))
            for data in self.sse:
                yield data.encode('u8')
Exemple #33
0
    def dispatch(self, request, *args, **kwargs):
        # This is basically the same method as in RedisQueueView.dispatch,
        # which should be a BaseSseView.dispatch. The only thing we modify
        # here is an extra header, called X-Accel-Buffering, which disables
        # buffering of this view by a webserver, for example nginx needs
        # that: http://wiki.nginx.org/X-accel#X-Accel-Buffering .

        # Also, we close db connection, as it won't be needed here.

        self.sse = Sse()

        self.request = request
        self.args = args
        self.kwargs = kwargs

        from django import db
        db.close_connection()

        response = HttpResponse(self._iterator(), content_type="text/event-stream")
        response['Cache-Control'] = 'no-cache'
        response['Software'] = 'django-sse'
        response['X-Accel-Buffering'] = 'no'
        return response
Exemple #34
0
    def send_message(cls, msg):
        """ Sends a message to all live connections """
        id = str(uuid.uuid4())
        event, data = json.loads(msg.body)

        sse = Sse()
        sse.set_event_id(id)
        sse.add_message(event, data)

        message = ''.join(sse)
        cls._cache.append({
            'id': id,
            'channel': msg.channel,
            'body': ''.join(sse),
        })
        if len(cls._cache) > cls._cache_size:
            cls._cache = cls._cache[-cls._cache_size:]

        clients = cls._channels.get(msg.channel, [])
        logger.info('Sending %s "%s" to channel %s for %s clients' %
                    (event, data, msg.channel, len(clients)))
        for client_id in clients:
            client = cls._connections[client_id]
            client.on_message(message)
Exemple #35
0
    def test_flush_on_iter(self):
        sse = Sse()
        sse.add_message("foo", "bar")

        self.assertEqual(list(sse), ['retry: 2000\n\n', 'event: foo\n', 'data: bar\n', '\n'])
        self.assertEqual(list(sse), [])
Exemple #36
0
 def test_constructor(self):
     self.assertEqual(list(Sse()), ['retry: 2000\n\n'])
     self.assertEqual(list(Sse(default_retry=1000)), ['retry: 1000\n\n'])
Exemple #37
0
    def test_dinamic_methods(self):
        sse = Sse()
        sse.add_event_foo(text="bar")

        self.assertEqual(list(sse), ['retry: 2000\n\n', 'event: foo\n', 'data: bar\n', '\n'])
Exemple #38
0
    def test_flush(self):
        sse = Sse()
        sse.add_message("foo", "bar")

        sse.flush()
        self.assertEqual(len(sse._buffer), 0)
def event_source():
    current_app.logger.debug(
        'Handle event_source request with session {}'.format(
            repr(dict(session))),
        extra=prepare_extra_journal_fields(request.headers))
    if 'remote_oauth' in session and 'client_id' in session:
        bidder_data = get_bidder_id(current_app, session)
        if bidder_data:
            valid_bidder = False
            client_hash = session['client_id']
            bidder = bidder_data['bidder_id']
            for bidder_info in current_app.config['auction'].bidders_data:
                if bidder_info['id'] == bidder:
                    valid_bidder = True
                    break
            if current_app.config['auction'].auction_document.get(
                    'current_phase',
                    '') in ['dutch', 'pre-started', 'pre-sealedbid']:
                valid_bidder = True
            if valid_bidder:
                if bidder not in current_app.auction_bidders:
                    current_app.auction_bidders[bidder] = {
                        "clients": {},
                        "channels": {}
                    }

                if client_hash not in current_app.auction_bidders[bidder]:
                    real_ip = request.environ.get('HTTP_X_REAL_IP', '')
                    if real_ip.startswith('172.'):
                        real_ip = ''
                    current_app.auction_bidders[bidder]["clients"][
                        client_hash] = {
                            'ip':
                            ','.join([
                                request.headers.get('X-Forwarded-For', ''),
                                real_ip
                            ]),
                            'User-Agent':
                            request.headers.get('User-Agent'),
                        }
                    current_app.auction_bidders[bidder]["channels"][
                        client_hash] = Queue()

                current_app.logger.info(
                    'Send identification for bidder: {} with client_hash {}'.
                    format(bidder, client_hash),
                    extra=prepare_extra_journal_fields(request.headers))
                identification_data = {
                    "bidder_id": bidder,
                    "client_id": client_hash,
                    "return_url": session.get('return_url', '')
                }
                if current_app.config['auction'].features:
                    identification_data["coeficient"] = str(
                        current_app.config['auction'].
                        bidders_coeficient[bidder])

                send_event_to_client(bidder, client_hash, identification_data,
                                     "Identification")
                if 'amount' in session:
                    send_event_to_client(bidder, client_hash,
                                         {"last_amount": session['amount']},
                                         "RestoreBidAmount")
                    current_app.logger.debug('Send RestoreBidAmount')
                    del session['amount']

                if not session.get("sse_timeout", 0):
                    current_app.logger.debug('Send ClientsList')
                    send_event(bidder,
                               current_app.auction_bidders[bidder]["clients"],
                               "ClientsList")
                response = Response(
                    SseStream(current_app.auction_bidders[bidder]["channels"]
                              [client_hash],
                              bidder_id=bidder,
                              client_id=client_hash,
                              timeout=session.get("sse_timeout", 0)),
                    direct_passthrough=True,
                    mimetype='text/event-stream',
                    content_type='text/event-stream')
                response.headers['Cache-Control'] = 'no-cache'
                response.headers['X-Accel-Buffering'] = 'no'
                return response
            else:
                current_app.logger.info(
                    'Not valid bidder: bidder_id {} with client_hash {}'.
                    format(bidder, client_hash),
                    extra=prepare_extra_journal_fields(request.headers))

    current_app.logger.debug('Disable event_source for unauthorized user.',
                             extra=prepare_extra_journal_fields(
                                 request.headers))
    events_close = PySse()
    events_close.add_message("Close", "Disable")
    response = Response(iter(
        [bytearray(''.join([x for x in events_close]), 'UTF-8')]),
                        direct_passthrough=True,
                        mimetype='text/event-stream',
                        content_type='text/event-stream')
    response.headers['Cache-Control'] = 'no-cache'
    response.headers['X-Accel-Buffering'] = 'no'
    return response
Exemple #40
0
 def initialize(self):
     self.set_header('Content-Type', 'text/event-stream')
     self.set_header('Cache-Control', 'no-cache')
     self.set_header('X-Accel-Buffering', 'no')
     self.sse = Sse()
     self.stream = True
def event_source():
    current_app.logger.debug(
        'Handle event_source request with session {}'.format(repr(dict(session))),
        extra=prepare_extra_journal_fields(request.headers)
    )
    if 'remote_oauth' in session and 'client_id' in session:
        bidder_data = get_bidder_id(current_app, session)
        if bidder_data:
            valid_bidder = False
            client_hash = session['client_id']
            bidder = bidder_data['bidder_id']
            for bidder_info in current_app.config['auction'].bidders_data:
                if bidder_info['id'] == bidder:
                    valid_bidder = True
                    break
            if valid_bidder:
                if bidder not in current_app.auction_bidders:
                    current_app.auction_bidders[bidder] = {
                        "clients": {},
                        "channels": {}
                    }

                if client_hash not in current_app.auction_bidders[bidder]:
                    real_ip = request.environ.get('HTTP_X_REAL_IP', '')
                    if real_ip.startswith('172.'):
                        real_ip = ''
                    current_app.auction_bidders[bidder]["clients"][client_hash] = {
                        'ip': ','.join(
                            [request.headers.get('X-Forwarded-For', ''), real_ip]
                        ),
                        'User-Agent': request.headers.get('User-Agent'),
                    }
                    current_app.auction_bidders[bidder]["channels"][client_hash] = Queue()

                current_app.logger.info(
                    'Send identification for bidder: {} with client_hash {}'.format(bidder, client_hash),
                    extra=prepare_extra_journal_fields(request.headers)
                )
                identification_data = {"bidder_id": bidder,
                                       "client_id": client_hash,
                                       "return_url": session.get('return_url', '')}
                if current_app.config['auction'].features:
                    identification_data["coeficient"] = str(current_app.config['auction'].bidders_coeficient[bidder])

                send_event_to_client(bidder, client_hash, identification_data,
                                     "Identification")
                if 'amount' in session:
                    send_event_to_client(bidder, client_hash,
                                         {"last_amount": session['amount']},
                                         "RestoreBidAmount")
                    current_app.logger.debug('Send RestoreBidAmount')
                    del session['amount']

                if not session.get("sse_timeout", 0):
                    current_app.logger.debug('Send ClientsList')
                    send_event(
                        bidder,
                        current_app.auction_bidders[bidder]["clients"],
                        "ClientsList"
                    )
                response = Response(
                    SseStream(
                        current_app.auction_bidders[bidder]["channels"][client_hash],
                        bidder_id=bidder,
                        client_id=client_hash,
                        timeout=session.get("sse_timeout", 0)
                    ),
                    direct_passthrough=True,
                    mimetype='text/event-stream',
                    content_type='text/event-stream'
                )
                response.headers['Cache-Control'] = 'no-cache'
                response.headers['X-Accel-Buffering'] = 'no'
                return response
            else:
                current_app.logger.info(
                    'Not valid bidder: bidder_id {} with client_hash {}'.format(bidder, client_hash),
                    extra=prepare_extra_journal_fields(request.headers)
                )

    current_app.logger.debug(
        'Disable event_source for unauthorized user.',
        extra=prepare_extra_journal_fields(request.headers)
    )
    events_close = PySse()
    events_close.add_message("Close", "Disable")
    response = Response(
        iter([bytearray(''.join([x for x in events_close]), 'UTF-8')]),
        direct_passthrough=True,
        mimetype='text/event-stream',
        content_type='text/event-stream'
    )
    response.headers['Cache-Control'] = 'no-cache'
    response.headers['X-Accel-Buffering'] = 'no'
    return response
 def __init__(self, backend, channel):
     """Initialise PublishSubscribe instance and channel."""
     self.sse = Sse()
     self.backend = backend
     self.backend.subscribe(channel)
Exemple #43
0
 def __init__(self, backend, channel):
     """Initialise PublishSubscribe instance and channel."""
     self.sse = Sse()
     self.backend = backend
     self.backend.subscribe(channel)