コード例 #1
0
ファイル: replay.py プロジェクト: PabloCresp0/ztreamy
def main():
    options = read_cmd_options()
    entity_id = ztreamy.random_id()
    limit = tornado.options.options.limit
    if not tornado.options.options.bayeux:
        publisher_type = 'ztreamy'
    else:
        publisher_type = 'bayeux'
    publishers = [_create_publisher(url, publisher_type=publisher_type) \
                  for url in options.server_urls]
    io_loop = tornado.ioloop.IOLoop.instance()
    if tornado.options.options.distribution is not None:
        time_generator = \
            utils.get_scheduler(tornado.options.options.distribution,
                                initial_delay=tornado.options.options.delay)
    else:
        time_generator = None
    scheduler = RelayScheduler(options.filename, limit, entity_id, io_loop,
                               publishers, tornado.options.options.timescale,
                               time_generator=time_generator,
                               add_timestamp=tornado.options.options.timestamp,
                               initial_delay=tornado.options.options.delay)
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(entity_id,
                                             'replay-' + entity_id + '.log')
    try:
        io_loop.start()
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #2
0
ファイル: events.py プロジェクト: jvrplmlmn/ztreamy
    def __init__(self, source_id, syntax, body, event_id=None,
                 application_id=None, aggregator_id=[], event_type=None,
                 timestamp=None, extra_headers=None):
        """Creates a new event.

        'body' must be the textual representation of the event, or an
        object providing that textual representation through 'str()'.

        When the created event has to be an instance of a specific
        subclass (e.g. an 'RDFEvent'), the static 'create()' method
        should be used instead.

        """
        self.event_id = event_id or ztreamy.random_id()
        self.source_id = source_id
        self.syntax = syntax
        self.body = body
        if aggregator_id is None:
            aggregator_id = []
        else:
            if type(aggregator_id) is not list:
                self.aggregator_id = [str(aggregator_id)]
            else:
                self.aggregator_id = [str(e) for e in aggregator_id]
        self.event_type = event_type
        self.timestamp = timestamp or ztreamy.get_timestamp()
        self.application_id = application_id
        if extra_headers is not None:
            self.extra_headers = extra_headers
        else:
            self.extra_headers = {}
コード例 #3
0
ファイル: client.py プロジェクト: jvrplmlmn/ztreamy
def main():
    import time
    def handle_event(event):
        sys.stdout.write(str(event))
    def handle_error(message, http_error=None):
        if http_error is not None:
            logging.error(message + ': ' + str(http_error))
        else:
            logging.error(message)
    def stop_client():
        client.stop()
    options = read_cmd_options()
#    import ztreamy.filters
#    filter = ztreamy.filters.SimpleTripleFilter(handle_event,
#                                        predicate='http://example.com/temp')
    client = Client(options.stream_urls,
                    event_callback=handle_event,
#                    event_callback=filter.filter_event,
                    error_callback=handle_error)
#    tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 6, stop_client)
    node_id = ztreamy.random_id()
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(node_id,
                                             'client-' + node_id + '.log')
    try:
        client.start(loop=True)
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #4
0
ファイル: replay.py プロジェクト: pCresp0/ztreamy
def main():
    options = read_cmd_options()
    entity_id = ztreamy.random_id()
    limit = tornado.options.options.limit
    if not tornado.options.options.bayeux:
        publisher_type = 'ztreamy'
    else:
        publisher_type = 'bayeux'
    publishers = [_create_publisher(url, publisher_type=publisher_type) \
                  for url in options.server_urls]
    io_loop = tornado.ioloop.IOLoop.instance()
    if tornado.options.options.distribution is not None:
        time_generator = \
            utils.get_scheduler(tornado.options.options.distribution,
                                initial_delay=tornado.options.options.delay)
    else:
        time_generator = None
    scheduler = RelayScheduler(options.filename,
                               limit,
                               entity_id,
                               io_loop,
                               publishers,
                               tornado.options.options.timescale,
                               time_generator=time_generator,
                               add_timestamp=tornado.options.options.timestamp,
                               initial_delay=tornado.options.options.delay)
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(entity_id,
                                             'replay-' + entity_id + '.log')
    try:
        io_loop.start()
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #5
0
def main():
    import time

    def handle_event(event):
        sys.stdout.write(str(event))

    def handle_error(message, http_error=None):
        if http_error is not None:
            logging.error(message + ': ' + str(http_error))
        else:
            logging.error(message)

    def stop_client():
        client.stop()

    options = read_cmd_options()
    #    import ztreamy.filters
    #    filter = ztreamy.filters.SimpleTripleFilter(handle_event,
    #                                        predicate='http://example.com/temp')
    client = Client(
        options.stream_urls,
        event_callback=handle_event,
        #                    event_callback=filter.filter_event,
        error_callback=handle_error)
    #    tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 6, stop_client)
    node_id = ztreamy.random_id()
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(node_id,
                                             'client-' + node_id + '.log')
    try:
        client.start(loop=True)
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #6
0
ファイル: plaztreamy.py プロジェクト: dleony/PLA
    def create_source(self):
        source_id = ztreamy.random_id()
        self.config.set('ztreamy', 'source_id', source_id)

        # save the source_id in configuration file
        conf_file = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'conf', 'pla-client.cfg'))
        with open(conf_file, 'wb') as configfile:
            self.config.write(configfile)
        return source_id
コード例 #7
0
ファイル: source.py プロジェクト: pCresp0/ztreamy
    def _schedule_entry(self, entry):
        if not entry.subject in self.source_ids:
            self.source_ids[entry.subject] = ztreamy.random_id()
        pub = utils.EventPublisher(entry, self.source_ids[entry.subject],
                                   self.publishers)
        fire_time = (self.t0_new
                     + (entry.timestamp - self.t0_original) / self.time_scale)
        self.io_loop.add_timeout(fire_time, pub.publish)
#        print entry.seq_num, 'scheduled for', fire_time
        return fire_time
コード例 #8
0
ファイル: server.py プロジェクト: pCresp0/ztreamy
    def __init__(self,
                 path,
                 allow_publish=False,
                 buffering_time=None,
                 source_id=None,
                 num_recent_events=2048,
                 event_adapter=None,
                 parse_event_body=True,
                 ioloop=None):
        """Creates a stream object.

        The stream will be served with the specified 'path' prefix,
        which should start with a slash ('/') and should not end with
        one. If 'path' does not start with a slash, this constructor
        inserts it automatically. For example, if the value of 'path'
        is '/mystream' the stream will be served as:

        http://host:port/mystream/stream
        http://host:port/mystream/compressed
        (etc.)

        The stream does not accept events from clients, unless
        'allow_publish' is set to 'True'.

        'buffering_time' controls the period for which events are
        accumulated in a buffer before sending them to the
        clients. Higher times improve CPU performance in the server
        and compression ratios, but increase the latency in the
        delivery of events.

        If a 'ioloop' object is given, it will be used by the internal
        timers of the stream.  If not, the default 'ioloop' of the
        Tornado instance will be used.

        """
        if source_id is not None:
            self.source_id = source_id
        else:
            self.source_id = ztreamy.random_id()
        if path.startswith('/'):
            self.path = path
        else:
            self.path = '/' + path
        self.allow_publish = allow_publish
        self.dispatcher = _EventDispatcher(self,
                                           num_recent_events=num_recent_events)
        self.buffering_time = buffering_time
        self.event_adapter = event_adapter
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self._event_buffer = []
        if buffering_time:
            self.buffer_dump_sched = \
                tornado.ioloop.PeriodicCallback(self._dump_buffer,
                                                buffering_time, self.ioloop)
コード例 #9
0
ファイル: server.py プロジェクト: jvrplmlmn/ztreamy
def main():
    import time
    import tornado.options
    from ztreamy import rdfevents
    source_id = ztreamy.random_id()
    application_id = '1111-1111'

    def stop_server():
        server.stop()

    tornado.options.define('port', default=8888, help='run on the given port',
                           type=int)
    tornado.options.define('buffer', default=None, help='event buffer time (s)',
                           type=float)
    tornado.options.define('eventlog', default=False,
                           help='dump event log',
                           type=bool)
    tornado.options.define('autostop', default=False,
                           help='stop the server when the source finishes',
                           type=bool)
    tornado.options.parse_command_line()
    port = tornado.options.options.port
    if (tornado.options.options.buffer is not None
        and tornado.options.options.buffer > 0):
        buffering_time = tornado.options.options.buffer * 1000
    else:
        buffering_time = None
    server = StreamServer(port,
                 stop_when_source_finishes=tornado.options.options.autostop)
    stream = Stream('/events', allow_publish=True,
                    buffering_time=buffering_time)
    ## relay = RelayStream('/relay', [('http://localhost:' + str(port)
    ##                                + '/stream/priority')],
    ##                     allow_publish=True,
    ##                     buffering_time=buffering_time)
    server.add_stream(stream)
    ## server.add_stream(relay)
    if tornado.options.options.eventlog:
        print stream.source_id
        comments = {'Buffer time (ms)': buffering_time}
#        logger.logger = logger.ZtreamyLogger(stream.source_id,
        logger.logger = logger.CompactServerLogger(stream.source_id,
                                                   'server-' + stream.source_id
                                                   + '.log', comments)
        logger.logger.auto_flush = True
     # Uncomment to test Stream.stop():
#    tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 5, stop_server)
    try:
        server.start()
    except KeyboardInterrupt:
        pass
    finally:
        server.stop()
        logger.logger.close()
コード例 #10
0
ファイル: server.py プロジェクト: PabloCresp0/ztreamy
    def __init__(self, path, allow_publish=False, buffering_time=None,
                 source_id=None, num_recent_events=2048,
                 event_adapter=None,
                 parse_event_body=True, ioloop=None):
        """Creates a stream object.

        The stream will be served with the specified 'path' prefix,
        which should start with a slash ('/') and should not end with
        one. If 'path' does not start with a slash, this constructor
        inserts it automatically. For example, if the value of 'path'
        is '/mystream' the stream will be served as:

        http://host:port/mystream/stream
        http://host:port/mystream/compressed
        (etc.)

        The stream does not accept events from clients, unless
        'allow_publish' is set to 'True'.

        'buffering_time' controls the period for which events are
        accumulated in a buffer before sending them to the
        clients. Higher times improve CPU performance in the server
        and compression ratios, but increase the latency in the
        delivery of events.

        If a 'ioloop' object is given, it will be used by the internal
        timers of the stream.  If not, the default 'ioloop' of the
        Tornado instance will be used.

        """
        if source_id is not None:
            self.source_id = source_id
        else:
            self.source_id = ztreamy.random_id()
        if path.startswith('/'):
            self.path = path
        else:
            self.path = '/' + path
        self.allow_publish = allow_publish
        self.dispatcher = _EventDispatcher(self,
                                           num_recent_events=num_recent_events)
        self.buffering_time = buffering_time
        self.event_adapter = event_adapter
        self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
        self.parse_event_body = parse_event_body
        self._event_buffer = []
        if buffering_time:
            self.buffer_dump_sched = \
                tornado.ioloop.PeriodicCallback(self._dump_buffer,
                                                buffering_time, self.ioloop)
コード例 #11
0
ファイル: processor.py プロジェクト: jfisteus/ztreamy
def main():
    options = read_cmd_options()
    node_id = ztreamy.random_id()
#    import ztreamy.filters
#    filter = ztreamy.filters.SimpleTripleFilter(handle_event,
#                                        predicate='http://example.com/temp')
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(node_id,
                                             'processor-' + node_id + '.log')
    processor = Processor(options.stream_urls)
    try:
        processor.start(loop=True)
    except KeyboardInterrupt:
        pass
    finally:
        processor.stop()
        logger.logger.close()
コード例 #12
0
ファイル: processor.py プロジェクト: pCresp0/ztreamy
def main():
    options = read_cmd_options()
    node_id = ztreamy.random_id()
    #    import ztreamy.filters
    #    filter = ztreamy.filters.SimpleTripleFilter(handle_event,
    #                                        predicate='http://example.com/temp')
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(node_id,
                                             'processor-' + node_id + '.log')
    processor = Processor(options.stream_urls)
    try:
        processor.start(loop=True)
    except KeyboardInterrupt:
        pass
    finally:
        processor.stop()
        logger.logger.close()
コード例 #13
0
ファイル: source.py プロジェクト: pCresp0/ztreamy
def main():
    options = read_cmd_options()
    entity_id = ztreamy.random_id()
    publishers = [client.EventPublisher(url) for url in options.server_urls]
    io_loop = tornado.ioloop.IOLoop.instance()
    filename = '../data-abel/EventData-sorted.csv.gz'
    scheduler = EventScheduler(filename, io_loop, publishers,
                               tornado.options.options.timescale,
                               compressed=True)
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(entity_id,
                                             'replay-' + entity_id + '.log')
    try:
        io_loop.start()
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #14
0
def main():
    options = read_cmd_options()
    entity_id = ztreamy.random_id()
    limit = tornado.options.options.limit
    publishers = [client.EventPublisher(url) for url in options.server_urls]
    io_loop = tornado.ioloop.IOLoop.instance()
    time_generator = utils.get_scheduler(tornado.options.options.distribution)
    scheduler = Scheduler(limit, entity_id, io_loop, publishers,
                          time_generator=time_generator, add_timestamp=True)
    if tornado.options.options.eventlog:
        logger.logger = logger.ZtreamyLogger(entity_id,
                                             'source-' + entity_id + '.log')
    try:
        io_loop.start()
    except KeyboardInterrupt:
        pass
    finally:
        logger.logger.close()
コード例 #15
0
ファイル: server.py プロジェクト: jvrplmlmn/ztreamy
 def get(self):
     event_id = self.get_argument('event-id', default=None)
     if event_id is None:
         event_id = ztreamy.random_id()
     source_id = self.get_argument('source-id')
     syntax = self.get_argument('syntax')
     application_id = self.get_argument('application_id')
     body = self.get_argument('body')
     aggregator_id = events.parse_aggregator_id( \
         self.get_argument('aggregator-id', default=''))
     event_type = self.get_argument('event-type', default=None)
     timestamp = self.get_argument('timestamp', default=None)
     event = events.Event(source_id, syntax, body,
                          application_id=application_id,
                          aggregator_id=aggregator_id,
                          event_type=event_type, timestamp=timestamp)
     event.aggregator_id.append(self.stream.source_id)
     self.stream.dispatch_event(event)
     self.finish()
コード例 #16
0
ファイル: server.py プロジェクト: pCresp0/ztreamy
 def get(self):
     event_id = self.get_argument('event-id', default=None)
     if event_id is None:
         event_id = ztreamy.random_id()
     source_id = self.get_argument('source-id')
     syntax = self.get_argument('syntax')
     application_id = self.get_argument('application_id')
     body = self.get_argument('body')
     aggregator_id = events.parse_aggregator_id( \
         self.get_argument('aggregator-id', default=''))
     event_type = self.get_argument('event-type', default=None)
     timestamp = self.get_argument('timestamp', default=None)
     event = events.Event(source_id,
                          syntax,
                          body,
                          application_id=application_id,
                          aggregator_id=aggregator_id,
                          event_type=event_type,
                          timestamp=timestamp)
     event.aggregator_id.append(self.stream.source_id)
     self.stream.dispatch_event(event)
     self.finish()
コード例 #17
0
ファイル: events.py プロジェクト: jvrplmlmn/ztreamy
    def __init__(self,
                 source_id,
                 syntax,
                 body,
                 event_id=None,
                 application_id=None,
                 aggregator_id=[],
                 event_type=None,
                 timestamp=None,
                 extra_headers=None):
        """Creates a new event.

        'body' must be the textual representation of the event, or an
        object providing that textual representation through 'str()'.

        When the created event has to be an instance of a specific
        subclass (e.g. an 'RDFEvent'), the static 'create()' method
        should be used instead.

        """
        self.event_id = event_id or ztreamy.random_id()
        self.source_id = source_id
        self.syntax = syntax
        self.body = body
        if aggregator_id is None:
            aggregator_id = []
        else:
            if type(aggregator_id) is not list:
                self.aggregator_id = [str(aggregator_id)]
            else:
                self.aggregator_id = [str(e) for e in aggregator_id]
        self.event_type = event_type
        self.timestamp = timestamp or ztreamy.get_timestamp()
        self.application_id = application_id
        if extra_headers is not None:
            self.extra_headers = extra_headers
        else:
            self.extra_headers = {}
コード例 #18
0
ファイル: events.py プロジェクト: jfisteus/ztreamy
    def __init__(self, source_id, syntax, body, event_id=None,
                 application_id=None, aggregator_id=[], event_type=None,
                 timestamp=None, extra_headers=None):
        """Creates a new event.

        'body' must be the textual representation of the event, or an
        object providing that textual representation through 'str()'.

        When the created event has to be an instance of a specific
        subclass (e.g. an 'RDFEvent'), the static 'create()' method
        should be used instead.

        """
        if source_id is None:
            raise ValueError('Required event field missing: source_id')
        elif not syntax:
            raise ValueError('Required event field missing: syntax')
        self.event_id = event_id or ztreamy.random_id()
        self.source_id = source_id
        self.syntax = syntax
        self.body = body
        if aggregator_id is None:
            aggregator_id = []
        elif type(aggregator_id) is not list:
            self.aggregator_id = [str(aggregator_id)]
        else:
            self.aggregator_id = [str(e) for e in aggregator_id]
        self.event_type = event_type
        self._timestamp = timestamp or ztreamy.get_timestamp()
        self._time = None
        self.application_id = application_id
        self.extra_headers = {}
        if extra_headers is not None:
            # Do this in order to ensure type checking
            for header, value in extra_headers.iteritems():
                self.set_extra_header(header, value)
コード例 #19
0
def main():
    def close_callback(client):
        reconnecting = False
        clients.remove(client)
        if tornado.options.options.reconnect and not client.finished:
            if times_reconnected[0] >= max_reconnections:
                print 'Active clients:', len(clients), '/', options.num_clients
                num_disconnected_clients[0] += 1
                print 'A client got disconnected with times overflown.', \
                      entity_id
            else:
                print 'preparing reconnection', entity_id
                reconnecting = True
                times_reconnected[0] += 1
                if client.no_parse:
                    _invoke_later(connect_new_client_no_parsing)
                else:
                    _invoke_later(connect_new_client_parsing)
        elif not client.finished:
            num_disconnected_clients[0] += 1
            print 'A client got disconnected with reconnect disabled.', \
                  entity_id
        if len(clients) == 0 and not reconnecting:
            tornado.ioloop.IOLoop.instance().stop()

    def connect_new_client_no_parsing():
        times_reconnected[0] += 1
        new_client = BogusClient(options.stream_url,
                                 stats,
                                 True,
                                 close_callback=close_callback)
        clients.append(new_client)
        new_client.start(loop=False)
        print "Created a new non-parsing client for reconnection", entity_id

    def connect_new_client_parsing():
        times_reconnected[0] += 1
        new_client = BogusClient(options.stream_url,
                                 stats,
                                 False,
                                 close_callback=close_callback,
                                 finish_callback=finish_callback)
        clients.append(new_client)
        new_client.start(loop=False)
        print "Created parsing client for reconnection", entity_id

    def finish_callback():
        for client in clients:
            client.finished = True
            client.stop(notify_connection_close=False)
        del clients[:]
        tornado.ioloop.IOLoop.instance().stop()

    options = read_cmd_options()
    no_parse = tornado.options.options.noparse
    assert options.num_clients > 0
    entity_id = ztreamy.random_id()
    num_disconnected_clients = [0]
    times_reconnected = [0]
    max_reconnections = 3 * options.num_clients
    if max_reconnections < 100:
        max_reconnections = 100
    AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient",
                              max_clients=options.num_clients)
    clients = []
    if not no_parse:
        stats = _Stats(options.num_clients)
        for i in range(0, options.num_clients):
            clients.append(
                BogusClient(options.stream_url,
                            stats,
                            False,
                            close_callback=close_callback))
    else:
        # One client parses, and the rest don't
        stats = _Stats(1)
        for i in range(0, options.num_clients - 1):
            clients.append(
                BogusClient(options.stream_url,
                            stats,
                            True,
                            close_callback=close_callback))
        clients.append(
            BogusClient(options.stream_url,
                        stats,
                        False,
                        close_callback=close_callback,
                        finish_callback=finish_callback))
    for c in clients:
        c.start(loop=False)
    sched = tornado.ioloop.PeriodicCallback(stats.log_stats, 5000)
    if no_parse:
        saturation_mon = SaturationMonitor(5.0, clients)
        sched = tornado.ioloop.PeriodicCallback(saturation_mon.fire, 5000)
    sched.start()
    if tornado.options.options.eventlog:
        print entity_id
        logger.logger = logger.ZtreamyManycLogger(
            entity_id, 'manyc-' + entity_id + '.log')
        logger.logger.auto_flush = True
    try:
        tornado.ioloop.IOLoop.instance().start()
    except KeyboardInterrupt:
        pass
    finally:
        for c in clients:
            c.stop()
        if num_disconnected_clients[0] > 0:
            logging.warning((str(num_disconnected_clients[0]) +
                             ' clients got disconnected'))
        logger.logger.close()
コード例 #20
0
ファイル: server.py プロジェクト: pCresp0/ztreamy
import ztreamy
import tornado.ioloop

# Create a server with two streams
server = ztreamy.StreamServer(9000)

# Create the streams; stream1 allows remote producers to publish through HTTP
stream1 = ztreamy.Stream('/stream1', allow_publish=True)
stream2 = ztreamy.Stream('/stream2')
server.add_stream(stream1)
server.add_stream(stream2)

# Create two publisher objects
publisher1 = ztreamy.LocalEventPublisher(stream1)
publisher2 = ztreamy.LocalEventPublisher(stream2)
source_id = ztreamy.random_id()
application_ids = ['ztreamy-example-a', 'ztreamy-example-b']


# Publish events periodically
def publish_hi():
    print('Publishing "hi"')
    app_id = random.choice(application_ids)
    event = ztreamy.Event(source_id, 'text/plain', 'Hi', application_id=app_id)
    publisher1.publish(event)


def publish_there():
    print('Publishing "there"')
    app_id = random.choice(application_ids)
    event = ztreamy.Event(source_id,
コード例 #21
0
ファイル: server.py プロジェクト: pCresp0/ztreamy
def main():
    import time
    import tornado.options
    from ztreamy import rdfevents
    source_id = ztreamy.random_id()
    application_id = '1111-1111'

    def stop_server():
        server.stop()

    tornado.options.define('port',
                           default=8888,
                           help='run on the given port',
                           type=int)
    tornado.options.define('buffer',
                           default=None,
                           help='event buffer time (s)',
                           type=float)
    tornado.options.define('eventlog',
                           default=False,
                           help='dump event log',
                           type=bool)
    tornado.options.define('autostop',
                           default=False,
                           help='stop the server when the source finishes',
                           type=bool)
    tornado.options.parse_command_line()
    port = tornado.options.options.port
    if (tornado.options.options.buffer is not None
            and tornado.options.options.buffer > 0):
        buffering_time = tornado.options.options.buffer * 1000
    else:
        buffering_time = None
    server = StreamServer(
        port, stop_when_source_finishes=tornado.options.options.autostop)
    stream = Stream('/events',
                    allow_publish=True,
                    buffering_time=buffering_time)
    ## relay = RelayStream('/relay', [('http://localhost:' + str(port)
    ##                                + '/stream/priority')],
    ##                     allow_publish=True,
    ##                     buffering_time=buffering_time)
    server.add_stream(stream)
    ## server.add_stream(relay)
    if tornado.options.options.eventlog:
        print(stream.source_id)
        comments = {'Buffer time (ms)': buffering_time}
        #        logger.logger = logger.ZtreamyLogger(stream.source_id,
        logger.logger = logger.CompactServerLogger(
            stream.source_id, 'server-' + stream.source_id + '.log', comments)
        logger.logger.auto_flush = True
    # Uncomment to test Stream.stop():


#    tornado.ioloop.IOLoop.instance().add_timeout(time.time() + 5, stop_server)
    try:
        server.start()
    except KeyboardInterrupt:
        pass
    finally:
        server.stop()
        logger.logger.close()
コード例 #22
0
ファイル: many_clients.py プロジェクト: jvrplmlmn/ztreamy
def main():
    def close_callback(client):
        reconnecting = False
        clients.remove(client)
        if tornado.options.options.reconnect and not client.finished:
            if times_reconnected[0] >= max_reconnections:
                print 'Active clients:', len(clients), '/', options.num_clients
                num_disconnected_clients[0] += 1
                print 'A client got disconnected with times overflown.', \
                      entity_id
            else:
                print 'preparing reconnection', entity_id
                reconnecting = True
                times_reconnected[0] += 1
                if client.no_parse:
                    _invoke_later(connect_new_client_no_parsing)
                else:
                    _invoke_later(connect_new_client_parsing)
        elif not client.finished:
            num_disconnected_clients[0] += 1
            print 'A client got disconnected with reconnect disabled.', \
                  entity_id
        if len(clients) == 0 and not reconnecting:
            tornado.ioloop.IOLoop.instance().stop()

    def connect_new_client_no_parsing():
        times_reconnected[0] += 1
        new_client = BogusClient(options.stream_url, stats, True,
                                 close_callback=close_callback)
        clients.append(new_client)
        new_client.start(loop=False)
        print "Created a new non-parsing client for reconnection", entity_id

    def connect_new_client_parsing():
        times_reconnected[0] += 1
        new_client = BogusClient(options.stream_url, stats, False,
                                 close_callback=close_callback,
                                 finish_callback=finish_callback)
        clients.append(new_client)
        new_client.start(loop=False)
        print "Created parsing client for reconnection", entity_id

    def finish_callback():
        for client in clients:
            client.finished = True
            client.stop(notify_connection_close=False)
        del clients[:]
        tornado.ioloop.IOLoop.instance().stop()

    options = read_cmd_options()
    no_parse = tornado.options.options.noparse
    assert options.num_clients > 0
    entity_id = ztreamy.random_id()
    num_disconnected_clients = [0]
    times_reconnected = [0]
    max_reconnections = 3 * options.num_clients
    if max_reconnections < 100:
        max_reconnections = 100
    AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient",
                              max_clients=options.num_clients)
    clients = []
    if not no_parse:
        stats = _Stats(options.num_clients)
        for i in range(0, options.num_clients):
            clients.append(BogusClient(options.stream_url, stats, False,
                                       close_callback=close_callback))
    else:
        # One client parses, and the rest don't
        stats = _Stats(1)
        for i in range(0, options.num_clients - 1):
            clients.append(BogusClient(options.stream_url, stats, True,
                                       close_callback=close_callback))
        clients.append(BogusClient(options.stream_url, stats, False,
                                   close_callback=close_callback,
                                   finish_callback=finish_callback))
    for c in clients:
        c.start(loop=False)
    sched = tornado.ioloop.PeriodicCallback(stats.log_stats, 5000)
    if no_parse:
        saturation_mon = SaturationMonitor(5.0, clients)
        sched = tornado.ioloop.PeriodicCallback(saturation_mon.fire, 5000)
    sched.start()
    if tornado.options.options.eventlog:
        print entity_id
        logger.logger = logger.ZtreamyManycLogger(entity_id,
                                                'manyc-' + entity_id + '.log')
        logger.logger.auto_flush = True
    try:
        tornado.ioloop.IOLoop.instance().start()
    except KeyboardInterrupt:
        pass
    finally:
        for c in clients:
            c.stop()
        if num_disconnected_clients[0] > 0:
            logging.warning((str(num_disconnected_clients[0])
                             + ' clients got disconnected'))
        logger.logger.close()
コード例 #23
0
ファイル: publisher_async.py プロジェクト: jvrplmlmn/ztreamy
import ztreamy
import tornado.ioloop

# Create a publisher object
stream = 'http://localhost:9000/stream1'
publisher = ztreamy.EventPublisher(stream)
source_id = ztreamy.random_id()

# Publish events periodically
def publish():
    print 'Publishing'
    event = ztreamy.Event(source_id, 'text/plain',  'This is a new event')
    publisher.publish(event)

tornado.ioloop.PeriodicCallback(publish, 10000).start()

try:
    # Block on the ioloop
    tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
    # Allow ctrl-c to finish the program
    pass
finally:
    publisher.close()