def main(): parser = argparse.ArgumentParser() parser.add_argument( "-t", "--time", dest="time", type=int, default=10, help= "time between two sucessive queries to Twitter API (in seconds, e.g. 10)" ) parser.add_argument( "-u", "--user", dest="user", required=True, help="print name of the Twitter user to follow (e.g. nordez)") parser.add_argument( "-a", "--appid", dest="appid", default="TwitterFollowerBot", help="application identifier (added to generated events)") parser.add_argument("-s", "--sourceid", dest="sourceid", required=True, help="source identifier (added to generated events)") parser.add_argument( "-o", "--output", dest="output", required=True, help= "URL for output stream where events are published (e.g. http://localhost:9001/events/publish)" ) options = parser.parse_args() publisher = client.EventPublisher(options.output) bot = TwitterFollowerBot(publisher, options.user, options.time, options.sourceid, options.appid) try: bot.start() except KeyboardInterrupt: bot.finish() pass finally: logger.logger.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-g", "--geo", dest="geo", action='store_const', const=True, default=False, help= "a boolean flag indicating whether all tweets, or only those that contain location information should be published" ) parser.add_argument("-u", "--user", dest="user", required=True, help="a Twitter account login") parser.add_argument("-p", "--passwd", dest="passwd", required=True, help="a Twitter account password") parser.add_argument( "-a", "--appid", dest="appid", default="TwitterSensor", help="application identifier (added to generated events)") parser.add_argument("-s", "--sourceid", dest="sourceid", required=True, help="source identifier (added to generated events)") parser.add_argument( "-o", "--output", dest="output", required=True, help= "URL for output stream where events are published (e.g. http://localhost:9001/events/publish)" ) options = parser.parse_args() publisher = client.EventPublisher(options.output) enc = TwitterStreamSensor(publisher, options.user, options.passwd, options.sourceid, options.appid, options.geo) enc.start_async() tornado.ioloop.IOLoop.instance().start()
def _create_publisher(url, publisher_type='ztreamy'): """Creates a publisher object for the given server URL. If the URL is '-', events are written to stdout. """ ioloop = tornado.ioloop.IOLoop.instance() if url == '-': return utils.StdoutPublisher(ioloop=ioloop) elif publisher_type == 'ztreamy': return client.EventPublisher(url, io_loop=ioloop) elif publisher_type == 'bayeux': # Use the path as channel name scheme, server, port, path = split_url(url) assert scheme == 'http' server_url = '{0}://{1}:{2}/'.format(scheme, server, port) return BayeuxEventPublisher(server_url, path, io_loop=ioloop)
def main(): options = read_cmd_options() entity_id = ztreamy.random_id() limit = tornado.options.options.limit publishers = [client.EventPublisher(url) for url in options.server_urls] io_loop = tornado.ioloop.IOLoop.instance() time_generator = utils.get_scheduler(tornado.options.options.distribution) scheduler = Scheduler(limit, entity_id, io_loop, publishers, time_generator=time_generator, add_timestamp=True) if tornado.options.options.eventlog: logger.logger = logger.ZtreamyLogger(entity_id, 'source-' + entity_id + '.log') try: io_loop.start() except KeyboardInterrupt: pass finally: logger.logger.close()
def main(): options = read_cmd_options() entity_id = ztreamy.random_id() publishers = [client.EventPublisher(url) for url in options.server_urls] io_loop = tornado.ioloop.IOLoop.instance() filename = '../data-abel/EventData-sorted.csv.gz' scheduler = EventScheduler(filename, io_loop, publishers, tornado.options.options.timescale, compressed=True) if tornado.options.options.eventlog: logger.logger = logger.ZtreamyLogger(entity_id, 'replay-' + entity_id + '.log') try: io_loop.start() except KeyboardInterrupt: pass finally: logger.logger.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument("-k", "--key", dest="key", required=True, help="a COSM key") parser.add_argument("-t", "--time", dest="time", type=int, default=30, help="period to query COSM (in seconds, defaults to 30)") parser.add_argument("-a", "--appid", dest="appid", default="AQESensor", help="application identifier (added to generated events)") parser.add_argument("-f", "--feed", dest="feed", required=True, help="the COSM URL of the JSON feed to be followed") parser.add_argument("-o", "--output", dest="output", required=True, help="URL for output stream where events are published (e.g. http://localhost:9001/events/publish)") options = parser.parse_args() publisher = client.EventPublisher(options.output) enc = COSMAQESensor(publisher, options.key, options.feed, options.appid) sched = tornado.ioloop.PeriodicCallback(enc.lookForUpdates, options.time * 1000) sched.start() tornado.ioloop.IOLoop.instance().start()
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-t", "--time", dest="time", type=int, default=30, help= "period to generate new events with Wikipedia editions (in seconds, defaults to 30)" ) parser.add_argument( "-a", "--appid", dest="appid", default="WikipediaSensor", help="application identifier (added to generated events)") parser.add_argument("-s", "--sourceid", dest="sourceid", required=True, help="source identifier (added to generated events)") parser.add_argument( "-o", "--output", dest="output", required=True, help= "URL for output stream where events are published (e.g. http://localhost:9001/events/publish)" ) options = parser.parse_args() publisher = client.EventPublisher(options.output) enc = WikipediaSensor(publisher, options.sourceid, options.appid) sched = tornado.ioloop.PeriodicCallback(enc.lookForUpdates, options.time * 1000) sched.start() tornado.ioloop.IOLoop.instance().start()
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-t", "--time", dest="time", type=int, default=30, help= "period to generate new events with statistics (in seconds, defaults to 30)" ) parser.add_argument( "-a", "--appid", dest="appid", default="StatsGenerator", help="application identifier (added to generated events)") parser.add_argument("-s", "--sourceid", dest="sourceid", required=True, help="source identifier (added to generated events)") parser.add_argument( "-i", "--input", dest="input", required=True, help= "URL for input stream where events are read (e.g. http://localhost:9001/events/stream)" ) parser.add_argument( "-o", "--output", dest="output", required=True, help= "URL for output stream where events are published (e.g. http://localhost:9002/events/publish)" ) options = parser.parse_args() inputUrl = options.input outputUrl = options.output period = options.time * 1000 appId = options.appid sourceId = options.sourceid # Client to listen to input tweet stream clnt = AsyncStreamingClient(inputUrl, event_callback=process_tweet, ioloop=tornado.ioloop.IOLoop.instance()) # Publisher to push the generated events publisher = client.EventPublisher(outputUrl) # Scheduling stats event publishing callback = functools.partial(publish, appId, sourceId, publisher) scheduler = tornado.ioloop.PeriodicCallback(callback, period) scheduler.start() clnt.start(loop=True)