def main(): config = configuration.read() with MainContext(config) as main_ctx: init_signals(main_ctx.shutdown_event, default_signal_handler, default_signal_handler) create_table_structure(main_ctx.config) es = Elasticsearch(config["Indexer"].get("ESConnection")) indexname = config["Indexer"].get("ESIndexname") index = get_current_index(es, indexname) if not index: index = create_index(es, indexname) main_ctx.Proc( name="Indexer", worker_class=Indexer, config=config["Indexer"], ) while not main_ctx.shutdown_event.is_set(): event = main_ctx.event_queue.safe_get() if not event: continue
def main(): config = configuration.read() with Context(config) as main_ctx: create_table_structure(main_ctx.config) db = DBInterface(config=main_ctx.config["DEFAULT"]) Rules(db).register_rules(rule.rule_registry.keys) db.close() # rules.init_rules(main_ctx.config) init_signals(main_ctx.shutdown_event, default_signal_handler, default_signal_handler) token_bucket_q = main_ctx.MPQueue(100) url_q = main_ctx.MPQueue(10) main_ctx.Proc( token_bucket_q, name="SessionDayChecker", worker_class=SessionDayChecker, config=config["SessionDayChecker"], ) for instance_id in range(int(config["Downloader"].get("Instances", 1))): main_ctx.Proc( token_bucket_q, url_q, name="Downloader_{}".format(instance_id), worker_class=DocumentDownloader, config=config["Downloader"], ) main_ctx.Proc( url_q, name="DateUrlGenerator", worker_class=DateUrlGenerator, config=config["DateUrlGenerator"], ) main_ctx.Proc( token_bucket_q, name="TokenGenerator", worker_class=TokenBucketWorker, config=config["TokenBucketWorker"], ) while not main_ctx.shutdown_event.is_set(): event = main_ctx.event_queue.safe_get() if not event: continue
def sessiondaychecker_instance(request, db_interface, config): # db = db_interface with MainContext(config) as main_ctx: init_signals( main_ctx.shutdown_event, default_signal_handler, default_signal_handler ) token_bucket_q = main_ctx.MPQueue(1) return SessionDayChecker( "name", mp.Event(), main_ctx.shutdown_event, main_ctx.event_queue, main_ctx.logger_q, main_ctx.config["SessionDayChecker"], token_bucket_q, )
def tokenbucket_instance(request, db_interface, config): # db = db_interface_module with MainContext(config) as main_ctx: init_signals(main_ctx.shutdown_event, default_signal_handler, default_signal_handler) token_bucket_q = main_ctx.MPQueue(1) startup_event = mp.Event() return TokenBucketWorker( "name", startup_event, main_ctx.shutdown_event, main_ctx.event_queue, main_ctx.logger_q, main_ctx.config["TokenBucketWorker"], token_bucket_q, )
def main(): config = configuration.read() with Context(config) as main_ctx: create_table_structure(main_ctx.config) db = DBInterface(config=main_ctx.config["DEFAULT"]) Rules(db).register_rules(rule.rule_registry.all) db.close() init_signals(main_ctx.shutdown_event, default_signal_handler, default_signal_handler) document_q = main_ctx.MPQueue(30) for instance_id in range( int(config["PostProcessingWorker"].get("Instances", 1))): main_ctx.Proc( document_q, name="PostProcessingWorker_{}".format(instance_id), worker_class=PostProcessingWorker, config=config["PostProcessingWorker"], ) main_ctx.Proc( document_q, name="PostProcessingScheduler", worker_class=PostProcessingScheduler, config=config["PostProcessingScheduler"], ) while not main_ctx.shutdown_event.is_set(): event = main_ctx.event_queue.safe_get() if not event: continue