def test_canteen_try_init_runs_at_most_once(): # Given that I have a Canteen c = multiprocessing.Value(Canteen) # When I run two canteen_try_init blocks with canteen_try_init(c) as acquired: if acquired: canteen_add(c, "hello") with canteen_try_init(c) as acquired: if acquired: canteen_add(c, "goodbye") # Then only the first one should run assert canteen_get(c) == ["hello"]
def worker_process(args, worker_id, logging_pipe, canteen): try: # Re-seed the random number generator from urandom on # supported platforms. This should make it so that worker # processes don't all follow the same sequence. random.seed() logger = setup_worker_logging(args, worker_id, logging_pipe) logger.debug("Loading broker...") module, broker = import_broker(args.broker) broker.emit_after("process_boot") logger.debug("Loading modules...") for module in args.modules: importlib.import_module(module) with canteen_try_init(canteen) as acquired: if acquired: logger.debug("Sending forks to main process...") for middleware in broker.middleware: for fork in middleware.forks: fork_path = "%s:%s" % (fork.__module__, fork.__name__) canteen_add(canteen, fork_path) logger.debug("Starting worker threads...") worker = Worker(broker, queues=args.queues, worker_threads=args.threads) worker.start() except ImportError: logger.exception("Failed to import module.") return sys.exit(RET_IMPORT) except ConnectionError: logger.exception("Broker connection failed.") return sys.exit(RET_CONNECT) def termhandler(signum, frame): nonlocal running if running: logger.info("Stopping worker process...") running = False else: logger.warning("Killing worker process...") return sys.exit(RET_KILLED) logger.info("Worker process is ready for action.") signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGTERM, termhandler) if hasattr(signal, "SIGHUP"): signal.signal(signal.SIGHUP, termhandler) if hasattr(signal, "SIGBREAK"): signal.signal(signal.SIGBREAK, termhandler) running = True while running: time.sleep(1) worker.stop() broker.close() logging_pipe.close()
def test_canteen_add_adds_paths(): # Given that I have a Canteen c = multiprocessing.Value(Canteen) # When I append a couple of paths and mark it ready with canteen_try_init(c): canteen_add(c, "hello") canteen_add(c, "there") # Then those paths should be stored in the canteen assert canteen_get(c) == ["hello", "there"]