def main(cls): """Bureaucrat entry point.""" options = parse_cmdline({"pidfile": PID_FILE}) # configure logging logging.config.fileConfig(options.config, disable_existing_loggers=False) config = ConfigParser() config.read(options.config) Configs.instance(config) daemon_obj = cls() context = daemon.DaemonContext() context.pidfile = PidFile(options.pidfile) if options.foreground: context.detach_process = False context.stdout = sys.stdout context.stderr = sys.stdout context.signal_map = { signal.SIGTERM: daemon_obj.cleanup, signal.SIGHUP: daemon_obj.cleanup } with context: daemon_obj.run()
def test_handle_message_start(self): """Test Await.handle_message() with 'start' message.""" confparser = ConfigParser() confparser.add_section('bureaucrat') confparser.set('bureaucrat', 'storage_dir', STORAGE_DIR) Configs.instance(confparser) subscriptions = [{"target": "some-id"}] Storage.instance().save("subscriptions", "test_event", json.dumps(subscriptions)) msg = Message(name='start', target='fake-id_0', origin='fake-id') self.fexpr.state = 'ready' result = self.fexpr.handle_message(self.ch, msg) self.assertEqual(result, 'consumed') self.assertEqual(self.fexpr.state, 'active') filename = os.path.join(STORAGE_DIR, "subscriptions/test_event") with open(filename) as fhdl: subscriptions.append({'target': 'fake-id_0'}) self.assertEqual(json.load(fhdl), subscriptions) Configs._instance = None Storage._instance = None os.unlink(filename) os.rmdir(os.path.join(STORAGE_DIR, "subscriptions")) os.removedirs(STORAGE_DIR)
def setUp(self): """Set up environment.""" confparser = ConfigParser() confparser.add_section('bureaucrat') confparser.set('bureaucrat', 'storage_dir', STORAGE_DIR) Configs.instance(confparser) self.wflow = Workflow.create_from_string(processdsc, 'fake-id')
def setUp(self): """Set up SUT.""" confparser = ConfigParser() confparser.add_section('bureaucrat') confparser.set('bureaucrat', 'storage_dir', STORAGE_DIR) Configs.instance(confparser) Storage.instance() self.ch = Mock() self.schedule = Schedule(self.ch)
def run(self): """Event cycle.""" config = Configs.instance() LOG.debug("create connection") self.connection = pika.BlockingConnection(config.amqp_params) LOG.debug("Bureaucrat connected") self.channel = self.connection.channel() self.schedule = Schedule(ChannelWrapper(self.channel)) self.channel.queue_declare(queue="bureaucrat", durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue=config.message_queue, durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue=config.event_queue, durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue="bureaucrat_schedule", durable=True, exclusive=False, auto_delete=False) self.channel.basic_qos(prefetch_count=1) self.channel.basic_consume("bureaucrat", self.launch_process) self.channel.basic_consume(config.message_queue, self.handle_message) self.channel.basic_consume(config.event_queue, self.handle_event) self.channel.basic_consume("bureaucrat_schedule", self.add_schedule) signal.signal(signal.SIGALRM, self.handle_alarm) signal.setitimer(signal.ITIMER_REAL, 60, 60) self.channel.start_consuming()
def run(self): """Event cycle.""" config = Configs.instance() LOG.debug("create connection") self.connection = pika.BlockingConnection(config.amqp_params) LOG.debug("Bureaucrat connected") self.channel = self.connection.channel() self.schedule = Schedule(ChannelWrapper(self.channel)) self.channel.queue_declare(queue="bureaucrat", durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue=config.message_queue, durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue=config.event_queue, durable=True, exclusive=False, auto_delete=False) self.channel.queue_declare(queue="bureaucrat_schedule", durable=True, exclusive=False, auto_delete=False) self.channel.basic_qos(prefetch_count=1) self.channel.basic_consume(self.launch_process, queue="bureaucrat") self.channel.basic_consume(self.handle_message, queue=config.message_queue) self.channel.basic_consume(self.handle_event, queue=config.event_queue) self.channel.basic_consume(self.add_schedule, queue="bureaucrat_schedule") signal.signal(signal.SIGALRM, self.handle_alarm) signal.setitimer(signal.ITIMER_REAL, 60, 60) self.channel.start_consuming()
def elaborate(self, participant, origin, payload): """Elaborate the payload at a given participant.""" config = Configs.instance() if config.taskqueue_type == 'taskqueue': body = { "name": 'response', "target": origin, "origin": origin, "payload": payload } self._ch.basic_publish(exchange='', routing_key="worker_%s" % participant, body=json.dumps(body), properties=pika.BasicProperties( delivery_mode=2, content_type='application/x-bureaucrat-message' )) elif config.taskqueue_type == 'celery': body = { "name": 'response', "target": origin, "origin": origin, "payload": payload } # This is a message in the format acceptable by Celery. # The exact format can be found in # celery.app.amqp.TaskProducer.publish_task() celery_msg = { "task": participant, "id": "%s" % uuid.uuid4(), "args": (body, ), "kwargs": {}, "retries": 0, "eta": None, "expires": None, "utc": True, "callbacks": None, "errbacks": None, "timelimit": (None, None), "taskset": None, "chord": None } name = participant.split(".", 1)[0] self._ch.basic_publish(exchange=name, routing_key=name, body=json.dumps(celery_msg), properties=pika.BasicProperties( delivery_mode=2, content_type='application/json', content_encoding='utf-8' )) else: raise ChannelWrapperError("Unknown task queue type: %s" % \ config.taskqueue_type)
def elaborate(self, participant, origin, payload): """Elaborate the payload at a given participant.""" config = Configs.instance() if config.taskqueue_type == 'taskqueue': body = { "name": 'response', "target": origin, "origin": origin, "payload": payload } self._ch.basic_publish( exchange='', routing_key="worker_%s" % participant, body=json.dumps(body), properties=pika.BasicProperties( delivery_mode=2, content_type='application/x-bureaucrat-message')) elif config.taskqueue_type == 'celery': body = { "name": 'response', "target": origin, "origin": origin, "payload": payload } # This is a message in the format acceptable by Celery. # The exact format can be found in # celery.app.amqp.TaskProducer.publish_task() celery_msg = { "task": participant, "id": "%s" % uuid.uuid4(), "args": (body, ), "kwargs": {}, "retries": 0, "eta": None, "expires": None, "utc": True, "callbacks": None, "errbacks": None, "timelimit": (None, None), "taskset": None, "chord": None } name = participant.split(".", 1)[0] self._ch.basic_publish(exchange=name, routing_key=name, body=json.dumps(celery_msg), properties=pika.BasicProperties( delivery_mode=2, content_type='application/json', content_encoding='utf-8')) else: raise ChannelWrapperError("Unknown task queue type: %s" % \ config.taskqueue_type)
def __init__(self): """Initialize the instance.""" if self._instance is not None or not self._is_instantiated: raise StorageError("Storage.instance() should be " + \ "used to get an instance") self._bucket_cache = [] self.storage_dir = Configs.instance().storage_dir if not os.path.isdir(self.storage_dir): os.makedirs(self.storage_dir)
def send(self, message): """Send a message to the target with payload attached.""" body = { "name": message.name, "target": message.target, "origin": message.origin, "payload": message.payload } self._ch.basic_publish(exchange='', routing_key=Configs.instance().message_queue, body=json.dumps(body), properties=pika.BasicProperties( delivery_mode=2, content_type=message.content_type, content_encoding='utf-8'))
def send(self, message): """Send a message to the target with payload attached.""" body = { "name": message.name, "target": message.target, "origin": message.origin, "payload": message.payload } self._ch.basic_publish(exchange='', routing_key=Configs.instance().message_queue, body=json.dumps(body), properties=pika.BasicProperties( delivery_mode=2, content_type=message.content_type, content_encoding='utf-8' ))