def __init__(self, configfile): app = Factories.celery_factory() super(PeriodicWorker, self).__init__(configfile) worker = Application.worker(name='worker.periodic', queues=['periodic'], level=self.loglevel) worker.run()
def __init__(self, configfile): app = Factories.celery_factory() super(MatchingWorker, self).__init__(configfile) worker = Application.worker(name='worker.matching', queues=['matching'], level=self.loglevel) worker.run()
def __init__(self, configfile): self.expected_keys.update({'broker': ['dsn']}) super(BrokerApi, self).__init__(configfile) broker_dsn = self.configparser.get('broker', 'dsn') data = {'broker': {'dsn': broker_dsn}, 'app': {'celery_app_name': 'bpm'}} set_configuration(data) celery_app = Factories.celery_factory() celery_app.config_from_object(celery_config()) celery_app.conf.task_routes = ROUTES
def __init__(self, configfile): if not os.path.exists(configfile): raise IOError('No such configuration file: %s' % configfile) self.configparser.read(configfile) check_configuration_keys(self.expected_keys, self.configparser) logging_config = self.configparser.get('logging', 'configuration') if not os.path.exists(logging_config): raise IOError('No such configuration file: %s' % logging_config) setup_logging(logging_config) database_dsn = self.configparser.get('database', 'dsn') broker_dsn = self.configparser.get('broker', 'dsn') storage_uri = '%(proto)s://%(uri)s' % ( { 'proto': self.configparser.get('storage', 'proto'), 'uri': self.configparser.get('storage', 'uri') }) component_token = self.configparser.get('application', 'cpnttoken') data = { 'broker': { 'dsn': broker_dsn }, 'database': { 'dsn': database_dsn }, 'app': { 'storage_uri': storage_uri }, 'tokens': { 'component': component_token } } set_configuration(data) celery_app = Factories.celery_factory() celery_app.config_from_object(celery_config()) celery_app.conf.task_routes = ROUTES Core.logger.override(providers.Singleton(logging.getLogger, 'bpm')) Core.profiler.override( providers.Singleton(logging.getLogger, 'bpm_profiling')) self.loglevel = (logging.getLevelName( logging.getLogger().getEffectiveLevel()) or logging.WARN) profiling = ( self.configparser.getboolean('application', 'profiling') if (self.configparser.has_section('application') and self.configparser.has_option('application', 'profiling')) else False) if profiling: Core.logger().info('Running with profiling active') setup_task_profiling()
def identity_and_match_processed_procedures(): broker = Factories.celery_factory() wf = Services.workflow() logger = Core.logger() procedures = wf.update_processed_procedures() for proc in procedures: logger.info('Processed procedure %(uid)s has been sent to matching' % ({ 'uid': proc })) produce_matching_task(broker, proc)
def test_manage_processed_procedures(self): (issuer, repository, exam, exam_status, procedure, procedure_status, modality_type, sop_class, image_metadata, image, image_status) = self.create_image_stack() self.em.upd_image_status('uid', image_status.uid, {'status': ImageStatusEnum.Processed.value}) self.em.upd_image( 'uid', image.uid, {'inserted_on': image.inserted_on + timedelta(minutes=-11)}) wrapper = ResultWrapper() wrapper.EXPECTED_RESPONSE = 'Sent' broker = Factories.celery_factory() (when(periodic).produce_matching_task(...).thenAnswer(wrapper.exec)) periodic.manage_processed_procedures() self.assertEqual(wrapper.RESPONSE, wrapper.EXPECTED_RESPONSE) unstub()
def __init__(self, configfile): app = Factories.celery_factory() app.on_after_configure.connect(periodic.setup_periodic_tasks) super(PeriodicBeater, self).__init__(configfile) beater = Application.beat(level=self.loglevel) beater.run()
# -*- coding: utf-8 -*- from ge.bpmc.app.injection import Contexts, Core, Factories, Services from ge.bpmc.utilities.network import check_broker_state from ge.bpmc.utilities.sqlalchemy import transaction app = Factories.celery_factory() # setup_periodic_tasks is to be wrapped in decorator # @app.on_after_configure.connect by the beat process def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task(15, manage_processed_procedures.s(), name='Manage processed procedures') def produce_matching_task(broker, procedure_uid): check_broker_state(broker) task_fqdn = 'ge.bpmc.tasks.matching.match_procedure_images' broker.send_task(task_fqdn, (procedure_uid, )) @transaction(Core.logger, Contexts.em) def identity_and_match_processed_procedures(): broker = Factories.celery_factory() wf = Services.workflow() logger = Core.logger() procedures = wf.update_processed_procedures() for proc in procedures:
def broker_context(self): yield Factories.celery_factory()