def __init__(self, *args, **kwargs): configs = ["service", "dtrs"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.CFG.dtrs.service_name, self.CFG, self.amqp_uri, self.sysname) store = kwargs.get('store') self.store = store or get_dtrs_store(self.CFG) self.store.initialize() self.core = DTRSCore(self.store)
def __init__(self, *args, **kwargs): configs = ["service", "highavailability"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) exchange = kwargs.get("exchange") if exchange: self.CFG.server.amqp.exchange = exchange self.topic = kwargs.get("service_name") or self.CFG.highavailability.get("service_name") or DEFAULT_TOPIC self.amqp_uri = kwargs.get("amqp_uri") or None self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, sysname=kwargs.get("sysname")) process_dispatchers = kwargs.get("process_dispatchers") or self.CFG.highavailability.processdispatchers policy_name = self.CFG.highavailability.policy.name try: policy_map[policy_name.lower()] self.policy = policy_name.lower() except KeyError: raise Exception("HA Service doesn't support '%s' policy" % policy_name) policy_parameters = kwargs.get("policy_parameters") or self.CFG.highavailability.policy.parameters process_definition_id = kwargs.get("process_definition_id") or self.CFG.highavailability.process_definition_id self.policy_interval = kwargs.get("policy_interval") or self.CFG.highavailability.policy.interval self.control = DashiHAProcessControl(self.dashi, process_dispatchers) core = HighAvailabilityCore self.core = core( self.CFG.highavailability, self.control, process_dispatchers, self.policy, parameters=policy_parameters, process_definition_id=process_definition_id, )
def __init__(self, *args, **kwargs): configs = ["service", "epu_worker"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.log = logging.getLogger() try: if os.environ.get('EPU_USE_GEVENT'): bootstrap.enable_gevent() else: self.log.info("Using standard python Threading") except: self.log.warning("gevent not available. Falling back to threading") self.queue_name_work = self.CFG.queue_name_work extradict = {"queue_name_work": self.queue_name_work} cei_events.event("worker", "init_begin", extra=extradict) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG)
def __init__(self): configs = ["service", "epumanagement"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.dashi = bootstrap.dashi_connect(self.CFG.epumanagement.service_name, self.CFG) self.default_user = self.CFG.epumanagement.get('default_user') # TODO: create ION class here or depend on epuagent repo as a dep ou_client = MockOUAgentClient() statsd_cfg = self.CFG.get('statsd') if 'mock_provisioner' in self.CFG.epumanagement and \ self.CFG.epumanagement['mock_provisioner']: prov_client = MockProvisionerClient() else: provisioner_topic = self.CFG.epumanagement.provisioner_service_name prov_client = ProvisionerClient(self.dashi, topic=provisioner_topic, statsd_cfg=statsd_cfg, client_name="epumanagement") self.service_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_SERVICE_NAME, EPUM_DEFAULT_SERVICE_NAME) self.proc_name = self.CFG.epumanagement.get(EPUM_INITIALCONF_PROC_NAME, None) self.store = get_epum_store(self.CFG, service_name=self.service_name, proc_name=self.proc_name) self.store.initialize() dtrs_client = DTRSClient(self.dashi, statsd_cfg=statsd_cfg, client_name=self.CFG.epumanagement.service_name) self.epumanagement = EPUManagement(self.CFG.epumanagement, SubscriberNotifier(self.dashi), prov_client, ou_client, dtrs_client, store=self.store, statsd_cfg=statsd_cfg) # hack to inject epum reference for mock prov client if isinstance(prov_client, MockProvisionerClient): prov_client._set_epum(self.epumanagement)
def __init__(self, amqp_uri=None, topic="process_dispatcher", registry=None, store=None, epum_client=None, notifier=None, definition_id=None, domain_config=None, sysname=None): configs = ["service", "processdispatcher"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) self.topic = self.CFG.processdispatcher.get('service_name', topic) self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, amqp_uri=amqp_uri, sysname=sysname) engine_conf = self.CFG.processdispatcher.get('engines', {}) default_engine = self.CFG.processdispatcher.get('default_engine') process_engines = self.CFG.processdispatcher.get('process_engines') if default_engine is None and len(engine_conf.keys()) == 1: default_engine = engine_conf.keys()[0] self.store = store or get_processdispatcher_store(self.CFG) self.store.initialize() self.registry = registry or EngineRegistry.from_config(engine_conf, default=default_engine, process_engines=process_engines) self.eeagent_client = EEAgentClient(self.dashi) domain_definition_id = None base_domain_config = None # allow disabling communication with EPUM for epuharness case if epum_client: self.epum_client = epum_client domain_definition_id = definition_id base_domain_config = domain_config elif not self.CFG.processdispatcher.get('static_resources'): domain_definition_id = definition_id or self.CFG.processdispatcher.get('definition_id') base_domain_config = domain_config or self.CFG.processdispatcher.get('domain_config') epum_service_name = self.CFG.processdispatcher.get('epum_service_name', 'epu_management_service') self.epum_client = EPUManagementClient(self.dashi, epum_service_name) else: self.epum_client = None if notifier: self.notifier = notifier else: self.notifier = SubscriberNotifier(self.dashi) self.core = ProcessDispatcherCore(self.store, self.registry, self.eeagent_client, self.notifier) launch_type = self.CFG.processdispatcher.get('launch_type', 'supd') restart_throttling_config = self.CFG.processdispatcher.get('restart_throttling_config', {}) dispatch_retry_seconds = self.CFG.processdispatcher.get('dispatch_retry_seconds') self.matchmaker = PDMatchmaker(self.core, self.store, self.eeagent_client, self.registry, self.epum_client, self.notifier, self.topic, domain_definition_id, base_domain_config, launch_type, restart_throttling_config, dispatch_retry_seconds) self.doctor = PDDoctor(self.core, self.store, config=self.CFG) self.ready_event = threading.Event()
def __init__(self, *args, **kwargs): configs = ["service", "provisioner"] config_files = get_config_paths(configs) self.CFG = bootstrap.configure(config_files) ssl_no_host_check = kwargs.get('ssl_no_host_check') if ssl_no_host_check is None: ssl_no_host_check = self.CFG.get('ssl_no_host_check') if ssl_no_host_check: import libcloud.security libcloud.security.VERIFY_SSL_CERT = False store = kwargs.get('store') self.proc_name = self.CFG.provisioner.get('proc_name', "") self.store = store or get_provisioner_store(self.CFG, proc_name=self.proc_name) self.store.initialize() notifier = kwargs.get('notifier') epum_topic = self.CFG.provisioner.epu_management_service_name self.notifier = notifier or ProvisionerNotifier(self, [epum_topic]) amqp_uri = kwargs.get('amqp_uri') self.amqp_uri = amqp_uri self.topic = self.CFG.provisioner.get('service_name') self.sysname = kwargs.get('sysname') self.dashi = bootstrap.dashi_connect(self.topic, self.CFG, self.amqp_uri, self.sysname) statsd_cfg = kwargs.get('statsd') statsd_cfg = statsd_cfg or self.CFG.get('statsd') dtrs = kwargs.get('dtrs') dtrs_topic = self.CFG.provisioner.dtrs_service_name self.dtrs = dtrs or self._get_dtrs(dtrs_topic, statsd_cfg=statsd_cfg, client_name=self.topic) contextualization_disabled = kwargs.get('contextualization_disabled') if contextualization_disabled is None: contextualization_disabled = self.CFG.get('contextualization_disabled') if not contextualization_disabled: context_client = kwargs.get('context_client') context_client = context_client or self._get_context_client() else: context_client = None default_user = kwargs.get('default_user') self.default_user = default_user or self.CFG.provisioner.get('default_user') iaas_timeout = kwargs.get('iaas_timeout') iaas_timeout = iaas_timeout or self.CFG.provisioner.get('iaas_timeout') record_reaping_max_age = kwargs.get('record_reaping_max_age') record_reaping_max_age = record_reaping_max_age or self.CFG.provisioner.get('record_reaping_max_age') core = kwargs.get('core') core = core or self._get_core() self.core = core(self.store, self.notifier, self.dtrs, context_client, iaas_timeout=iaas_timeout, statsd_cfg=statsd_cfg) leader = kwargs.get('leader') self.leader = leader or ProvisionerLeader(self.store, self.core, record_reaping_max_age=record_reaping_max_age) self.ready_event = threading.Event()